2016-03-15 08:51:06 -05:00
|
|
|
#!/usr/bin/env python3
|
2015-11-01 14:08:45 -06:00
|
|
|
#
|
|
|
|
# Bookmark management utility
|
|
|
|
#
|
2016-05-23 04:02:46 -05:00
|
|
|
# Copyright (C) 2015-2016 Arun Prakash Jana <engineerarun@gmail.com>
|
2015-11-01 14:08:45 -06:00
|
|
|
#
|
|
|
|
# This program is free software: you can redistribute it and/or modify
|
|
|
|
# it under the terms of the GNU General Public License as published by
|
|
|
|
# the Free Software Foundation, either version 3 of the License, or
|
|
|
|
# (at your option) any later version.
|
|
|
|
#
|
|
|
|
# This program is distributed in the hope that it will be useful,
|
|
|
|
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
|
|
|
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
|
|
|
# GNU General Public License for more details.
|
|
|
|
#
|
|
|
|
# You should have received a copy of the GNU General Public License
|
2015-12-19 14:16:24 -06:00
|
|
|
# along with buku. If not, see <http://www.gnu.org/licenses/>.
|
2015-11-01 14:04:41 -06:00
|
|
|
|
2015-11-04 08:11:16 -06:00
|
|
|
import sys
|
2015-11-06 14:26:08 -06:00
|
|
|
import os
|
2015-11-01 14:04:41 -06:00
|
|
|
import sqlite3
|
2016-04-24 14:19:32 -05:00
|
|
|
import argparse
|
2015-11-06 13:26:20 -06:00
|
|
|
import readline
|
2015-11-06 13:59:57 -06:00
|
|
|
import webbrowser
|
2015-11-06 16:32:08 -06:00
|
|
|
import html.parser as HTMLParser
|
2016-04-04 04:50:23 -05:00
|
|
|
from http.client import HTTPConnection, HTTPSConnection
|
2016-04-09 04:22:31 -05:00
|
|
|
from urllib.parse import urljoin, quote, unquote
|
2016-04-09 06:52:47 -05:00
|
|
|
import gzip
|
|
|
|
import io
|
2016-03-16 10:10:55 -05:00
|
|
|
import signal
|
2015-11-04 08:11:16 -06:00
|
|
|
|
2015-12-19 10:23:29 -06:00
|
|
|
# Import libraries needed for encryption
|
2015-12-19 09:02:55 -06:00
|
|
|
try:
|
2015-12-19 10:23:29 -06:00
|
|
|
import getpass
|
2015-12-19 09:02:55 -06:00
|
|
|
import hashlib
|
|
|
|
from Crypto.Cipher import AES
|
|
|
|
from Crypto import Random
|
2015-12-19 10:23:29 -06:00
|
|
|
import struct
|
2015-12-19 09:02:55 -06:00
|
|
|
|
|
|
|
no_crypto = False
|
2015-12-19 13:55:05 -06:00
|
|
|
BLOCKSIZE = 65536
|
2015-12-20 13:00:34 -06:00
|
|
|
SALT_SIZE = 32
|
|
|
|
CHUNKSIZE = 0x80000 # Read/write 512 KB chunks
|
2015-12-19 09:02:55 -06:00
|
|
|
except ImportError:
|
|
|
|
no_crypto = True
|
|
|
|
|
2015-12-19 10:23:29 -06:00
|
|
|
|
2015-11-04 13:07:10 -06:00
|
|
|
# Globals
|
2016-04-05 23:55:25 -05:00
|
|
|
update = False # Update a bookmark in DB
|
2016-05-18 22:24:46 -05:00
|
|
|
tagManual = None # Tags for update command
|
2016-04-05 23:55:25 -05:00
|
|
|
titleManual = None # Manually add a title offline
|
2016-05-17 15:11:31 -05:00
|
|
|
description = None # Description of the bookmark
|
2016-05-18 22:24:46 -05:00
|
|
|
tagsearch = False # Search bookmarks by tag
|
|
|
|
titleData = None # Title fetched from a page
|
2016-04-17 11:07:26 -05:00
|
|
|
jsonOutput = False # Output json formatted result
|
2016-04-25 11:23:03 -05:00
|
|
|
showOpt = 0 # Modify show. 1: show only URL, 2: show URL and tag
|
2016-04-24 16:18:56 -05:00
|
|
|
debug = False # Enable debug logs
|
2016-04-05 23:55:25 -05:00
|
|
|
pipeargs = [] # Holds arguments piped to the program
|
2016-05-28 08:15:03 -05:00
|
|
|
noninteractive = False # Show the prompt or not
|
2016-05-29 08:33:58 -05:00
|
|
|
DELIMITER = ',' # Delimiter used to store tags in DB
|
2016-05-22 12:50:19 -05:00
|
|
|
_VERSION_ = '2.1' # Program version
|
2016-04-05 23:55:25 -05:00
|
|
|
|
|
|
|
|
2016-04-05 06:25:40 -05:00
|
|
|
class BMHTMLParser(HTMLParser.HTMLParser):
|
2016-04-05 23:39:56 -05:00
|
|
|
"""Class to parse and fetch the title from a HTML page, if available"""
|
|
|
|
|
2016-04-05 06:25:40 -05:00
|
|
|
def __init__(self):
|
|
|
|
HTMLParser.HTMLParser.__init__(self)
|
|
|
|
self.inTitle = False
|
2016-05-24 12:51:38 -05:00
|
|
|
self.data = ''
|
2016-04-05 06:25:40 -05:00
|
|
|
self.lasttag = None
|
|
|
|
|
|
|
|
def handle_starttag(self, tag, attrs):
|
|
|
|
self.inTitle = False
|
2016-05-24 12:51:38 -05:00
|
|
|
if tag == 'title':
|
2016-04-05 06:25:40 -05:00
|
|
|
self.inTitle = True
|
|
|
|
self.lasttag = tag
|
|
|
|
|
|
|
|
def handle_endtag(self, tag):
|
|
|
|
global titleData
|
|
|
|
|
2016-05-24 12:51:38 -05:00
|
|
|
if tag == 'title':
|
2016-04-05 06:25:40 -05:00
|
|
|
self.inTitle = False
|
2016-05-24 12:51:38 -05:00
|
|
|
if self.data != '':
|
2016-04-05 06:25:40 -05:00
|
|
|
titleData = self.data
|
2016-05-21 05:40:37 -05:00
|
|
|
self.reset() # We have received title data, exit parsing
|
2016-04-05 06:25:40 -05:00
|
|
|
|
|
|
|
def handle_data(self, data):
|
2016-05-24 12:51:38 -05:00
|
|
|
if self.lasttag == 'title' and self.inTitle == True:
|
2016-04-05 06:25:40 -05:00
|
|
|
self.data += data
|
|
|
|
|
2016-04-10 02:09:51 -05:00
|
|
|
def error(self, message):
|
|
|
|
pass
|
|
|
|
|
2016-05-20 17:05:25 -05:00
|
|
|
class BukuDb:
|
2016-04-05 06:25:40 -05:00
|
|
|
|
2016-05-20 17:05:25 -05:00
|
|
|
def __init__(self, *args, **kwargs):
|
|
|
|
conn, cur = BukuDb.initdb()
|
|
|
|
self.conn = conn
|
|
|
|
self.cur = cur
|
2016-05-28 08:15:03 -05:00
|
|
|
self.noninteractive = noninteractive
|
2016-04-10 07:28:49 -05:00
|
|
|
|
2016-05-20 17:05:25 -05:00
|
|
|
@staticmethod
|
2016-05-25 05:52:38 -05:00
|
|
|
def get_dbdir_path():
|
|
|
|
"""Determine the directory path where dbfile will be stored:
|
2016-05-20 17:05:25 -05:00
|
|
|
if $XDG_DATA_HOME is defined, use it
|
|
|
|
else if $HOME exists, use it
|
|
|
|
else use the current directory
|
|
|
|
"""
|
2016-05-21 08:15:56 -05:00
|
|
|
|
2016-05-20 17:05:25 -05:00
|
|
|
data_home = os.environ.get('XDG_DATA_HOME')
|
|
|
|
if data_home is None:
|
|
|
|
if os.environ.get('HOME') is None:
|
|
|
|
data_home = '.'
|
|
|
|
else:
|
|
|
|
data_home = os.path.join(os.environ.get('HOME'), '.local', 'share')
|
2016-05-21 08:15:56 -05:00
|
|
|
|
2016-05-20 17:05:25 -05:00
|
|
|
return os.path.join(data_home, 'buku')
|
2016-05-21 08:15:56 -05:00
|
|
|
|
2016-05-20 17:05:25 -05:00
|
|
|
@staticmethod
|
2016-05-22 09:53:19 -05:00
|
|
|
def move_legacy_dbfile():
|
2016-05-20 17:05:25 -05:00
|
|
|
"""Move database file from earlier path used in versions <= 1.8
|
|
|
|
to new path. Errors out if both the old and new DB files exist.
|
|
|
|
"""
|
2016-05-21 08:15:56 -05:00
|
|
|
|
2016-05-20 17:05:25 -05:00
|
|
|
olddbpath = os.path.join(os.environ.get('HOME'), '.cache', 'buku')
|
|
|
|
olddbfile = os.path.join(olddbpath, 'bookmarks.db')
|
2016-05-21 08:15:56 -05:00
|
|
|
|
2016-05-20 17:05:25 -05:00
|
|
|
if not os.path.exists(olddbfile):
|
|
|
|
return
|
2016-05-21 08:15:56 -05:00
|
|
|
|
2016-05-25 05:52:38 -05:00
|
|
|
newdbpath = BukuDb.get_dbdir_path()
|
2016-05-20 17:05:25 -05:00
|
|
|
newdbfile = os.path.join(newdbpath, 'bookmarks.db')
|
2016-05-21 08:15:56 -05:00
|
|
|
|
2016-05-20 17:05:25 -05:00
|
|
|
if os.path.exists(newdbfile):
|
2016-05-24 12:51:38 -05:00
|
|
|
print('Both old (%s) and new (%s) databases exist, need manual action' % (olddbfile, newdbfile))
|
2016-05-20 17:05:25 -05:00
|
|
|
sys.exit(1)
|
2016-05-21 08:15:56 -05:00
|
|
|
|
2016-05-20 17:05:25 -05:00
|
|
|
if not os.path.exists(newdbpath):
|
|
|
|
os.makedirs(newdbpath)
|
2016-05-21 08:15:56 -05:00
|
|
|
|
2016-05-20 17:05:25 -05:00
|
|
|
os.rename(olddbfile, newdbfile)
|
2016-05-24 12:51:38 -05:00
|
|
|
print('Database was moved from old (%s) to new (%s) location.\n' % (olddbfile, newdbfile))
|
2016-05-21 08:15:56 -05:00
|
|
|
|
2016-05-20 17:05:25 -05:00
|
|
|
os.rmdir(olddbpath)
|
2016-05-21 08:15:56 -05:00
|
|
|
|
2016-05-24 11:00:54 -05:00
|
|
|
|
2016-05-20 17:05:25 -05:00
|
|
|
@staticmethod
|
|
|
|
def initdb():
|
|
|
|
"""Initialize the database connection. Create DB file and/or bookmarks table
|
|
|
|
if they don't exist. Alert on encryption options on first execution.
|
2016-05-21 08:15:56 -05:00
|
|
|
|
2016-05-20 17:05:25 -05:00
|
|
|
Returns: connection, cursor
|
|
|
|
"""
|
2016-05-21 08:15:56 -05:00
|
|
|
|
2016-05-25 05:52:38 -05:00
|
|
|
dbpath = BukuDb.get_dbdir_path()
|
2016-05-20 17:05:25 -05:00
|
|
|
if not os.path.exists(dbpath):
|
|
|
|
os.makedirs(dbpath)
|
2016-05-21 08:15:56 -05:00
|
|
|
|
2016-05-20 17:05:25 -05:00
|
|
|
dbfile = os.path.join(dbpath, 'bookmarks.db')
|
2016-05-21 08:15:56 -05:00
|
|
|
|
2016-05-20 17:05:25 -05:00
|
|
|
encpath = os.path.join(dbpath, 'bookmarks.db.enc')
|
|
|
|
# Notify if DB file needs to be decrypted first
|
|
|
|
if os.path.exists(encpath) and not os.path.exists(dbfile):
|
2016-05-24 12:51:38 -05:00
|
|
|
print('Unlock database first')
|
2016-05-20 17:05:25 -05:00
|
|
|
sys.exit(1)
|
2016-05-21 08:15:56 -05:00
|
|
|
|
2016-05-20 17:05:25 -05:00
|
|
|
# Show info on first creation
|
2016-05-29 09:40:22 -05:00
|
|
|
if not os.path.exists(dbfile):
|
2016-05-24 12:51:38 -05:00
|
|
|
print('DB file is being created. You may want to encrypt it later.')
|
2016-05-21 08:15:56 -05:00
|
|
|
|
2016-05-20 17:05:25 -05:00
|
|
|
try:
|
|
|
|
# Create a connection
|
|
|
|
conn = sqlite3.connect(dbfile)
|
|
|
|
cur = conn.cursor()
|
2016-05-21 08:15:56 -05:00
|
|
|
|
2016-05-20 17:05:25 -05:00
|
|
|
# Create table if it doesn't exist
|
2016-05-24 12:51:38 -05:00
|
|
|
cur.execute("CREATE TABLE if not exists bookmarks \
|
|
|
|
(id integer PRIMARY KEY, URL text NOT NULL UNIQUE, metadata text default \'\', tags text default \',\', desc text default \'\')")
|
2016-05-20 17:05:25 -05:00
|
|
|
conn.commit()
|
|
|
|
except Exception as e:
|
2016-05-24 12:51:38 -05:00
|
|
|
print('\x1b[1mEXCEPTION\x1b[21m [initdb]: (%s) %s' % (type(e).__name__, e))
|
2016-05-20 17:05:25 -05:00
|
|
|
sys.exit(1)
|
2016-05-21 08:15:56 -05:00
|
|
|
|
2016-05-20 17:05:25 -05:00
|
|
|
# Add description column in existing DB (from version 2.1)
|
|
|
|
try:
|
2016-05-24 12:51:38 -05:00
|
|
|
cur.execute("ALTER TABLE bookmarks ADD COLUMN desc text default \'\'")
|
2016-05-20 17:05:25 -05:00
|
|
|
conn.commit()
|
|
|
|
except:
|
|
|
|
pass
|
2016-05-21 08:15:56 -05:00
|
|
|
|
2016-05-20 17:05:25 -05:00
|
|
|
return (conn, cur)
|
|
|
|
|
2016-05-24 11:00:54 -05:00
|
|
|
|
2016-05-22 09:53:19 -05:00
|
|
|
def get_bookmark_index(self, url):
|
2016-05-20 17:05:25 -05:00
|
|
|
"""Check if URL already exists in DB
|
2016-05-21 08:15:56 -05:00
|
|
|
|
2016-05-20 17:05:25 -05:00
|
|
|
Params: URL to search
|
|
|
|
Returns: DB index if URL found, else -1
|
|
|
|
"""
|
2016-05-21 08:15:56 -05:00
|
|
|
|
2016-05-24 12:51:38 -05:00
|
|
|
self.cur.execute('SELECT id FROM bookmarks WHERE URL = ?', (url,))
|
2016-05-20 17:05:25 -05:00
|
|
|
resultset = self.cur.fetchall()
|
|
|
|
if len(resultset) == 0:
|
|
|
|
return -1
|
2016-05-21 08:15:56 -05:00
|
|
|
|
2016-05-20 17:05:25 -05:00
|
|
|
return resultset[0][0]
|
2016-05-21 08:15:56 -05:00
|
|
|
|
2016-05-24 11:00:54 -05:00
|
|
|
|
2016-05-29 01:09:51 -05:00
|
|
|
def add_bookmark(self, url, title_manual=None, tag_manual=None, desc=None):
|
2016-05-22 14:20:50 -05:00
|
|
|
"""Add a new bookmark
|
2016-05-21 08:15:56 -05:00
|
|
|
|
2016-05-22 15:24:24 -05:00
|
|
|
:param url: url to bookmark
|
|
|
|
:param tag_manual: string of comma-separated tags to add manually
|
2016-05-22 14:33:24 -05:00
|
|
|
:param title_manual: string title to add manually
|
|
|
|
:param desc: string description
|
2016-05-20 17:05:25 -05:00
|
|
|
"""
|
2016-05-21 08:15:56 -05:00
|
|
|
|
2016-05-20 17:05:25 -05:00
|
|
|
# Ensure that the URL does not exist in DB already
|
2016-05-22 09:53:19 -05:00
|
|
|
id = self.get_bookmark_index(url)
|
2016-05-20 17:05:25 -05:00
|
|
|
if id != -1:
|
2016-05-29 01:09:51 -05:00
|
|
|
print('URL [%s] already exists at index %d' % (url, id))
|
2016-05-20 17:05:25 -05:00
|
|
|
return
|
2016-05-21 08:15:56 -05:00
|
|
|
|
2016-05-20 17:05:25 -05:00
|
|
|
# Process title
|
2016-05-22 14:33:24 -05:00
|
|
|
if title_manual is not None:
|
|
|
|
meta = title_manual
|
2016-04-07 16:34:05 -05:00
|
|
|
else:
|
2016-05-22 09:53:19 -05:00
|
|
|
meta = network_handler(url)
|
2016-05-20 17:05:25 -05:00
|
|
|
if meta == '':
|
2016-05-24 12:51:38 -05:00
|
|
|
print('\x1B[91mTitle: []\x1B[0m\n')
|
2016-05-24 00:25:40 -05:00
|
|
|
elif debug:
|
2016-05-24 12:51:38 -05:00
|
|
|
print('Title: [%s]\n' % meta)
|
2016-05-21 08:15:56 -05:00
|
|
|
|
2016-05-20 17:05:25 -05:00
|
|
|
# Process tags
|
2016-05-22 15:24:24 -05:00
|
|
|
if tag_manual is None:
|
2016-05-29 08:33:58 -05:00
|
|
|
tag_manual = DELIMITER
|
2016-05-29 02:04:16 -05:00
|
|
|
else:
|
2016-05-29 08:33:58 -05:00
|
|
|
if tag_manual[0] != DELIMITER:
|
|
|
|
tag_manual = DELIMITER + tag_manual
|
|
|
|
if tag_manual[-1] != DELIMITER:
|
|
|
|
tag_manual = tag_manual + DELIMITER
|
2016-05-21 08:15:56 -05:00
|
|
|
|
2016-05-20 17:05:25 -05:00
|
|
|
# Process description
|
2016-05-22 14:33:24 -05:00
|
|
|
if desc is None:
|
|
|
|
desc = ''
|
2016-05-21 08:15:56 -05:00
|
|
|
|
2016-05-20 17:05:25 -05:00
|
|
|
try:
|
2016-05-22 15:24:24 -05:00
|
|
|
self.cur.execute('INSERT INTO bookmarks(URL, metadata, tags, desc) VALUES (?, ?, ?, ?)', (url, meta, tag_manual, desc))
|
2016-05-20 17:05:25 -05:00
|
|
|
self.conn.commit()
|
2016-05-23 03:51:54 -05:00
|
|
|
self.print_bookmark(self.cur.lastrowid)
|
2016-05-22 14:20:50 -05:00
|
|
|
except Exception as e:
|
2016-05-24 12:51:38 -05:00
|
|
|
print('\x1b[1mEXCEPTION\x1b[21m [add_bookmark]: (%s) %s' % (type(e).__name__, e))
|
2016-05-21 08:15:56 -05:00
|
|
|
|
|
|
|
|
2016-05-29 01:44:14 -05:00
|
|
|
def update_bookmark(self, index, url='', title_manual=None, tag_manual=None, desc=None):
|
2016-05-21 05:40:37 -05:00
|
|
|
""" Update an existing record at index
|
2016-05-21 08:15:56 -05:00
|
|
|
|
2016-05-21 05:40:37 -05:00
|
|
|
:param index: int position to update
|
|
|
|
:param url: address
|
2016-05-22 15:24:24 -05:00
|
|
|
:param tag_manual: string of comma-separated tags to add manually
|
2016-05-21 05:40:37 -05:00
|
|
|
:param title_manual: string title to add manually
|
|
|
|
:param desc: string description
|
|
|
|
:return:
|
2016-05-20 17:05:25 -05:00
|
|
|
"""
|
2016-05-21 08:15:56 -05:00
|
|
|
|
2016-05-20 17:05:25 -05:00
|
|
|
arguments = []
|
2016-05-24 12:51:38 -05:00
|
|
|
query = 'UPDATE bookmarks SET'
|
2016-05-21 05:40:37 -05:00
|
|
|
to_update = False
|
2016-05-21 08:15:56 -05:00
|
|
|
|
2016-05-20 17:05:25 -05:00
|
|
|
# Update URL if passed as argument
|
|
|
|
if url != '':
|
2016-05-24 12:51:38 -05:00
|
|
|
query += ' URL = ?,'
|
2016-05-20 17:05:25 -05:00
|
|
|
arguments.append(url)
|
2016-05-21 05:40:37 -05:00
|
|
|
to_update = True
|
2016-05-21 08:15:56 -05:00
|
|
|
|
2016-05-20 17:05:25 -05:00
|
|
|
# Update tags if passed as argument
|
2016-05-21 05:40:37 -05:00
|
|
|
if tag_manual is not None:
|
2016-05-24 12:51:38 -05:00
|
|
|
query += ' tags = ?,'
|
2016-05-22 15:24:24 -05:00
|
|
|
arguments.append(tag_manual)
|
2016-05-21 05:40:37 -05:00
|
|
|
to_update = True
|
2016-05-21 08:15:56 -05:00
|
|
|
|
2016-05-20 17:05:25 -05:00
|
|
|
# Update description if passed as an argument
|
2016-05-21 05:40:37 -05:00
|
|
|
if desc is not None:
|
2016-05-24 12:51:38 -05:00
|
|
|
query += ' desc = ?,'
|
2016-05-21 05:40:37 -05:00
|
|
|
arguments.append(desc)
|
|
|
|
to_update = True
|
2016-05-21 08:15:56 -05:00
|
|
|
|
2016-05-20 17:05:25 -05:00
|
|
|
# Update title
|
|
|
|
#
|
|
|
|
# 1. if -t has no arguments, delete existing title
|
|
|
|
# 2. if -t has arguments, update existing title
|
|
|
|
# 3. if -t option is omitted at cmdline:
|
|
|
|
# if URL is passed, update the title from web using the URL
|
|
|
|
# 4. if no other argument (url, tag, comment) passed update title from web using DB URL
|
|
|
|
meta = None
|
2016-05-21 05:40:37 -05:00
|
|
|
if title_manual is not None:
|
|
|
|
meta = title_manual
|
2016-05-20 17:05:25 -05:00
|
|
|
elif url != '':
|
2016-05-22 09:53:19 -05:00
|
|
|
meta = network_handler(url)
|
2016-05-20 17:05:25 -05:00
|
|
|
if meta == '':
|
2016-05-24 12:51:38 -05:00
|
|
|
print('\x1B[91mTitle: []\x1B[0m')
|
2016-05-29 10:40:15 -05:00
|
|
|
elif debug:
|
2016-05-24 12:51:38 -05:00
|
|
|
print('Title: [%s]' % meta)
|
2016-05-21 05:40:37 -05:00
|
|
|
elif not to_update:
|
2016-05-22 09:53:19 -05:00
|
|
|
self.refreshdb(index)
|
2016-05-23 03:51:54 -05:00
|
|
|
self.print_bookmark(index)
|
2016-05-21 08:15:56 -05:00
|
|
|
|
2016-05-20 17:05:25 -05:00
|
|
|
if meta is not None:
|
2016-05-24 12:51:38 -05:00
|
|
|
query += ' metadata = ?,'
|
2016-05-20 17:05:25 -05:00
|
|
|
arguments.append(meta)
|
2016-05-21 05:40:37 -05:00
|
|
|
to_update = True
|
2016-05-21 08:15:56 -05:00
|
|
|
|
2016-05-21 05:40:37 -05:00
|
|
|
if not to_update: # Nothing to update
|
2016-05-20 17:05:25 -05:00
|
|
|
return
|
2016-05-21 08:15:56 -05:00
|
|
|
|
2016-05-24 12:51:38 -05:00
|
|
|
query = query[:-1] + ' WHERE id = ?'
|
2016-05-20 17:05:25 -05:00
|
|
|
arguments.append(index)
|
|
|
|
if debug:
|
2016-05-24 12:51:38 -05:00
|
|
|
print('query: [%s], args: [%s]' % (query, arguments))
|
2016-05-21 08:15:56 -05:00
|
|
|
|
2016-05-20 17:05:25 -05:00
|
|
|
try:
|
|
|
|
self.cur.execute(query, arguments)
|
|
|
|
self.conn.commit()
|
|
|
|
if self.cur.rowcount == 1:
|
2016-05-23 03:51:54 -05:00
|
|
|
self.print_bookmark(index)
|
2016-05-20 17:05:25 -05:00
|
|
|
else:
|
2016-05-24 12:51:38 -05:00
|
|
|
print('No matching index')
|
2016-05-20 17:05:25 -05:00
|
|
|
except sqlite3.IntegrityError:
|
2016-05-24 12:51:38 -05:00
|
|
|
print('URL already exists')
|
2016-05-21 08:15:56 -05:00
|
|
|
|
|
|
|
|
2016-05-23 11:29:46 -05:00
|
|
|
def refreshdb(self, index, title_manual=None):
|
2016-05-20 17:05:25 -05:00
|
|
|
"""Refresh ALL records in the database. Fetch title for each
|
|
|
|
bookmark from the web and update the records. Doesn't udpate
|
|
|
|
the record if title is empty.
|
|
|
|
This API doesn't change DB index, URL or tags of a bookmark.
|
2016-05-21 08:15:56 -05:00
|
|
|
|
2016-05-23 14:16:21 -05:00
|
|
|
:param index: index of record to update, or 0 for all records
|
|
|
|
:param title_manual: custom title
|
2016-05-20 17:05:25 -05:00
|
|
|
"""
|
2016-05-21 08:15:56 -05:00
|
|
|
|
2016-05-20 17:05:25 -05:00
|
|
|
if index == 0:
|
2016-05-24 12:51:38 -05:00
|
|
|
self.cur.execute('SELECT id, url FROM bookmarks ORDER BY id ASC')
|
2016-05-20 17:05:25 -05:00
|
|
|
else:
|
2016-05-24 12:51:38 -05:00
|
|
|
self.cur.execute('SELECT id, url FROM bookmarks WHERE id = ?', (index,))
|
2016-05-21 08:15:56 -05:00
|
|
|
|
2016-05-20 17:05:25 -05:00
|
|
|
resultset = self.cur.fetchall()
|
2016-05-23 11:29:46 -05:00
|
|
|
if title_manual is None:
|
2016-05-20 17:05:25 -05:00
|
|
|
for row in resultset:
|
2016-05-22 09:53:19 -05:00
|
|
|
title = network_handler(row[1])
|
2016-05-20 17:05:25 -05:00
|
|
|
if title == '':
|
2016-05-24 12:51:38 -05:00
|
|
|
print('\x1b[1mIndex %d: empty title\x1b[21m\x1B[0m\n' % row[0])
|
2016-05-20 17:05:25 -05:00
|
|
|
continue
|
|
|
|
else:
|
2016-05-24 12:51:38 -05:00
|
|
|
print('Title: [%s]' % title)
|
2016-05-21 08:15:56 -05:00
|
|
|
|
2016-05-24 12:51:38 -05:00
|
|
|
self.cur.execute('UPDATE bookmarks SET metadata = ? WHERE id = ?', (title, row[0],))
|
2016-05-20 17:05:25 -05:00
|
|
|
self.conn.commit()
|
2016-05-24 12:51:38 -05:00
|
|
|
print('Index %d updated\n' % row[0])
|
2016-05-20 17:05:25 -05:00
|
|
|
else:
|
2016-05-23 11:29:46 -05:00
|
|
|
title = title_manual
|
2016-05-21 08:15:56 -05:00
|
|
|
|
2016-05-20 17:05:25 -05:00
|
|
|
for row in resultset:
|
2016-05-24 12:51:38 -05:00
|
|
|
self.cur.execute('UPDATE bookmarks SET metadata = ? WHERE id = ?', (title, row[0],))
|
2016-05-20 17:05:25 -05:00
|
|
|
self.conn.commit()
|
2016-05-24 12:51:38 -05:00
|
|
|
print('Index %d updated\n' % row[0])
|
2016-05-21 08:15:56 -05:00
|
|
|
|
|
|
|
|
2016-05-23 14:16:21 -05:00
|
|
|
def searchdb(self, keywords, all_keywords=False, json=False):
|
2016-05-20 17:05:25 -05:00
|
|
|
"""Search the database for an entries with tags or URL
|
|
|
|
or title info matching keywords and list those.
|
2016-05-21 08:15:56 -05:00
|
|
|
|
2016-05-23 14:16:21 -05:00
|
|
|
:param keywords: keywords to search
|
|
|
|
:param all_keywords: search any or all keywords
|
|
|
|
:param json: json formatted output
|
2016-05-20 17:05:25 -05:00
|
|
|
"""
|
2016-05-21 08:15:56 -05:00
|
|
|
|
2016-05-20 17:05:25 -05:00
|
|
|
arguments = []
|
|
|
|
placeholder = "'%' || ? || '%'"
|
|
|
|
query = "SELECT id, url, metadata, tags, desc FROM bookmarks WHERE"
|
2016-05-21 08:15:56 -05:00
|
|
|
|
2016-05-20 17:05:25 -05:00
|
|
|
if all_keywords == True: # Match all keywords in URL or Title
|
|
|
|
for token in keywords:
|
|
|
|
query += " (tags LIKE (%s) OR URL LIKE (%s) OR metadata LIKE (%s) OR desc LIKE (%s)) AND" % (placeholder, placeholder, placeholder, placeholder)
|
|
|
|
arguments.append(token)
|
|
|
|
arguments.append(token)
|
|
|
|
arguments.append(token)
|
|
|
|
arguments.append(token)
|
|
|
|
query = query[:-4]
|
|
|
|
else: # Match any keyword in URL or Title
|
|
|
|
for token in keywords:
|
|
|
|
query += " tags LIKE (%s) OR URL LIKE (%s) OR metadata LIKE (%s) OR desc LIKE (%s) OR" % (placeholder, placeholder, placeholder, placeholder)
|
|
|
|
arguments.append(token)
|
|
|
|
arguments.append(token)
|
|
|
|
arguments.append(token)
|
|
|
|
arguments.append(token)
|
|
|
|
query = query[:-3]
|
2016-05-21 08:15:56 -05:00
|
|
|
|
2016-05-20 17:05:25 -05:00
|
|
|
if debug:
|
|
|
|
print("\"%s\", (%s)" % (query, arguments))
|
2016-05-21 08:15:56 -05:00
|
|
|
|
2016-05-20 17:05:25 -05:00
|
|
|
self.cur.execute(query, arguments)
|
|
|
|
results = self.cur.fetchall()
|
|
|
|
if len(results) == 0:
|
|
|
|
return
|
2016-05-21 08:15:56 -05:00
|
|
|
|
2016-05-23 14:16:21 -05:00
|
|
|
if json == False:
|
2016-05-28 08:15:03 -05:00
|
|
|
prompt(results, self.noninteractive)
|
2016-05-20 17:05:25 -05:00
|
|
|
else:
|
2016-05-22 09:53:19 -05:00
|
|
|
print(format_json(results))
|
2016-05-21 08:15:56 -05:00
|
|
|
|
|
|
|
|
2016-05-23 14:27:57 -05:00
|
|
|
def search_by_tag(self, tag, json=False):
|
2016-05-20 17:05:25 -05:00
|
|
|
"""Search and list bookmarks with a tag
|
2016-05-21 08:15:56 -05:00
|
|
|
|
2016-05-23 14:27:57 -05:00
|
|
|
:param tag: tag to search
|
|
|
|
:param json: print in json format
|
2016-05-20 17:05:25 -05:00
|
|
|
"""
|
2016-05-21 08:15:56 -05:00
|
|
|
|
2016-05-20 17:05:25 -05:00
|
|
|
self.cur.execute("SELECT id, url, metadata, tags, desc FROM bookmarks WHERE tags LIKE '%' || ? || '%'", (tag,))
|
|
|
|
results = self.cur.fetchall()
|
|
|
|
if len(results) == 0:
|
|
|
|
return
|
2016-05-21 08:15:56 -05:00
|
|
|
|
2016-05-23 14:27:57 -05:00
|
|
|
if json == False:
|
2016-05-28 08:15:03 -05:00
|
|
|
prompt(results, self.noninteractive)
|
2016-05-20 17:05:25 -05:00
|
|
|
else:
|
2016-05-22 09:53:19 -05:00
|
|
|
print(format_json(results))
|
2016-05-21 08:15:56 -05:00
|
|
|
|
2016-05-24 11:00:54 -05:00
|
|
|
|
2016-05-22 09:53:19 -05:00
|
|
|
def compactdb(self, index):
|
2016-05-20 17:05:25 -05:00
|
|
|
"""When an entry at index is deleted, move the last
|
|
|
|
entry in DB to index, if index is lesser.
|
2016-05-21 08:15:56 -05:00
|
|
|
|
2016-05-20 17:05:25 -05:00
|
|
|
Params: index of deleted entry
|
|
|
|
"""
|
2016-05-21 08:15:56 -05:00
|
|
|
|
2016-05-20 17:05:25 -05:00
|
|
|
self.cur.execute('SELECT MAX(id) from bookmarks')
|
|
|
|
results = self.cur.fetchall()
|
|
|
|
if len(results) == 1 and results[0][0] is None: # Return if the last index was just deleted
|
|
|
|
return
|
2016-05-21 08:15:56 -05:00
|
|
|
|
2016-05-20 17:05:25 -05:00
|
|
|
for row in results:
|
|
|
|
if row[0] > index:
|
|
|
|
self.cur.execute('SELECT id, URL, metadata, tags, desc FROM bookmarks WHERE id = ?', (row[0],))
|
|
|
|
results = self.cur.fetchall()
|
|
|
|
for row in results:
|
|
|
|
self.cur.execute('DELETE FROM bookmarks WHERE id = ?', (row[0],))
|
|
|
|
self.conn.commit()
|
|
|
|
self.cur.execute('INSERT INTO bookmarks(id, URL, metadata, tags, desc) VALUES (?, ?, ?, ?, ?)', (index, row[1], row[2], row[3], row[4],))
|
|
|
|
self.conn.commit()
|
2016-05-24 12:51:38 -05:00
|
|
|
print('Index %d moved to %d' % (row[0], index))
|
2016-05-21 08:15:56 -05:00
|
|
|
|
|
|
|
|
2016-05-23 01:01:31 -05:00
|
|
|
def delete_bookmark(self, index):
|
2016-05-20 17:05:25 -05:00
|
|
|
"""Delete a single record or remove the table if index is None
|
2016-05-21 08:15:56 -05:00
|
|
|
|
2016-05-20 17:05:25 -05:00
|
|
|
Params: index to delete
|
|
|
|
"""
|
2016-05-21 08:15:56 -05:00
|
|
|
|
2016-05-20 17:05:25 -05:00
|
|
|
if index == 0: # Remove the table
|
2016-05-24 12:51:38 -05:00
|
|
|
resp = input('ALL bookmarks will be removed. Enter \x1b[1my\x1b[21m to confirm: ')
|
2016-05-20 17:05:25 -05:00
|
|
|
if resp != 'y':
|
2016-05-24 12:51:38 -05:00
|
|
|
print('No bookmarks deleted')
|
2016-05-20 17:05:25 -05:00
|
|
|
return
|
2016-05-21 08:15:56 -05:00
|
|
|
|
2016-05-20 17:05:25 -05:00
|
|
|
self.cur.execute('DROP TABLE if exists bookmarks')
|
|
|
|
self.conn.commit()
|
2016-05-24 12:51:38 -05:00
|
|
|
print('All bookmarks deleted')
|
2016-05-20 17:05:25 -05:00
|
|
|
else: # Remove a single entry
|
|
|
|
try:
|
|
|
|
self.cur.execute('DELETE FROM bookmarks WHERE id = ?', (index,))
|
|
|
|
self.conn.commit()
|
|
|
|
if self.cur.rowcount == 1:
|
2016-05-24 12:51:38 -05:00
|
|
|
print('Removed index %d' % index)
|
2016-05-22 09:53:19 -05:00
|
|
|
self.compactdb(index)
|
2016-05-20 17:05:25 -05:00
|
|
|
else:
|
2016-05-24 12:51:38 -05:00
|
|
|
print('No matching index')
|
2016-05-20 17:05:25 -05:00
|
|
|
except IndexError:
|
2016-05-24 12:51:38 -05:00
|
|
|
print('Index out of bound')
|
2016-05-21 08:15:56 -05:00
|
|
|
|
2016-05-24 11:00:54 -05:00
|
|
|
|
2016-05-23 03:51:54 -05:00
|
|
|
def print_bookmark(self, index, empty=False):
|
2016-05-20 17:05:25 -05:00
|
|
|
"""Print bookmark details at index or all bookmarks if index is None
|
|
|
|
Print only bookmarks with blank title or tag if empty is True
|
|
|
|
Note: URL is printed on top because title may be blank
|
2016-05-21 08:15:56 -05:00
|
|
|
|
2016-05-20 17:05:25 -05:00
|
|
|
Params: index to print, flag to show only bookmarks with no title or tags
|
|
|
|
"""
|
2016-05-21 08:15:56 -05:00
|
|
|
|
2016-05-20 17:05:25 -05:00
|
|
|
global showOpt
|
|
|
|
global jsonOutput
|
2016-05-21 08:15:56 -05:00
|
|
|
|
2016-05-20 17:05:25 -05:00
|
|
|
resultset = None
|
|
|
|
if index == 0: # Show all entries
|
|
|
|
if empty == False:
|
|
|
|
self.cur.execute('SELECT * FROM bookmarks')
|
|
|
|
resultset = self.cur.fetchall()
|
|
|
|
else:
|
2016-05-29 08:33:58 -05:00
|
|
|
self.cur.execute("SELECT * FROM bookmarks WHERE metadata = '' OR tags = ?", (DELIMITER,))
|
2016-05-20 17:05:25 -05:00
|
|
|
resultset = self.cur.fetchall()
|
2016-05-24 12:51:38 -05:00
|
|
|
print('\x1b[1m%d records found\x1b[21m\n' % len(resultset))
|
2016-05-21 08:15:56 -05:00
|
|
|
|
2016-05-20 17:05:25 -05:00
|
|
|
if jsonOutput == False:
|
|
|
|
if showOpt == 0:
|
|
|
|
for row in resultset:
|
2016-05-22 09:53:19 -05:00
|
|
|
print_record(row)
|
2016-05-20 17:05:25 -05:00
|
|
|
elif showOpt == 1:
|
|
|
|
for row in resultset:
|
2016-05-24 12:51:38 -05:00
|
|
|
print('%s %s' % (row[0], row[1]))
|
2016-05-20 17:05:25 -05:00
|
|
|
elif showOpt == 2:
|
|
|
|
for row in resultset:
|
2016-05-24 12:51:38 -05:00
|
|
|
print('%s %s %s' % (row[0], row[1], row[3][1:-1]))
|
2016-05-20 17:05:25 -05:00
|
|
|
else:
|
2016-05-22 09:53:19 -05:00
|
|
|
print(format_json(resultset))
|
2016-05-20 17:05:25 -05:00
|
|
|
else: # Show record at index
|
|
|
|
try:
|
2016-05-24 12:51:38 -05:00
|
|
|
self.cur.execute('SELECT * FROM bookmarks WHERE id = ?', (index,))
|
2016-05-23 03:51:54 -05:00
|
|
|
results = self.cur.fetchall()
|
|
|
|
if len(results) == 0:
|
2016-05-24 12:55:37 -05:00
|
|
|
print('No matching index')
|
2016-05-23 03:51:54 -05:00
|
|
|
return
|
2016-05-20 17:05:25 -05:00
|
|
|
except IndexError:
|
2016-05-24 12:51:38 -05:00
|
|
|
print('Index out of bound')
|
2016-05-20 17:05:25 -05:00
|
|
|
return
|
2016-05-21 08:15:56 -05:00
|
|
|
|
2016-05-20 17:05:25 -05:00
|
|
|
if jsonOutput == False:
|
2016-05-23 03:51:54 -05:00
|
|
|
for row in results:
|
|
|
|
if showOpt == 0:
|
|
|
|
print_record(row)
|
|
|
|
elif showOpt == 1:
|
2016-05-24 12:51:38 -05:00
|
|
|
print('%s %s' % (row[0], row[1]))
|
2016-05-23 03:51:54 -05:00
|
|
|
elif showOpt == 2:
|
2016-05-24 12:51:38 -05:00
|
|
|
print('%s %s %s' % (row[0], row[1], row[3][1:-1]))
|
2016-05-20 17:05:25 -05:00
|
|
|
else:
|
2016-05-23 03:51:54 -05:00
|
|
|
print(format_json(results, True))
|
2016-05-20 17:05:25 -05:00
|
|
|
|
2016-05-24 11:00:54 -05:00
|
|
|
|
2016-05-22 09:53:19 -05:00
|
|
|
def list_tags(self):
|
2016-05-20 17:05:25 -05:00
|
|
|
"""Print all unique tags ordered alphabetically
|
|
|
|
"""
|
2016-05-21 08:15:56 -05:00
|
|
|
|
2016-05-20 17:05:25 -05:00
|
|
|
count = 1
|
|
|
|
Tags = []
|
|
|
|
uniqueTags = []
|
|
|
|
for row in self.cur.execute('SELECT DISTINCT tags FROM bookmarks'):
|
2016-05-29 08:33:58 -05:00
|
|
|
if row[0] == DELIMITER:
|
2016-05-20 17:05:25 -05:00
|
|
|
continue
|
2016-05-21 08:15:56 -05:00
|
|
|
|
2016-05-29 08:33:58 -05:00
|
|
|
Tags.extend(row[0].strip(DELIMITER).split(DELIMITER))
|
2016-05-21 08:15:56 -05:00
|
|
|
|
2016-05-20 17:05:25 -05:00
|
|
|
for tag in Tags:
|
|
|
|
if tag not in uniqueTags:
|
|
|
|
uniqueTags.append(tag)
|
2016-05-21 08:15:56 -05:00
|
|
|
|
2016-05-20 17:05:25 -05:00
|
|
|
Tags = sorted(uniqueTags, key=str.lower)
|
|
|
|
for tag in Tags:
|
2016-05-24 12:51:38 -05:00
|
|
|
print('%6d. %s' % (count, tag))
|
2016-05-20 17:05:25 -05:00
|
|
|
count += 1
|
2016-05-21 08:15:56 -05:00
|
|
|
|
|
|
|
|
2016-05-22 09:53:19 -05:00
|
|
|
def replace_tag(self, orig, new=None):
|
2016-05-20 17:05:25 -05:00
|
|
|
"""Replace orig tags with new tags in DB for all records.
|
|
|
|
Remove orig tag is new tag is empty.
|
2016-05-21 08:15:56 -05:00
|
|
|
|
2016-05-20 17:05:25 -05:00
|
|
|
Params: original and new tags
|
|
|
|
"""
|
2016-05-21 08:15:56 -05:00
|
|
|
|
2016-05-20 17:05:25 -05:00
|
|
|
update = False
|
|
|
|
delete = False
|
2016-05-29 08:33:58 -05:00
|
|
|
newtags = DELIMITER
|
2016-05-21 08:15:56 -05:00
|
|
|
|
2016-05-29 08:33:58 -05:00
|
|
|
orig = DELIMITER + orig + DELIMITER
|
2016-05-20 17:05:25 -05:00
|
|
|
if new is None:
|
|
|
|
delete = True
|
|
|
|
else:
|
2016-05-23 10:57:06 -05:00
|
|
|
newtags = parse_tags(new)
|
2016-05-29 08:33:58 -05:00
|
|
|
if newtags == DELIMITER:
|
2016-05-20 17:05:25 -05:00
|
|
|
delete = True
|
2016-05-21 08:15:56 -05:00
|
|
|
|
2016-05-20 17:05:25 -05:00
|
|
|
if orig == newtags:
|
2016-05-24 12:51:38 -05:00
|
|
|
print('Tags are same.')
|
2016-05-20 17:05:25 -05:00
|
|
|
return
|
2016-05-21 08:15:56 -05:00
|
|
|
|
2016-05-20 17:05:25 -05:00
|
|
|
self.cur.execute("SELECT id, tags FROM bookmarks WHERE tags LIKE ?", ('%' + orig + '%',))
|
|
|
|
results = self.cur.fetchall()
|
2016-05-21 08:15:56 -05:00
|
|
|
|
2016-05-20 17:05:25 -05:00
|
|
|
for row in results:
|
|
|
|
if delete == False:
|
|
|
|
# Check if tag newtags is already added
|
|
|
|
if row[1].find(newtags) >= 0:
|
2016-05-29 08:33:58 -05:00
|
|
|
newtags = DELIMITER
|
2016-05-21 08:15:56 -05:00
|
|
|
|
2016-05-24 13:28:14 -05:00
|
|
|
tags = row[1].replace(orig, newtags)
|
|
|
|
self.cur.execute('UPDATE bookmarks SET tags = ? WHERE id = ?', (tags, row[0],))
|
2016-05-24 12:51:38 -05:00
|
|
|
print('Index %d updated' % row[0])
|
2016-05-20 17:05:25 -05:00
|
|
|
update = True
|
2016-05-21 08:15:56 -05:00
|
|
|
|
2016-05-20 17:05:25 -05:00
|
|
|
if update:
|
|
|
|
self.conn.commit()
|
|
|
|
|
2016-05-24 11:00:54 -05:00
|
|
|
|
2016-05-22 09:53:19 -05:00
|
|
|
def browse_by_index(self, index):
|
|
|
|
"""Open URL at index in browser
|
2016-05-20 17:05:25 -05:00
|
|
|
|
|
|
|
Params: index
|
|
|
|
"""
|
2016-03-24 13:47:57 -05:00
|
|
|
|
2016-05-20 17:05:25 -05:00
|
|
|
try:
|
2016-05-24 12:51:38 -05:00
|
|
|
for row in self.cur.execute('SELECT URL FROM bookmarks WHERE id = ?', (index,)):
|
2016-05-20 17:05:25 -05:00
|
|
|
url = unquote(row[0])
|
|
|
|
browser_open(url)
|
|
|
|
return
|
2016-05-24 12:51:38 -05:00
|
|
|
print('No matching index')
|
2016-05-20 17:05:25 -05:00
|
|
|
except IndexError:
|
2016-05-24 12:51:38 -05:00
|
|
|
print('Index out of bound')
|
2016-05-07 10:56:20 -05:00
|
|
|
|
2016-05-29 01:09:51 -05:00
|
|
|
|
2016-05-22 09:53:19 -05:00
|
|
|
def close_quit(self, exitval=0):
|
2016-05-20 17:05:25 -05:00
|
|
|
"""Close a DB connection and exit"""
|
2016-05-17 15:11:31 -05:00
|
|
|
|
2016-05-20 17:05:25 -05:00
|
|
|
if self.conn is not None:
|
|
|
|
try:
|
|
|
|
self.cur.close()
|
|
|
|
self.conn.close()
|
|
|
|
except: # we don't really care about errors, we're closing down anyway
|
|
|
|
pass
|
|
|
|
sys.exit(exitval)
|
2015-11-05 09:18:51 -06:00
|
|
|
|
2015-11-08 12:56:52 -06:00
|
|
|
|
2016-05-23 01:01:31 -05:00
|
|
|
def import_bookmark(self, fp):
|
2016-05-22 16:03:47 -05:00
|
|
|
"""Import bookmarks from a html file.
|
|
|
|
Supports Firefox, Google Chrome and IE imports
|
|
|
|
|
2016-05-22 18:36:41 -05:00
|
|
|
Params: Path to file to import
|
2016-05-22 16:03:47 -05:00
|
|
|
"""
|
|
|
|
|
2016-05-22 18:36:41 -05:00
|
|
|
if not os.path.exists(fp):
|
2016-05-24 12:51:38 -05:00
|
|
|
printmsg((fp + ' not found'), 'ERROR')
|
2016-05-28 08:36:09 -05:00
|
|
|
self.close_quit(1)
|
2016-05-22 16:03:47 -05:00
|
|
|
|
2016-05-28 08:36:09 -05:00
|
|
|
try:
|
|
|
|
import bs4
|
|
|
|
except ImportError:
|
|
|
|
printmsg('Beautiful Soup not found', 'ERROR')
|
|
|
|
self.close_quit(1)
|
2016-05-22 16:03:47 -05:00
|
|
|
|
2016-05-22 18:36:41 -05:00
|
|
|
with open(fp, encoding='utf-8') as f:
|
2016-05-22 16:03:47 -05:00
|
|
|
soup = bs4.BeautifulSoup(f, 'html.parser')
|
|
|
|
|
|
|
|
html_tags = soup.findAll('a')
|
|
|
|
for tag in html_tags:
|
2016-05-22 18:36:41 -05:00
|
|
|
# Extract comment from <dd> tag
|
|
|
|
desc = None
|
|
|
|
comment_tag = tag.findNextSibling('dd')
|
|
|
|
if comment_tag:
|
|
|
|
desc = comment_tag.text[0:comment_tag.text.find('\n')]
|
|
|
|
|
|
|
|
self.add_bookmark(tag['href'],
|
2016-05-29 01:09:51 -05:00
|
|
|
tag.string,
|
2016-05-29 08:33:58 -05:00
|
|
|
(DELIMITER + tag['tags'] + DELIMITER) if tag.has_attr('tags') else None,
|
2016-05-29 01:09:51 -05:00
|
|
|
desc)
|
|
|
|
|
|
|
|
|
|
|
|
def mergedb(self, fp):
|
|
|
|
"""Merge bookmarks from another Buku database file
|
|
|
|
|
|
|
|
Params: Path to file to merge
|
|
|
|
"""
|
|
|
|
|
|
|
|
if not os.path.exists(fp):
|
|
|
|
printmsg((fp + ' not found'), 'ERROR')
|
|
|
|
self.close_quit(1)
|
|
|
|
|
|
|
|
try:
|
|
|
|
# Create a connection
|
|
|
|
connfp = sqlite3.connect(fp)
|
|
|
|
curfp = connfp.cursor()
|
|
|
|
except Exception as e:
|
|
|
|
print('\x1b[1mEXCEPTION\x1b[21m [mergedb]: (%s) %s' % (type(e).__name__, e))
|
|
|
|
sys.exit(1)
|
|
|
|
|
|
|
|
curfp.execute('SELECT * FROM bookmarks')
|
|
|
|
resultset = curfp.fetchall()
|
|
|
|
for row in resultset:
|
|
|
|
self.add_bookmark(row[1], row[2], row[3], row[4])
|
|
|
|
|
|
|
|
try:
|
|
|
|
curfp.close()
|
|
|
|
connfp.close()
|
|
|
|
except:
|
|
|
|
pass
|
2016-05-22 16:03:47 -05:00
|
|
|
|
|
|
|
|
|
|
|
# Generic functions
|
|
|
|
|
2016-05-22 09:53:19 -05:00
|
|
|
def connect_server(url, fullurl=False, forced=False):
|
2016-04-05 06:25:40 -05:00
|
|
|
"""Connect to a server and fetch the requested page data.
|
2016-04-09 06:52:47 -05:00
|
|
|
Supports gzip compression.
|
2016-04-10 07:41:00 -05:00
|
|
|
|
2016-05-20 12:01:03 -05:00
|
|
|
If forced is True, for URLs like http://www.domain.com
|
|
|
|
or http://www.domain.com/ path is www.domain.com or
|
|
|
|
www.domain.com/ correspondingly.
|
|
|
|
|
|
|
|
If fullurl is False, for URLs like http://www.domain.com/,
|
|
|
|
path is /, else www.domain.com/.
|
|
|
|
|
|
|
|
Params: URL to fetch, use complete url as path, force flag
|
2016-04-05 06:25:40 -05:00
|
|
|
Returns: connection, HTTP(S) GET response
|
|
|
|
"""
|
|
|
|
|
2016-05-24 12:51:38 -05:00
|
|
|
if url.find('%20') != -1:
|
|
|
|
url = unquote(url).replace(' ', '%20')
|
2016-03-25 01:17:20 -05:00
|
|
|
else:
|
|
|
|
url = unquote(url)
|
2016-03-25 03:02:43 -05:00
|
|
|
|
|
|
|
if debug:
|
2016-05-24 12:51:38 -05:00
|
|
|
print('unquoted: %s' % url)
|
2016-03-25 01:17:20 -05:00
|
|
|
|
2016-05-24 12:51:38 -05:00
|
|
|
if url.find('https://') >= 0: # Secure connection
|
2016-03-22 15:23:46 -05:00
|
|
|
server = url[8:]
|
2016-05-24 12:51:38 -05:00
|
|
|
marker = server.find('/')
|
2016-03-23 09:58:21 -05:00
|
|
|
if marker > 0:
|
2016-05-20 12:01:03 -05:00
|
|
|
if fullurl == False and forced == False:
|
2016-03-23 09:58:21 -05:00
|
|
|
url = server[marker:]
|
|
|
|
server = server[:marker]
|
2016-05-20 12:01:03 -05:00
|
|
|
elif forced == False: # Handle domain name without trailing /
|
2016-05-20 10:15:52 -05:00
|
|
|
url = '/'
|
2016-03-23 09:58:21 -05:00
|
|
|
urlconn = HTTPSConnection(server, timeout=30)
|
2016-05-24 12:51:38 -05:00
|
|
|
elif url.find('http://') >= 0: # Insecure connection
|
2016-03-22 15:23:46 -05:00
|
|
|
server = url[7:]
|
2016-05-24 12:51:38 -05:00
|
|
|
marker = server.find('/')
|
2016-03-23 09:58:21 -05:00
|
|
|
if marker > 0:
|
2016-05-20 12:01:03 -05:00
|
|
|
if fullurl == False and forced == False:
|
2016-03-23 09:58:21 -05:00
|
|
|
url = server[marker:]
|
|
|
|
server = server[:marker]
|
2016-05-20 12:01:03 -05:00
|
|
|
elif forced == False:
|
2016-05-20 10:15:52 -05:00
|
|
|
url = '/'
|
2016-03-23 09:58:21 -05:00
|
|
|
urlconn = HTTPConnection(server, timeout=30)
|
2016-03-22 15:23:46 -05:00
|
|
|
else:
|
2016-05-24 12:51:38 -05:00
|
|
|
printmsg('Not a valid HTTP(S) url', 'WARNING')
|
|
|
|
if url.find(':') == -1:
|
|
|
|
printmsg("Doesn't appear to be a valid url either", 'WARNING')
|
2016-03-23 09:58:21 -05:00
|
|
|
return (None, None)
|
2016-03-22 15:23:46 -05:00
|
|
|
|
2016-03-23 09:58:21 -05:00
|
|
|
if debug:
|
2016-05-24 12:51:38 -05:00
|
|
|
print('server [%s] url [%s]' % (server, url))
|
2016-03-23 09:58:21 -05:00
|
|
|
|
2016-04-09 04:22:31 -05:00
|
|
|
# Handle URLs passed with %xx escape
|
|
|
|
try:
|
|
|
|
url.encode('ascii')
|
|
|
|
except:
|
|
|
|
url = quote(url)
|
|
|
|
|
2016-05-24 12:51:38 -05:00
|
|
|
urlconn.request('GET', url, None, {
|
|
|
|
'Accept-encoding': 'gzip',
|
2016-04-09 06:52:47 -05:00
|
|
|
})
|
2016-04-09 08:11:46 -05:00
|
|
|
return (urlconn, urlconn.getresponse())
|
2016-03-23 09:58:21 -05:00
|
|
|
|
|
|
|
|
2016-05-22 09:53:19 -05:00
|
|
|
def get_page_title(resp):
|
2016-04-05 06:25:40 -05:00
|
|
|
"""Invoke HTML parser and extract title from HTTP response
|
2016-04-10 07:41:00 -05:00
|
|
|
|
2016-05-22 09:53:19 -05:00
|
|
|
Params: GET response and invoke HTML parser
|
2016-04-05 06:25:40 -05:00
|
|
|
"""
|
|
|
|
|
2016-04-09 06:52:47 -05:00
|
|
|
data = None
|
2016-04-05 06:25:40 -05:00
|
|
|
charset = resp.headers.get_content_charset()
|
2016-04-09 06:52:47 -05:00
|
|
|
|
|
|
|
if resp.headers.get('Content-Encoding') == 'gzip':
|
2016-04-10 02:09:51 -05:00
|
|
|
if debug:
|
2016-05-24 12:51:38 -05:00
|
|
|
print('gzip response')
|
2016-04-09 06:52:47 -05:00
|
|
|
data = gzip.GzipFile(fileobj=io.BytesIO(resp.read())).read()
|
|
|
|
else:
|
|
|
|
data = resp.read()
|
|
|
|
|
2016-04-05 06:25:40 -05:00
|
|
|
if charset == None:
|
|
|
|
charset = 'utf-8'
|
2016-04-14 09:04:02 -05:00
|
|
|
if debug:
|
2016-05-24 12:51:38 -05:00
|
|
|
printmsg('Charset missing in response', 'WARNING')
|
2016-04-14 09:04:02 -05:00
|
|
|
|
2016-04-05 06:25:40 -05:00
|
|
|
if debug:
|
2016-05-24 12:51:38 -05:00
|
|
|
print('charset: %s' % charset)
|
2016-04-05 06:25:40 -05:00
|
|
|
|
|
|
|
parser = BMHTMLParser()
|
|
|
|
try:
|
|
|
|
if charset == 'utf-8':
|
2016-05-24 12:51:38 -05:00
|
|
|
parser.feed(data.decode(charset, 'replace'))
|
2016-04-05 06:25:40 -05:00
|
|
|
else:
|
2016-04-09 06:52:47 -05:00
|
|
|
parser.feed(data.decode(charset))
|
2016-04-05 06:25:40 -05:00
|
|
|
except Exception as e:
|
2016-05-24 12:51:38 -05:00
|
|
|
if debug and str(e) != 'we should not get here!':
|
2016-04-10 00:07:16 -05:00
|
|
|
# Suppress Exception due to intentional self.reset() in HTMLParser
|
2016-05-24 12:51:38 -05:00
|
|
|
print('\x1b[1mEXCEPTION\x1b[21m [get_page_title]: (%s) %s' % (type(e).__name__, e))
|
2016-04-05 06:25:40 -05:00
|
|
|
|
|
|
|
|
2016-05-22 09:53:19 -05:00
|
|
|
def network_handler(url):
|
2016-04-05 06:25:40 -05:00
|
|
|
"""Handle server connection and redirections
|
2016-04-10 07:41:00 -05:00
|
|
|
|
2016-04-05 06:25:40 -05:00
|
|
|
Params: URL to fetch
|
|
|
|
Returns: page title or empty string, if not found
|
|
|
|
"""
|
|
|
|
|
2016-03-23 09:58:21 -05:00
|
|
|
global titleData
|
2016-03-24 13:02:39 -05:00
|
|
|
titleData = None
|
2016-03-23 09:58:21 -05:00
|
|
|
urlconn = None
|
2016-04-09 08:43:12 -05:00
|
|
|
retry = False
|
2016-03-22 15:23:46 -05:00
|
|
|
|
|
|
|
try:
|
2016-05-22 09:53:19 -05:00
|
|
|
urlconn, resp = connect_server(url, False)
|
2016-03-23 09:58:21 -05:00
|
|
|
|
2016-03-24 15:38:38 -05:00
|
|
|
while 1:
|
|
|
|
if resp is None:
|
|
|
|
break
|
2016-04-09 23:47:09 -05:00
|
|
|
elif resp.status == 200:
|
2016-05-22 09:53:19 -05:00
|
|
|
get_page_title(resp)
|
2016-03-24 15:38:38 -05:00
|
|
|
break
|
2016-04-09 23:47:09 -05:00
|
|
|
elif resp.status in [301, 302]:
|
2016-03-25 01:17:20 -05:00
|
|
|
redirurl = urljoin(url, resp.getheader('location', ''))
|
2016-05-24 00:25:40 -05:00
|
|
|
if debug:
|
2016-05-24 12:51:38 -05:00
|
|
|
printmsg(redirurl, 'REDIRECTION')
|
2016-05-20 12:01:03 -05:00
|
|
|
retry = False # Reset retry, start fresh on redirection
|
2016-03-22 15:23:46 -05:00
|
|
|
|
2016-05-24 12:51:38 -05:00
|
|
|
if redirurl.find('sorry/IndexRedirect?') >= 0: # gracefully handle Google blocks
|
|
|
|
printmsg('Connection blocked due to unusual activity', 'ERROR')
|
2016-03-24 15:38:38 -05:00
|
|
|
break
|
2016-03-25 01:17:20 -05:00
|
|
|
|
2016-05-24 12:51:38 -05:00
|
|
|
marker = redirurl.find('redirectUrl=')
|
2016-03-25 01:17:20 -05:00
|
|
|
if marker != -1:
|
|
|
|
redirurl = redirurl[marker + 12:]
|
|
|
|
|
|
|
|
# break same URL redirection loop
|
|
|
|
if url == redirurl:
|
2016-05-24 12:51:38 -05:00
|
|
|
printmsg('Detected repeated redirection to same URL', 'ERROR')
|
2016-03-25 01:17:20 -05:00
|
|
|
break
|
|
|
|
|
|
|
|
url = redirurl
|
|
|
|
urlconn.close()
|
2016-04-09 09:10:30 -05:00
|
|
|
# Try with complete URL on redirection
|
2016-05-22 09:53:19 -05:00
|
|
|
urlconn, resp = connect_server(url, True)
|
2016-05-20 12:01:03 -05:00
|
|
|
elif resp.status == 403 and retry == False:
|
|
|
|
"""Handle URLs of the form https://www.domain.com or
|
|
|
|
https://www.domain.com/ which fails when trying to fetch
|
|
|
|
resource '/', retry with full path.
|
|
|
|
"""
|
|
|
|
urlconn.close()
|
|
|
|
if debug:
|
2016-05-24 12:51:38 -05:00
|
|
|
print('Received status 403: retrying.')
|
2016-05-20 12:01:03 -05:00
|
|
|
# Remove trailing /
|
|
|
|
if url[-1] == '/':
|
|
|
|
url = url[:-1]
|
2016-05-22 09:53:19 -05:00
|
|
|
urlconn, resp = connect_server(url, False, True)
|
2016-05-20 12:01:03 -05:00
|
|
|
retry = True
|
2016-04-09 23:47:09 -05:00
|
|
|
elif resp.status == 500 and retry == False:
|
2016-04-09 08:43:12 -05:00
|
|
|
"""Retry on status 500 (Internal Server Error) with truncated
|
|
|
|
URL. Some servers support truncated request URL on redirection.
|
|
|
|
"""
|
|
|
|
urlconn.close()
|
|
|
|
if debug:
|
2016-05-24 12:51:38 -05:00
|
|
|
print('Received status 500: retrying.')
|
2016-05-22 09:53:19 -05:00
|
|
|
urlconn, resp = connect_server(url, False)
|
2016-04-09 08:43:12 -05:00
|
|
|
retry = True
|
2016-03-24 15:38:38 -05:00
|
|
|
else:
|
2016-05-24 12:51:38 -05:00
|
|
|
printmsg(('[' + str(resp.status) + '] ' + resp.reason), 'ERROR')
|
2016-03-24 15:38:38 -05:00
|
|
|
break
|
2016-03-22 15:23:46 -05:00
|
|
|
except Exception as e:
|
2016-05-24 12:51:38 -05:00
|
|
|
print('\x1b[1mEXCEPTION\x1b[21m [network_handler]: (%s) %s' % (type(e).__name__, e))
|
2016-03-22 15:23:46 -05:00
|
|
|
finally:
|
2016-03-23 09:58:21 -05:00
|
|
|
if urlconn is not None:
|
|
|
|
urlconn.close()
|
|
|
|
if titleData is None:
|
|
|
|
return ''
|
2016-05-24 12:51:38 -05:00
|
|
|
return titleData.strip().replace('\n','')
|
2016-03-22 15:23:46 -05:00
|
|
|
|
|
|
|
|
2016-05-22 09:53:19 -05:00
|
|
|
def parse_tags(keywords=[]):
|
2016-05-18 22:24:46 -05:00
|
|
|
"""Format and get tag string from tokens"""
|
|
|
|
|
2016-05-29 08:33:58 -05:00
|
|
|
tags = DELIMITER
|
2016-05-19 09:24:18 -05:00
|
|
|
origTags = []
|
|
|
|
uniqueTags = []
|
2016-05-18 22:24:46 -05:00
|
|
|
|
|
|
|
# Cleanse and get the tags
|
2016-05-29 12:13:37 -05:00
|
|
|
tagstr = ' '.join(keywords)
|
|
|
|
marker = tagstr.find(',')
|
|
|
|
|
|
|
|
while marker >= 0:
|
|
|
|
token = tagstr[0:marker]
|
|
|
|
tagstr = tagstr[marker+1:]
|
|
|
|
marker = tagstr.find(',')
|
|
|
|
token = token.strip()
|
|
|
|
if token == '':
|
2016-05-23 10:57:06 -05:00
|
|
|
continue
|
|
|
|
|
2016-05-29 12:13:37 -05:00
|
|
|
tags += token + ','
|
2016-05-19 09:24:18 -05:00
|
|
|
|
2016-05-29 12:13:37 -05:00
|
|
|
tagstr = tagstr.strip()
|
|
|
|
if tagstr != '':
|
|
|
|
tags += tagstr + ','
|
2016-05-18 22:24:46 -05:00
|
|
|
|
2016-05-29 12:13:37 -05:00
|
|
|
if debug:
|
|
|
|
print(keywords)
|
|
|
|
print('tags: [%s]' % tags)
|
2016-05-18 22:24:46 -05:00
|
|
|
|
2016-05-29 08:33:58 -05:00
|
|
|
if tags == DELIMITER:
|
2016-05-23 10:57:06 -05:00
|
|
|
return tags
|
|
|
|
|
2016-05-29 08:33:58 -05:00
|
|
|
origTags.extend(tags.strip(DELIMITER).split(DELIMITER))
|
2016-05-19 09:24:18 -05:00
|
|
|
for tag in origTags:
|
|
|
|
if tag not in uniqueTags:
|
2016-05-19 22:44:39 -05:00
|
|
|
uniqueTags.append(tag) # Select unique tags
|
2016-05-19 09:24:18 -05:00
|
|
|
|
|
|
|
# Sort the tags
|
|
|
|
sortedTags = sorted(uniqueTags, key=str.lower)
|
|
|
|
|
2016-05-19 22:44:39 -05:00
|
|
|
# Wrap with delimiter
|
2016-05-29 08:33:58 -05:00
|
|
|
return DELIMITER + DELIMITER.join(sortedTags) + DELIMITER
|
2016-05-18 22:24:46 -05:00
|
|
|
|
|
|
|
|
2016-05-28 08:15:03 -05:00
|
|
|
def prompt(results, noninteractive=False):
|
2016-05-22 09:53:19 -05:00
|
|
|
"""Show each matching result from a search and prompt"""
|
2016-05-18 10:46:08 -05:00
|
|
|
|
|
|
|
count = 0
|
|
|
|
for row in results:
|
|
|
|
count += 1
|
2016-05-22 09:53:19 -05:00
|
|
|
print_record(row, count)
|
2016-05-18 10:46:08 -05:00
|
|
|
|
2016-05-28 08:15:03 -05:00
|
|
|
if noninteractive == True:
|
|
|
|
return
|
|
|
|
|
2016-05-18 10:46:08 -05:00
|
|
|
while True:
|
|
|
|
try:
|
2016-05-24 12:51:38 -05:00
|
|
|
nav = input('Result number to open: ')
|
2016-05-24 11:09:59 -05:00
|
|
|
if not nav:
|
2016-05-24 12:51:38 -05:00
|
|
|
nav = input('Result number to open: ')
|
2016-05-24 11:09:59 -05:00
|
|
|
if not nav:
|
|
|
|
# Quit on double enter
|
|
|
|
break
|
2016-05-18 10:46:08 -05:00
|
|
|
except EOFError:
|
|
|
|
return
|
|
|
|
|
|
|
|
if is_int(nav):
|
|
|
|
index = int(nav) - 1
|
|
|
|
if index < 0 or index >= count:
|
2016-05-24 12:51:38 -05:00
|
|
|
print('Index out of bound')
|
2016-05-18 10:46:08 -05:00
|
|
|
continue
|
|
|
|
|
|
|
|
try:
|
|
|
|
browser_open(unquote(results[index][1]))
|
|
|
|
except Exception as e:
|
2016-05-24 12:51:38 -05:00
|
|
|
print('\x1b[1mEXCEPTION\x1b[21m [searchdb]: (%s) %s' % (type(e).__name__, e))
|
2016-05-18 10:46:08 -05:00
|
|
|
else:
|
|
|
|
break
|
|
|
|
|
|
|
|
|
2016-05-22 09:53:19 -05:00
|
|
|
def print_record(row, count=0):
|
2016-05-17 15:11:31 -05:00
|
|
|
"""Print a single DB record
|
|
|
|
Handles differently for search and print (count = 0)
|
|
|
|
"""
|
|
|
|
|
|
|
|
# Print index and URL
|
|
|
|
if count != 0:
|
2016-05-24 12:51:38 -05:00
|
|
|
print('\x1B[1m\x1B[93m%d. \x1B[0m\x1B[92m%s\x1B[0m\t[%d]' % (count, row[1], row[0]))
|
2016-05-17 15:11:31 -05:00
|
|
|
else:
|
2016-05-24 12:51:38 -05:00
|
|
|
print('\x1B[1m\x1B[93m%d. \x1B[0m\x1B[92m%s\x1B[0m' % (row[0], row[1]))
|
2016-05-17 15:11:31 -05:00
|
|
|
|
|
|
|
# Print title
|
|
|
|
if row[2] != '':
|
2016-05-24 12:51:38 -05:00
|
|
|
print(' \x1B[91m>\x1B[0m %s' % row[2])
|
2016-05-17 15:11:31 -05:00
|
|
|
|
|
|
|
# Print description
|
|
|
|
if row[4] != '':
|
2016-05-24 12:51:38 -05:00
|
|
|
print(' \x1B[91m+\x1B[0m %s' % row[4])
|
2016-05-17 15:11:31 -05:00
|
|
|
|
2016-05-29 08:33:58 -05:00
|
|
|
# Print tags IF not default (DELIMITER)
|
|
|
|
if row[3] != DELIMITER:
|
2016-05-24 12:51:38 -05:00
|
|
|
print(' \x1B[91m#\x1B[0m %s' % row[3][1:-1])
|
2016-05-17 15:11:31 -05:00
|
|
|
|
2016-05-24 12:51:38 -05:00
|
|
|
print('')
|
2016-05-17 15:11:31 -05:00
|
|
|
|
|
|
|
|
2016-05-22 09:53:19 -05:00
|
|
|
def format_json(resultset, single=False):
|
2016-05-16 09:39:01 -05:00
|
|
|
"""Return results in Json format"""
|
|
|
|
|
2016-03-22 18:29:45 -05:00
|
|
|
global showOpt
|
|
|
|
|
|
|
|
if single == False:
|
|
|
|
marks = []
|
|
|
|
for row in resultset:
|
|
|
|
if showOpt == 1:
|
2016-05-22 22:39:58 -05:00
|
|
|
record = { 'uri': row[1] }
|
2016-03-22 18:29:45 -05:00
|
|
|
elif showOpt == 2:
|
2016-05-22 22:39:58 -05:00
|
|
|
record = { 'uri': row[1], 'tags': row[3][1:-1] }
|
2016-03-22 18:29:45 -05:00
|
|
|
else:
|
2016-05-22 22:39:58 -05:00
|
|
|
record = { 'uri': row[1], 'title': row[2], 'description': row[4], 'tags': row[3][1:-1]}
|
2016-03-22 18:29:45 -05:00
|
|
|
|
|
|
|
marks.append(record)
|
|
|
|
else:
|
|
|
|
marks = {}
|
|
|
|
for row in resultset:
|
|
|
|
if showOpt == 1:
|
2016-05-22 22:39:58 -05:00
|
|
|
marks['uri'] = row[1]
|
2016-03-22 18:29:45 -05:00
|
|
|
elif showOpt == 2:
|
2016-05-22 22:39:58 -05:00
|
|
|
marks['uri'] = row[1]
|
2016-03-22 18:29:45 -05:00
|
|
|
marks['tags'] = row[3][1:-1]
|
|
|
|
else:
|
2016-05-22 22:39:58 -05:00
|
|
|
marks['uri'] = row[1]
|
2016-03-22 18:29:45 -05:00
|
|
|
marks['title'] = row[2]
|
2016-05-22 22:39:58 -05:00
|
|
|
marks['description'] = row[4]
|
2016-03-22 18:29:45 -05:00
|
|
|
marks['tags'] = row[3][1:-1]
|
2015-11-05 09:18:51 -06:00
|
|
|
|
2016-03-22 18:29:45 -05:00
|
|
|
return json.dumps(marks, sort_keys=True, indent=4)
|
2015-11-08 12:56:52 -06:00
|
|
|
|
2016-05-16 09:39:01 -05:00
|
|
|
|
2015-11-06 13:59:57 -06:00
|
|
|
def is_int(string):
|
2016-04-05 06:25:40 -05:00
|
|
|
"""Check if a string is a digit
|
2016-04-10 07:41:00 -05:00
|
|
|
|
2016-04-05 06:25:40 -05:00
|
|
|
Params: string
|
|
|
|
"""
|
|
|
|
|
2015-11-06 13:59:57 -06:00
|
|
|
try:
|
|
|
|
int(string)
|
|
|
|
return True
|
|
|
|
except:
|
|
|
|
return False
|
|
|
|
|
2015-11-08 12:56:52 -06:00
|
|
|
|
2015-11-10 03:11:05 -06:00
|
|
|
def browser_open(url):
|
2016-04-05 07:55:29 -05:00
|
|
|
"""Duplicate stdin, stdout (to suppress showing errors
|
|
|
|
on the terminal) and open URL in default browser
|
2015-11-06 16:32:08 -06:00
|
|
|
|
2016-04-05 07:55:29 -05:00
|
|
|
Params: url to open
|
|
|
|
"""
|
2015-11-08 12:56:52 -06:00
|
|
|
|
2016-05-24 12:51:38 -05:00
|
|
|
url = url.replace('%22', "\"")
|
2015-11-10 05:20:30 -06:00
|
|
|
|
2015-11-10 03:11:05 -06:00
|
|
|
_stderr = os.dup(2)
|
|
|
|
os.close(2)
|
|
|
|
_stdout = os.dup(1)
|
|
|
|
os.close(1)
|
|
|
|
fd = os.open(os.devnull, os.O_RDWR)
|
|
|
|
os.dup2(fd, 2)
|
|
|
|
os.dup2(fd, 1)
|
|
|
|
try:
|
|
|
|
webbrowser.open(url)
|
|
|
|
except Exception as e:
|
2016-05-24 12:51:38 -05:00
|
|
|
print('\x1b[1mEXCEPTION\x1b[21m [browser_open]: (%s) %s' % (type(e).__name__, e))
|
2015-11-10 03:11:05 -06:00
|
|
|
finally:
|
|
|
|
os.close(fd)
|
|
|
|
os.dup2(_stderr, 2)
|
|
|
|
os.dup2(_stdout, 1)
|
|
|
|
|
|
|
|
|
2015-12-19 13:55:05 -06:00
|
|
|
def get_filehash(filepath):
|
2016-04-05 07:55:29 -05:00
|
|
|
"""Get the SHA256 hash of a file
|
|
|
|
|
|
|
|
Params: path to the file
|
|
|
|
"""
|
|
|
|
|
2015-12-19 13:55:05 -06:00
|
|
|
with open(filepath, 'rb') as f:
|
|
|
|
hasher = hashlib.sha256()
|
|
|
|
buf = f.read(BLOCKSIZE)
|
|
|
|
while len(buf) > 0:
|
|
|
|
hasher.update(buf)
|
|
|
|
buf = f.read(BLOCKSIZE)
|
|
|
|
|
|
|
|
return hasher.digest()
|
|
|
|
|
|
|
|
|
2016-04-25 12:28:35 -05:00
|
|
|
def encrypt_file(iterations):
|
2016-04-05 07:55:29 -05:00
|
|
|
"""Encrypt the bookmarks database file"""
|
|
|
|
|
2016-05-25 05:52:38 -05:00
|
|
|
dbpath = os.path.join(BukuDb.get_dbdir_path(), 'bookmarks.db')
|
2016-03-20 03:11:52 -05:00
|
|
|
encpath = dbpath + '.enc'
|
2015-12-19 10:23:29 -06:00
|
|
|
if not os.path.exists(dbpath):
|
2016-05-24 12:51:38 -05:00
|
|
|
print('%s missing. Already encrypted?' % dbpath)
|
2015-12-19 10:23:29 -06:00
|
|
|
sys.exit(1)
|
|
|
|
|
2016-03-20 03:11:52 -05:00
|
|
|
# If both encrypted file and flat file exist, error out
|
|
|
|
if os.path.exists(dbpath) and os.path.exists(encpath):
|
2016-05-24 12:51:38 -05:00
|
|
|
printmsg('Both encrypted and flat DB files exist!', 'ERROR')
|
2016-03-20 03:11:52 -05:00
|
|
|
sys.exit(1)
|
|
|
|
|
2015-12-19 10:23:29 -06:00
|
|
|
password = ''
|
|
|
|
password = getpass.getpass()
|
|
|
|
passconfirm = getpass.getpass()
|
|
|
|
if password == '':
|
2016-05-24 12:51:38 -05:00
|
|
|
print('Empty password');
|
2015-12-19 10:23:29 -06:00
|
|
|
sys.exit(1)
|
|
|
|
if password != passconfirm:
|
|
|
|
print("Passwords don't match");
|
|
|
|
sys.exit(1)
|
|
|
|
|
2015-12-19 13:55:05 -06:00
|
|
|
# Get SHA256 hash of DB file
|
|
|
|
dbhash = get_filehash(dbpath)
|
|
|
|
|
2016-04-10 08:01:11 -05:00
|
|
|
# Generate random 256-bit salt and key
|
2016-04-10 07:41:00 -05:00
|
|
|
salt = Random.get_random_bytes(SALT_SIZE)
|
2016-05-24 12:51:38 -05:00
|
|
|
key = (password + salt.decode('utf-8', 'replace')).encode('utf-8')
|
2015-12-22 12:10:24 -06:00
|
|
|
for i in range(iterations):
|
2015-12-20 13:00:34 -06:00
|
|
|
key = hashlib.sha256(key).digest()
|
|
|
|
|
|
|
|
iv = Random.get_random_bytes(16)
|
2015-12-19 10:23:29 -06:00
|
|
|
cipher = AES.new(key, AES.MODE_CBC, iv)
|
|
|
|
filesize = os.path.getsize(dbpath)
|
|
|
|
|
|
|
|
with open(dbpath, 'rb') as infile:
|
|
|
|
with open(encpath, 'wb') as outfile:
|
|
|
|
outfile.write(struct.pack('<Q', filesize))
|
2015-12-20 13:00:34 -06:00
|
|
|
outfile.write(salt)
|
2015-12-19 10:23:29 -06:00
|
|
|
outfile.write(iv)
|
|
|
|
|
2015-12-19 13:55:05 -06:00
|
|
|
# Embed DB file hash in encrypted file
|
|
|
|
outfile.write(dbhash)
|
|
|
|
|
2015-12-19 10:23:29 -06:00
|
|
|
while True:
|
2015-12-20 13:00:34 -06:00
|
|
|
chunk = infile.read(CHUNKSIZE)
|
2015-12-19 10:23:29 -06:00
|
|
|
if len(chunk) == 0:
|
|
|
|
break
|
|
|
|
elif len(chunk) % 16 != 0:
|
|
|
|
chunk += ' ' * (16 - len(chunk) % 16)
|
|
|
|
|
|
|
|
outfile.write(cipher.encrypt(chunk))
|
|
|
|
|
2015-12-19 11:15:17 -06:00
|
|
|
os.remove(dbpath)
|
2016-05-24 12:51:38 -05:00
|
|
|
print('File encrypted')
|
2015-12-19 10:23:29 -06:00
|
|
|
sys.exit(0)
|
|
|
|
|
|
|
|
|
2016-04-25 12:28:35 -05:00
|
|
|
def decrypt_file(iterations):
|
2016-04-05 07:55:29 -05:00
|
|
|
"""Decrypt the bookmarks database file"""
|
|
|
|
|
2016-05-25 05:52:38 -05:00
|
|
|
dbpath = os.path.join(BukuDb.get_dbdir_path(), 'bookmarks.db')
|
2015-12-19 11:15:17 -06:00
|
|
|
encpath = dbpath + '.enc'
|
|
|
|
if not os.path.exists(encpath):
|
2016-05-24 12:51:38 -05:00
|
|
|
printmsg((encpath + ' missing'), 'ERROR')
|
2015-12-19 11:15:17 -06:00
|
|
|
sys.exit(1)
|
|
|
|
|
2016-03-20 03:11:52 -05:00
|
|
|
# If both encrypted file and flat file exist, error out
|
|
|
|
if os.path.exists(dbpath) and os.path.exists(encpath):
|
2016-05-24 12:51:38 -05:00
|
|
|
printmsg('Both encrypted and flat DB files exist!', 'ERROR')
|
2015-12-19 11:15:17 -06:00
|
|
|
sys.exit(1)
|
|
|
|
|
|
|
|
password = ''
|
|
|
|
password = getpass.getpass()
|
|
|
|
if password == '':
|
2016-05-24 12:51:38 -05:00
|
|
|
printmsg('Decryption failed', 'ERROR');
|
2015-12-19 11:15:17 -06:00
|
|
|
sys.exit(1)
|
|
|
|
|
|
|
|
with open(encpath, 'rb') as infile:
|
|
|
|
origsize = struct.unpack('<Q', infile.read(struct.calcsize('Q')))[0]
|
2015-12-20 13:00:34 -06:00
|
|
|
|
|
|
|
# Read 256-bit salt and generate key
|
|
|
|
salt = infile.read(32)
|
2016-05-24 12:51:38 -05:00
|
|
|
key = (password + salt.decode('utf-8', 'replace')).encode('utf-8')
|
2015-12-22 12:10:24 -06:00
|
|
|
for i in range(iterations):
|
2015-12-20 13:00:34 -06:00
|
|
|
key = hashlib.sha256(key).digest()
|
|
|
|
|
2015-12-19 11:15:17 -06:00
|
|
|
iv = infile.read(16)
|
|
|
|
cipher = AES.new(key, AES.MODE_CBC, iv)
|
|
|
|
|
2015-12-19 13:55:05 -06:00
|
|
|
# Get original DB file's SHA256 hash from encrypted file
|
|
|
|
enchash = infile.read(32)
|
|
|
|
|
2015-12-19 11:15:17 -06:00
|
|
|
with open(dbpath, 'wb') as outfile:
|
|
|
|
while True:
|
2015-12-20 13:00:34 -06:00
|
|
|
chunk = infile.read(CHUNKSIZE)
|
2015-12-19 11:15:17 -06:00
|
|
|
if len(chunk) == 0:
|
|
|
|
break;
|
|
|
|
|
|
|
|
outfile.write(cipher.decrypt(chunk))
|
|
|
|
|
|
|
|
outfile.truncate(origsize)
|
|
|
|
|
2015-12-19 13:55:05 -06:00
|
|
|
# Match hash of generated file with that of original DB file
|
|
|
|
dbhash = get_filehash(dbpath)
|
|
|
|
if dbhash != enchash:
|
|
|
|
os.remove(dbpath)
|
2016-05-24 12:51:38 -05:00
|
|
|
printmsg('Decryption failed', 'ERROR');
|
2016-04-24 16:18:56 -05:00
|
|
|
sys.exit(1)
|
2015-12-19 13:55:05 -06:00
|
|
|
else:
|
|
|
|
os.remove(encpath)
|
2016-05-24 12:51:38 -05:00
|
|
|
print('File decrypted')
|
2015-12-19 13:55:05 -06:00
|
|
|
|
2015-12-19 11:15:17 -06:00
|
|
|
|
2016-03-16 10:10:55 -05:00
|
|
|
def sigint_handler(signum, frame):
|
2016-04-05 07:55:29 -05:00
|
|
|
"""Custom SIGINT handler"""
|
|
|
|
|
2016-03-16 10:10:55 -05:00
|
|
|
print('\nInterrupted.', file=sys.stderr)
|
|
|
|
sys.exit(1)
|
|
|
|
|
|
|
|
signal.signal(signal.SIGINT, sigint_handler)
|
|
|
|
|
|
|
|
|
2016-04-01 09:56:00 -05:00
|
|
|
def printmsg(msg, level=None):
|
2016-04-05 07:55:29 -05:00
|
|
|
"""Print a message in 2 parts, with the level in bold
|
|
|
|
|
|
|
|
Params: msg, level
|
|
|
|
"""
|
|
|
|
|
2016-04-01 09:56:00 -05:00
|
|
|
if level is not None:
|
2016-05-24 12:51:38 -05:00
|
|
|
print('\x1b[1m%s: \x1b[21m%s\x1b[0m' % (level, msg))
|
2016-04-01 09:56:00 -05:00
|
|
|
else:
|
2016-05-24 12:51:38 -05:00
|
|
|
print('%s' % msg)
|
2016-04-01 09:56:00 -05:00
|
|
|
|
2016-04-05 07:55:29 -05:00
|
|
|
|
2016-05-24 11:00:54 -05:00
|
|
|
# Custom Action classes for argparse
|
|
|
|
|
2016-05-22 09:53:19 -05:00
|
|
|
class CustomUpdateAction(argparse.Action):
|
2016-05-18 12:23:08 -05:00
|
|
|
"""Class to capture if optional param 'update'
|
2016-04-25 12:57:01 -05:00
|
|
|
is actually used, even if sans arguments
|
|
|
|
"""
|
|
|
|
|
2016-04-25 11:23:03 -05:00
|
|
|
def __call__(self, parser, args, values, option_string=None):
|
|
|
|
global update
|
|
|
|
|
|
|
|
update = True
|
|
|
|
# NOTE: the following converts a None argument to an empty array []
|
|
|
|
setattr(args, self.dest, values)
|
|
|
|
|
2016-04-26 12:23:48 -05:00
|
|
|
|
2016-05-22 09:53:19 -05:00
|
|
|
class CustomTagAction(argparse.Action):
|
2016-05-18 22:24:46 -05:00
|
|
|
"""Class to capture if optional param 'tag'
|
2016-05-18 12:23:08 -05:00
|
|
|
is actually used, even if sans arguments
|
|
|
|
"""
|
|
|
|
|
|
|
|
def __call__(self, parser, args, values, option_string=None):
|
2016-05-18 22:24:46 -05:00
|
|
|
global tagManual
|
2016-05-18 12:23:08 -05:00
|
|
|
|
2016-05-29 08:33:58 -05:00
|
|
|
tagManual = [DELIMITER,]
|
2016-05-18 12:23:08 -05:00
|
|
|
setattr(args, self.dest, values)
|
|
|
|
|
|
|
|
|
2016-05-22 09:53:19 -05:00
|
|
|
class CustomTitleAction(argparse.Action):
|
2016-05-18 12:23:08 -05:00
|
|
|
"""Class to capture if optional param 'title'
|
2016-04-27 12:56:57 -05:00
|
|
|
is actually used, even if sans arguments
|
|
|
|
"""
|
|
|
|
|
|
|
|
def __call__(self, parser, args, values, option_string=None):
|
|
|
|
global titleManual
|
|
|
|
|
|
|
|
titleManual = ''
|
|
|
|
setattr(args, self.dest, values)
|
|
|
|
|
|
|
|
|
2016-05-22 09:53:19 -05:00
|
|
|
class CustomDescAction(argparse.Action):
|
2016-05-18 12:23:08 -05:00
|
|
|
"""Class to capture if optional param 'comment'
|
2016-05-17 15:11:31 -05:00
|
|
|
is actually used, even if sans arguments
|
|
|
|
"""
|
|
|
|
|
|
|
|
def __call__(self, parser, args, values, option_string=None):
|
|
|
|
global description
|
|
|
|
|
|
|
|
description = ''
|
|
|
|
setattr(args, self.dest, values)
|
|
|
|
|
|
|
|
|
2016-05-22 09:53:19 -05:00
|
|
|
class CustomTagSearchAction(argparse.Action):
|
2016-05-18 22:24:46 -05:00
|
|
|
"""Class to capture if optional param 'stag'
|
|
|
|
is actually used, even if sans arguments
|
|
|
|
"""
|
|
|
|
|
|
|
|
def __call__(self, parser, args, values, option_string=None):
|
|
|
|
global tagsearch
|
|
|
|
|
|
|
|
tagsearch = True
|
|
|
|
setattr(args, self.dest, values)
|
|
|
|
|
|
|
|
|
2016-04-24 14:19:32 -05:00
|
|
|
class ExtendedArgumentParser(argparse.ArgumentParser):
|
2016-04-25 12:57:01 -05:00
|
|
|
"""Extend classic argument parser"""
|
2016-04-24 14:19:32 -05:00
|
|
|
|
2016-04-25 15:45:10 -05:00
|
|
|
# Print additional help and info
|
|
|
|
@staticmethod
|
|
|
|
def print_extended_help(file=None):
|
2016-05-14 10:33:21 -05:00
|
|
|
file.write('''
|
|
|
|
prompt keys:
|
|
|
|
1-N open the Nth search result in web browser
|
2016-05-24 11:09:59 -05:00
|
|
|
double Enter exit buku
|
2016-05-14 10:33:21 -05:00
|
|
|
|
2016-05-22 12:50:19 -05:00
|
|
|
Version %s
|
2016-05-14 10:33:21 -05:00
|
|
|
Copyright (C) 2015-2016 Arun Prakash Jana <engineerarun@gmail.com>
|
|
|
|
License: GPLv3
|
|
|
|
Webpage: https://github.com/jarun/buku
|
|
|
|
''' % _VERSION_)
|
2016-04-25 15:45:10 -05:00
|
|
|
|
|
|
|
# Help
|
2016-04-24 14:19:32 -05:00
|
|
|
def print_help(self, file=None):
|
|
|
|
super(ExtendedArgumentParser, self).print_help(file)
|
2016-04-25 15:45:10 -05:00
|
|
|
self.print_extended_help(file)
|
2016-04-05 23:55:25 -05:00
|
|
|
|
|
|
|
|
|
|
|
"""main starts here"""
|
2016-04-26 12:23:48 -05:00
|
|
|
|
|
|
|
# Handle piped input
|
2016-03-26 10:59:07 -05:00
|
|
|
def main(argv = sys.argv):
|
|
|
|
if not sys.stdin.isatty():
|
|
|
|
pipeargs.extend(sys.argv)
|
|
|
|
for s in sys.stdin.readlines():
|
|
|
|
pipeargs.extend(s.split())
|
|
|
|
|
2016-05-24 12:51:38 -05:00
|
|
|
if __name__ == '__main__':
|
2016-03-26 10:59:07 -05:00
|
|
|
try:
|
|
|
|
main(sys.argv)
|
|
|
|
except KeyboardInterrupt:
|
|
|
|
pass
|
|
|
|
|
2016-05-25 06:00:14 -05:00
|
|
|
# If piped input, set argument vector
|
|
|
|
if len(pipeargs) > 0:
|
|
|
|
sys.argv = pipeargs
|
|
|
|
|
|
|
|
# Setup custom argument parser
|
|
|
|
argparser = ExtendedArgumentParser(
|
|
|
|
description='A private command-line bookmark manager. Your mini web!',
|
|
|
|
formatter_class=argparse.RawTextHelpFormatter,
|
2016-05-29 02:19:11 -05:00
|
|
|
usage='''buku [-a URL [tags ...]] [-u [N]] [-i file] [-d [N]]
|
2016-05-25 12:15:52 -05:00
|
|
|
[--url keyword] [--tag [...]] [-t [...]] [-c [...]]
|
|
|
|
[-s keyword [...]] [-S keyword [...]] [--st [...]]
|
2016-05-29 02:19:11 -05:00
|
|
|
[-k [N]] [-l [N]] [-p [N]] [-f N] [-r oldtag [newtag ...]]
|
|
|
|
[-j] [--merge file] [--noprompt] [-o N] [-z] [-h]''',
|
2016-05-25 06:00:14 -05:00
|
|
|
add_help=False
|
|
|
|
)
|
|
|
|
|
|
|
|
# General options
|
|
|
|
general_group = argparser.add_argument_group(title='general options',
|
|
|
|
description='''-a, --add URL [tags ...]
|
2016-05-25 12:15:52 -05:00
|
|
|
bookmark URL with comma-separated tags
|
|
|
|
-u, --update [N] update fields of bookmark at DB index N
|
|
|
|
refresh all titles, if no arguments
|
|
|
|
refresh title of bookmark at N, if only
|
|
|
|
N is specified without any edit options
|
|
|
|
-d, --delete [N] delete bookmark at DB index N
|
|
|
|
delete all bookmarks, if no arguments
|
2016-05-29 02:19:11 -05:00
|
|
|
-i, --import file import bookmarks from html file; Firefox,
|
2016-05-25 12:15:52 -05:00
|
|
|
Google Chrome and IE formats supported
|
|
|
|
-h, --help show this information''')
|
2016-05-25 06:00:14 -05:00
|
|
|
general_group.add_argument('-a', '--add', nargs='+', dest='addurl', metavar=('URL', 'tags'), help=argparse.SUPPRESS)
|
|
|
|
general_group.add_argument('-u', '--update', nargs='*', dest='update', action=CustomUpdateAction, metavar=('N', 'URL tags'), help=argparse.SUPPRESS)
|
|
|
|
general_group.add_argument('-d', '--delete', nargs='?', dest='delete', type=int, const=0, metavar='N', help=argparse.SUPPRESS)
|
2016-05-29 02:19:11 -05:00
|
|
|
general_group.add_argument('-i', '--import', nargs=1, dest='imports', metavar='file', help=argparse.SUPPRESS)
|
2016-05-25 06:00:14 -05:00
|
|
|
general_group.add_argument('-h', '--help', dest='help', action='store_true', help=argparse.SUPPRESS)
|
|
|
|
|
|
|
|
# Edit options
|
|
|
|
edit_group=argparser.add_argument_group(title='edit options',
|
|
|
|
description='''--url keyword specify url, works with -u only
|
2016-05-25 12:15:52 -05:00
|
|
|
--tag [...] set comma-separated tags, works with -a, -u
|
|
|
|
clears tags, if no arguments
|
|
|
|
-t, --title [...] manually set title, works with -a, -u
|
|
|
|
if no arguments:
|
|
|
|
-a: do not set title, -u: clear title
|
|
|
|
-c, --comment [...] description of the bookmark, works with
|
|
|
|
-a, -u; clears comment, if no arguments''')
|
2016-05-25 06:00:14 -05:00
|
|
|
edit_group.add_argument('--url', nargs=1, dest='url', metavar='url', help=argparse.SUPPRESS)
|
|
|
|
edit_group.add_argument('--tag', nargs='*', dest='tag', action=CustomTagAction, metavar='tag', help=argparse.SUPPRESS)
|
|
|
|
edit_group.add_argument('-t', '--title', nargs='*', dest='title', action=CustomTitleAction, metavar='title', help=argparse.SUPPRESS)
|
|
|
|
edit_group.add_argument('-c', '--comment', nargs='*', dest='desc', type=str, action=CustomDescAction, metavar='desc', help=argparse.SUPPRESS)
|
|
|
|
|
|
|
|
# Search options
|
|
|
|
search_group=argparser.add_argument_group(title='search options',
|
|
|
|
description='''-s, --sany keyword [...]
|
2016-05-25 12:15:52 -05:00
|
|
|
search bookmarks for ANY matching keyword
|
|
|
|
-S, --sall keyword [...]
|
|
|
|
search bookmarks with ALL keywords
|
|
|
|
special keyword -
|
|
|
|
"blank": list entries with empty title/tag
|
|
|
|
--st, --stag [...] search bookmarks by tag
|
|
|
|
list all tags alphabetically, if no arguments''')
|
2016-05-25 06:00:14 -05:00
|
|
|
search_group.add_argument('-s', '--sany', nargs='+', metavar='keyword', help=argparse.SUPPRESS)
|
|
|
|
search_group.add_argument('-S', '--sall', nargs='+', metavar='keyword', help=argparse.SUPPRESS)
|
|
|
|
search_group.add_argument('--st', '--stag', nargs='*', dest='stag', action=CustomTagSearchAction, metavar='keyword', help=argparse.SUPPRESS)
|
|
|
|
|
|
|
|
# Encryption options
|
|
|
|
crypto_group=argparser.add_argument_group(title='encryption options',
|
|
|
|
description='''-l, --lock [N] encrypt DB file with N (> 0, default 8)
|
2016-05-25 12:15:52 -05:00
|
|
|
hash iterations to generate key
|
|
|
|
-k, --unlock [N] decrypt DB file with N (> 0, default 8)
|
|
|
|
hash iterations to generate key''')
|
2016-05-25 06:00:14 -05:00
|
|
|
crypto_group.add_argument('-k', '--unlock', nargs='?', dest='decrypt', type=int, const=8, metavar='N', help=argparse.SUPPRESS)
|
|
|
|
crypto_group.add_argument('-l', '--lock', nargs='?', dest='encrypt', type=int, const=8, metavar='N', help=argparse.SUPPRESS)
|
|
|
|
|
|
|
|
# Power toys
|
|
|
|
power_group=argparser.add_argument_group(title='power toys',
|
|
|
|
description='''-p, --print [N] show details of bookmark at DB index N
|
2016-05-25 12:15:52 -05:00
|
|
|
show all bookmarks, if no arguments
|
|
|
|
-f, --format N modify -p output
|
|
|
|
N=1: show only URL, N=2: show URL and tag
|
|
|
|
-r, --replace oldtag [newtag ...]
|
|
|
|
replace oldtag with newtag everywhere
|
|
|
|
delete oldtag, if no newtag
|
|
|
|
-j, --json Json formatted output for -p, -s, -S, --st
|
2016-05-29 02:19:11 -05:00
|
|
|
--merge file merge bookmarks from another buku database
|
2016-05-28 08:15:03 -05:00
|
|
|
--noprompt do not show the prompt, run and exit
|
2016-05-25 12:15:52 -05:00
|
|
|
-o, --open N open bookmark at DB index N in web browser
|
|
|
|
-z, --debug show debug information and additional logs''')
|
2016-05-25 06:00:14 -05:00
|
|
|
power_group.add_argument('-p', '--print', nargs='?', dest='printindex', type=int, const=0, metavar='N', help=argparse.SUPPRESS)
|
|
|
|
power_group.add_argument('-f', '--format', dest='showOpt', type=int, choices=[1, 2], metavar='N', help=argparse.SUPPRESS)
|
|
|
|
power_group.add_argument('-r', '--replace', nargs='+', dest='replace', metavar=('oldtag', 'newtag'), help=argparse.SUPPRESS)
|
|
|
|
power_group.add_argument('-j', '--json', dest='jsonOutput', action='store_true', help=argparse.SUPPRESS)
|
2016-05-29 02:19:11 -05:00
|
|
|
general_group.add_argument('--merge', nargs=1, dest='merge', metavar='file', help=argparse.SUPPRESS)
|
2016-05-28 08:15:03 -05:00
|
|
|
power_group.add_argument('--noprompt', dest='noninteractive', action='store_true', help=argparse.SUPPRESS)
|
2016-05-25 06:00:14 -05:00
|
|
|
power_group.add_argument('-o', '--open', dest='openurl', type=int, metavar='N', help=argparse.SUPPRESS)
|
|
|
|
power_group.add_argument('-z', '--debug', dest='debug', action='store_true', help=argparse.SUPPRESS)
|
|
|
|
|
|
|
|
# Show help and exit if no arguments
|
|
|
|
if len(sys.argv) < 2:
|
|
|
|
argparser.print_help(sys.stderr)
|
|
|
|
sys.exit(1)
|
2015-11-07 07:29:38 -06:00
|
|
|
|
2016-05-25 06:00:14 -05:00
|
|
|
# Parse the arguments
|
|
|
|
args = argparser.parse_args()
|
|
|
|
|
|
|
|
# Show help and exit if help requested
|
|
|
|
if args.help == True:
|
|
|
|
argparser.print_help(sys.stderr)
|
|
|
|
sys.exit(0)
|
|
|
|
|
|
|
|
# Assign the values to globals
|
|
|
|
if args.showOpt is not None:
|
|
|
|
showOpt = args.showOpt
|
|
|
|
if tagManual is not None and len(args.tag) > 0:
|
|
|
|
tagManual = args.tag
|
|
|
|
if titleManual is not None and len(args.title) > 0:
|
|
|
|
titleManual = ' '.join(args.title)
|
|
|
|
if description is not None and len(args.desc) > 0:
|
|
|
|
description = ' '.join(args.desc)
|
|
|
|
if args.jsonOutput:
|
|
|
|
import json
|
|
|
|
jsonOutput = args.jsonOutput
|
2016-05-28 08:15:03 -05:00
|
|
|
noninteractive = args.noninteractive
|
2016-05-25 06:00:14 -05:00
|
|
|
debug = args.debug
|
|
|
|
|
|
|
|
# Show version in debug logs
|
|
|
|
if debug:
|
|
|
|
print('Version %s' % _VERSION_)
|
2015-11-07 07:29:38 -06:00
|
|
|
|
2016-05-25 06:00:14 -05:00
|
|
|
# Move pre-1.9 database to new location
|
|
|
|
BukuDb.move_legacy_dbfile()
|
2015-11-07 07:29:38 -06:00
|
|
|
|
2016-05-25 06:00:14 -05:00
|
|
|
# Handle encrypt/decrypt options at top priority
|
|
|
|
if args.encrypt is not None:
|
|
|
|
if no_crypto:
|
|
|
|
printmsg('PyCrypto missing', 'ERROR')
|
|
|
|
sys.exit(1)
|
|
|
|
if args.encrypt < 1:
|
|
|
|
printmsg('Iterations must be >= 1', 'ERROR')
|
|
|
|
sys.exit(1)
|
|
|
|
encrypt_file(args.encrypt)
|
2015-12-22 12:10:24 -06:00
|
|
|
|
2016-05-25 06:00:14 -05:00
|
|
|
if args.decrypt is not None:
|
|
|
|
if no_crypto:
|
|
|
|
printmsg('PyCrypto missing', 'ERROR')
|
|
|
|
sys.exit(1)
|
|
|
|
if args.decrypt < 1:
|
|
|
|
printmsg('Decryption failed', 'ERROR');
|
|
|
|
sys.exit(1)
|
|
|
|
decrypt_file(args.decrypt)
|
2015-12-22 12:10:24 -06:00
|
|
|
|
2016-05-25 06:00:14 -05:00
|
|
|
# Initialize the database and get handles
|
|
|
|
bdb = BukuDb()
|
2016-05-22 15:24:24 -05:00
|
|
|
|
2016-05-29 01:09:51 -05:00
|
|
|
# Import bookmarks
|
|
|
|
if args.imports is not None:
|
|
|
|
bdb.import_bookmark(args.imports[0])
|
|
|
|
|
|
|
|
# Merge a database file and exit
|
|
|
|
if args.merge is not None:
|
|
|
|
bdb.mergedb(args.merge[0])
|
|
|
|
|
2016-05-25 06:00:14 -05:00
|
|
|
# Add a record
|
|
|
|
if args.addurl is not None:
|
2016-05-22 15:24:24 -05:00
|
|
|
# Parse tags into a comma-separated string
|
2016-05-29 08:33:58 -05:00
|
|
|
tags = DELIMITER
|
2016-05-25 06:00:14 -05:00
|
|
|
keywords = args.addurl
|
2016-05-29 08:33:58 -05:00
|
|
|
if tagManual is not None and not (tagManual[0] == DELIMITER and len(tagManual) == 1):
|
|
|
|
keywords = args.addurl + [DELIMITER] + tagManual
|
2016-05-25 06:00:14 -05:00
|
|
|
|
|
|
|
if len(keywords) > 1:
|
|
|
|
tags = parse_tags(keywords[1:])
|
|
|
|
|
2016-05-29 01:09:51 -05:00
|
|
|
bdb.add_bookmark(args.addurl[0], titleManual, tags, description)
|
2016-05-25 06:00:14 -05:00
|
|
|
|
|
|
|
# Update record
|
|
|
|
if update == True:
|
|
|
|
if len(args.update) == 0:
|
|
|
|
bdb.refreshdb(0, titleManual)
|
|
|
|
elif not args.update[0].isdigit():
|
|
|
|
printmsg('Index must be a number >= 0', 'ERROR')
|
|
|
|
bdb.close_quit(1)
|
|
|
|
elif int(args.update[0]) == 0:
|
|
|
|
bdb.refreshdb(0, titleManual)
|
|
|
|
else:
|
|
|
|
if args.url is not None:
|
|
|
|
new_url = args.url[0]
|
|
|
|
else:
|
|
|
|
new_url = ''
|
|
|
|
|
|
|
|
# Parse tags into a comma-separated string
|
|
|
|
tags = None
|
2016-05-29 08:33:58 -05:00
|
|
|
if tagManual is not None and not (tagManual[0] == DELIMITER and len(tagManual) == 1):
|
2016-05-25 06:00:14 -05:00
|
|
|
tags = parse_tags(tagManual)
|
|
|
|
|
2016-05-29 01:44:14 -05:00
|
|
|
bdb.update_bookmark(int(args.update[0]), new_url, titleManual, tags, description)
|
2016-05-25 06:00:14 -05:00
|
|
|
|
|
|
|
# Delete record(s)
|
|
|
|
if args.delete is not None:
|
|
|
|
if args.delete < 0:
|
|
|
|
printmsg('Index must be >= 0', 'ERROR')
|
|
|
|
bdb.close_quit(1)
|
|
|
|
bdb.delete_bookmark(args.delete)
|
|
|
|
|
|
|
|
# Search URLs, titles, tags for any keyword
|
|
|
|
if args.sany is not None:
|
|
|
|
bdb.searchdb(args.sany, False, jsonOutput)
|
|
|
|
|
|
|
|
# Search URLs, titles, tags with all keywords
|
|
|
|
if args.sall is not None:
|
|
|
|
if args.sall[0] == 'blank' and len(args.sall) == 1:
|
|
|
|
bdb.print_bookmark(0, True)
|
|
|
|
else:
|
|
|
|
bdb.searchdb(args.sall, True, jsonOutput)
|
2015-11-05 09:18:51 -06:00
|
|
|
|
2016-05-25 06:00:14 -05:00
|
|
|
# Search bookmarks by tag
|
|
|
|
if tagsearch == True:
|
|
|
|
if len(args.stag) > 0:
|
2016-05-29 08:33:58 -05:00
|
|
|
tag = DELIMITER + ' '.join(args.stag) + DELIMITER
|
2016-05-25 06:00:14 -05:00
|
|
|
bdb.search_by_tag(tag, jsonOutput)
|
|
|
|
else:
|
|
|
|
bdb.list_tags()
|
|
|
|
|
|
|
|
# Print all records
|
|
|
|
if args.printindex is not None:
|
|
|
|
if args.printindex < 0:
|
|
|
|
printmsg('Index must be >= 0', 'ERROR')
|
|
|
|
bdb.close_quit(1)
|
|
|
|
bdb.print_bookmark(args.printindex)
|
|
|
|
|
|
|
|
# Replace a tag in DB
|
|
|
|
if args.replace is not None:
|
|
|
|
if len(args.replace) == 1:
|
|
|
|
bdb.replace_tag(args.replace[0])
|
|
|
|
else:
|
|
|
|
bdb.replace_tag(args.replace[0], args.replace[1:])
|
2016-03-25 02:52:52 -05:00
|
|
|
|
2016-05-25 06:00:14 -05:00
|
|
|
# Open URL in browser
|
|
|
|
if args.openurl is not None:
|
|
|
|
if args.openurl < 1:
|
|
|
|
printmsg('Index must be >= 1', 'ERROR')
|
|
|
|
bdb.close_quit(1)
|
|
|
|
bdb.browse_by_index(args.openurl)
|
2016-03-25 02:52:52 -05:00
|
|
|
|
2016-05-25 06:00:14 -05:00
|
|
|
# Close DB connection and quit
|
|
|
|
bdb.close_quit(0)
|