2016-03-15 08:51:06 -05:00
|
|
|
#!/usr/bin/env python3
|
2015-11-01 14:08:45 -06:00
|
|
|
#
|
|
|
|
# Bookmark management utility
|
|
|
|
#
|
2016-05-23 04:02:46 -05:00
|
|
|
# Copyright (C) 2015-2016 Arun Prakash Jana <engineerarun@gmail.com>
|
2015-11-01 14:08:45 -06:00
|
|
|
#
|
|
|
|
# This program is free software: you can redistribute it and/or modify
|
|
|
|
# it under the terms of the GNU General Public License as published by
|
|
|
|
# the Free Software Foundation, either version 3 of the License, or
|
|
|
|
# (at your option) any later version.
|
|
|
|
#
|
|
|
|
# This program is distributed in the hope that it will be useful,
|
|
|
|
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
|
|
|
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
|
|
|
# GNU General Public License for more details.
|
|
|
|
#
|
|
|
|
# You should have received a copy of the GNU General Public License
|
2016-10-22 08:21:46 -05:00
|
|
|
# along with Buku. If not, see <http://www.gnu.org/licenses/>.
|
2015-11-01 14:04:41 -06:00
|
|
|
|
2015-11-04 08:11:16 -06:00
|
|
|
import sys
|
2015-11-06 14:26:08 -06:00
|
|
|
import os
|
2015-11-01 14:04:41 -06:00
|
|
|
import sqlite3
|
2016-09-05 03:18:21 -05:00
|
|
|
import re
|
2016-04-24 14:19:32 -05:00
|
|
|
import argparse
|
2015-11-06 13:59:57 -06:00
|
|
|
import webbrowser
|
2015-11-06 16:32:08 -06:00
|
|
|
import html.parser as HTMLParser
|
2016-11-07 09:13:08 -06:00
|
|
|
import urllib3
|
2016-11-12 09:47:36 -06:00
|
|
|
import requests
|
2016-11-08 11:32:45 -06:00
|
|
|
from urllib.parse import urlparse, unquote
|
2016-03-16 10:10:55 -05:00
|
|
|
import signal
|
2016-10-23 13:11:31 -05:00
|
|
|
import json
|
2016-06-30 10:45:45 -05:00
|
|
|
import logging
|
2016-06-30 14:31:51 -05:00
|
|
|
import inspect
|
2016-06-30 10:45:45 -05:00
|
|
|
import atexit
|
2016-11-08 11:32:45 -06:00
|
|
|
|
2016-10-09 23:23:56 -05:00
|
|
|
try:
|
|
|
|
import readline
|
|
|
|
readline
|
|
|
|
except ImportError:
|
|
|
|
pass
|
2015-11-04 08:11:16 -06:00
|
|
|
|
2016-11-06 12:44:44 -06:00
|
|
|
__version__ = '2.6'
|
|
|
|
__author__ = 'Arun Prakash Jana <engineerarun@gmail.com>'
|
|
|
|
__license__ = 'GPLv3'
|
|
|
|
|
2015-11-04 13:07:10 -06:00
|
|
|
# Globals
|
2016-10-22 08:21:46 -05:00
|
|
|
update = False # Update a bookmark in DB
|
2016-10-29 04:35:44 -05:00
|
|
|
title_in = None # Input title specified at cmdline
|
2016-11-08 12:52:34 -06:00
|
|
|
tags_in = None # Input tags specified at cmdline
|
|
|
|
desc_in = None # Description of the bookmark
|
2016-10-22 08:21:46 -05:00
|
|
|
tagsearch = False # Search bookmarks by tag
|
|
|
|
interrupted = False # Received SIGINT
|
2016-10-29 05:36:29 -05:00
|
|
|
DELIM = ',' # Delimiter used to store tags in DB
|
2016-11-08 11:32:45 -06:00
|
|
|
SKIP_MIMES = {'.pdf', '.txt'}
|
2016-11-07 09:13:08 -06:00
|
|
|
http_handler = None # urllib3 PoolManager handler
|
2016-11-08 13:43:53 -06:00
|
|
|
htmlparser = None # Use a single HTML Parser instance
|
2016-04-05 23:55:25 -05:00
|
|
|
|
2016-11-09 11:14:31 -06:00
|
|
|
# Disguise as Firefox on Ubuntu
|
2016-11-12 12:32:38 -06:00
|
|
|
USER_AGENT = 'Mozilla/5.0 (X11; Ubuntu; Linux x86_64; rv:48.0) Gecko/20100101 \
|
|
|
|
Firefox/48.0'
|
2016-11-09 11:14:31 -06:00
|
|
|
|
2016-06-01 11:51:55 -05:00
|
|
|
# Crypto globals
|
|
|
|
BLOCKSIZE = 65536
|
|
|
|
SALT_SIZE = 32
|
2016-10-22 08:21:46 -05:00
|
|
|
CHUNKSIZE = 0x80000 # Read/write 512 KB chunks
|
2016-06-01 11:51:55 -05:00
|
|
|
|
2016-06-30 10:45:45 -05:00
|
|
|
# Set up logging
|
|
|
|
logging.basicConfig(format='[%(levelname)s] %(message)s')
|
|
|
|
logger = logging.getLogger()
|
2016-11-22 12:09:03 -06:00
|
|
|
logdbg = logger.debug
|
|
|
|
logerr = logger.error
|
2016-06-30 10:45:45 -05:00
|
|
|
|
2016-09-09 13:52:32 -05:00
|
|
|
|
2016-04-05 06:25:40 -05:00
|
|
|
class BMHTMLParser(HTMLParser.HTMLParser):
|
2016-10-29 02:54:10 -05:00
|
|
|
'''Class to parse and fetch the title
|
|
|
|
from a HTML page, if available
|
|
|
|
'''
|
2016-04-05 23:39:56 -05:00
|
|
|
|
2016-04-05 06:25:40 -05:00
|
|
|
def __init__(self):
|
|
|
|
HTMLParser.HTMLParser.__init__(self)
|
2016-11-08 12:52:34 -06:00
|
|
|
self.in_title_tag = False
|
2016-05-24 12:51:38 -05:00
|
|
|
self.data = ''
|
2016-11-08 12:52:34 -06:00
|
|
|
self.prev_tag = None
|
|
|
|
self.parsed_title = None
|
2016-04-05 06:25:40 -05:00
|
|
|
|
2016-11-12 12:32:38 -06:00
|
|
|
def feed(self, data):
|
2016-11-08 13:43:53 -06:00
|
|
|
self.in_title_tag = False
|
|
|
|
self.data = ''
|
|
|
|
self.prev_tag = None
|
|
|
|
self.parsed_title = None
|
2016-11-12 12:32:38 -06:00
|
|
|
HTMLParser.HTMLParser.feed(self, data)
|
2016-11-08 13:43:53 -06:00
|
|
|
|
2016-04-05 06:25:40 -05:00
|
|
|
def handle_starttag(self, tag, attrs):
|
2016-11-08 12:52:34 -06:00
|
|
|
self.in_title_tag = False
|
2016-05-24 12:51:38 -05:00
|
|
|
if tag == 'title':
|
2016-11-08 12:52:34 -06:00
|
|
|
self.in_title_tag = True
|
|
|
|
self.prev_tag = tag
|
2016-04-05 06:25:40 -05:00
|
|
|
|
|
|
|
def handle_endtag(self, tag):
|
2016-05-24 12:51:38 -05:00
|
|
|
if tag == 'title':
|
2016-11-08 12:52:34 -06:00
|
|
|
self.in_title_tag = False
|
2016-05-24 12:51:38 -05:00
|
|
|
if self.data != '':
|
2016-11-08 12:52:34 -06:00
|
|
|
self.parsed_title = self.data
|
2016-05-21 05:40:37 -05:00
|
|
|
self.reset() # We have received title data, exit parsing
|
2016-04-05 06:25:40 -05:00
|
|
|
|
|
|
|
def handle_data(self, data):
|
2016-11-08 12:52:34 -06:00
|
|
|
if self.prev_tag == 'title' and self.in_title_tag:
|
2016-05-31 13:18:06 -05:00
|
|
|
self.data = '%s%s' % (self.data, data)
|
2016-04-05 06:25:40 -05:00
|
|
|
|
2016-04-10 02:09:51 -05:00
|
|
|
def error(self, message):
|
|
|
|
pass
|
|
|
|
|
2016-09-09 10:07:01 -05:00
|
|
|
|
2016-06-01 11:51:55 -05:00
|
|
|
class BukuCrypt:
|
2016-10-29 02:54:10 -05:00
|
|
|
'''Class to handle encryption and decryption of
|
|
|
|
the database file. Functionally a separate entity.
|
2016-06-01 11:51:55 -05:00
|
|
|
|
|
|
|
Involves late imports in the static functions but it
|
|
|
|
saves ~100ms each time. Given that encrypt/decrypt are
|
|
|
|
not done automatically and any one should be called at
|
|
|
|
a time, this doesn't seem to be an outrageous approach.
|
2016-09-09 13:52:32 -05:00
|
|
|
'''
|
2016-06-01 11:51:55 -05:00
|
|
|
|
|
|
|
@staticmethod
|
|
|
|
def get_filehash(filepath):
|
2016-09-09 13:52:32 -05:00
|
|
|
'''Get the SHA256 hash of a file
|
2016-06-01 11:51:55 -05:00
|
|
|
|
2016-10-29 02:54:10 -05:00
|
|
|
:param filepath: path to the file
|
|
|
|
:return: hash digest of the file
|
2016-09-09 13:52:32 -05:00
|
|
|
'''
|
2016-06-01 11:51:55 -05:00
|
|
|
|
|
|
|
from hashlib import sha256
|
|
|
|
|
2016-10-29 04:35:44 -05:00
|
|
|
with open(filepath, 'rb') as fp:
|
2016-06-01 11:51:55 -05:00
|
|
|
hasher = sha256()
|
2016-10-29 04:35:44 -05:00
|
|
|
buf = fp.read(BLOCKSIZE)
|
2016-06-01 11:51:55 -05:00
|
|
|
while len(buf) > 0:
|
|
|
|
hasher.update(buf)
|
2016-10-29 04:35:44 -05:00
|
|
|
buf = fp.read(BLOCKSIZE)
|
2016-06-01 11:51:55 -05:00
|
|
|
|
|
|
|
return hasher.digest()
|
|
|
|
|
|
|
|
@staticmethod
|
2016-11-11 20:38:28 -06:00
|
|
|
def encrypt_file(iterations, dbfile=None):
|
2016-10-29 02:54:10 -05:00
|
|
|
'''Encrypt the bookmarks database file
|
|
|
|
|
|
|
|
:param iterations: number of iterations for key generation
|
2016-11-11 20:38:28 -06:00
|
|
|
:param dbfile: custom database file path (including filename)
|
2016-10-29 02:54:10 -05:00
|
|
|
'''
|
2016-06-01 11:51:55 -05:00
|
|
|
|
|
|
|
try:
|
|
|
|
from getpass import getpass
|
|
|
|
import struct
|
|
|
|
from hashlib import sha256
|
|
|
|
from cryptography.hazmat.backends import default_backend
|
2016-09-09 13:52:32 -05:00
|
|
|
from cryptography.hazmat.primitives.ciphers import (Cipher, modes,
|
|
|
|
algorithms)
|
2016-10-09 23:23:56 -05:00
|
|
|
except ImportError:
|
2016-11-22 12:09:03 -06:00
|
|
|
logerr('cryptography lib(s) missing')
|
2016-06-01 11:51:55 -05:00
|
|
|
sys.exit(1)
|
|
|
|
|
|
|
|
if iterations < 1:
|
2016-11-22 12:09:03 -06:00
|
|
|
logerr('Iterations must be >= 1')
|
2016-06-01 11:51:55 -05:00
|
|
|
sys.exit(1)
|
|
|
|
|
2016-11-11 20:38:28 -06:00
|
|
|
if not dbfile:
|
|
|
|
dbfile = os.path.join(BukuDb.get_default_dbdir(), 'bookmarks.db')
|
|
|
|
encfile = '%s.enc' % dbfile
|
|
|
|
|
|
|
|
db_exists = os.path.exists(dbfile)
|
|
|
|
enc_exists = os.path.exists(encfile)
|
2016-06-01 11:51:55 -05:00
|
|
|
|
2016-11-11 20:38:28 -06:00
|
|
|
if db_exists and not enc_exists:
|
|
|
|
pass
|
|
|
|
elif not db_exists:
|
2016-11-22 12:09:03 -06:00
|
|
|
logerr('%s missing. Already encrypted?', dbfile)
|
2016-11-11 20:38:28 -06:00
|
|
|
sys.exit(1)
|
|
|
|
else:
|
|
|
|
# db_exists and enc_exists
|
2016-11-22 12:09:03 -06:00
|
|
|
logerr('Both encrypted and flat DB files exist!')
|
2016-06-01 11:51:55 -05:00
|
|
|
sys.exit(1)
|
|
|
|
|
|
|
|
password = ''
|
|
|
|
password = getpass()
|
|
|
|
passconfirm = getpass()
|
|
|
|
if password == '':
|
2016-11-22 12:09:03 -06:00
|
|
|
logerr('Empty password')
|
2016-06-01 11:51:55 -05:00
|
|
|
sys.exit(1)
|
|
|
|
if password != passconfirm:
|
2016-11-22 12:09:03 -06:00
|
|
|
logerr('Passwords do not match')
|
2016-06-01 11:51:55 -05:00
|
|
|
sys.exit(1)
|
|
|
|
|
2016-11-11 20:38:28 -06:00
|
|
|
try:
|
|
|
|
# Get SHA256 hash of DB file
|
|
|
|
dbhash = BukuCrypt.get_filehash(dbfile)
|
|
|
|
except Exception as e:
|
2016-11-22 12:09:03 -06:00
|
|
|
logerr(e)
|
2016-11-11 20:38:28 -06:00
|
|
|
sys.exit(1)
|
2016-06-01 11:51:55 -05:00
|
|
|
|
|
|
|
# Generate random 256-bit salt and key
|
|
|
|
salt = os.urandom(SALT_SIZE)
|
2016-09-09 13:52:32 -05:00
|
|
|
key = ('%s%s' % (password,
|
|
|
|
salt.decode('utf-8', 'replace'))).encode('utf-8')
|
2016-06-01 11:51:55 -05:00
|
|
|
for _ in range(iterations):
|
|
|
|
key = sha256(key).digest()
|
|
|
|
|
|
|
|
iv = os.urandom(16)
|
|
|
|
encryptor = Cipher(
|
|
|
|
algorithms.AES(key),
|
|
|
|
modes.CBC(iv),
|
|
|
|
backend=default_backend()
|
|
|
|
).encryptor()
|
2016-11-11 20:38:28 -06:00
|
|
|
filesize = os.path.getsize(dbfile)
|
2016-06-01 11:51:55 -05:00
|
|
|
|
2016-11-11 20:38:28 -06:00
|
|
|
try:
|
|
|
|
with open(dbfile, 'rb') as infp, open(encfile, 'wb') as outfp:
|
2016-10-29 04:35:44 -05:00
|
|
|
outfp.write(struct.pack('<Q', filesize))
|
|
|
|
outfp.write(salt)
|
|
|
|
outfp.write(iv)
|
2016-06-01 11:51:55 -05:00
|
|
|
|
|
|
|
# Embed DB file hash in encrypted file
|
2016-10-29 04:35:44 -05:00
|
|
|
outfp.write(dbhash)
|
2016-06-01 11:51:55 -05:00
|
|
|
|
|
|
|
while True:
|
2016-10-29 04:35:44 -05:00
|
|
|
chunk = infp.read(CHUNKSIZE)
|
2016-06-01 11:51:55 -05:00
|
|
|
if len(chunk) == 0:
|
|
|
|
break
|
|
|
|
elif len(chunk) % 16 != 0:
|
|
|
|
chunk = '%s%s' % (chunk, ' ' * (16 - len(chunk) % 16))
|
|
|
|
|
2016-10-29 05:36:29 -05:00
|
|
|
outfp.write(encryptor.update(chunk) + encryptor.finalize())
|
2016-06-01 11:51:55 -05:00
|
|
|
|
2016-11-11 20:38:28 -06:00
|
|
|
os.remove(dbfile)
|
|
|
|
print('File encrypted')
|
|
|
|
sys.exit(0)
|
|
|
|
except Exception as e:
|
2016-11-22 12:09:03 -06:00
|
|
|
logerr(e)
|
2016-11-11 20:38:28 -06:00
|
|
|
sys.exit(1)
|
2016-06-01 11:51:55 -05:00
|
|
|
|
|
|
|
@staticmethod
|
2016-11-11 20:38:28 -06:00
|
|
|
def decrypt_file(iterations, dbfile=None):
|
2016-10-29 02:54:10 -05:00
|
|
|
'''Decrypt the bookmarks database file
|
|
|
|
|
|
|
|
:param iterations: number of iterations for key generation
|
2016-11-11 20:38:28 -06:00
|
|
|
:param dbfile: custom database file path (including filename)
|
|
|
|
: The '.enc' suffix must be omitted.
|
2016-10-29 02:54:10 -05:00
|
|
|
'''
|
2016-06-01 11:51:55 -05:00
|
|
|
|
|
|
|
try:
|
|
|
|
from getpass import getpass
|
|
|
|
import struct
|
|
|
|
from hashlib import sha256
|
|
|
|
from cryptography.hazmat.backends import default_backend
|
2016-09-09 13:52:32 -05:00
|
|
|
from cryptography.hazmat.primitives.ciphers import (Cipher, modes,
|
|
|
|
algorithms)
|
2016-10-09 23:23:56 -05:00
|
|
|
except ImportError:
|
2016-11-22 12:09:03 -06:00
|
|
|
logerr('cryptography lib(s) missing')
|
2016-06-01 11:51:55 -05:00
|
|
|
sys.exit(1)
|
|
|
|
|
|
|
|
if iterations < 1:
|
2016-11-22 12:09:03 -06:00
|
|
|
logerr('Decryption failed')
|
2016-06-01 11:51:55 -05:00
|
|
|
sys.exit(1)
|
|
|
|
|
2016-11-11 20:38:28 -06:00
|
|
|
if not dbfile:
|
|
|
|
dbfile = os.path.join(BukuDb.get_default_dbdir(), 'bookmarks.db')
|
|
|
|
else:
|
|
|
|
dbfile = os.path.abspath(dbfile)
|
|
|
|
dbpath, filename = os.path.split(dbfile)
|
|
|
|
|
|
|
|
encfile = '%s.enc' % dbfile
|
|
|
|
|
|
|
|
enc_exists = os.path.exists(encfile)
|
|
|
|
db_exists = os.path.exists(dbfile)
|
2016-06-01 11:51:55 -05:00
|
|
|
|
2016-11-11 20:38:28 -06:00
|
|
|
if enc_exists and not db_exists:
|
|
|
|
pass
|
|
|
|
elif not enc_exists:
|
2016-11-22 12:09:03 -06:00
|
|
|
logerr('%s missing', encfile)
|
2016-11-11 20:38:28 -06:00
|
|
|
sys.exit(1)
|
|
|
|
else:
|
|
|
|
# db_exists and enc_exists
|
2016-11-22 12:09:03 -06:00
|
|
|
logerr('Both encrypted and flat DB files exist!')
|
2016-06-01 11:51:55 -05:00
|
|
|
sys.exit(1)
|
|
|
|
|
|
|
|
password = ''
|
|
|
|
password = getpass()
|
|
|
|
if password == '':
|
2016-11-22 12:09:03 -06:00
|
|
|
logerr('Decryption failed')
|
2016-06-01 11:51:55 -05:00
|
|
|
sys.exit(1)
|
|
|
|
|
2016-11-11 20:38:28 -06:00
|
|
|
try:
|
|
|
|
with open(encfile, 'rb') as infp:
|
|
|
|
size = struct.unpack('<Q', infp.read(struct.calcsize('Q')))[0]
|
|
|
|
|
|
|
|
# Read 256-bit salt and generate key
|
|
|
|
salt = infp.read(32)
|
|
|
|
key = ('%s%s' % (password,
|
|
|
|
salt.decode('utf-8', 'replace'))).encode('utf-8')
|
|
|
|
for _ in range(iterations):
|
|
|
|
key = sha256(key).digest()
|
|
|
|
|
|
|
|
iv = infp.read(16)
|
|
|
|
decryptor = Cipher(
|
|
|
|
algorithms.AES(key),
|
|
|
|
modes.CBC(iv),
|
|
|
|
backend=default_backend(),
|
|
|
|
).decryptor()
|
|
|
|
|
|
|
|
# Get original DB file's SHA256 hash from encrypted file
|
|
|
|
enchash = infp.read(32)
|
|
|
|
|
|
|
|
with open(dbfile, 'wb') as outfp:
|
|
|
|
while True:
|
|
|
|
chunk = infp.read(CHUNKSIZE)
|
|
|
|
if len(chunk) == 0:
|
|
|
|
break
|
|
|
|
|
|
|
|
outfp.write(
|
|
|
|
decryptor.update(chunk) + decryptor.finalize())
|
|
|
|
|
|
|
|
outfp.truncate(size)
|
|
|
|
|
|
|
|
# Match hash of generated file with that of original DB file
|
|
|
|
dbhash = BukuCrypt.get_filehash(dbfile)
|
|
|
|
if dbhash != enchash:
|
|
|
|
os.remove(dbfile)
|
2016-11-22 12:09:03 -06:00
|
|
|
logerr('Decryption failed')
|
2016-11-11 20:38:28 -06:00
|
|
|
sys.exit(1)
|
|
|
|
else:
|
|
|
|
os.remove(encfile)
|
|
|
|
print('File decrypted')
|
|
|
|
except struct.error:
|
2016-11-22 12:09:03 -06:00
|
|
|
logerr('Tainted file')
|
2016-11-11 20:38:28 -06:00
|
|
|
sys.exit(1)
|
|
|
|
except Exception as e:
|
2016-11-22 12:09:03 -06:00
|
|
|
logerr(e)
|
2016-06-01 11:51:55 -05:00
|
|
|
sys.exit(1)
|
|
|
|
|
2016-09-09 10:07:01 -05:00
|
|
|
|
2016-05-20 17:05:25 -05:00
|
|
|
class BukuDb:
|
2016-04-05 06:25:40 -05:00
|
|
|
|
2016-11-11 20:38:28 -06:00
|
|
|
def __init__(self, json=False, field_filter=0, immutable=-1, chatty=False,
|
|
|
|
dbfile=None):
|
2016-10-29 02:54:10 -05:00
|
|
|
'''Database initialization API
|
|
|
|
|
|
|
|
:param json: print results in json format
|
2016-10-29 04:02:50 -05:00
|
|
|
:param field_filter: bookmark print format specifier
|
2016-11-06 08:41:45 -06:00
|
|
|
:param immutable: disable title fetch from web
|
2016-11-06 09:30:45 -06:00
|
|
|
:param chatty: set the verbosity of the APIs
|
2016-11-11 20:38:28 -06:00
|
|
|
:param dbfile: custom database file path (including filename)
|
2016-10-29 02:54:10 -05:00
|
|
|
'''
|
|
|
|
|
2016-11-11 20:38:28 -06:00
|
|
|
self.conn, self.cur = BukuDb.initdb(dbfile)
|
2016-06-29 13:06:33 -05:00
|
|
|
self.json = json
|
2016-10-29 04:02:50 -05:00
|
|
|
self.field_filter = field_filter
|
2016-11-05 17:32:03 -05:00
|
|
|
self.immutable = immutable
|
2016-11-06 09:30:45 -06:00
|
|
|
self.chatty = chatty
|
2016-04-10 07:28:49 -05:00
|
|
|
|
2016-05-20 17:05:25 -05:00
|
|
|
@staticmethod
|
2016-11-11 20:38:28 -06:00
|
|
|
def get_default_dbdir():
|
2016-09-09 13:52:32 -05:00
|
|
|
'''Determine the directory path where dbfile will be stored:
|
2016-05-20 17:05:25 -05:00
|
|
|
if $XDG_DATA_HOME is defined, use it
|
|
|
|
else if $HOME exists, use it
|
|
|
|
else use the current directory
|
2016-10-29 02:54:10 -05:00
|
|
|
|
|
|
|
:return: path to database file
|
2016-09-09 13:52:32 -05:00
|
|
|
'''
|
2016-05-21 08:15:56 -05:00
|
|
|
|
2016-05-20 17:05:25 -05:00
|
|
|
data_home = os.environ.get('XDG_DATA_HOME')
|
|
|
|
if data_home is None:
|
|
|
|
if os.environ.get('HOME') is None:
|
2016-10-09 23:52:21 -05:00
|
|
|
return os.path.abspath('.')
|
2016-05-20 17:05:25 -05:00
|
|
|
else:
|
2016-09-09 13:52:32 -05:00
|
|
|
data_home = os.path.join(os.environ.get('HOME'),
|
|
|
|
'.local', 'share')
|
2016-05-21 08:15:56 -05:00
|
|
|
|
2016-05-20 17:05:25 -05:00
|
|
|
return os.path.join(data_home, 'buku')
|
2016-05-21 08:15:56 -05:00
|
|
|
|
2016-05-20 17:05:25 -05:00
|
|
|
@staticmethod
|
2016-11-11 20:38:28 -06:00
|
|
|
def initdb(dbfile=None):
|
2016-09-09 13:52:32 -05:00
|
|
|
'''Initialize the database connection. Create DB
|
|
|
|
file and/or bookmarks table if they don't exist.
|
|
|
|
Alert on encryption options on first execution.
|
2016-05-21 08:15:56 -05:00
|
|
|
|
2016-11-11 20:38:28 -06:00
|
|
|
:param dbfile: custom database file path (including filename)
|
2016-10-29 02:54:10 -05:00
|
|
|
:return: (connection, cursor) tuple
|
2016-09-09 13:52:32 -05:00
|
|
|
'''
|
2016-05-21 08:15:56 -05:00
|
|
|
|
2016-11-11 20:38:28 -06:00
|
|
|
if not dbfile:
|
|
|
|
dbpath = BukuDb.get_default_dbdir()
|
|
|
|
filename = 'bookmarks.db'
|
|
|
|
dbfile = os.path.join(dbpath, filename)
|
|
|
|
else:
|
|
|
|
dbfile = os.path.abspath(dbfile)
|
|
|
|
dbpath, filename = os.path.split(dbfile)
|
2016-05-21 08:15:56 -05:00
|
|
|
|
2016-11-11 20:38:28 -06:00
|
|
|
encfile = dbfile + '.enc'
|
2016-05-21 08:15:56 -05:00
|
|
|
|
2016-11-11 20:38:28 -06:00
|
|
|
try:
|
|
|
|
if not os.path.exists(dbpath):
|
|
|
|
os.makedirs(dbpath)
|
|
|
|
except Exception as e:
|
2016-11-22 12:09:03 -06:00
|
|
|
logerr(e)
|
2016-11-11 20:38:28 -06:00
|
|
|
os.exit(1)
|
|
|
|
|
|
|
|
db_exists = os.path.exists(dbfile)
|
|
|
|
enc_exists = os.path.exists(encfile)
|
|
|
|
|
|
|
|
if db_exists and not enc_exists:
|
|
|
|
pass
|
|
|
|
elif enc_exists and not db_exists:
|
2016-11-22 12:09:03 -06:00
|
|
|
logerr('Unlock database first')
|
2016-05-20 17:05:25 -05:00
|
|
|
sys.exit(1)
|
2016-11-11 20:38:28 -06:00
|
|
|
elif db_exists and enc_exists:
|
2016-11-22 12:09:03 -06:00
|
|
|
logerr('Both encrypted and flat DB files exist!')
|
2016-11-11 20:38:28 -06:00
|
|
|
sys.exit(1)
|
|
|
|
else:
|
|
|
|
# not db_exists and not enc_exists
|
2016-11-08 11:40:16 -06:00
|
|
|
print('DB file is being created at \x1b[1m%s\x1b[0m.' % dbfile)
|
2016-11-11 20:38:28 -06:00
|
|
|
print('You should \x1b[1mencrypt it\x1b[0m later.\n')
|
2016-05-21 08:15:56 -05:00
|
|
|
|
2016-05-20 17:05:25 -05:00
|
|
|
try:
|
|
|
|
# Create a connection
|
|
|
|
conn = sqlite3.connect(dbfile)
|
2016-09-09 08:05:28 -05:00
|
|
|
conn.create_function('REGEXP', 2, regexp)
|
2016-05-20 17:05:25 -05:00
|
|
|
cur = conn.cursor()
|
2016-05-21 08:15:56 -05:00
|
|
|
|
2016-05-20 17:05:25 -05:00
|
|
|
# Create table if it doesn't exist
|
2016-09-09 13:52:32 -05:00
|
|
|
cur.execute('CREATE TABLE if not exists bookmarks \
|
|
|
|
(id integer PRIMARY KEY, URL text NOT NULL UNIQUE, \
|
|
|
|
metadata text default \'\', tags text default \',\', \
|
|
|
|
desc text default \'\')')
|
2016-05-20 17:05:25 -05:00
|
|
|
conn.commit()
|
|
|
|
except Exception as e:
|
2016-06-30 14:31:51 -05:00
|
|
|
_, _, linenumber, func, _, _ = inspect.stack()[0]
|
2016-11-22 12:09:03 -06:00
|
|
|
logerr('%s(), ln %d: %s', func, linenumber, e)
|
2016-05-20 17:05:25 -05:00
|
|
|
sys.exit(1)
|
2016-05-21 08:15:56 -05:00
|
|
|
|
2016-05-20 17:05:25 -05:00
|
|
|
# Add description column in existing DB (from version 2.1)
|
|
|
|
try:
|
2016-09-09 13:52:32 -05:00
|
|
|
query = 'ALTER TABLE bookmarks ADD COLUMN desc text default \'\''
|
|
|
|
cur.execute(query)
|
2016-05-20 17:05:25 -05:00
|
|
|
conn.commit()
|
2016-05-31 12:39:34 -05:00
|
|
|
except Exception:
|
2016-05-20 17:05:25 -05:00
|
|
|
pass
|
2016-05-21 08:15:56 -05:00
|
|
|
|
2016-11-05 17:32:03 -05:00
|
|
|
'''Add flags column in existing DB
|
|
|
|
Introduced in v2.7 to handle immutable title
|
|
|
|
Designed to be extended in future using bitwise masks
|
|
|
|
Masks:
|
|
|
|
0b00000001: set title immutable'''
|
|
|
|
try:
|
|
|
|
query = 'ALTER TABLE bookmarks ADD COLUMN flags integer default 0'
|
|
|
|
cur.execute(query)
|
|
|
|
conn.commit()
|
|
|
|
except Exception:
|
|
|
|
pass
|
|
|
|
|
2016-05-20 17:05:25 -05:00
|
|
|
return (conn, cur)
|
|
|
|
|
2016-10-29 06:31:14 -05:00
|
|
|
def get_bm_by_id(self, index):
|
2016-09-09 13:52:32 -05:00
|
|
|
'''Get a bookmark from database by its ID.
|
2016-10-29 02:54:10 -05:00
|
|
|
|
|
|
|
:return: bookmark data as a tuple, or None, if index is not found
|
2016-09-09 13:52:32 -05:00
|
|
|
'''
|
2016-06-02 00:38:14 -05:00
|
|
|
|
|
|
|
self.cur.execute('SELECT * FROM bookmarks WHERE id = ?', (index,))
|
|
|
|
results = self.cur.fetchall()
|
|
|
|
if len(results) == 0:
|
|
|
|
return None
|
|
|
|
else:
|
|
|
|
return results[0]
|
|
|
|
|
2016-10-29 06:31:14 -05:00
|
|
|
def get_bm_id(self, url):
|
2016-09-09 13:52:32 -05:00
|
|
|
'''Check if URL already exists in DB
|
2016-05-21 08:15:56 -05:00
|
|
|
|
2016-10-29 02:54:10 -05:00
|
|
|
:param url: URL to search
|
|
|
|
:return: DB index if URL found, else -1
|
2016-09-09 13:52:32 -05:00
|
|
|
'''
|
2016-05-21 08:15:56 -05:00
|
|
|
|
2016-05-24 12:51:38 -05:00
|
|
|
self.cur.execute('SELECT id FROM bookmarks WHERE URL = ?', (url,))
|
2016-05-20 17:05:25 -05:00
|
|
|
resultset = self.cur.fetchall()
|
|
|
|
if len(resultset) == 0:
|
|
|
|
return -1
|
2016-05-21 08:15:56 -05:00
|
|
|
|
2016-05-20 17:05:25 -05:00
|
|
|
return resultset[0][0]
|
2016-05-21 08:15:56 -05:00
|
|
|
|
2016-10-29 06:31:14 -05:00
|
|
|
def add_bm(self, url, title_in=None, tags_in=None, desc=None,
|
2016-11-06 09:30:45 -06:00
|
|
|
delay_commit=False):
|
2016-09-09 13:52:32 -05:00
|
|
|
'''Add a new bookmark
|
2016-05-21 08:15:56 -05:00
|
|
|
|
2016-10-29 02:54:10 -05:00
|
|
|
:param url: URL to bookmark
|
2016-10-29 04:35:44 -05:00
|
|
|
:param title_in: string title to add manually
|
|
|
|
:param tags_in: string of comma-separated tags to add manually
|
2016-05-22 14:33:24 -05:00
|
|
|
:param desc: string description
|
2016-10-29 02:54:10 -05:00
|
|
|
:param delay_commit: do not commit to DB, caller responsibility
|
|
|
|
:return: True on success, False on failure
|
2016-09-09 13:52:32 -05:00
|
|
|
'''
|
2016-05-21 08:15:56 -05:00
|
|
|
|
2016-10-22 15:56:27 -05:00
|
|
|
# Return error for empty URL
|
|
|
|
if not url or url == '':
|
2016-11-22 12:09:03 -06:00
|
|
|
logerr('Invalid URL')
|
2016-10-29 02:54:10 -05:00
|
|
|
return False
|
2016-10-22 15:56:27 -05:00
|
|
|
|
2016-05-20 17:05:25 -05:00
|
|
|
# Ensure that the URL does not exist in DB already
|
2016-10-29 06:31:14 -05:00
|
|
|
id = self.get_bm_id(url)
|
2016-05-20 17:05:25 -05:00
|
|
|
if id != -1:
|
2016-11-22 12:09:03 -06:00
|
|
|
logerr('URL [%s] already exists at index %d', url, id)
|
2016-10-29 02:54:10 -05:00
|
|
|
return False
|
2016-05-21 08:15:56 -05:00
|
|
|
|
2016-05-20 17:05:25 -05:00
|
|
|
# Process title
|
2016-10-29 04:35:44 -05:00
|
|
|
if title_in is not None:
|
|
|
|
meta = title_in
|
2016-04-07 16:34:05 -05:00
|
|
|
else:
|
2016-11-08 11:32:45 -06:00
|
|
|
meta, mime, bad = network_handler(url)
|
|
|
|
if bad:
|
|
|
|
print('\x1b[91mMalformed URL\x1b[0m\n')
|
|
|
|
elif mime:
|
2016-11-22 12:09:03 -06:00
|
|
|
logdbg('Mime HEAD requested\n')
|
2016-11-08 11:32:45 -06:00
|
|
|
elif meta == '':
|
|
|
|
print('\x1b[91mTitle: []\x1b[0m\n')
|
|
|
|
else:
|
2016-11-22 12:09:03 -06:00
|
|
|
logdbg('Title: [%s]', meta)
|
2016-05-21 08:15:56 -05:00
|
|
|
|
2016-05-20 17:05:25 -05:00
|
|
|
# Process tags
|
2016-10-29 04:35:44 -05:00
|
|
|
if tags_in is None:
|
2016-10-29 05:36:29 -05:00
|
|
|
tags_in = DELIM
|
2016-05-29 02:04:16 -05:00
|
|
|
else:
|
2016-10-29 05:36:29 -05:00
|
|
|
if tags_in[0] != DELIM:
|
|
|
|
tags_in = '%s%s' % (DELIM, tags_in)
|
|
|
|
if tags_in[-1] != DELIM:
|
|
|
|
tags_in = '%s%s' % (tags_in, DELIM)
|
2016-05-21 08:15:56 -05:00
|
|
|
|
2016-05-20 17:05:25 -05:00
|
|
|
# Process description
|
2016-05-22 14:33:24 -05:00
|
|
|
if desc is None:
|
|
|
|
desc = ''
|
2016-05-21 08:15:56 -05:00
|
|
|
|
2016-05-20 17:05:25 -05:00
|
|
|
try:
|
2016-11-05 17:32:03 -05:00
|
|
|
flagset = 0
|
|
|
|
if self.immutable == 1:
|
|
|
|
flagset |= self.immutable
|
|
|
|
|
|
|
|
query = 'INSERT INTO bookmarks(URL, metadata, tags, desc, flags) \
|
|
|
|
VALUES (?, ?, ?, ?, ?)'
|
|
|
|
self.cur.execute(query, (url, meta, tags_in, desc, flagset))
|
2016-10-29 02:54:10 -05:00
|
|
|
if not delay_commit:
|
2016-06-01 04:35:33 -05:00
|
|
|
self.conn.commit()
|
2016-11-06 09:30:45 -06:00
|
|
|
if self.chatty:
|
2016-10-29 06:31:14 -05:00
|
|
|
self.print_bm(self.cur.lastrowid)
|
2016-10-29 02:54:10 -05:00
|
|
|
return True
|
2016-05-22 14:20:50 -05:00
|
|
|
except Exception as e:
|
2016-06-30 14:31:51 -05:00
|
|
|
_, _, linenumber, func, _, _ = inspect.stack()[0]
|
2016-11-22 12:09:03 -06:00
|
|
|
logerr('%s(), ln %d: %s', func, linenumber, e)
|
2016-10-29 02:54:10 -05:00
|
|
|
return False
|
2016-05-21 08:15:56 -05:00
|
|
|
|
2016-11-06 09:30:45 -06:00
|
|
|
def append_tag_at_index(self, index, tags_in):
|
2016-10-29 02:54:10 -05:00
|
|
|
'''Append tags for bookmark at index
|
2016-06-12 05:30:54 -05:00
|
|
|
|
|
|
|
:param index: int position of record, 0 for all
|
2016-10-29 04:35:44 -05:00
|
|
|
:param tags_in: string of comma-separated tags to add manually
|
2016-10-29 02:54:10 -05:00
|
|
|
:return: True on success, False on failure
|
2016-09-09 13:52:32 -05:00
|
|
|
'''
|
2016-06-12 05:30:54 -05:00
|
|
|
|
2016-08-25 13:32:33 -05:00
|
|
|
if index == 0:
|
2016-09-09 13:52:32 -05:00
|
|
|
resp = input('Append specified tags to ALL bookmarks? (y/n): ')
|
2016-08-25 13:32:33 -05:00
|
|
|
if resp != 'y':
|
2016-10-29 02:54:10 -05:00
|
|
|
return False
|
2016-08-24 01:51:27 -05:00
|
|
|
|
2016-08-25 13:32:33 -05:00
|
|
|
self.cur.execute('SELECT id, tags FROM bookmarks ORDER BY id ASC')
|
|
|
|
else:
|
2016-09-09 13:52:32 -05:00
|
|
|
self.cur.execute('SELECT id, tags FROM bookmarks WHERE id = ?',
|
|
|
|
(index,))
|
2016-06-12 05:30:54 -05:00
|
|
|
|
2016-08-31 19:50:31 -05:00
|
|
|
resultset = self.cur.fetchall()
|
2016-09-09 13:52:32 -05:00
|
|
|
query = 'UPDATE bookmarks SET tags = ? WHERE id = ?'
|
2016-08-31 19:50:31 -05:00
|
|
|
for row in resultset:
|
2016-10-29 04:35:44 -05:00
|
|
|
tags = '%s%s' % (row[1], tags_in[1:])
|
2016-08-31 19:50:31 -05:00
|
|
|
tags = parse_tags([tags])
|
2016-09-09 13:52:32 -05:00
|
|
|
self.cur.execute(query, (tags, row[0],))
|
2016-11-06 09:30:45 -06:00
|
|
|
if self.chatty:
|
2016-10-29 06:31:14 -05:00
|
|
|
self.print_bm(row[0])
|
2016-08-31 19:50:31 -05:00
|
|
|
|
|
|
|
self.conn.commit()
|
2016-10-29 02:54:10 -05:00
|
|
|
return True
|
2016-08-31 19:50:31 -05:00
|
|
|
|
2016-11-06 09:30:45 -06:00
|
|
|
def delete_tag_at_index(self, index, tags_in):
|
2016-10-29 02:54:10 -05:00
|
|
|
'''Delete tags for bookmark at index
|
2016-07-03 16:50:44 -05:00
|
|
|
|
|
|
|
:param index: int position of record, 0 for all
|
2016-10-29 04:35:44 -05:00
|
|
|
:param tags_in: string of comma-separated tags to delete manually
|
2016-10-29 02:54:10 -05:00
|
|
|
:return: True on success, False on failure
|
2016-09-09 13:52:32 -05:00
|
|
|
'''
|
|
|
|
|
2016-10-29 05:36:29 -05:00
|
|
|
tags_to_delete = tags_in.strip(DELIM).split(DELIM)
|
2016-07-03 16:50:44 -05:00
|
|
|
|
|
|
|
if index == 0:
|
2016-09-09 13:52:32 -05:00
|
|
|
resp = input('Delete specified tags from ALL bookmarks? (y/n): ')
|
2016-08-25 13:32:33 -05:00
|
|
|
if resp != 'y':
|
2016-10-29 02:54:10 -05:00
|
|
|
return False
|
2016-08-25 13:32:33 -05:00
|
|
|
|
2016-09-09 13:52:32 -05:00
|
|
|
query1 = "SELECT id, tags FROM bookmarks WHERE tags \
|
|
|
|
LIKE '%' || ? || '%' ORDER BY id ASC"
|
|
|
|
query2 = 'UPDATE bookmarks SET tags = ? WHERE id = ?'
|
2016-08-25 13:32:33 -05:00
|
|
|
for tag in tags_to_delete:
|
2016-10-29 05:36:29 -05:00
|
|
|
self.cur.execute(query1, (DELIM + tag + DELIM,))
|
2016-08-27 10:47:33 -05:00
|
|
|
resultset = self.cur.fetchall()
|
2016-07-03 16:50:44 -05:00
|
|
|
|
2016-08-27 10:47:33 -05:00
|
|
|
for row in resultset:
|
|
|
|
tags = row[1]
|
2016-07-03 16:50:44 -05:00
|
|
|
|
2016-10-29 05:36:29 -05:00
|
|
|
tags = tags.replace('%s%s%s' % (DELIM, tag, DELIM,), DELIM)
|
2016-09-09 13:52:32 -05:00
|
|
|
self.cur.execute(query2, (parse_tags([tags]), row[0],))
|
2016-11-06 09:30:45 -06:00
|
|
|
if self.chatty:
|
2016-10-29 06:31:14 -05:00
|
|
|
self.print_bm(row[0])
|
2016-07-03 16:50:44 -05:00
|
|
|
|
2016-10-29 00:08:49 -05:00
|
|
|
if len(resultset):
|
2016-10-28 21:45:04 -05:00
|
|
|
self.conn.commit()
|
2016-08-27 10:47:33 -05:00
|
|
|
else:
|
2016-09-09 13:52:32 -05:00
|
|
|
query = 'SELECT id, tags FROM bookmarks WHERE id = ?'
|
|
|
|
self.cur.execute(query, (index,))
|
2016-08-27 10:47:33 -05:00
|
|
|
resultset = self.cur.fetchall()
|
2016-07-03 16:50:44 -05:00
|
|
|
|
2016-09-09 13:52:32 -05:00
|
|
|
query = 'UPDATE bookmarks SET tags = ? WHERE id = ?'
|
2016-08-27 10:47:33 -05:00
|
|
|
for row in resultset:
|
|
|
|
tags = row[1]
|
2016-07-03 16:50:44 -05:00
|
|
|
|
2016-08-27 10:47:33 -05:00
|
|
|
for tag in tags_to_delete:
|
2016-10-29 05:36:29 -05:00
|
|
|
tags = tags.replace('%s%s%s' % (DELIM, tag, DELIM,), DELIM)
|
2016-08-27 10:47:33 -05:00
|
|
|
|
2016-09-09 13:52:32 -05:00
|
|
|
self.cur.execute(query, (parse_tags([tags]), row[0],))
|
2016-11-21 08:03:04 -06:00
|
|
|
if self.chatty:
|
|
|
|
self.print_bm(row[0])
|
|
|
|
|
2016-08-27 10:47:33 -05:00
|
|
|
self.conn.commit()
|
2016-07-03 16:50:44 -05:00
|
|
|
|
2016-10-29 02:54:10 -05:00
|
|
|
return True
|
|
|
|
|
2016-10-29 06:31:14 -05:00
|
|
|
def update_bm(self, index, url='', title_in=None, tags_in=None, desc=None,
|
2016-11-06 09:30:45 -06:00
|
|
|
append_tag=False, delete_tag=False):
|
2016-10-29 02:54:10 -05:00
|
|
|
'''Update an existing record at index
|
2016-06-06 13:55:09 -05:00
|
|
|
Update all records if index is 0 and url is not specified.
|
|
|
|
URL is an exception because URLs are unique in DB.
|
2016-05-21 08:15:56 -05:00
|
|
|
|
2016-06-12 05:30:54 -05:00
|
|
|
:param index: int position to update, 0 for all
|
2016-10-29 00:32:23 -05:00
|
|
|
:param url: bookmark address
|
2016-10-29 04:35:44 -05:00
|
|
|
:param title_in: string title to add manually
|
|
|
|
:param tags_in: string of comma-separated tags to add manually
|
2016-05-21 05:40:37 -05:00
|
|
|
:param desc: string description
|
2016-10-29 00:32:23 -05:00
|
|
|
:param append_tag: add tag(s) to existing tag(s)
|
|
|
|
:param delete_tag: delete tag(s) from existing tag(s)
|
2016-10-29 02:54:10 -05:00
|
|
|
:return: True on success, False on failure
|
2016-09-09 13:52:32 -05:00
|
|
|
'''
|
2016-05-21 08:15:56 -05:00
|
|
|
|
2016-05-20 17:05:25 -05:00
|
|
|
arguments = []
|
2016-05-24 12:51:38 -05:00
|
|
|
query = 'UPDATE bookmarks SET'
|
2016-05-21 05:40:37 -05:00
|
|
|
to_update = False
|
2016-10-29 02:54:10 -05:00
|
|
|
ret = False
|
2016-05-21 08:15:56 -05:00
|
|
|
|
2016-05-20 17:05:25 -05:00
|
|
|
# Update URL if passed as argument
|
|
|
|
if url != '':
|
2016-06-30 10:45:45 -05:00
|
|
|
if index == 0:
|
2016-11-22 12:09:03 -06:00
|
|
|
logerr('All URLs cannot be same')
|
2016-10-29 02:54:10 -05:00
|
|
|
return False
|
2016-05-31 13:18:06 -05:00
|
|
|
query = '%s URL = ?,' % query
|
2016-05-31 16:00:34 -05:00
|
|
|
arguments += (url,)
|
2016-05-21 05:40:37 -05:00
|
|
|
to_update = True
|
2016-05-21 08:15:56 -05:00
|
|
|
|
2016-05-20 17:05:25 -05:00
|
|
|
# Update tags if passed as argument
|
2016-10-29 04:35:44 -05:00
|
|
|
if tags_in is not None:
|
2016-06-12 05:30:54 -05:00
|
|
|
if append_tag:
|
2016-11-06 09:30:45 -06:00
|
|
|
ret = self.append_tag_at_index(index, tags_in)
|
2016-07-03 16:50:44 -05:00
|
|
|
elif delete_tag:
|
2016-11-06 09:30:45 -06:00
|
|
|
ret = self.delete_tag_at_index(index, tags_in)
|
2016-06-12 05:30:54 -05:00
|
|
|
else:
|
|
|
|
query = '%s tags = ?,' % query
|
2016-10-29 04:35:44 -05:00
|
|
|
arguments += (tags_in,)
|
2016-06-12 05:30:54 -05:00
|
|
|
to_update = True
|
2016-05-21 08:15:56 -05:00
|
|
|
|
2016-05-20 17:05:25 -05:00
|
|
|
# Update description if passed as an argument
|
2016-05-21 05:40:37 -05:00
|
|
|
if desc is not None:
|
2016-05-31 13:18:06 -05:00
|
|
|
query = '%s desc = ?,' % query
|
2016-05-31 16:00:34 -05:00
|
|
|
arguments += (desc,)
|
2016-05-21 05:40:37 -05:00
|
|
|
to_update = True
|
2016-05-21 08:15:56 -05:00
|
|
|
|
2016-11-05 17:32:03 -05:00
|
|
|
# Update immutable flag if passed as argument
|
|
|
|
if self.immutable != -1:
|
|
|
|
flagset = 1
|
|
|
|
if self.immutable:
|
|
|
|
query = '%s flags = flags | ?,' % query
|
|
|
|
else:
|
|
|
|
query = '%s flags = flags & ?,' % query
|
|
|
|
flagset = ~flagset
|
|
|
|
|
|
|
|
arguments += (flagset,)
|
|
|
|
to_update = True
|
|
|
|
|
2016-05-20 17:05:25 -05:00
|
|
|
# Update title
|
|
|
|
#
|
|
|
|
# 1. if -t has no arguments, delete existing title
|
|
|
|
# 2. if -t has arguments, update existing title
|
|
|
|
# 3. if -t option is omitted at cmdline:
|
|
|
|
# if URL is passed, update the title from web using the URL
|
2016-11-05 17:32:03 -05:00
|
|
|
# 4. if no other argument (url, tag, comment, immutable) passed,
|
|
|
|
# update title from web using DB URL (if title is mutable)
|
2016-11-08 11:32:45 -06:00
|
|
|
title_to_insert = None
|
2016-10-29 04:35:44 -05:00
|
|
|
if title_in is not None:
|
2016-11-08 11:32:45 -06:00
|
|
|
title_to_insert = title_in
|
2016-05-20 17:05:25 -05:00
|
|
|
elif url != '':
|
2016-11-08 11:32:45 -06:00
|
|
|
title_to_insert, mime, bad = network_handler(url)
|
|
|
|
if bad:
|
|
|
|
print('\x1b[91mMalformed URL\x1b[0m\n')
|
|
|
|
elif mime:
|
2016-11-08 14:22:54 -06:00
|
|
|
print('\x1b[91mMime head requested\x1b[0m\n')
|
2016-11-08 11:32:45 -06:00
|
|
|
elif title_to_insert == '':
|
|
|
|
print('\x1b[91mTitle: []\x1b[0m')
|
|
|
|
else:
|
2016-11-22 12:09:03 -06:00
|
|
|
logdbg('Title: [%s]', title_to_insert)
|
2016-07-11 12:18:40 -05:00
|
|
|
elif not to_update and not (append_tag or delete_tag):
|
2016-11-05 22:43:24 -05:00
|
|
|
ret = self.refreshdb(index)
|
2016-11-06 09:30:45 -06:00
|
|
|
if ret and index and self.chatty:
|
2016-10-29 06:31:14 -05:00
|
|
|
self.print_bm(index)
|
2016-11-05 22:43:24 -05:00
|
|
|
return ret
|
2016-05-21 08:15:56 -05:00
|
|
|
|
2016-11-08 11:32:45 -06:00
|
|
|
if title_to_insert is not None:
|
2016-05-31 13:18:06 -05:00
|
|
|
query = '%s metadata = ?,' % query
|
2016-11-08 11:32:45 -06:00
|
|
|
arguments += (title_to_insert,)
|
2016-05-21 05:40:37 -05:00
|
|
|
to_update = True
|
2016-05-21 08:15:56 -05:00
|
|
|
|
2016-05-21 05:40:37 -05:00
|
|
|
if not to_update: # Nothing to update
|
2016-10-29 02:54:10 -05:00
|
|
|
return ret
|
2016-05-21 08:15:56 -05:00
|
|
|
|
2016-06-06 13:55:09 -05:00
|
|
|
if index == 0: # Update all records
|
2016-09-09 13:52:32 -05:00
|
|
|
resp = input('Update ALL bookmarks? (y/n): ')
|
2016-06-06 13:55:09 -05:00
|
|
|
if resp != 'y':
|
2016-10-29 02:54:10 -05:00
|
|
|
return False
|
2016-06-06 13:55:09 -05:00
|
|
|
|
|
|
|
query = query[:-1]
|
|
|
|
else:
|
|
|
|
query = '%s WHERE id = ?' % query[:-1]
|
|
|
|
arguments += (index,)
|
|
|
|
|
2016-11-22 12:09:03 -06:00
|
|
|
logdbg('query: "%s", args: %s', query, arguments)
|
2016-05-21 08:15:56 -05:00
|
|
|
|
2016-05-20 17:05:25 -05:00
|
|
|
try:
|
|
|
|
self.cur.execute(query, arguments)
|
|
|
|
self.conn.commit()
|
2016-11-06 09:30:45 -06:00
|
|
|
if self.cur.rowcount and self.chatty:
|
2016-10-29 06:31:14 -05:00
|
|
|
self.print_bm(index)
|
2016-10-29 02:54:10 -05:00
|
|
|
|
|
|
|
if self.cur.rowcount == 0:
|
2016-11-22 12:09:03 -06:00
|
|
|
logerr('No matching index %s', index)
|
2016-10-29 02:54:10 -05:00
|
|
|
return False
|
2016-05-20 17:05:25 -05:00
|
|
|
except sqlite3.IntegrityError:
|
2016-11-22 12:09:03 -06:00
|
|
|
logerr('URL already exists')
|
2016-10-29 02:54:10 -05:00
|
|
|
return False
|
|
|
|
|
|
|
|
return True
|
2016-05-21 08:15:56 -05:00
|
|
|
|
2016-06-07 21:18:54 -05:00
|
|
|
def refreshdb(self, index):
|
2016-09-09 13:52:32 -05:00
|
|
|
'''Refresh ALL records in the database. Fetch title for each
|
2016-10-29 02:54:10 -05:00
|
|
|
bookmark from the web and update the records. Doesn't update
|
2016-05-20 17:05:25 -05:00
|
|
|
the record if title is empty.
|
|
|
|
This API doesn't change DB index, URL or tags of a bookmark.
|
2016-10-29 00:43:52 -05:00
|
|
|
This API is verbose.
|
2016-05-21 08:15:56 -05:00
|
|
|
|
2016-05-23 14:16:21 -05:00
|
|
|
:param index: index of record to update, or 0 for all records
|
2016-09-09 13:52:32 -05:00
|
|
|
'''
|
2016-05-21 08:15:56 -05:00
|
|
|
|
2016-05-20 17:05:25 -05:00
|
|
|
if index == 0:
|
2016-11-05 17:32:03 -05:00
|
|
|
self.cur.execute('SELECT id, url FROM bookmarks WHERE \
|
|
|
|
flags & 1 != 1 ORDER BY id ASC')
|
2016-05-20 17:05:25 -05:00
|
|
|
else:
|
2016-11-05 17:32:03 -05:00
|
|
|
self.cur.execute('SELECT id, url FROM bookmarks WHERE id = ? AND \
|
|
|
|
flags & 1 != 1', (index,))
|
2016-05-21 08:15:56 -05:00
|
|
|
|
2016-05-20 17:05:25 -05:00
|
|
|
resultset = self.cur.fetchall()
|
2016-11-05 22:43:24 -05:00
|
|
|
if not len(resultset):
|
2016-11-22 12:09:03 -06:00
|
|
|
logerr('No matching index or title immutable or empty DB')
|
2016-11-05 22:43:24 -05:00
|
|
|
return False
|
|
|
|
|
2016-09-09 13:52:32 -05:00
|
|
|
query = 'UPDATE bookmarks SET metadata = ? WHERE id = ?'
|
2016-06-07 21:18:54 -05:00
|
|
|
for row in resultset:
|
2016-11-08 11:32:45 -06:00
|
|
|
title, mime, bad = network_handler(row[1])
|
|
|
|
if bad:
|
|
|
|
print('\x1b[1mIndex %d: malformed URL\x1b[0m\n' % row[0])
|
|
|
|
continue
|
|
|
|
elif mime:
|
2016-11-08 14:22:54 -06:00
|
|
|
print('\x1b[1mIndex %d: mime HEAD requested\x1b[0m\n' % row[0])
|
2016-11-08 11:32:45 -06:00
|
|
|
continue
|
|
|
|
elif title == '':
|
|
|
|
print('\x1b[1mIndex %d: no title\x1b[0m\n' % row[0])
|
2016-06-07 21:18:54 -05:00
|
|
|
continue
|
|
|
|
|
2016-09-09 13:52:32 -05:00
|
|
|
self.cur.execute(query, (title, row[0],))
|
2016-11-08 00:45:39 -06:00
|
|
|
|
|
|
|
if self.chatty:
|
2016-11-08 14:22:54 -06:00
|
|
|
print('Title: [%s]\n\x1b[92mIndex %d: updated\x1b[0m\n'
|
2016-11-08 00:45:39 -06:00
|
|
|
% (title, row[0]))
|
2016-06-07 21:18:54 -05:00
|
|
|
if interrupted:
|
|
|
|
break
|
2016-05-21 08:15:56 -05:00
|
|
|
|
2016-11-05 22:43:24 -05:00
|
|
|
self.conn.commit()
|
|
|
|
return True
|
2016-05-31 21:48:39 -05:00
|
|
|
|
2016-10-27 15:21:09 -05:00
|
|
|
def searchdb(self, keywords, all_keywords=False, deep=False, regex=False):
|
2016-09-09 13:52:32 -05:00
|
|
|
'''Search the database for an entries with tags or URL
|
2016-05-20 17:05:25 -05:00
|
|
|
or title info matching keywords and list those.
|
2016-05-21 08:15:56 -05:00
|
|
|
|
2016-05-23 14:16:21 -05:00
|
|
|
:param keywords: keywords to search
|
|
|
|
:param all_keywords: search any or all keywords
|
2016-09-05 10:09:20 -05:00
|
|
|
:param deep: search for matching substrings
|
2016-10-28 09:27:46 -05:00
|
|
|
:param regex: match a regular expression
|
2016-10-29 02:54:10 -05:00
|
|
|
:return: search results, or None, if no matches
|
2016-09-09 13:52:32 -05:00
|
|
|
'''
|
2016-05-21 08:15:56 -05:00
|
|
|
|
2016-11-20 11:10:54 -06:00
|
|
|
qry = 'SELECT id, url, metadata, tags, desc FROM bookmarks WHERE'
|
2016-11-13 03:03:19 -06:00
|
|
|
# Deep query string
|
2016-11-13 11:52:00 -06:00
|
|
|
q1 = "(tags LIKE ('%' || ? || '%') OR URL LIKE ('%' || ? || '%') OR \
|
2016-11-13 03:03:19 -06:00
|
|
|
metadata LIKE ('%' || ? || '%') OR desc LIKE ('%' || ? || '%'))"
|
2016-11-13 11:52:00 -06:00
|
|
|
# Non-deep query string
|
|
|
|
q2 = '(tags REGEXP ? OR URL REGEXP ? OR metadata REGEXP ? OR desc \
|
|
|
|
REGEXP ?)'
|
2016-11-20 11:10:54 -06:00
|
|
|
qargs = []
|
2016-11-13 03:03:19 -06:00
|
|
|
|
|
|
|
if regex:
|
|
|
|
for token in keywords:
|
2016-11-20 11:10:54 -06:00
|
|
|
qry = '%s %s OR' % (qry, q2)
|
2016-05-21 08:15:56 -05:00
|
|
|
|
2016-11-20 11:10:54 -06:00
|
|
|
qargs += (token, token, token, token,)
|
|
|
|
qry = qry[:-3]
|
2016-11-13 03:03:19 -06:00
|
|
|
elif all_keywords:
|
2016-11-20 11:10:54 -06:00
|
|
|
if len(keywords) == 1 and keywords[0] == 'blank':
|
|
|
|
qry = "SELECT * FROM bookmarks WHERE metadata = '' OR tags = ?"
|
|
|
|
qargs += (DELIM,)
|
|
|
|
elif len(keywords) == 1 and keywords[0] == 'immutable':
|
|
|
|
qry = "SELECT * FROM bookmarks WHERE flags & 1 == 1"
|
|
|
|
else:
|
|
|
|
for token in keywords:
|
|
|
|
if deep:
|
|
|
|
qry = '%s %s AND' % (qry, q1)
|
|
|
|
else:
|
|
|
|
token = '\\b' + token + '\\b'
|
|
|
|
qry = '%s %s AND' % (qry, q2)
|
2016-09-05 03:18:21 -05:00
|
|
|
|
2016-11-20 11:10:54 -06:00
|
|
|
qargs += (token, token, token, token,)
|
|
|
|
qry = qry[:-4]
|
2016-11-13 03:03:19 -06:00
|
|
|
elif not all_keywords:
|
2016-05-20 17:05:25 -05:00
|
|
|
for token in keywords:
|
2016-11-13 11:52:00 -06:00
|
|
|
if deep:
|
2016-11-20 11:10:54 -06:00
|
|
|
qry = '%s %s OR' % (qry, q1)
|
2016-09-05 03:18:21 -05:00
|
|
|
else:
|
2016-11-13 11:52:00 -06:00
|
|
|
token = '\\b' + token + '\\b'
|
2016-11-20 11:10:54 -06:00
|
|
|
qry = '%s %s OR' % (qry, q2)
|
2016-09-05 03:18:21 -05:00
|
|
|
|
2016-11-20 11:10:54 -06:00
|
|
|
qargs += (token, token, token, token,)
|
|
|
|
qry = qry[:-3]
|
2016-11-13 03:03:19 -06:00
|
|
|
else:
|
2016-11-22 12:09:03 -06:00
|
|
|
logerr('Invalid search option')
|
2016-11-13 03:03:19 -06:00
|
|
|
return None
|
2016-05-21 08:15:56 -05:00
|
|
|
|
2016-11-20 11:10:54 -06:00
|
|
|
qry = '%s ORDER BY id ASC' % qry
|
2016-11-22 12:09:03 -06:00
|
|
|
logdbg('query: "%s", args: %s', qry, qargs)
|
2016-05-21 08:15:56 -05:00
|
|
|
|
2016-11-20 07:45:44 -06:00
|
|
|
try:
|
2016-11-20 11:10:54 -06:00
|
|
|
self.cur.execute(qry, qargs)
|
2016-11-20 07:45:44 -06:00
|
|
|
except sqlite3.OperationalError as e:
|
2016-11-22 12:09:03 -06:00
|
|
|
logerr(e)
|
2016-11-20 07:45:44 -06:00
|
|
|
return None
|
|
|
|
|
2016-05-20 17:05:25 -05:00
|
|
|
results = self.cur.fetchall()
|
|
|
|
if len(results) == 0:
|
2016-10-29 02:54:10 -05:00
|
|
|
return None
|
2016-05-21 08:15:56 -05:00
|
|
|
|
2016-10-28 08:23:34 -05:00
|
|
|
return results
|
2016-05-21 08:15:56 -05:00
|
|
|
|
2016-10-27 15:21:09 -05:00
|
|
|
def search_by_tag(self, tag):
|
2016-09-09 13:52:32 -05:00
|
|
|
'''Search and list bookmarks with a tag
|
2016-05-21 08:15:56 -05:00
|
|
|
|
2016-11-13 11:52:00 -06:00
|
|
|
:param tag: a tag to search as string
|
2016-10-29 02:54:10 -05:00
|
|
|
:return: search results, or None, if no matches
|
2016-09-09 13:52:32 -05:00
|
|
|
'''
|
2016-05-21 08:15:56 -05:00
|
|
|
|
2016-11-13 11:52:00 -06:00
|
|
|
tag = '%s%s%s' % (DELIM, tag.strip(DELIM), DELIM)
|
2016-09-09 13:52:32 -05:00
|
|
|
query = "SELECT id, url, metadata, tags, desc FROM bookmarks \
|
|
|
|
WHERE tags LIKE '%' || ? || '%' ORDER BY id ASC"
|
2016-11-22 12:09:03 -06:00
|
|
|
logdbg('query: "%s", args: %s', query, tag)
|
2016-07-09 10:19:16 -05:00
|
|
|
|
|
|
|
self.cur.execute(query, (tag,))
|
2016-05-20 17:05:25 -05:00
|
|
|
results = self.cur.fetchall()
|
|
|
|
if len(results) == 0:
|
2016-10-29 02:54:10 -05:00
|
|
|
return None
|
2016-05-21 08:15:56 -05:00
|
|
|
|
2016-10-28 08:23:34 -05:00
|
|
|
return results
|
2016-05-21 08:15:56 -05:00
|
|
|
|
2016-10-28 13:31:52 -05:00
|
|
|
def compactdb(self, index, delay_commit=False):
|
2016-10-29 02:54:10 -05:00
|
|
|
'''When an entry at index is deleted, move the
|
|
|
|
last entry in DB to index, if index is lesser.
|
2016-05-21 08:15:56 -05:00
|
|
|
|
2016-10-28 13:31:52 -05:00
|
|
|
:param index: DB index of deleted entry
|
|
|
|
:param delay_commit: do not commit to DB, caller's responsibility
|
2016-09-09 13:52:32 -05:00
|
|
|
'''
|
2016-05-21 08:15:56 -05:00
|
|
|
|
2016-05-20 17:05:25 -05:00
|
|
|
self.cur.execute('SELECT MAX(id) from bookmarks')
|
|
|
|
results = self.cur.fetchall()
|
2016-10-01 10:29:53 -05:00
|
|
|
# Return if the last index left in DB was just deleted
|
2016-09-09 13:52:32 -05:00
|
|
|
if len(results) == 1 and results[0][0] is None:
|
2016-05-20 17:05:25 -05:00
|
|
|
return
|
2016-05-21 08:15:56 -05:00
|
|
|
|
2016-09-09 13:52:32 -05:00
|
|
|
query1 = 'SELECT id, URL, metadata, tags, \
|
|
|
|
desc FROM bookmarks WHERE id = ?'
|
|
|
|
query2 = 'DELETE FROM bookmarks WHERE id = ?'
|
|
|
|
query3 = 'INSERT INTO bookmarks(id, URL, metadata, \
|
|
|
|
tags, desc) VALUES (?, ?, ?, ?, ?)'
|
|
|
|
|
2016-05-20 17:05:25 -05:00
|
|
|
for row in results:
|
|
|
|
if row[0] > index:
|
2016-09-09 13:52:32 -05:00
|
|
|
self.cur.execute(query1, (row[0],))
|
2016-05-20 17:05:25 -05:00
|
|
|
results = self.cur.fetchall()
|
|
|
|
for row in results:
|
2016-09-09 13:52:32 -05:00
|
|
|
self.cur.execute(query2, (row[0],))
|
|
|
|
self.cur.execute(query3,
|
|
|
|
(index, row[1], row[2], row[3], row[4],))
|
2016-10-28 13:31:52 -05:00
|
|
|
if not delay_commit:
|
|
|
|
self.conn.commit()
|
2016-05-24 12:51:38 -05:00
|
|
|
print('Index %d moved to %d' % (row[0], index))
|
2016-05-21 08:15:56 -05:00
|
|
|
|
2016-10-29 06:31:14 -05:00
|
|
|
def delete_bm(self, index, low=0, high=0, is_range=False,
|
|
|
|
delay_commit=False):
|
2016-09-09 13:52:32 -05:00
|
|
|
'''Delete a single record or remove the table if index is None
|
2016-05-21 08:15:56 -05:00
|
|
|
|
2016-10-28 13:31:52 -05:00
|
|
|
:param index: DB index of deleted entry
|
|
|
|
:param low: lower index of range
|
|
|
|
:param low: higher index of range
|
|
|
|
:param is_range: a range is passed using low and high arguments
|
|
|
|
:param delay_commit: do not commit to DB, caller's responsibility
|
2016-10-29 02:54:10 -05:00
|
|
|
:return: True on success, False on failure
|
2016-09-09 13:52:32 -05:00
|
|
|
'''
|
2016-05-21 08:15:56 -05:00
|
|
|
|
2016-10-28 13:52:49 -05:00
|
|
|
if is_range: # Delete a range of indices
|
2016-10-28 14:19:24 -05:00
|
|
|
# If range starts from 0, delete all records
|
|
|
|
if low == 0:
|
2016-10-29 06:31:14 -05:00
|
|
|
return self.cleardb()
|
2016-10-28 14:19:24 -05:00
|
|
|
|
2016-06-16 16:08:38 -05:00
|
|
|
try:
|
2016-09-09 13:52:32 -05:00
|
|
|
query = 'DELETE from bookmarks where id BETWEEN ? AND ?'
|
|
|
|
self.cur.execute(query, (low, high))
|
2016-10-28 13:31:52 -05:00
|
|
|
if not delay_commit:
|
|
|
|
self.conn.commit()
|
2016-06-16 16:08:38 -05:00
|
|
|
print('Bookmarks from index %s to %s deleted' % (low, high))
|
2016-10-28 13:31:52 -05:00
|
|
|
|
|
|
|
# Compact DB by ascending order of index to ensure
|
|
|
|
# the existing higher indices move only once
|
|
|
|
# Delayed commit is forced
|
|
|
|
for index in range(low, high + 1):
|
|
|
|
self.compactdb(index, delay_commit=True)
|
|
|
|
|
|
|
|
if not delay_commit:
|
|
|
|
self.conn.commit()
|
2016-06-16 16:08:38 -05:00
|
|
|
except IndexError:
|
2016-11-22 12:09:03 -06:00
|
|
|
logerr('No matching index')
|
2016-10-29 02:54:10 -05:00
|
|
|
return False
|
2016-06-16 16:08:38 -05:00
|
|
|
elif index == 0: # Remove the table
|
2016-10-29 06:31:14 -05:00
|
|
|
return self.cleardb()
|
2016-05-31 12:39:34 -05:00
|
|
|
else: # Remove a single entry
|
2016-05-20 17:05:25 -05:00
|
|
|
try:
|
2016-09-09 13:52:32 -05:00
|
|
|
query = 'DELETE FROM bookmarks WHERE id = ?'
|
|
|
|
self.cur.execute(query, (index,))
|
2016-10-28 13:31:52 -05:00
|
|
|
if not delay_commit:
|
|
|
|
self.conn.commit()
|
2016-05-20 17:05:25 -05:00
|
|
|
if self.cur.rowcount == 1:
|
2016-05-24 12:51:38 -05:00
|
|
|
print('Removed index %d' % index)
|
2016-10-28 13:31:52 -05:00
|
|
|
self.compactdb(index, delay_commit)
|
2016-05-20 17:05:25 -05:00
|
|
|
else:
|
2016-11-22 12:09:03 -06:00
|
|
|
logerr('No matching index')
|
2016-10-29 02:54:10 -05:00
|
|
|
return False
|
2016-05-20 17:05:25 -05:00
|
|
|
except IndexError:
|
2016-11-22 12:09:03 -06:00
|
|
|
logerr('No matching index')
|
2016-10-29 02:54:10 -05:00
|
|
|
return False
|
2016-05-21 08:15:56 -05:00
|
|
|
|
2016-08-26 11:54:57 -05:00
|
|
|
return True
|
|
|
|
|
2016-10-28 09:27:46 -05:00
|
|
|
def delete_resultset(self, results):
|
|
|
|
'''Delete search results in descending order of DB index.
|
|
|
|
Indices are expected to be unique and in ascending order.
|
2016-10-28 13:31:52 -05:00
|
|
|
This API forces a delayed commit.
|
2016-10-29 02:54:10 -05:00
|
|
|
|
|
|
|
:param results: set of results to delete
|
|
|
|
:return: True on success, False on failure
|
2016-10-28 09:27:46 -05:00
|
|
|
'''
|
|
|
|
|
|
|
|
resp = input('Delete the search results? (y/n): ')
|
2016-10-28 14:56:40 -05:00
|
|
|
if resp != 'y':
|
2016-10-29 02:54:10 -05:00
|
|
|
return False
|
2016-10-28 09:27:46 -05:00
|
|
|
|
|
|
|
# delete records in reverse order
|
|
|
|
pos = len(results) - 1
|
|
|
|
while pos >= 0:
|
|
|
|
idx = results[pos][0]
|
2016-10-29 06:31:14 -05:00
|
|
|
self.delete_bm(idx, delay_commit=True)
|
2016-10-28 09:27:46 -05:00
|
|
|
|
2016-10-28 13:52:49 -05:00
|
|
|
# Commit at every 200th removal
|
|
|
|
if pos % 200 == 0:
|
|
|
|
self.conn.commit()
|
|
|
|
|
|
|
|
pos -= 1
|
2016-10-28 13:31:52 -05:00
|
|
|
|
2016-10-29 02:54:10 -05:00
|
|
|
return True
|
|
|
|
|
2016-10-29 06:31:14 -05:00
|
|
|
def cleardb(self):
|
2016-10-29 02:54:10 -05:00
|
|
|
'''Drops the bookmark table if it exists
|
|
|
|
|
|
|
|
:return: True on success, False on failure
|
|
|
|
'''
|
2016-10-28 14:03:10 -05:00
|
|
|
|
|
|
|
resp = input('Remove ALL bookmarks? (y/n): ')
|
|
|
|
if resp != 'y':
|
|
|
|
print('No bookmarks deleted')
|
|
|
|
return False
|
2016-08-21 18:09:07 -05:00
|
|
|
|
|
|
|
self.cur.execute('DROP TABLE if exists bookmarks')
|
|
|
|
self.conn.commit()
|
2016-10-28 14:03:10 -05:00
|
|
|
print('All bookmarks deleted')
|
|
|
|
return True
|
2016-08-21 18:09:07 -05:00
|
|
|
|
2016-11-20 11:10:54 -06:00
|
|
|
def print_bm(self, index):
|
2016-09-09 13:52:32 -05:00
|
|
|
'''Print bookmark details at index or all bookmarks if index is 0
|
2016-05-20 17:05:25 -05:00
|
|
|
Note: URL is printed on top because title may be blank
|
2016-05-21 08:15:56 -05:00
|
|
|
|
2016-11-20 11:10:54 -06:00
|
|
|
:param index: index to print, 0 prints all
|
2016-09-09 13:52:32 -05:00
|
|
|
'''
|
2016-05-21 08:15:56 -05:00
|
|
|
|
2016-11-20 11:10:54 -06:00
|
|
|
if index != 0: # Show record at index
|
2016-05-20 17:05:25 -05:00
|
|
|
try:
|
2016-09-09 13:52:32 -05:00
|
|
|
query = 'SELECT * FROM bookmarks WHERE id = ?'
|
|
|
|
self.cur.execute(query, (index,))
|
2016-05-23 03:51:54 -05:00
|
|
|
results = self.cur.fetchall()
|
|
|
|
if len(results) == 0:
|
2016-11-22 12:09:03 -06:00
|
|
|
logerr('No matching index')
|
2016-05-23 03:51:54 -05:00
|
|
|
return
|
2016-05-20 17:05:25 -05:00
|
|
|
except IndexError:
|
2016-11-22 12:09:03 -06:00
|
|
|
logerr('No matching index')
|
2016-05-20 17:05:25 -05:00
|
|
|
return
|
2016-05-21 08:15:56 -05:00
|
|
|
|
2016-06-29 13:06:33 -05:00
|
|
|
if not self.json:
|
2016-05-23 03:51:54 -05:00
|
|
|
for row in results:
|
2016-10-29 04:02:50 -05:00
|
|
|
if self.field_filter == 0:
|
2016-05-23 03:51:54 -05:00
|
|
|
print_record(row)
|
2016-10-29 04:02:50 -05:00
|
|
|
elif self.field_filter == 1:
|
2016-06-02 15:46:05 -05:00
|
|
|
print('%s\t%s' % (row[0], row[1]))
|
2016-10-29 04:02:50 -05:00
|
|
|
elif self.field_filter == 2:
|
2016-06-02 15:46:05 -05:00
|
|
|
print('%s\t%s\t%s' % (row[0], row[1], row[3][1:-1]))
|
2016-10-29 04:02:50 -05:00
|
|
|
elif self.field_filter == 3:
|
2016-10-09 14:56:45 -05:00
|
|
|
print('%s\t%s' % (row[0], row[2]))
|
2016-05-20 17:05:25 -05:00
|
|
|
else:
|
2016-10-29 04:02:50 -05:00
|
|
|
print(format_json(results, True, self.field_filter))
|
2016-11-20 11:10:54 -06:00
|
|
|
else: # Show all entries
|
|
|
|
self.cur.execute('SELECT * FROM bookmarks')
|
|
|
|
resultset = self.cur.fetchall()
|
|
|
|
|
|
|
|
if not self.json:
|
|
|
|
if self.field_filter == 0:
|
|
|
|
for row in resultset:
|
|
|
|
print_record(row)
|
|
|
|
elif self.field_filter == 1:
|
|
|
|
for row in resultset:
|
|
|
|
print('%s\t%s' % (row[0], row[1]))
|
|
|
|
elif self.field_filter == 2:
|
|
|
|
for row in resultset:
|
|
|
|
print('%s\t%s\t%s' % (row[0], row[1], row[3][1:-1]))
|
|
|
|
elif self.field_filter == 3:
|
|
|
|
for row in resultset:
|
|
|
|
print('%s\t%s' % (row[0], row[2]))
|
|
|
|
else:
|
|
|
|
print(format_json(resultset, field_filter=self.field_filter))
|
2016-05-20 17:05:25 -05:00
|
|
|
|
2016-11-14 14:31:22 -06:00
|
|
|
def get_all_tags(self):
|
|
|
|
'''Get list of tags in DB
|
|
|
|
|
|
|
|
:return: list of unique tags sorted alphabetically
|
2016-11-20 07:31:02 -06:00
|
|
|
:return: a dictionary of {tag:usage_count}
|
2016-11-14 14:31:22 -06:00
|
|
|
'''
|
2016-05-21 08:15:56 -05:00
|
|
|
|
2016-10-22 08:21:46 -05:00
|
|
|
tags = []
|
|
|
|
unique_tags = []
|
2016-11-20 07:31:02 -06:00
|
|
|
dic = {}
|
|
|
|
qry = 'SELECT DISTINCT tags, COUNT(tags) FROM bookmarks GROUP BY tags'
|
|
|
|
for row in self.cur.execute(qry):
|
2016-10-29 05:36:29 -05:00
|
|
|
tagset = row[0].strip(DELIM).split(DELIM)
|
2016-05-31 16:00:34 -05:00
|
|
|
for tag in tagset:
|
2016-10-22 08:21:46 -05:00
|
|
|
if tag not in tags:
|
2016-11-20 07:31:02 -06:00
|
|
|
dic[tag] = row[1]
|
2016-10-22 08:21:46 -05:00
|
|
|
tags += (tag,)
|
2016-11-20 07:31:02 -06:00
|
|
|
else:
|
|
|
|
dic[tag] += row[1]
|
2016-05-31 16:00:34 -05:00
|
|
|
|
2016-11-19 23:41:09 -06:00
|
|
|
if len(tags) == 0:
|
|
|
|
return tags
|
|
|
|
|
2016-10-22 08:21:46 -05:00
|
|
|
if tags[0] == '':
|
2016-11-20 07:31:02 -06:00
|
|
|
unique_tags = sorted(tags[1:])
|
2016-05-31 16:00:34 -05:00
|
|
|
else:
|
2016-11-20 07:31:02 -06:00
|
|
|
unique_tags = sorted(tags)
|
2016-11-14 14:31:22 -06:00
|
|
|
|
2016-11-20 07:31:02 -06:00
|
|
|
return unique_tags, dic
|
2016-05-21 08:15:56 -05:00
|
|
|
|
2016-05-22 09:53:19 -05:00
|
|
|
def replace_tag(self, orig, new=None):
|
2016-09-09 13:52:32 -05:00
|
|
|
'''Replace orig tags with new tags in DB for all records.
|
2016-06-07 10:56:22 -05:00
|
|
|
Remove orig tag if new tag is empty.
|
2016-05-21 08:15:56 -05:00
|
|
|
|
2016-10-29 02:54:10 -05:00
|
|
|
:param orig: original tags
|
|
|
|
:param new: replacement tags
|
|
|
|
:return: True on success, False on failure
|
2016-09-09 13:52:32 -05:00
|
|
|
'''
|
2016-05-21 08:15:56 -05:00
|
|
|
|
2016-05-20 17:05:25 -05:00
|
|
|
update = False
|
|
|
|
delete = False
|
2016-10-29 05:36:29 -05:00
|
|
|
newtags = DELIM
|
2016-05-21 08:15:56 -05:00
|
|
|
|
2016-10-29 05:36:29 -05:00
|
|
|
orig = '%s%s%s' % (DELIM, orig, DELIM)
|
2016-05-20 17:05:25 -05:00
|
|
|
if new is None:
|
|
|
|
delete = True
|
|
|
|
else:
|
2016-05-23 10:57:06 -05:00
|
|
|
newtags = parse_tags(new)
|
2016-10-29 05:36:29 -05:00
|
|
|
if newtags == DELIM:
|
2016-05-20 17:05:25 -05:00
|
|
|
delete = True
|
2016-05-21 08:15:56 -05:00
|
|
|
|
2016-05-20 17:05:25 -05:00
|
|
|
if orig == newtags:
|
2016-05-24 12:51:38 -05:00
|
|
|
print('Tags are same.')
|
2016-10-29 02:54:10 -05:00
|
|
|
return False
|
2016-05-21 08:15:56 -05:00
|
|
|
|
2016-09-09 13:52:32 -05:00
|
|
|
query = 'SELECT id, tags FROM bookmarks WHERE tags LIKE ?'
|
|
|
|
self.cur.execute(query, ('%' + orig + '%',))
|
2016-05-20 17:05:25 -05:00
|
|
|
results = self.cur.fetchall()
|
2016-05-21 08:15:56 -05:00
|
|
|
|
2016-09-09 13:52:32 -05:00
|
|
|
query = 'UPDATE bookmarks SET tags = ? WHERE id = ?'
|
2016-05-20 17:05:25 -05:00
|
|
|
for row in results:
|
2016-05-31 12:39:34 -05:00
|
|
|
if not delete:
|
2016-05-20 17:05:25 -05:00
|
|
|
# Check if tag newtags is already added
|
|
|
|
if row[1].find(newtags) >= 0:
|
2016-10-29 05:36:29 -05:00
|
|
|
newtags = DELIM
|
2016-05-21 08:15:56 -05:00
|
|
|
|
2016-05-24 13:28:14 -05:00
|
|
|
tags = row[1].replace(orig, newtags)
|
2016-06-07 10:56:22 -05:00
|
|
|
tags = parse_tags([tags])
|
2016-09-09 13:52:32 -05:00
|
|
|
self.cur.execute(query, (tags, row[0],))
|
2016-05-24 12:51:38 -05:00
|
|
|
print('Index %d updated' % row[0])
|
2016-05-20 17:05:25 -05:00
|
|
|
update = True
|
2016-05-21 08:15:56 -05:00
|
|
|
|
2016-05-20 17:05:25 -05:00
|
|
|
if update:
|
|
|
|
self.conn.commit()
|
|
|
|
|
2016-10-29 02:54:10 -05:00
|
|
|
return update
|
|
|
|
|
2016-05-22 09:53:19 -05:00
|
|
|
def browse_by_index(self, index):
|
2016-09-09 13:52:32 -05:00
|
|
|
'''Open URL at index in browser
|
2016-05-20 17:05:25 -05:00
|
|
|
|
2016-10-29 02:54:10 -05:00
|
|
|
:param index: DB index
|
|
|
|
:return: True on success, False on failure
|
2016-09-09 13:52:32 -05:00
|
|
|
'''
|
2016-03-24 13:47:57 -05:00
|
|
|
|
2016-10-01 10:29:53 -05:00
|
|
|
if index == 0:
|
2016-10-11 13:49:05 -05:00
|
|
|
query = 'SELECT id from bookmarks ORDER BY RANDOM() LIMIT 1'
|
|
|
|
self.cur.execute(query)
|
2016-10-09 15:33:59 -05:00
|
|
|
result = self.cur.fetchone()
|
2016-10-01 10:29:53 -05:00
|
|
|
|
|
|
|
# Return if no entries in DB
|
2016-10-09 15:33:59 -05:00
|
|
|
if result is None:
|
2016-11-06 07:52:12 -06:00
|
|
|
print('No bookmarks added yet ...')
|
2016-10-29 02:54:10 -05:00
|
|
|
return False
|
2016-10-01 10:29:53 -05:00
|
|
|
|
2016-10-09 15:33:59 -05:00
|
|
|
index = result[0]
|
2016-11-22 12:09:03 -06:00
|
|
|
logdbg('Opening random index ' + str(index))
|
2016-10-01 10:29:53 -05:00
|
|
|
|
2016-09-09 13:52:32 -05:00
|
|
|
query = 'SELECT URL FROM bookmarks WHERE id = ?'
|
2016-05-20 17:05:25 -05:00
|
|
|
try:
|
2016-09-09 13:52:32 -05:00
|
|
|
for row in self.cur.execute(query, (index,)):
|
2016-05-20 17:05:25 -05:00
|
|
|
url = unquote(row[0])
|
2016-09-09 07:46:40 -05:00
|
|
|
open_in_browser(url)
|
2016-10-29 02:54:10 -05:00
|
|
|
return True
|
2016-11-22 12:09:03 -06:00
|
|
|
logerr('No matching index')
|
2016-05-20 17:05:25 -05:00
|
|
|
except IndexError:
|
2016-11-22 12:09:03 -06:00
|
|
|
logerr('No matching index')
|
2016-05-29 01:09:51 -05:00
|
|
|
|
2016-10-29 02:54:10 -05:00
|
|
|
return False
|
|
|
|
|
2016-10-29 05:36:29 -05:00
|
|
|
def exportdb(self, filepath, markdown=False, taglist=None):
|
|
|
|
'''Export bookmarks to a Firefox bookmarks formatted html file.
|
2016-06-02 10:39:16 -05:00
|
|
|
|
2016-10-29 04:35:44 -05:00
|
|
|
:param filepath: path to file to export to
|
2016-10-29 02:54:10 -05:00
|
|
|
:param markdown: use markdown syntax
|
|
|
|
:param taglist: list of specific tags to export
|
|
|
|
:return: True on success, False on failure
|
2016-09-09 13:52:32 -05:00
|
|
|
'''
|
2016-06-02 10:39:16 -05:00
|
|
|
|
|
|
|
import time
|
|
|
|
|
2016-09-20 13:02:04 -05:00
|
|
|
count = 0
|
|
|
|
timestamp = int(time.time())
|
|
|
|
arguments = []
|
|
|
|
query = 'SELECT * FROM bookmarks'
|
2016-10-26 11:17:01 -05:00
|
|
|
is_tag_valid = False
|
2016-09-20 13:02:04 -05:00
|
|
|
|
|
|
|
if taglist is not None:
|
|
|
|
tagstr = parse_tags(taglist)
|
|
|
|
|
2016-10-29 05:36:29 -05:00
|
|
|
if len(tagstr) == 0 or tagstr == DELIM:
|
2016-11-22 12:09:03 -06:00
|
|
|
logerr('Invalid tag')
|
2016-10-29 02:54:10 -05:00
|
|
|
return False
|
2016-09-20 13:02:04 -05:00
|
|
|
|
|
|
|
if len(tagstr) > 0:
|
2016-10-29 05:36:29 -05:00
|
|
|
tags = tagstr.split(DELIM)
|
2016-09-20 13:02:04 -05:00
|
|
|
query = '%s WHERE' % query
|
|
|
|
for tag in tags:
|
|
|
|
if tag != '':
|
2016-10-26 11:17:01 -05:00
|
|
|
is_tag_valid = True
|
2016-09-20 13:02:04 -05:00
|
|
|
query += " tags LIKE '%' || ? || '%' OR"
|
2016-10-29 05:36:29 -05:00
|
|
|
tag = '%s%s%s' % (DELIM, tag, DELIM)
|
2016-09-20 13:02:04 -05:00
|
|
|
arguments += (tag,)
|
|
|
|
|
2016-10-26 11:17:01 -05:00
|
|
|
if is_tag_valid:
|
2016-09-20 13:02:04 -05:00
|
|
|
query = query[:-3]
|
|
|
|
else:
|
|
|
|
query = query[:-6]
|
|
|
|
|
2016-11-22 12:09:03 -06:00
|
|
|
logdbg('(%s), %s' % (query, arguments))
|
2016-09-20 13:02:04 -05:00
|
|
|
self.cur.execute(query, arguments)
|
|
|
|
resultset = self.cur.fetchall()
|
|
|
|
|
2016-09-20 13:43:32 -05:00
|
|
|
if len(resultset) == 0:
|
|
|
|
print('No bookmarks exported')
|
2016-10-29 02:54:10 -05:00
|
|
|
return False
|
2016-09-20 13:43:32 -05:00
|
|
|
|
2016-10-29 04:35:44 -05:00
|
|
|
if os.path.exists(filepath):
|
|
|
|
resp = input('%s exists. Overwrite? (y/n): ' % filepath)
|
2016-06-02 10:39:16 -05:00
|
|
|
if resp != 'y':
|
2016-10-29 02:54:10 -05:00
|
|
|
return False
|
2016-06-02 10:39:16 -05:00
|
|
|
|
|
|
|
try:
|
2016-10-29 04:35:44 -05:00
|
|
|
outfp = open(filepath, mode='w', encoding='utf-8')
|
2016-06-02 10:39:16 -05:00
|
|
|
except Exception as e:
|
2016-11-22 12:09:03 -06:00
|
|
|
logerr(e)
|
2016-10-29 02:54:10 -05:00
|
|
|
return False
|
2016-06-02 10:39:16 -05:00
|
|
|
|
2016-10-22 01:25:41 -05:00
|
|
|
if not markdown:
|
2016-10-29 04:35:44 -05:00
|
|
|
outfp.write('''<!DOCTYPE NETSCAPE-Bookmark-file-1>
|
2016-06-02 10:39:16 -05:00
|
|
|
|
|
|
|
<META HTTP-EQUIV="Content-Type" CONTENT="text/html; charset=UTF-8">
|
|
|
|
<TITLE>Bookmarks</TITLE>
|
|
|
|
<H1>Bookmarks</H1>
|
|
|
|
|
|
|
|
<DL><p>
|
2016-11-16 09:56:26 -06:00
|
|
|
<DT><H3 ADD_DATE="%s" LAST_MODIFIED="%s" PERSONAL_TOOLBAR_FOLDER="true">\
|
|
|
|
Buku bookmarks</H3>
|
2016-06-02 10:39:16 -05:00
|
|
|
<DL><p>
|
2016-09-09 13:52:32 -05:00
|
|
|
''' % (timestamp, timestamp))
|
2016-06-02 10:39:16 -05:00
|
|
|
|
2016-10-20 07:48:14 -05:00
|
|
|
for row in resultset:
|
2016-10-29 05:36:29 -05:00
|
|
|
out = '%s<DT><A HREF="%s" ADD_DATE="%s" LAST_MODIFIED="%s"' \
|
|
|
|
% (' ', row[1], timestamp, timestamp)
|
|
|
|
if row[3] != DELIM:
|
2016-10-20 07:48:14 -05:00
|
|
|
out = '%s TAGS="%s"' % (out, row[3][1:-1])
|
|
|
|
out = '%s>%s</A>\n' % (out, row[2])
|
|
|
|
if row[4] != '':
|
|
|
|
out = '%s <DD>%s\n' % (out, row[4])
|
|
|
|
|
2016-10-29 04:35:44 -05:00
|
|
|
outfp.write(out)
|
2016-10-20 07:48:14 -05:00
|
|
|
count += 1
|
|
|
|
|
2016-10-29 04:35:44 -05:00
|
|
|
outfp.write(' </DL><p>\n</DL><p>')
|
2016-10-20 07:48:14 -05:00
|
|
|
else:
|
2016-11-06 07:52:12 -06:00
|
|
|
outfp.write('List of buku bookmarks:\n\n')
|
2016-10-20 07:48:14 -05:00
|
|
|
for row in resultset:
|
2016-10-22 15:56:27 -05:00
|
|
|
if row[2] == '':
|
|
|
|
out = '- [Untitled](%s)\n' % (row[1])
|
|
|
|
else:
|
|
|
|
out = '- [%s](%s)\n' % (row[2], row[1])
|
2016-10-29 04:35:44 -05:00
|
|
|
outfp.write(out)
|
2016-10-20 07:48:14 -05:00
|
|
|
count += 1
|
2016-10-22 15:56:27 -05:00
|
|
|
|
2016-10-29 04:35:44 -05:00
|
|
|
outfp.close()
|
2016-10-22 15:56:27 -05:00
|
|
|
print('%s exported' % count)
|
2016-10-29 02:54:10 -05:00
|
|
|
return True
|
2016-06-02 10:39:16 -05:00
|
|
|
|
2016-10-29 05:36:29 -05:00
|
|
|
def importdb(self, filepath, markdown=False):
|
2016-09-09 13:52:32 -05:00
|
|
|
'''Import bookmarks from a html file.
|
2016-05-22 16:03:47 -05:00
|
|
|
Supports Firefox, Google Chrome and IE imports
|
|
|
|
|
2016-10-29 04:35:44 -05:00
|
|
|
:param filepath: path to file to import
|
2016-10-29 02:54:10 -05:00
|
|
|
:param markdown: use markdown syntax
|
|
|
|
:return: True on success, False on failure
|
2016-09-09 13:52:32 -05:00
|
|
|
'''
|
2016-05-22 16:03:47 -05:00
|
|
|
|
2016-10-22 01:25:41 -05:00
|
|
|
if not markdown:
|
2016-10-20 07:48:14 -05:00
|
|
|
try:
|
|
|
|
import bs4
|
2016-10-29 04:35:44 -05:00
|
|
|
with open(filepath, mode='r', encoding='utf-8') as infp:
|
|
|
|
soup = bs4.BeautifulSoup(infp, 'html.parser')
|
2016-10-20 07:48:14 -05:00
|
|
|
except ImportError:
|
2016-11-22 12:09:03 -06:00
|
|
|
logerr('Beautiful Soup not found')
|
2016-10-29 02:54:10 -05:00
|
|
|
return False
|
2016-10-20 07:48:14 -05:00
|
|
|
except Exception as e:
|
2016-11-22 12:09:03 -06:00
|
|
|
logerr(e)
|
2016-10-29 02:54:10 -05:00
|
|
|
return False
|
2016-05-22 18:36:41 -05:00
|
|
|
|
2016-10-20 07:48:14 -05:00
|
|
|
html_tags = soup.findAll('a')
|
|
|
|
for tag in html_tags:
|
|
|
|
# Extract comment from <dd> tag
|
|
|
|
desc = None
|
|
|
|
comment_tag = tag.findNextSibling('dd')
|
|
|
|
if comment_tag:
|
|
|
|
desc = comment_tag.text[0:comment_tag.text.find('\n')]
|
2016-06-01 07:02:26 -05:00
|
|
|
|
2016-10-29 06:31:14 -05:00
|
|
|
self.add_bm(tag['href'], tag.string, ('%s%s%s' %
|
|
|
|
(DELIM, tag['tags'], DELIM))
|
|
|
|
if tag.has_attr('tags') else None,
|
2016-11-06 09:30:45 -06:00
|
|
|
desc, True)
|
2016-05-29 01:09:51 -05:00
|
|
|
|
2016-10-20 07:48:14 -05:00
|
|
|
self.conn.commit()
|
2016-10-29 04:35:44 -05:00
|
|
|
infp.close()
|
2016-10-20 07:48:14 -05:00
|
|
|
else:
|
2016-10-29 04:35:44 -05:00
|
|
|
with open(filepath, mode='r', encoding='utf-8') as infp:
|
|
|
|
for line in infp:
|
2016-10-22 15:56:27 -05:00
|
|
|
# Supported markdown format: [title](url)
|
|
|
|
# Find position of title end, url start delimiter combo
|
|
|
|
index = line.find('](')
|
|
|
|
if index != -1:
|
2016-11-07 14:26:03 -06:00
|
|
|
# Find title start delimiter
|
|
|
|
title_start_delim = line[:index].find('[')
|
|
|
|
# Reverse find the url end delimiter
|
|
|
|
url_end_delim = line[index + 2:].rfind(')')
|
2016-10-22 15:56:27 -05:00
|
|
|
|
|
|
|
if title_start_delim != -1 and url_end_delim > 0:
|
|
|
|
# Parse title
|
|
|
|
title = line[title_start_delim + 1:index]
|
|
|
|
# Parse url
|
|
|
|
url = line[index + 2:index + 2 + url_end_delim]
|
|
|
|
|
2016-11-06 09:30:45 -06:00
|
|
|
self.add_bm(url, title, None, None, True)
|
2016-10-22 15:56:27 -05:00
|
|
|
|
2016-10-20 07:48:14 -05:00
|
|
|
self.conn.commit()
|
2016-10-29 04:35:44 -05:00
|
|
|
infp.close()
|
2016-10-22 01:25:41 -05:00
|
|
|
|
2016-10-29 02:54:10 -05:00
|
|
|
return True
|
|
|
|
|
2016-10-29 05:36:29 -05:00
|
|
|
def mergedb(self, path):
|
2016-09-09 13:52:32 -05:00
|
|
|
'''Merge bookmarks from another Buku database file
|
2016-05-29 01:09:51 -05:00
|
|
|
|
2016-10-29 05:36:29 -05:00
|
|
|
:param path: path to DB file to merge
|
2016-10-29 02:54:10 -05:00
|
|
|
:return: True on success, False on failure
|
2016-09-09 13:52:32 -05:00
|
|
|
'''
|
2016-05-29 01:09:51 -05:00
|
|
|
|
|
|
|
try:
|
2016-06-11 01:03:56 -05:00
|
|
|
# Connect to input DB
|
2016-09-09 13:52:32 -05:00
|
|
|
if sys.version_info >= (3, 4, 4):
|
2016-06-11 01:03:56 -05:00
|
|
|
# Python 3.4.4 and above
|
2016-10-29 05:36:29 -05:00
|
|
|
indb_conn = sqlite3.connect('file:%s?mode=ro' % path, uri=True)
|
2016-06-11 01:03:56 -05:00
|
|
|
else:
|
2016-10-29 05:36:29 -05:00
|
|
|
indb_conn = sqlite3.connect(path)
|
2016-06-11 01:03:56 -05:00
|
|
|
|
2016-10-29 04:35:44 -05:00
|
|
|
indb_cur = indb_conn.cursor()
|
2016-10-29 05:00:13 -05:00
|
|
|
indb_cur.execute('SELECT * FROM bookmarks')
|
2016-05-29 01:09:51 -05:00
|
|
|
except Exception as e:
|
2016-11-22 12:09:03 -06:00
|
|
|
logerr(e)
|
2016-10-29 02:54:10 -05:00
|
|
|
return False
|
2016-05-29 01:09:51 -05:00
|
|
|
|
2016-10-29 04:35:44 -05:00
|
|
|
resultset = indb_cur.fetchall()
|
2016-05-29 01:09:51 -05:00
|
|
|
for row in resultset:
|
2016-11-06 09:30:45 -06:00
|
|
|
self.add_bm(row[1], row[2], row[3], row[4], True)
|
2016-06-01 07:02:26 -05:00
|
|
|
|
2016-10-29 05:00:13 -05:00
|
|
|
if len(resultset):
|
|
|
|
self.conn.commit()
|
2016-05-29 01:09:51 -05:00
|
|
|
|
|
|
|
try:
|
2016-10-29 04:35:44 -05:00
|
|
|
indb_cur.close()
|
|
|
|
indb_conn.close()
|
2016-05-31 12:39:34 -05:00
|
|
|
except Exception:
|
2016-05-29 01:09:51 -05:00
|
|
|
pass
|
2016-05-22 16:03:47 -05:00
|
|
|
|
2016-10-29 02:54:10 -05:00
|
|
|
return True
|
|
|
|
|
2016-11-12 09:47:36 -06:00
|
|
|
def shorten_url(self, index=0, url=None):
|
|
|
|
'''Shorted a URL using Google URL shortener
|
|
|
|
|
|
|
|
:param index: shorten the URL at DB index (int)
|
|
|
|
:param url: pass a URL (string)
|
|
|
|
:return: shortened url string on success, None on failure
|
|
|
|
'''
|
|
|
|
|
|
|
|
if not index and not url:
|
2016-11-22 12:09:03 -06:00
|
|
|
logerr('Either a valid DB index or URL required')
|
2016-11-12 09:47:36 -06:00
|
|
|
return None
|
|
|
|
|
|
|
|
if index:
|
|
|
|
self.cur.execute('SELECT url FROM bookmarks WHERE id = ?',
|
|
|
|
(index,))
|
|
|
|
results = self.cur.fetchall()
|
|
|
|
if len(results):
|
|
|
|
url = results[0][0]
|
|
|
|
else:
|
|
|
|
return None
|
|
|
|
|
|
|
|
r = requests.post(
|
|
|
|
'http://tny.im/yourls-api.php?action=shorturl&format=simple&url=' +
|
|
|
|
url,
|
|
|
|
headers={
|
|
|
|
'content-type': 'application/json',
|
|
|
|
'User-Agent': USER_AGENT
|
|
|
|
}
|
|
|
|
)
|
|
|
|
if r.status_code != 200:
|
2016-11-22 12:09:03 -06:00
|
|
|
logerr('[%s] %s', r.status_code, r.reason)
|
2016-11-12 09:47:36 -06:00
|
|
|
return None
|
|
|
|
|
|
|
|
return r.text
|
|
|
|
|
2016-11-20 09:10:56 -06:00
|
|
|
def fixtags(self):
|
|
|
|
'''Undocumented API to fix tags set
|
|
|
|
in earlier versions. Functionalities:
|
|
|
|
|
|
|
|
1. Remove duplicate tags
|
|
|
|
2. Sort tags
|
|
|
|
3. Use lower case to store tags
|
|
|
|
'''
|
|
|
|
|
|
|
|
to_commit = False
|
|
|
|
self.cur.execute('SELECT id, tags FROM bookmarks ORDER BY id ASC')
|
|
|
|
resultset = self.cur.fetchall()
|
|
|
|
query = 'UPDATE bookmarks SET tags = ? WHERE id = ?'
|
|
|
|
for row in resultset:
|
|
|
|
oldtags = row[1]
|
|
|
|
if oldtags == ',':
|
|
|
|
continue
|
|
|
|
|
|
|
|
tags = parse_tags([oldtags])
|
|
|
|
if tags == oldtags:
|
|
|
|
continue
|
|
|
|
|
|
|
|
self.cur.execute(query, (tags, row[0],))
|
|
|
|
to_commit = True
|
|
|
|
|
|
|
|
if to_commit:
|
|
|
|
self.conn.commit()
|
|
|
|
|
2016-05-31 13:18:06 -05:00
|
|
|
def close_quit(self, exitval=0):
|
2016-10-29 02:54:10 -05:00
|
|
|
'''Close a DB connection and exit
|
|
|
|
|
|
|
|
:param exitval: program exit value
|
|
|
|
'''
|
2016-05-31 13:18:06 -05:00
|
|
|
|
|
|
|
if self.conn is not None:
|
|
|
|
try:
|
|
|
|
self.cur.close()
|
|
|
|
self.conn.close()
|
2016-09-09 13:52:32 -05:00
|
|
|
except Exception:
|
|
|
|
# ignore errors here, we're closing down
|
2016-05-31 13:18:06 -05:00
|
|
|
pass
|
|
|
|
sys.exit(exitval)
|
|
|
|
|
2016-09-09 10:07:01 -05:00
|
|
|
|
2016-05-22 16:03:47 -05:00
|
|
|
# Generic functions
|
|
|
|
|
2016-11-08 11:32:45 -06:00
|
|
|
def is_bad_url(url):
|
|
|
|
'''Check if URL is malformed
|
|
|
|
This API is not bulletproof but works in most cases.
|
|
|
|
|
|
|
|
:param url: URL to scan
|
|
|
|
:return: True or False
|
|
|
|
'''
|
|
|
|
|
|
|
|
# Get the netloc token
|
|
|
|
netloc = urlparse(url).netloc
|
|
|
|
if not netloc:
|
|
|
|
# Try of prepend '//' and get netloc
|
|
|
|
netloc = urlparse('//' + url).netloc
|
|
|
|
if not netloc:
|
|
|
|
return True
|
|
|
|
|
2016-11-22 12:09:03 -06:00
|
|
|
logdbg('netloc: %s' % netloc)
|
2016-11-08 12:13:32 -06:00
|
|
|
|
|
|
|
# netloc cannot start or end with a '.'
|
|
|
|
if netloc.startswith('.') or netloc.endswith('.'):
|
2016-11-08 11:32:45 -06:00
|
|
|
return True
|
|
|
|
|
|
|
|
# netloc should have at least one '.'
|
|
|
|
index = netloc.rfind('.')
|
|
|
|
if index < 0:
|
|
|
|
return True
|
|
|
|
|
2016-11-08 12:13:32 -06:00
|
|
|
return False
|
2016-11-08 11:32:45 -06:00
|
|
|
|
|
|
|
|
|
|
|
def is_ignored_mime(url):
|
|
|
|
'''Check if URL links to ignored mime
|
|
|
|
Only a 'HEAD' request is made for these URLs
|
|
|
|
|
|
|
|
:param url: URL to scan
|
|
|
|
:return: True or False
|
|
|
|
'''
|
|
|
|
|
|
|
|
for mime in SKIP_MIMES:
|
|
|
|
if url.lower().endswith(mime):
|
|
|
|
return True
|
|
|
|
|
|
|
|
return False
|
|
|
|
|
|
|
|
|
2016-05-22 09:53:19 -05:00
|
|
|
def get_page_title(resp):
|
2016-09-09 13:52:32 -05:00
|
|
|
'''Invoke HTML parser and extract title from HTTP response
|
2016-04-10 07:41:00 -05:00
|
|
|
|
2016-10-29 02:54:10 -05:00
|
|
|
:param resp: HTTP(S) GET response
|
2016-11-08 12:52:34 -06:00
|
|
|
:return: title fetched from parsed page
|
2016-09-09 13:52:32 -05:00
|
|
|
'''
|
2016-04-05 06:25:40 -05:00
|
|
|
|
2016-11-08 13:43:53 -06:00
|
|
|
global htmlparser
|
|
|
|
|
|
|
|
if not htmlparser:
|
|
|
|
htmlparser = BMHTMLParser()
|
|
|
|
|
2016-04-05 06:25:40 -05:00
|
|
|
try:
|
2016-11-08 13:43:53 -06:00
|
|
|
htmlparser.feed(resp.data.decode(errors='replace'))
|
2016-04-05 06:25:40 -05:00
|
|
|
except Exception as e:
|
2016-06-30 10:45:45 -05:00
|
|
|
# Suppress Exception due to intentional self.reset() in HTMLParser
|
2016-09-09 13:52:32 -05:00
|
|
|
if logger.isEnabledFor(logging.DEBUG) \
|
|
|
|
and str(e) != 'we should not get here!':
|
2016-06-30 14:31:51 -05:00
|
|
|
_, _, linenumber, func, _, _ = inspect.stack()[0]
|
2016-11-22 12:09:03 -06:00
|
|
|
logerr('%s(), ln %d: %s', func, linenumber, e)
|
2016-11-08 12:52:34 -06:00
|
|
|
finally:
|
2016-11-08 13:43:53 -06:00
|
|
|
return htmlparser.parsed_title
|
2016-04-05 06:25:40 -05:00
|
|
|
|
2016-09-09 10:07:01 -05:00
|
|
|
|
2016-11-18 08:34:37 -06:00
|
|
|
def get_PoolManager():
|
|
|
|
'''Creates a pool manager with proxy support, if applicable
|
2016-11-16 18:11:08 -06:00
|
|
|
|
2016-11-18 08:34:37 -06:00
|
|
|
:return: ProxyManager if https_proxy is defined, else PoolManager.
|
|
|
|
'''
|
2016-11-16 18:11:08 -06:00
|
|
|
|
2016-11-18 10:36:09 -06:00
|
|
|
proxy = os.environ.get('https_proxy')
|
2016-11-16 18:11:08 -06:00
|
|
|
|
2016-11-18 10:36:09 -06:00
|
|
|
if proxy:
|
|
|
|
headers = None
|
|
|
|
url = urlparse(proxy)
|
|
|
|
# Strip username and password and create header, if present
|
|
|
|
if url.username:
|
|
|
|
proxy = proxy.replace(url.username + ':' + url.password + '@', '')
|
|
|
|
headers = urllib3.util.make_headers(
|
|
|
|
basic_auth=url.username + ':' + url.password
|
|
|
|
)
|
|
|
|
|
2016-11-22 12:09:03 -06:00
|
|
|
logdbg('proxy: [%s]' % proxy)
|
2016-11-18 10:36:09 -06:00
|
|
|
return urllib3.ProxyManager(proxy, headers=headers)
|
2016-11-16 18:11:08 -06:00
|
|
|
|
2016-11-18 08:34:37 -06:00
|
|
|
return urllib3.PoolManager()
|
2016-11-16 18:11:08 -06:00
|
|
|
|
|
|
|
|
2016-05-22 09:53:19 -05:00
|
|
|
def network_handler(url):
|
2016-09-09 13:52:32 -05:00
|
|
|
'''Handle server connection and redirections
|
2016-04-10 07:41:00 -05:00
|
|
|
|
2016-10-29 02:54:10 -05:00
|
|
|
:param url: URL to fetch
|
2016-11-22 12:09:03 -06:00
|
|
|
:return: (title, recognized mime, bad url) tuple
|
2016-09-09 13:52:32 -05:00
|
|
|
'''
|
2016-04-05 06:25:40 -05:00
|
|
|
|
2016-11-08 12:52:34 -06:00
|
|
|
global http_handler
|
2016-10-22 08:21:46 -05:00
|
|
|
|
2016-11-08 12:52:34 -06:00
|
|
|
page_title = None
|
2016-11-07 09:13:08 -06:00
|
|
|
resp = None
|
2016-11-08 11:32:45 -06:00
|
|
|
method = 'GET'
|
2016-11-07 09:13:08 -06:00
|
|
|
|
2016-11-08 11:32:45 -06:00
|
|
|
if is_bad_url(url):
|
|
|
|
return ('', 0, 1)
|
2016-11-07 09:13:08 -06:00
|
|
|
|
2016-11-08 11:32:45 -06:00
|
|
|
if is_ignored_mime(url):
|
|
|
|
method = 'HEAD'
|
2016-11-07 09:13:08 -06:00
|
|
|
|
|
|
|
if not http_handler:
|
2016-11-15 18:29:00 -06:00
|
|
|
urllib3.disable_warnings()
|
2016-11-18 08:34:37 -06:00
|
|
|
http_handler = get_PoolManager()
|
2016-03-22 15:23:46 -05:00
|
|
|
|
|
|
|
try:
|
2016-05-31 12:39:34 -05:00
|
|
|
while True:
|
2016-11-07 09:13:08 -06:00
|
|
|
resp = http_handler.request(
|
2016-11-08 11:32:45 -06:00
|
|
|
method, url, timeout=40,
|
2016-11-07 09:13:08 -06:00
|
|
|
headers={'Accept-Encoding': 'gzip,deflate',
|
2016-11-09 11:14:31 -06:00
|
|
|
'User-Agent': USER_AGENT,
|
|
|
|
'Accept': '*/*',
|
2016-11-07 09:13:08 -06:00
|
|
|
'DNT': '1'}
|
|
|
|
)
|
2016-11-08 11:32:45 -06:00
|
|
|
|
2016-11-06 13:26:35 -06:00
|
|
|
if resp.status == 200:
|
2016-11-08 12:52:34 -06:00
|
|
|
page_title = get_page_title(resp)
|
2016-11-09 11:14:31 -06:00
|
|
|
elif resp.status == 403 and url.endswith('/'):
|
2016-11-07 23:45:24 -06:00
|
|
|
# HTTP response Forbidden
|
|
|
|
# Handle URLs in the form of https://www.domain.com/
|
|
|
|
# which fail when trying to fetch resource '/'
|
|
|
|
# retry without trailing '/'
|
2016-09-09 13:52:32 -05:00
|
|
|
|
2016-11-22 12:09:03 -06:00
|
|
|
logdbg('Received status 403: retrying...')
|
2016-05-20 12:01:03 -05:00
|
|
|
# Remove trailing /
|
2016-11-09 11:14:31 -06:00
|
|
|
url = url[:-1]
|
|
|
|
resp.release_conn()
|
|
|
|
continue
|
2016-03-24 15:38:38 -05:00
|
|
|
else:
|
2016-11-22 12:09:03 -06:00
|
|
|
logerr('[%s] %s', resp.status, resp.reason)
|
2016-11-07 09:13:08 -06:00
|
|
|
|
2016-11-09 11:14:31 -06:00
|
|
|
break
|
2016-03-22 15:23:46 -05:00
|
|
|
except Exception as e:
|
2016-06-30 14:31:51 -05:00
|
|
|
_, _, linenumber, func, _, _ = inspect.stack()[0]
|
2016-11-22 12:09:03 -06:00
|
|
|
logerr('%s(), ln %d: %s', func, linenumber, e)
|
2016-03-22 15:23:46 -05:00
|
|
|
finally:
|
2016-11-07 09:13:08 -06:00
|
|
|
if resp:
|
|
|
|
resp.release_conn()
|
2016-11-08 11:32:45 -06:00
|
|
|
if method == 'HEAD':
|
|
|
|
return ('', 1, 0)
|
2016-11-08 12:52:34 -06:00
|
|
|
if page_title is None:
|
2016-11-08 11:32:45 -06:00
|
|
|
return ('', 0, 0)
|
2016-11-08 12:52:34 -06:00
|
|
|
return (page_title.strip().replace('\n', ''), 0, 0)
|
2016-03-22 15:23:46 -05:00
|
|
|
|
2016-09-09 10:07:01 -05:00
|
|
|
|
2016-05-31 12:39:34 -05:00
|
|
|
def parse_tags(keywords=None):
|
2016-10-29 02:54:10 -05:00
|
|
|
'''Format and get tag string from tokens
|
|
|
|
|
|
|
|
:param keywords: list of tags
|
|
|
|
:return: comma-delimited string of tags
|
|
|
|
:return: just delimiter, if no keywords
|
|
|
|
:return: None, if keyword is None
|
|
|
|
'''
|
2016-05-18 22:24:46 -05:00
|
|
|
|
2016-05-31 12:39:34 -05:00
|
|
|
if keywords is None:
|
2016-05-31 16:40:51 -05:00
|
|
|
return None
|
2016-05-31 12:39:34 -05:00
|
|
|
|
2016-10-29 05:36:29 -05:00
|
|
|
tags = DELIM
|
2016-10-22 08:21:46 -05:00
|
|
|
orig_tags = []
|
|
|
|
unique_tags = []
|
2016-05-18 22:24:46 -05:00
|
|
|
|
|
|
|
# Cleanse and get the tags
|
2016-05-29 12:13:37 -05:00
|
|
|
tagstr = ' '.join(keywords)
|
2016-10-29 05:36:29 -05:00
|
|
|
marker = tagstr.find(DELIM)
|
2016-05-29 12:13:37 -05:00
|
|
|
|
|
|
|
while marker >= 0:
|
|
|
|
token = tagstr[0:marker]
|
2016-05-31 12:39:34 -05:00
|
|
|
tagstr = tagstr[marker + 1:]
|
2016-10-29 05:36:29 -05:00
|
|
|
marker = tagstr.find(DELIM)
|
2016-05-29 12:13:37 -05:00
|
|
|
token = token.strip()
|
|
|
|
if token == '':
|
2016-05-23 10:57:06 -05:00
|
|
|
continue
|
|
|
|
|
2016-10-29 05:36:29 -05:00
|
|
|
tags = '%s%s%s' % (tags, token, DELIM)
|
2016-05-19 09:24:18 -05:00
|
|
|
|
2016-05-29 12:13:37 -05:00
|
|
|
tagstr = tagstr.strip()
|
|
|
|
if tagstr != '':
|
2016-10-29 05:36:29 -05:00
|
|
|
tags = '%s%s%s' % (tags, tagstr, DELIM)
|
2016-05-18 22:24:46 -05:00
|
|
|
|
2016-11-22 12:09:03 -06:00
|
|
|
logdbg('keywords: %s', keywords)
|
|
|
|
logdbg('parsed tags: [%s]', tags)
|
2016-05-18 22:24:46 -05:00
|
|
|
|
2016-10-29 05:36:29 -05:00
|
|
|
if tags == DELIM:
|
2016-05-23 10:57:06 -05:00
|
|
|
return tags
|
|
|
|
|
2016-10-29 05:36:29 -05:00
|
|
|
orig_tags += tags.strip(DELIM).split(DELIM)
|
2016-10-22 08:21:46 -05:00
|
|
|
for tag in orig_tags:
|
2016-11-20 00:22:28 -06:00
|
|
|
if tag.lower() not in unique_tags:
|
2016-11-20 09:10:56 -06:00
|
|
|
# Add unique tags in lower case
|
2016-11-20 00:22:28 -06:00
|
|
|
unique_tags += (tag.lower(), )
|
2016-05-19 09:24:18 -05:00
|
|
|
|
|
|
|
# Sort the tags
|
2016-11-20 00:22:28 -06:00
|
|
|
sorted_tags = sorted(unique_tags)
|
2016-05-19 09:24:18 -05:00
|
|
|
|
2016-05-19 22:44:39 -05:00
|
|
|
# Wrap with delimiter
|
2016-10-29 05:36:29 -05:00
|
|
|
return '%s%s%s' % (DELIM, DELIM.join(sorted_tags), DELIM)
|
2016-05-18 22:24:46 -05:00
|
|
|
|
2016-09-09 10:07:01 -05:00
|
|
|
|
2016-11-27 09:14:23 -06:00
|
|
|
def taglist_subprompt(obj, noninteractive=False):
|
2016-11-14 14:31:22 -06:00
|
|
|
'''Additional prompt to show unique tag list
|
|
|
|
|
|
|
|
:param obj: a valid instance of BukuDb class
|
2016-11-27 09:14:23 -06:00
|
|
|
:param noninteractive: do not seek user input
|
2016-11-14 14:31:22 -06:00
|
|
|
:return: new command string
|
|
|
|
'''
|
|
|
|
|
2016-11-20 07:31:02 -06:00
|
|
|
unique_tags, dic = obj.get_all_tags()
|
2016-11-14 14:31:22 -06:00
|
|
|
msg = '\x1b[7mbuku (? for help)\x1b[0m '
|
|
|
|
new_results = True
|
|
|
|
|
|
|
|
while True:
|
|
|
|
if new_results:
|
2016-11-15 09:44:26 -06:00
|
|
|
if len(unique_tags) == 0:
|
|
|
|
count = 0
|
|
|
|
print('0 tags')
|
|
|
|
else:
|
|
|
|
count = 1
|
|
|
|
for tag in unique_tags:
|
2016-11-20 07:31:02 -06:00
|
|
|
print('%6d. %s (%d)' % (count, tag, dic[tag]))
|
2016-11-15 09:44:26 -06:00
|
|
|
count += 1
|
2016-11-20 12:20:23 -06:00
|
|
|
print()
|
2016-11-14 14:31:22 -06:00
|
|
|
|
2016-11-27 09:14:23 -06:00
|
|
|
if noninteractive:
|
|
|
|
return
|
|
|
|
|
2016-11-14 14:31:22 -06:00
|
|
|
try:
|
|
|
|
nav = input(msg)
|
|
|
|
if not nav:
|
|
|
|
nav = input(msg)
|
|
|
|
if not nav:
|
|
|
|
# Quit on double enter
|
|
|
|
return 'q'
|
|
|
|
nav = nav.strip()
|
|
|
|
except EOFError:
|
|
|
|
return 'q'
|
|
|
|
|
|
|
|
if is_int(nav) and int(nav) > 0 and int(nav) < count:
|
|
|
|
return 't ' + unique_tags[int(nav) - 1]
|
|
|
|
elif is_int(nav):
|
2016-11-15 08:02:13 -06:00
|
|
|
print('No matching index')
|
2016-11-14 14:31:22 -06:00
|
|
|
new_results = False
|
|
|
|
elif is_int(nav[0]):
|
|
|
|
print('Invalid input')
|
|
|
|
new_results = False
|
|
|
|
elif nav == 't':
|
|
|
|
new_results = True
|
|
|
|
continue
|
|
|
|
elif (nav == 'q' or nav == 'd' or nav == '?' or
|
|
|
|
nav.startswith('s ') or nav.startswith('S ') or
|
|
|
|
nav.startswith('r ') or nav.startswith('t ')):
|
|
|
|
return nav
|
|
|
|
else:
|
|
|
|
print('Invalid input')
|
|
|
|
new_results = False
|
|
|
|
|
2016-11-16 09:56:26 -06:00
|
|
|
|
2016-11-20 12:20:23 -06:00
|
|
|
def prompt(obj, results, noninteractive=False, deep=False, subprompt=False):
|
2016-10-29 02:54:10 -05:00
|
|
|
'''Show each matching result from a search and prompt
|
|
|
|
|
2016-11-13 11:52:00 -06:00
|
|
|
:param obj: a valid instance of BukuDb class
|
|
|
|
:param results: result set from a DB query
|
2016-10-29 02:54:10 -05:00
|
|
|
:param noninteractive: do not seek user input
|
2016-11-14 12:37:22 -06:00
|
|
|
:param deep: use deep search
|
2016-11-20 12:20:23 -06:00
|
|
|
:param subprompt: jump directly to sub prompt
|
2016-10-29 02:54:10 -05:00
|
|
|
'''
|
2016-05-18 10:46:08 -05:00
|
|
|
|
2016-11-13 11:52:00 -06:00
|
|
|
if not type(obj) is BukuDb:
|
2016-11-22 12:09:03 -06:00
|
|
|
logerr('Not a BukuDb instance')
|
2016-10-28 09:27:46 -05:00
|
|
|
return
|
2016-05-18 10:46:08 -05:00
|
|
|
|
2016-11-13 12:40:47 -06:00
|
|
|
new_results = True
|
|
|
|
msg = '\x1b[7mbuku (? for help)\x1b[0m '
|
|
|
|
|
2016-10-28 09:27:46 -05:00
|
|
|
while True:
|
2016-11-20 12:20:23 -06:00
|
|
|
if not subprompt:
|
|
|
|
if new_results:
|
|
|
|
if results:
|
|
|
|
count = 0
|
|
|
|
|
|
|
|
for row in results:
|
|
|
|
count += 1
|
|
|
|
print_record(row, count)
|
|
|
|
else:
|
|
|
|
print('0 results')
|
2016-11-13 11:52:00 -06:00
|
|
|
|
2016-11-20 12:20:23 -06:00
|
|
|
if noninteractive:
|
|
|
|
return
|
2016-11-13 11:52:00 -06:00
|
|
|
|
2016-11-20 12:20:23 -06:00
|
|
|
try:
|
2016-11-13 12:40:47 -06:00
|
|
|
nav = input(msg)
|
2016-07-09 10:19:16 -05:00
|
|
|
if not nav:
|
2016-11-20 12:20:23 -06:00
|
|
|
nav = input(msg)
|
|
|
|
if not nav:
|
|
|
|
# Quit on double enter
|
|
|
|
break
|
|
|
|
nav = nav.strip()
|
|
|
|
except EOFError:
|
|
|
|
return
|
|
|
|
else:
|
|
|
|
nav = 't'
|
|
|
|
subprompt = False
|
2016-07-09 10:19:16 -05:00
|
|
|
|
2016-11-14 14:31:22 -06:00
|
|
|
# list tags with 't'
|
|
|
|
if nav == 't':
|
2016-11-27 09:14:23 -06:00
|
|
|
nav = taglist_subprompt(obj, noninteractive)
|
|
|
|
if noninteractive:
|
|
|
|
return
|
2016-11-14 14:31:22 -06:00
|
|
|
|
2016-11-13 11:52:00 -06:00
|
|
|
# search ANY match with new keywords
|
2016-11-14 14:31:22 -06:00
|
|
|
if nav.startswith('s '):
|
2016-11-14 12:37:22 -06:00
|
|
|
results = obj.searchdb(nav[2:].split(), False, deep)
|
2016-11-13 11:52:00 -06:00
|
|
|
new_results = True
|
|
|
|
continue
|
|
|
|
|
|
|
|
# search ALL match with new keywords
|
2016-11-14 14:31:22 -06:00
|
|
|
if nav.startswith('S '):
|
2016-11-14 12:37:22 -06:00
|
|
|
results = obj.searchdb(nav[2:].split(), True, deep)
|
2016-11-13 11:52:00 -06:00
|
|
|
new_results = True
|
|
|
|
continue
|
|
|
|
|
|
|
|
# regular expressions search with new keywords
|
2016-11-14 14:31:22 -06:00
|
|
|
if nav.startswith('r '):
|
2016-11-13 11:52:00 -06:00
|
|
|
results = obj.searchdb(nav[2:].split(), True, regex=True)
|
|
|
|
new_results = True
|
|
|
|
continue
|
|
|
|
|
|
|
|
# tag search with new keywords
|
2016-11-14 14:31:22 -06:00
|
|
|
if nav.startswith('t '):
|
2016-11-13 11:52:00 -06:00
|
|
|
results = obj.search_by_tag(nav[2:])
|
|
|
|
new_results = True
|
|
|
|
continue
|
|
|
|
|
|
|
|
# quit with 'q'
|
|
|
|
if nav == 'q':
|
|
|
|
return
|
|
|
|
|
|
|
|
# toggle deep search with 'd'
|
|
|
|
if nav == 'd':
|
2016-11-14 12:37:22 -06:00
|
|
|
deep = not deep
|
|
|
|
if deep:
|
2016-11-13 11:52:00 -06:00
|
|
|
print('deep search on')
|
|
|
|
else:
|
|
|
|
print('deep search off')
|
|
|
|
|
|
|
|
new_results = False
|
|
|
|
continue
|
|
|
|
|
2016-11-14 10:14:23 -06:00
|
|
|
# Show help with '?'
|
|
|
|
if nav == '?':
|
|
|
|
ExtendedArgumentParser.print_prompt_help(sys.stdout)
|
|
|
|
new_results = False
|
|
|
|
continue
|
|
|
|
|
2016-11-13 11:52:00 -06:00
|
|
|
new_results = False
|
|
|
|
|
|
|
|
# Nothing to browse if there are no results
|
|
|
|
if not results:
|
|
|
|
print('Not in a search context')
|
|
|
|
continue
|
|
|
|
|
|
|
|
# open all results and re-prompt with 'a'
|
2016-10-28 09:27:46 -05:00
|
|
|
if nav == 'a':
|
|
|
|
for index in range(0, count):
|
|
|
|
try:
|
|
|
|
open_in_browser(unquote(results[index][1]))
|
|
|
|
except Exception as e:
|
|
|
|
_, _, linenumber, func, _, _ = inspect.stack()[0]
|
2016-11-22 12:09:03 -06:00
|
|
|
logerr('%s(), ln %d: %s', func, linenumber, e)
|
2016-09-07 12:59:49 -05:00
|
|
|
|
2016-10-28 09:27:46 -05:00
|
|
|
continue
|
2016-09-07 12:59:49 -05:00
|
|
|
|
2016-10-28 09:27:46 -05:00
|
|
|
# iterate over white-space separated indices
|
|
|
|
for nav in (' '.join(nav.split())).split():
|
|
|
|
if is_int(nav):
|
|
|
|
index = int(nav) - 1
|
|
|
|
if index < 0 or index >= count:
|
2016-11-15 09:44:26 -06:00
|
|
|
print('No matching index')
|
2016-10-28 09:27:46 -05:00
|
|
|
continue
|
|
|
|
try:
|
|
|
|
open_in_browser(unquote(results[index][1]))
|
|
|
|
except Exception as e:
|
|
|
|
_, _, linenumber, func, _, _ = inspect.stack()[0]
|
2016-11-22 12:09:03 -06:00
|
|
|
logerr('%s(), ln %d: %s', func, linenumber, e)
|
2016-10-28 09:27:46 -05:00
|
|
|
elif '-' in nav and is_int(nav.split('-')[0]) \
|
|
|
|
and is_int(nav.split('-')[1]):
|
|
|
|
lower = int(nav.split('-')[0])
|
|
|
|
upper = int(nav.split('-')[1])
|
|
|
|
if lower > upper:
|
|
|
|
lower, upper = upper, lower
|
|
|
|
for index in range(lower-1, upper):
|
2016-08-31 01:14:26 -05:00
|
|
|
try:
|
2016-11-15 09:44:26 -06:00
|
|
|
if 0 <= index < count:
|
|
|
|
open_in_browser(unquote(results[index][1]))
|
|
|
|
else:
|
|
|
|
print('No matching index')
|
2016-08-31 01:14:26 -05:00
|
|
|
except Exception as e:
|
|
|
|
_, _, linenumber, func, _, _ = inspect.stack()[0]
|
2016-11-22 12:09:03 -06:00
|
|
|
logerr('%s(), ln %d: %s', func, linenumber, e)
|
2016-10-28 09:27:46 -05:00
|
|
|
else:
|
2016-11-13 11:52:00 -06:00
|
|
|
print('Invalid input')
|
2016-10-28 09:27:46 -05:00
|
|
|
break
|
2016-05-18 10:46:08 -05:00
|
|
|
|
2016-09-09 10:07:01 -05:00
|
|
|
|
2016-10-11 13:49:05 -05:00
|
|
|
def print_record(row, idx=0):
|
2016-09-09 13:52:32 -05:00
|
|
|
'''Print a single DB record
|
2016-10-29 02:54:10 -05:00
|
|
|
Handles both search result and individual record
|
|
|
|
|
|
|
|
:param idx: search result index. If 0, print with DB index
|
2016-09-09 13:52:32 -05:00
|
|
|
'''
|
2016-05-17 15:11:31 -05:00
|
|
|
|
2016-10-29 02:54:10 -05:00
|
|
|
# Start with index and URL
|
2016-10-11 13:49:05 -05:00
|
|
|
if idx != 0:
|
2016-11-08 11:40:16 -06:00
|
|
|
pr = '\x1b[1m\x1b[93m%d. \x1b[0m\x1b[92m%s\x1b[0m \
|
|
|
|
\x1b[1m[%s]\x1b[0m\n' % (idx, row[1], row[0])
|
2016-05-17 15:11:31 -05:00
|
|
|
else:
|
2016-11-08 11:40:16 -06:00
|
|
|
pr = '\x1b[1m\x1b[93m%d. \x1b[0m\x1b[92m%s\x1b[0m' % (row[0], row[1])
|
2016-11-06 07:52:12 -06:00
|
|
|
# Indicate if record is immutable
|
|
|
|
if row[5] & 1:
|
2016-11-08 11:40:16 -06:00
|
|
|
pr = '%s \x1b[1m(L)\x1b[0m\n' % (pr)
|
2016-11-06 07:52:12 -06:00
|
|
|
else:
|
|
|
|
pr = '%s\n' % (pr)
|
2016-06-02 12:26:37 -05:00
|
|
|
|
2016-10-29 02:54:10 -05:00
|
|
|
# Append title
|
2016-06-02 12:26:37 -05:00
|
|
|
if row[2] != '':
|
2016-11-08 11:40:16 -06:00
|
|
|
pr = '%s \x1b[91m>\x1b[0m %s\n' % (pr, row[2])
|
2016-06-02 12:26:37 -05:00
|
|
|
|
2016-10-29 02:54:10 -05:00
|
|
|
# Append description
|
2016-06-02 12:26:37 -05:00
|
|
|
if row[4] != '':
|
2016-11-08 11:40:16 -06:00
|
|
|
pr = '%s \x1b[91m+\x1b[0m %s\n' % (pr, row[4])
|
2016-06-02 12:26:37 -05:00
|
|
|
|
2016-10-29 05:36:29 -05:00
|
|
|
# Append tags IF not default (delimiter)
|
|
|
|
if row[3] != DELIM:
|
2016-11-08 11:40:16 -06:00
|
|
|
pr = '%s \x1b[91m#\x1b[0m %s\n' % (pr, row[3][1:-1])
|
2016-06-02 12:26:37 -05:00
|
|
|
|
2016-10-11 13:49:05 -05:00
|
|
|
print(pr)
|
2016-05-17 15:11:31 -05:00
|
|
|
|
2016-09-09 10:07:01 -05:00
|
|
|
|
2016-10-29 04:02:50 -05:00
|
|
|
def format_json(resultset, single_record=False, field_filter=0):
|
2016-10-29 02:54:10 -05:00
|
|
|
'''Return results in Json format
|
2016-05-16 09:39:01 -05:00
|
|
|
|
2016-10-29 02:54:10 -05:00
|
|
|
:param single_record: indicates only one record
|
2016-10-29 04:02:50 -05:00
|
|
|
:param field_filter: determines fields to show
|
2016-10-29 02:54:10 -05:00
|
|
|
:return: record(s) in Json format
|
|
|
|
'''
|
2016-03-22 18:29:45 -05:00
|
|
|
|
2016-10-29 02:54:10 -05:00
|
|
|
if single_record:
|
2016-03-22 18:29:45 -05:00
|
|
|
marks = {}
|
|
|
|
for row in resultset:
|
2016-10-29 04:02:50 -05:00
|
|
|
if field_filter == 1:
|
2016-05-22 22:39:58 -05:00
|
|
|
marks['uri'] = row[1]
|
2016-10-29 04:02:50 -05:00
|
|
|
elif field_filter == 2:
|
2016-05-22 22:39:58 -05:00
|
|
|
marks['uri'] = row[1]
|
2016-05-31 12:39:34 -05:00
|
|
|
marks['tags'] = row[3][1:-1]
|
2016-10-29 04:02:50 -05:00
|
|
|
elif field_filter == 3:
|
2016-10-09 14:56:45 -05:00
|
|
|
marks['title'] = row[2]
|
2016-03-22 18:29:45 -05:00
|
|
|
else:
|
2016-11-26 20:16:54 -06:00
|
|
|
marks['index'] = row[0]
|
2016-05-31 12:39:34 -05:00
|
|
|
marks['uri'] = row[1]
|
2016-03-22 18:29:45 -05:00
|
|
|
marks['title'] = row[2]
|
2016-05-22 22:39:58 -05:00
|
|
|
marks['description'] = row[4]
|
2016-05-31 12:39:34 -05:00
|
|
|
marks['tags'] = row[3][1:-1]
|
2016-10-29 02:54:10 -05:00
|
|
|
else:
|
|
|
|
marks = []
|
|
|
|
for row in resultset:
|
2016-10-29 04:02:50 -05:00
|
|
|
if field_filter == 1:
|
2016-10-29 02:54:10 -05:00
|
|
|
record = {'uri': row[1]}
|
2016-10-29 04:02:50 -05:00
|
|
|
elif field_filter == 2:
|
2016-10-29 02:54:10 -05:00
|
|
|
record = {'uri': row[1], 'tags': row[3][1:-1]}
|
2016-10-29 04:02:50 -05:00
|
|
|
elif field_filter == 3:
|
2016-10-29 02:54:10 -05:00
|
|
|
record = {'title': row[2]}
|
|
|
|
else:
|
2016-11-26 20:16:54 -06:00
|
|
|
record = {'index': row[0], 'uri': row[1], 'title': row[2],
|
2016-10-29 02:54:10 -05:00
|
|
|
'description': row[4], 'tags': row[3][1:-1]}
|
|
|
|
|
|
|
|
marks.append(record)
|
2015-11-05 09:18:51 -06:00
|
|
|
|
2016-03-22 18:29:45 -05:00
|
|
|
return json.dumps(marks, sort_keys=True, indent=4)
|
2015-11-08 12:56:52 -06:00
|
|
|
|
2016-09-09 10:07:01 -05:00
|
|
|
|
2015-11-06 13:59:57 -06:00
|
|
|
def is_int(string):
|
2016-09-09 13:52:32 -05:00
|
|
|
'''Check if a string is a digit
|
2016-04-10 07:41:00 -05:00
|
|
|
|
2016-10-29 02:54:10 -05:00
|
|
|
:param string: input string
|
|
|
|
:return: True on success, False on exception
|
2016-09-09 13:52:32 -05:00
|
|
|
'''
|
2016-04-05 06:25:40 -05:00
|
|
|
|
2015-11-06 13:59:57 -06:00
|
|
|
try:
|
|
|
|
int(string)
|
|
|
|
return True
|
2016-05-31 12:39:34 -05:00
|
|
|
except Exception:
|
2015-11-06 13:59:57 -06:00
|
|
|
return False
|
|
|
|
|
2016-09-09 10:07:01 -05:00
|
|
|
|
2016-09-09 07:46:40 -05:00
|
|
|
def open_in_browser(url):
|
2016-09-09 13:52:32 -05:00
|
|
|
'''Duplicate stdin, stdout (to suppress showing errors
|
2016-04-05 07:55:29 -05:00
|
|
|
on the terminal) and open URL in default browser
|
2015-11-06 16:32:08 -06:00
|
|
|
|
2016-10-29 02:54:10 -05:00
|
|
|
:param url: URL to open
|
2016-09-09 13:52:32 -05:00
|
|
|
'''
|
2015-11-08 12:56:52 -06:00
|
|
|
|
2016-09-04 13:04:31 -05:00
|
|
|
url = url.replace('%22', '\"')
|
2016-11-16 09:56:26 -06:00
|
|
|
if not urlparse(url).scheme:
|
2016-11-18 10:36:09 -06:00
|
|
|
# Prefix with 'http://' is no scheme
|
|
|
|
# Otherwise, opening in browser fails anyway
|
|
|
|
# We expect http to https redirection
|
|
|
|
# will happen for https-only websites
|
2016-11-22 12:09:03 -06:00
|
|
|
logerr('scheme missing in URI, trying http')
|
2016-11-16 09:56:26 -06:00
|
|
|
url = '%s%s' % ('http://', url)
|
2015-11-10 05:20:30 -06:00
|
|
|
|
2015-11-10 03:11:05 -06:00
|
|
|
_stderr = os.dup(2)
|
|
|
|
os.close(2)
|
|
|
|
_stdout = os.dup(1)
|
|
|
|
os.close(1)
|
|
|
|
fd = os.open(os.devnull, os.O_RDWR)
|
|
|
|
os.dup2(fd, 2)
|
|
|
|
os.dup2(fd, 1)
|
|
|
|
try:
|
|
|
|
webbrowser.open(url)
|
|
|
|
except Exception as e:
|
2016-06-30 14:31:51 -05:00
|
|
|
_, _, linenumber, func, _, _ = inspect.stack()[0]
|
2016-11-22 12:09:03 -06:00
|
|
|
logerr('%s(), ln %d: %s', func, linenumber, e)
|
2015-11-10 03:11:05 -06:00
|
|
|
finally:
|
|
|
|
os.close(fd)
|
|
|
|
os.dup2(_stderr, 2)
|
|
|
|
os.dup2(_stdout, 1)
|
|
|
|
|
2016-09-09 10:07:01 -05:00
|
|
|
|
2016-10-23 13:11:31 -05:00
|
|
|
def check_upstream_release():
|
|
|
|
'''Check and report the latest upstream release version'''
|
|
|
|
|
2016-11-07 23:28:24 -06:00
|
|
|
r = requests.get('https://api.github.com/repos/jarun/buku/tags?per_page=1')
|
|
|
|
if r.status_code != 200:
|
2016-11-22 12:09:03 -06:00
|
|
|
logerr('[%s] %s', r.status_code, r.reason)
|
2016-10-23 13:11:31 -05:00
|
|
|
else:
|
2016-11-07 23:28:24 -06:00
|
|
|
latest = r.json()[0]['name']
|
2016-11-06 12:44:44 -06:00
|
|
|
if latest == 'v' + __version__:
|
2016-10-23 13:11:31 -05:00
|
|
|
print('This is the latest release')
|
|
|
|
else:
|
|
|
|
print('Latest upstream release is %s' % latest)
|
|
|
|
|
|
|
|
|
2016-03-16 10:10:55 -05:00
|
|
|
def sigint_handler(signum, frame):
|
2016-09-09 13:52:32 -05:00
|
|
|
'''Custom SIGINT handler'''
|
2016-04-05 07:55:29 -05:00
|
|
|
|
2016-06-01 01:10:12 -05:00
|
|
|
global interrupted
|
|
|
|
|
|
|
|
interrupted = True
|
2016-03-16 10:10:55 -05:00
|
|
|
print('\nInterrupted.', file=sys.stderr)
|
|
|
|
sys.exit(1)
|
|
|
|
|
|
|
|
signal.signal(signal.SIGINT, sigint_handler)
|
|
|
|
|
2016-09-09 10:07:01 -05:00
|
|
|
|
2016-09-05 03:18:21 -05:00
|
|
|
def regexp(expr, item):
|
2016-09-09 13:52:32 -05:00
|
|
|
'''Perform a regular expression search'''
|
2016-09-05 03:18:21 -05:00
|
|
|
|
2016-09-09 13:52:32 -05:00
|
|
|
return re.search(expr, item, re.IGNORECASE) is not None
|
2016-09-09 10:07:01 -05:00
|
|
|
|
2016-05-24 11:00:54 -05:00
|
|
|
# Custom Action classes for argparse
|
|
|
|
|
2016-09-09 13:52:32 -05:00
|
|
|
|
2016-05-22 09:53:19 -05:00
|
|
|
class CustomUpdateAction(argparse.Action):
|
2016-09-09 13:52:32 -05:00
|
|
|
'''Class to capture if optional param 'update'
|
2016-04-25 12:57:01 -05:00
|
|
|
is actually used, even if sans arguments
|
2016-09-09 13:52:32 -05:00
|
|
|
'''
|
2016-04-25 12:57:01 -05:00
|
|
|
|
2016-04-25 11:23:03 -05:00
|
|
|
def __call__(self, parser, args, values, option_string=None):
|
|
|
|
global update
|
|
|
|
|
|
|
|
update = True
|
|
|
|
# NOTE: the following converts a None argument to an empty array []
|
|
|
|
setattr(args, self.dest, values)
|
|
|
|
|
2016-09-09 10:07:01 -05:00
|
|
|
|
2016-05-22 09:53:19 -05:00
|
|
|
class CustomTagAction(argparse.Action):
|
2016-09-09 13:52:32 -05:00
|
|
|
'''Class to capture if optional param 'tag'
|
2016-05-18 12:23:08 -05:00
|
|
|
is actually used, even if sans arguments
|
2016-09-09 13:52:32 -05:00
|
|
|
'''
|
2016-05-18 12:23:08 -05:00
|
|
|
|
|
|
|
def __call__(self, parser, args, values, option_string=None):
|
2016-10-29 04:35:44 -05:00
|
|
|
global tags_in
|
2016-05-18 12:23:08 -05:00
|
|
|
|
2016-10-29 05:36:29 -05:00
|
|
|
tags_in = [DELIM, ]
|
2016-05-18 12:23:08 -05:00
|
|
|
setattr(args, self.dest, values)
|
|
|
|
|
2016-09-09 10:07:01 -05:00
|
|
|
|
2016-05-22 09:53:19 -05:00
|
|
|
class CustomTitleAction(argparse.Action):
|
2016-09-09 13:52:32 -05:00
|
|
|
'''Class to capture if optional param 'title'
|
2016-04-27 12:56:57 -05:00
|
|
|
is actually used, even if sans arguments
|
2016-09-09 13:52:32 -05:00
|
|
|
'''
|
2016-04-27 12:56:57 -05:00
|
|
|
|
|
|
|
def __call__(self, parser, args, values, option_string=None):
|
2016-10-29 04:35:44 -05:00
|
|
|
global title_in
|
2016-04-27 12:56:57 -05:00
|
|
|
|
2016-10-29 04:35:44 -05:00
|
|
|
title_in = ''
|
2016-04-27 12:56:57 -05:00
|
|
|
setattr(args, self.dest, values)
|
|
|
|
|
2016-09-09 10:07:01 -05:00
|
|
|
|
2016-05-22 09:53:19 -05:00
|
|
|
class CustomDescAction(argparse.Action):
|
2016-09-09 13:52:32 -05:00
|
|
|
'''Class to capture if optional param 'comment'
|
2016-05-17 15:11:31 -05:00
|
|
|
is actually used, even if sans arguments
|
2016-09-09 13:52:32 -05:00
|
|
|
'''
|
2016-05-17 15:11:31 -05:00
|
|
|
|
|
|
|
def __call__(self, parser, args, values, option_string=None):
|
2016-11-08 12:52:34 -06:00
|
|
|
global desc_in
|
2016-05-17 15:11:31 -05:00
|
|
|
|
2016-11-08 12:52:34 -06:00
|
|
|
desc_in = ''
|
2016-05-17 15:11:31 -05:00
|
|
|
setattr(args, self.dest, values)
|
|
|
|
|
2016-09-09 10:07:01 -05:00
|
|
|
|
2016-05-22 09:53:19 -05:00
|
|
|
class CustomTagSearchAction(argparse.Action):
|
2016-09-09 13:52:32 -05:00
|
|
|
'''Class to capture if optional param 'stag'
|
2016-05-18 22:24:46 -05:00
|
|
|
is actually used, even if sans arguments
|
2016-09-09 13:52:32 -05:00
|
|
|
'''
|
2016-05-18 22:24:46 -05:00
|
|
|
|
|
|
|
def __call__(self, parser, args, values, option_string=None):
|
|
|
|
global tagsearch
|
|
|
|
|
|
|
|
tagsearch = True
|
|
|
|
setattr(args, self.dest, values)
|
|
|
|
|
2016-09-09 10:07:01 -05:00
|
|
|
|
2016-04-24 14:19:32 -05:00
|
|
|
class ExtendedArgumentParser(argparse.ArgumentParser):
|
2016-09-09 13:52:32 -05:00
|
|
|
'''Extend classic argument parser'''
|
2016-04-24 14:19:32 -05:00
|
|
|
|
2016-11-14 10:14:23 -06:00
|
|
|
# Print program info
|
2016-04-25 15:45:10 -05:00
|
|
|
@staticmethod
|
2016-11-14 10:14:23 -06:00
|
|
|
def print_program_info(file=None):
|
2016-05-14 10:33:21 -05:00
|
|
|
file.write('''
|
2016-07-03 05:06:50 -05:00
|
|
|
symbols:
|
|
|
|
> title
|
|
|
|
+ comment
|
|
|
|
# tags
|
|
|
|
|
2016-11-14 10:14:23 -06:00
|
|
|
|
2016-05-22 12:50:19 -05:00
|
|
|
Version %s
|
2016-11-14 10:14:23 -06:00
|
|
|
© 2015-2016 Arun Prakash Jana <engineerarun@gmail.com>
|
2016-05-14 10:33:21 -05:00
|
|
|
License: GPLv3
|
2016-08-13 14:31:35 -05:00
|
|
|
Webpage: https://github.com/jarun/Buku
|
2016-11-06 12:44:44 -06:00
|
|
|
''' % __version__)
|
2016-04-25 15:45:10 -05:00
|
|
|
|
2016-11-14 10:14:23 -06:00
|
|
|
# Print prompt help
|
|
|
|
@staticmethod
|
|
|
|
def print_prompt_help(file=None):
|
|
|
|
file.write('''
|
|
|
|
keys:
|
|
|
|
1-N browse search result indices and/or ranges
|
|
|
|
a open all results in browser
|
|
|
|
s keyword [...] search for records with ANY keyword
|
|
|
|
S keyword [...] search for records with ALL keywords
|
|
|
|
d match substrings ('pen' matches 'opened')
|
|
|
|
r expression run a regex search
|
2016-11-14 14:31:22 -06:00
|
|
|
t [...] search bookmarks by a tag or show tag list
|
|
|
|
(tag list index fetches bookmarks by tag)
|
2016-11-14 10:14:23 -06:00
|
|
|
? show this help
|
|
|
|
q, ^D, double Enter exit buku
|
|
|
|
|
|
|
|
''')
|
|
|
|
|
2016-04-25 15:45:10 -05:00
|
|
|
# Help
|
2016-04-24 14:19:32 -05:00
|
|
|
def print_help(self, file=None):
|
|
|
|
super(ExtendedArgumentParser, self).print_help(file)
|
2016-11-14 10:14:23 -06:00
|
|
|
self.print_program_info(file)
|
2016-04-05 23:55:25 -05:00
|
|
|
|
2016-09-09 10:07:01 -05:00
|
|
|
|
2016-04-26 12:23:48 -05:00
|
|
|
# Handle piped input
|
2016-11-06 12:44:44 -06:00
|
|
|
def piped_input(argv, pipeargs=None):
|
2016-03-26 10:59:07 -05:00
|
|
|
if not sys.stdin.isatty():
|
2016-05-31 12:39:34 -05:00
|
|
|
pipeargs.extend(argv)
|
2016-03-26 10:59:07 -05:00
|
|
|
for s in sys.stdin.readlines():
|
|
|
|
pipeargs.extend(s.split())
|
|
|
|
|
2016-11-06 12:44:44 -06:00
|
|
|
|
2016-11-11 20:38:28 -06:00
|
|
|
'''main starts here'''
|
|
|
|
|
|
|
|
|
2016-11-06 12:44:44 -06:00
|
|
|
def main():
|
2016-11-08 12:52:34 -06:00
|
|
|
global tags_in, title_in, desc_in
|
2016-11-06 12:44:44 -06:00
|
|
|
|
2016-06-30 09:19:57 -05:00
|
|
|
pipeargs = []
|
2016-06-30 10:45:45 -05:00
|
|
|
atexit.register(logging.shutdown)
|
2016-06-30 09:19:57 -05:00
|
|
|
|
2016-03-26 10:59:07 -05:00
|
|
|
try:
|
2016-11-06 12:44:44 -06:00
|
|
|
piped_input(sys.argv, pipeargs)
|
2016-03-26 10:59:07 -05:00
|
|
|
except KeyboardInterrupt:
|
|
|
|
pass
|
|
|
|
|
2016-05-25 06:00:14 -05:00
|
|
|
# If piped input, set argument vector
|
|
|
|
if len(pipeargs) > 0:
|
|
|
|
sys.argv = pipeargs
|
|
|
|
|
|
|
|
# Setup custom argument parser
|
|
|
|
argparser = ExtendedArgumentParser(
|
2016-11-26 21:30:41 -06:00
|
|
|
description='Powerful command-line bookmark manager. Your mini web!',
|
2016-05-25 06:00:14 -05:00
|
|
|
formatter_class=argparse.RawTextHelpFormatter,
|
2016-06-08 11:57:50 -05:00
|
|
|
usage='''buku [OPTIONS] [KEYWORD [KEYWORD ...]]''',
|
2016-05-25 06:00:14 -05:00
|
|
|
add_help=False
|
|
|
|
)
|
2016-11-07 11:52:54 -06:00
|
|
|
HIDE = argparse.SUPPRESS
|
|
|
|
|
|
|
|
# ---------------------
|
|
|
|
# GENERAL OPTIONS GROUP
|
|
|
|
# ---------------------
|
2016-05-25 06:00:14 -05:00
|
|
|
|
2016-09-09 13:52:32 -05:00
|
|
|
general_grp = argparser.add_argument_group(
|
2016-05-31 12:39:34 -05:00
|
|
|
title='general options',
|
2016-11-14 12:37:22 -06:00
|
|
|
description='''-a, --add URL [tag, ...]
|
2016-05-25 12:15:52 -05:00
|
|
|
bookmark URL with comma-separated tags
|
2016-09-09 07:17:51 -05:00
|
|
|
-u, --update [...] update fields of bookmark at DB indices
|
2016-10-11 02:39:58 -05:00
|
|
|
accepts indices and ranges
|
2016-05-25 12:15:52 -05:00
|
|
|
refresh all titles, if no arguments
|
2016-09-09 07:17:51 -05:00
|
|
|
refresh titles of bookmarks at indices,
|
|
|
|
if no edit options are specified
|
2016-06-16 16:08:38 -05:00
|
|
|
-d, --delete [...] delete bookmarks. Valid inputs: either
|
|
|
|
a hyphenated single range (100-200),
|
|
|
|
OR space-separated indices (100 15 200)
|
2016-11-26 08:28:22 -06:00
|
|
|
delete results with search options
|
2016-05-25 12:15:52 -05:00
|
|
|
delete all bookmarks, if no arguments
|
2016-05-31 00:53:01 -05:00
|
|
|
-h, --help show this information and exit''')
|
2016-09-09 13:52:32 -05:00
|
|
|
addarg = general_grp.add_argument
|
2016-11-07 11:52:54 -06:00
|
|
|
addarg('-a', '--add', nargs='+', help=HIDE)
|
|
|
|
addarg('-u', '--update', nargs='*', action=CustomUpdateAction, help=HIDE)
|
|
|
|
addarg('-d', '--delete', nargs='*', help=HIDE)
|
|
|
|
addarg('-h', '--help', action='store_true', help=HIDE)
|
|
|
|
|
|
|
|
# ------------------
|
|
|
|
# EDIT OPTIONS GROUP
|
|
|
|
# ------------------
|
|
|
|
|
2016-09-09 13:52:32 -05:00
|
|
|
edit_grp = argparser.add_argument_group(
|
2016-05-31 12:39:34 -05:00
|
|
|
title='edit options',
|
2016-11-26 08:28:22 -06:00
|
|
|
description='''--url keyword specify url, works only with -u option
|
|
|
|
--tag [+|-] [...] set comma-separated tags with -a and -u
|
2016-07-11 12:18:40 -05:00
|
|
|
clear tags, if no arguments
|
2016-11-12 11:06:31 -06:00
|
|
|
works with -a, -u
|
2016-11-26 08:28:22 -06:00
|
|
|
append to tags, if preceded by '+'
|
|
|
|
remove from tags, if preceded by '-'
|
2016-05-25 12:15:52 -05:00
|
|
|
-t, --title [...] manually set title, works with -a, -u
|
|
|
|
if no arguments:
|
|
|
|
-a: do not set title, -u: clear title
|
|
|
|
-c, --comment [...] description of the bookmark, works with
|
2016-11-05 17:32:03 -05:00
|
|
|
-a, -u; clears comment, if no arguments
|
2016-11-12 11:06:31 -06:00
|
|
|
--immutable N disable title fetch from web on update
|
2016-11-05 17:32:03 -05:00
|
|
|
works with -a, -u
|
|
|
|
N=0: mutable (default), N=1: immutable''')
|
2016-09-09 13:52:32 -05:00
|
|
|
addarg = edit_grp.add_argument
|
2016-11-07 11:52:54 -06:00
|
|
|
addarg('--url', nargs=1, help=HIDE)
|
|
|
|
addarg('--tag', nargs='*', action=CustomTagAction, help=HIDE)
|
|
|
|
addarg('-t', '--title', nargs='*', action=CustomTitleAction, help=HIDE)
|
|
|
|
addarg('-c', '--comment', nargs='*', action=CustomDescAction, help=HIDE)
|
|
|
|
addarg('--immutable', type=int, default=-1, choices={0, 1}, help=HIDE)
|
|
|
|
|
|
|
|
# --------------------
|
|
|
|
# SEARCH OPTIONS GROUP
|
|
|
|
# --------------------
|
|
|
|
|
2016-09-09 13:52:32 -05:00
|
|
|
search_grp = argparser.add_argument_group(
|
2016-05-31 12:39:34 -05:00
|
|
|
title='search options',
|
2016-05-25 06:00:14 -05:00
|
|
|
description='''-s, --sany keyword [...]
|
2016-11-25 12:10:25 -06:00
|
|
|
find records with ANY search keyword
|
2016-05-25 12:15:52 -05:00
|
|
|
-S, --sall keyword [...]
|
2016-11-25 12:10:25 -06:00
|
|
|
find records with ALL search keywords
|
2016-11-06 08:41:45 -06:00
|
|
|
special keywords -
|
2016-11-12 11:06:31 -06:00
|
|
|
"blank": entries with empty title/tag
|
|
|
|
"immutable": entries with locked title
|
2016-11-25 12:10:25 -06:00
|
|
|
--deep match substrings ('pen' matches 'opens')
|
2016-11-14 10:14:23 -06:00
|
|
|
--sreg expression run a regex search
|
2016-11-13 11:52:00 -06:00
|
|
|
--stag [...] search bookmarks by a tag
|
2016-11-25 12:10:25 -06:00
|
|
|
list all tags, if no arguments''')
|
2016-09-09 13:52:32 -05:00
|
|
|
addarg = search_grp.add_argument
|
2016-11-07 11:52:54 -06:00
|
|
|
addarg('-s', '--sany', nargs='+', help=HIDE)
|
|
|
|
addarg('-S', '--sall', nargs='+', help=HIDE)
|
|
|
|
addarg('--sreg', nargs=1, help=HIDE)
|
|
|
|
addarg('--deep', action='store_true', help=HIDE)
|
|
|
|
addarg('--stag', nargs='*', action=CustomTagSearchAction, help=HIDE)
|
|
|
|
|
|
|
|
# ------------------------
|
|
|
|
# ENCRYPTION OPTIONS GROUP
|
|
|
|
# ------------------------
|
|
|
|
|
2016-09-09 13:52:32 -05:00
|
|
|
crypto_grp = argparser.add_argument_group(
|
2016-05-31 12:39:34 -05:00
|
|
|
title='encryption options',
|
2016-05-25 06:00:14 -05:00
|
|
|
description='''-l, --lock [N] encrypt DB file with N (> 0, default 8)
|
2016-05-25 12:15:52 -05:00
|
|
|
hash iterations to generate key
|
|
|
|
-k, --unlock [N] decrypt DB file with N (> 0, default 8)
|
|
|
|
hash iterations to generate key''')
|
2016-09-09 13:52:32 -05:00
|
|
|
addarg = crypto_grp.add_argument
|
2016-11-07 11:52:54 -06:00
|
|
|
addarg('-k', '--unlock', nargs='?', type=int, const=8, help=HIDE)
|
|
|
|
addarg('-l', '--lock', nargs='?', type=int, const=8, help=HIDE)
|
|
|
|
|
|
|
|
# ----------------
|
|
|
|
# POWER TOYS GROUP
|
|
|
|
# ----------------
|
2016-09-09 13:52:32 -05:00
|
|
|
|
|
|
|
power_grp = argparser.add_argument_group(
|
2016-05-31 12:39:34 -05:00
|
|
|
title='power toys',
|
2016-09-09 13:52:32 -05:00
|
|
|
description='''-e, --export file export bookmarks to Firefox format html
|
2016-09-20 13:02:04 -05:00
|
|
|
use --tag to export only specific tags
|
2016-11-25 12:10:25 -06:00
|
|
|
-i, --import file import bookmarks from html file
|
|
|
|
FF and Google Chrome formats supported
|
2016-10-22 15:56:27 -05:00
|
|
|
--markdown use markdown with -e and -i
|
2016-11-25 12:10:25 -06:00
|
|
|
format: [title](url), 1 per line
|
|
|
|
-m, --merge file merge bookmarks from another buku DB file
|
2016-10-11 11:45:07 -05:00
|
|
|
-p, --print [...] show details of bookmark by DB index
|
|
|
|
accepts indices and ranges
|
2016-05-25 12:15:52 -05:00
|
|
|
show all bookmarks, if no arguments
|
2016-11-25 12:10:25 -06:00
|
|
|
-f, --format N limit fields in -p or Json search output
|
2016-11-12 11:06:31 -06:00
|
|
|
1: URL, 2: URL and tag, 3: title
|
2016-05-25 12:15:52 -05:00
|
|
|
-r, --replace oldtag [newtag ...]
|
|
|
|
replace oldtag with newtag everywhere
|
|
|
|
delete oldtag, if no newtag
|
2016-11-07 11:35:02 -06:00
|
|
|
-j, --json Json formatted output for -p and search
|
2016-05-28 08:15:03 -05:00
|
|
|
--noprompt do not show the prompt, run and exit
|
2016-11-25 12:10:25 -06:00
|
|
|
-o, --open [N] open bookmark at DB index N in browser
|
2016-10-01 10:29:53 -05:00
|
|
|
open a random index if N is omitted
|
2016-11-25 12:10:25 -06:00
|
|
|
--shorten N/URL fetch shortened url from tny.im service
|
2016-11-12 09:47:36 -06:00
|
|
|
accepts either a DB index or a URL
|
2016-11-06 09:30:45 -06:00
|
|
|
--tacit reduce verbosity
|
2016-10-23 13:11:31 -05:00
|
|
|
--upstream check latest upstream version available
|
2016-11-25 12:10:25 -06:00
|
|
|
-z, --debug show debug information and verbose logs''')
|
2016-09-09 13:52:32 -05:00
|
|
|
addarg = power_grp.add_argument
|
2016-11-07 11:52:54 -06:00
|
|
|
addarg('-e', '--export', nargs=1, help=HIDE)
|
|
|
|
addarg('-i', '--import', nargs=1, dest='importfile', help=HIDE)
|
|
|
|
addarg('--markdown', action='store_true', help=HIDE)
|
|
|
|
addarg('-m', '--merge', nargs=1, help=HIDE)
|
|
|
|
addarg('-p', '--print', nargs='*', help=HIDE)
|
|
|
|
addarg('-f', '--format', type=int, default=0, choices={1, 2, 3}, help=HIDE)
|
|
|
|
addarg('-r', '--replace', nargs='+', help=HIDE)
|
|
|
|
addarg('-j', '--json', action='store_true', help=HIDE)
|
|
|
|
addarg('--noprompt', action='store_true', help=HIDE)
|
|
|
|
addarg('-o', '--open', nargs='?', type=int, const=0, help=HIDE)
|
2016-11-12 09:47:36 -06:00
|
|
|
addarg('--shorten', nargs=1, help=HIDE)
|
2016-11-07 11:52:54 -06:00
|
|
|
addarg('--tacit', action='store_true', help=HIDE)
|
|
|
|
addarg('--upstream', action='store_true', help=HIDE)
|
|
|
|
addarg('-z', '--debug', action='store_true', help=HIDE)
|
2016-11-20 09:10:56 -06:00
|
|
|
# Undocumented API
|
|
|
|
addarg('--fixtags', action='store_true', help=HIDE)
|
2016-05-25 06:00:14 -05:00
|
|
|
|
|
|
|
# Show help and exit if no arguments
|
|
|
|
if len(sys.argv) < 2:
|
2016-11-04 10:36:56 -05:00
|
|
|
argparser.print_help(sys.stdout)
|
2016-05-25 06:00:14 -05:00
|
|
|
sys.exit(1)
|
2015-11-07 07:29:38 -06:00
|
|
|
|
2016-05-25 06:00:14 -05:00
|
|
|
# Parse the arguments
|
|
|
|
args = argparser.parse_args()
|
|
|
|
|
|
|
|
# Show help and exit if help requested
|
2016-05-31 12:39:34 -05:00
|
|
|
if args.help:
|
2016-11-04 10:36:56 -05:00
|
|
|
argparser.print_help(sys.stdout)
|
2016-05-25 06:00:14 -05:00
|
|
|
sys.exit(0)
|
|
|
|
|
|
|
|
# Assign the values to globals
|
2016-10-29 04:35:44 -05:00
|
|
|
if tags_in is not None and len(args.tag) > 0:
|
|
|
|
tags_in = args.tag
|
|
|
|
if title_in is not None and len(args.title) > 0:
|
|
|
|
title_in = ' '.join(args.title)
|
2016-11-08 12:52:34 -06:00
|
|
|
if desc_in is not None and len(args.comment) > 0:
|
|
|
|
desc_in = ' '.join(args.comment)
|
2016-06-30 10:45:45 -05:00
|
|
|
if args.debug:
|
|
|
|
logger.setLevel(logging.DEBUG)
|
2016-11-22 12:09:03 -06:00
|
|
|
logdbg('Version %s', __version__)
|
2016-11-07 09:13:08 -06:00
|
|
|
else:
|
|
|
|
logging.disable(logging.WARNING)
|
2015-11-07 07:29:38 -06:00
|
|
|
|
2016-05-25 06:00:14 -05:00
|
|
|
# Handle encrypt/decrypt options at top priority
|
2016-11-07 11:35:02 -06:00
|
|
|
if args.lock is not None:
|
|
|
|
BukuCrypt.encrypt_file(args.lock)
|
2015-12-22 12:10:24 -06:00
|
|
|
|
2016-11-07 11:35:02 -06:00
|
|
|
if args.unlock is not None:
|
|
|
|
BukuCrypt.decrypt_file(args.unlock)
|
2015-12-22 12:10:24 -06:00
|
|
|
|
2016-11-06 09:30:45 -06:00
|
|
|
# Initialize the database and get handles, set verbose by default
|
2016-11-11 20:38:28 -06:00
|
|
|
bdb = BukuDb(args.json, args.format, args.immutable, not args.tacit)
|
2016-05-22 15:24:24 -05:00
|
|
|
|
2016-05-25 06:00:14 -05:00
|
|
|
# Add a record
|
2016-11-07 11:35:02 -06:00
|
|
|
if args.add is not None:
|
2016-05-22 15:24:24 -05:00
|
|
|
# Parse tags into a comma-separated string
|
2016-10-29 05:36:29 -05:00
|
|
|
tags = DELIM
|
2016-11-07 11:35:02 -06:00
|
|
|
keywords = args.add
|
2016-10-29 04:35:44 -05:00
|
|
|
if tags_in is not None:
|
|
|
|
if tags_in[0] == '+' and len(tags_in) == 1:
|
2016-06-12 05:30:54 -05:00
|
|
|
pass
|
2016-10-29 04:35:44 -05:00
|
|
|
elif tags_in[0] == '+':
|
2016-11-15 08:02:13 -06:00
|
|
|
# The case: buku -a url tag1, tag2 --tag + tag3, tag4
|
2016-10-29 04:35:44 -05:00
|
|
|
tags_in = tags_in[1:]
|
2016-11-07 11:35:02 -06:00
|
|
|
# In case of add, args.add may have URL followed by tags
|
2016-10-29 05:36:29 -05:00
|
|
|
# Add delimiter as url+tags may not end with one
|
2016-11-07 11:35:02 -06:00
|
|
|
keywords = args.add + [DELIM] + tags_in
|
2016-06-12 05:30:54 -05:00
|
|
|
else:
|
2016-11-07 11:35:02 -06:00
|
|
|
keywords = args.add + [DELIM] + tags_in
|
2016-05-25 06:00:14 -05:00
|
|
|
|
|
|
|
if len(keywords) > 1:
|
|
|
|
tags = parse_tags(keywords[1:])
|
|
|
|
|
2016-11-08 12:52:34 -06:00
|
|
|
bdb.add_bm(args.add[0], title_in, tags, desc_in)
|
2016-05-25 06:00:14 -05:00
|
|
|
|
|
|
|
# Update record
|
2016-05-31 12:39:34 -05:00
|
|
|
if update:
|
2016-06-06 13:55:09 -05:00
|
|
|
if args.url is not None:
|
2016-10-29 04:35:44 -05:00
|
|
|
url_in = args.url[0]
|
2016-06-06 13:55:09 -05:00
|
|
|
else:
|
2016-10-29 04:35:44 -05:00
|
|
|
url_in = ''
|
2016-06-06 13:55:09 -05:00
|
|
|
|
2016-06-12 05:30:54 -05:00
|
|
|
append = False
|
2016-07-03 16:50:44 -05:00
|
|
|
delete = False
|
2016-10-29 04:35:44 -05:00
|
|
|
if tags_in is not None:
|
|
|
|
if (tags_in[0] == '+' or tags_in[0] == '-') \
|
|
|
|
and len(tags_in) == 1:
|
2016-11-22 12:09:03 -06:00
|
|
|
logerr('Please specify a tag')
|
2016-08-27 10:26:09 -05:00
|
|
|
bdb.close_quit(1)
|
2016-10-29 04:35:44 -05:00
|
|
|
elif tags_in[0] == '+':
|
|
|
|
tags_in = tags_in[1:]
|
2016-06-12 05:30:54 -05:00
|
|
|
append = True
|
2016-10-29 04:35:44 -05:00
|
|
|
elif tags_in[0] == '-':
|
|
|
|
tags_in = tags_in[1:]
|
2016-07-03 16:50:44 -05:00
|
|
|
delete = True
|
2016-06-12 05:30:54 -05:00
|
|
|
|
2016-06-06 13:55:09 -05:00
|
|
|
# Parse tags into a comma-separated string
|
2016-10-29 04:35:44 -05:00
|
|
|
tags = parse_tags(tags_in)
|
2016-05-25 06:00:14 -05:00
|
|
|
|
2016-10-11 10:15:57 -05:00
|
|
|
if len(args.update) == 0:
|
2016-11-08 12:52:34 -06:00
|
|
|
bdb.update_bm(0, url_in, title_in, tags, desc_in, append, delete)
|
2016-10-11 10:15:57 -05:00
|
|
|
else:
|
2016-09-09 07:17:51 -05:00
|
|
|
for idx in args.update:
|
|
|
|
if is_int(idx):
|
2016-11-08 12:52:34 -06:00
|
|
|
bdb.update_bm(int(idx), url_in, title_in, tags, desc_in,
|
|
|
|
append, delete)
|
2016-09-09 13:52:32 -05:00
|
|
|
elif '-' in idx and is_int(idx.split('-')[0]) \
|
|
|
|
and is_int(idx.split('-')[1]):
|
2016-09-09 07:17:51 -05:00
|
|
|
lower = int(idx.split('-')[0])
|
|
|
|
upper = int(idx.split('-')[1])
|
|
|
|
if lower > upper:
|
2016-09-09 07:46:40 -05:00
|
|
|
lower, upper = upper, lower
|
2016-10-28 14:32:01 -05:00
|
|
|
|
|
|
|
# Update only once if range starts from 0 (all)
|
|
|
|
if lower == 0:
|
2016-11-08 12:52:34 -06:00
|
|
|
bdb.update_bm(0, url_in, title_in, tags, desc_in,
|
2016-11-06 09:30:45 -06:00
|
|
|
append, delete)
|
2016-10-28 14:32:01 -05:00
|
|
|
else:
|
|
|
|
for _id in range(lower, upper + 1):
|
2016-11-08 12:52:34 -06:00
|
|
|
bdb.update_bm(_id, url_in, title_in, tags, desc_in,
|
|
|
|
append, delete)
|
2016-11-08 11:32:45 -06:00
|
|
|
if interrupted:
|
|
|
|
break
|
|
|
|
|
|
|
|
if interrupted:
|
|
|
|
break
|
2016-05-25 06:00:14 -05:00
|
|
|
|
2016-10-28 09:27:46 -05:00
|
|
|
# Search operations
|
2016-10-27 15:21:09 -05:00
|
|
|
search_results = None
|
2016-11-26 08:28:22 -06:00
|
|
|
search_opted = True
|
2016-10-27 15:21:09 -05:00
|
|
|
|
2016-07-09 10:19:16 -05:00
|
|
|
if args.sany is not None:
|
2016-11-26 08:28:22 -06:00
|
|
|
# Search URLs, titles, tags for any keyword
|
2016-10-27 15:21:09 -05:00
|
|
|
search_results = bdb.searchdb(args.sany, False, args.deep)
|
2016-07-09 10:19:16 -05:00
|
|
|
elif args.sall is not None:
|
2016-11-26 08:28:22 -06:00
|
|
|
# Search URLs, titles, tags with all keywords
|
2016-11-20 11:10:54 -06:00
|
|
|
search_results = bdb.searchdb(args.sall, True, args.deep)
|
2016-10-21 06:45:10 -05:00
|
|
|
elif args.sreg is not None:
|
2016-11-26 08:28:22 -06:00
|
|
|
# Run a regular expression search
|
2016-11-13 03:03:19 -06:00
|
|
|
search_results = bdb.searchdb(args.sreg, regex=True)
|
2016-07-09 10:19:16 -05:00
|
|
|
elif tagsearch:
|
2016-11-26 08:28:22 -06:00
|
|
|
# Search bookmarks by tag
|
2016-07-09 10:19:16 -05:00
|
|
|
if len(args.stag) > 0:
|
2016-11-13 11:52:00 -06:00
|
|
|
search_results = bdb.search_by_tag(' '.join(args.stag))
|
2016-07-09 10:19:16 -05:00
|
|
|
else:
|
2016-11-20 12:20:23 -06:00
|
|
|
# Use sub prompt to list all tags
|
|
|
|
prompt(bdb, None, subprompt=True)
|
2016-11-26 08:28:22 -06:00
|
|
|
search_opted = False
|
|
|
|
else:
|
|
|
|
search_opted = False
|
2016-07-09 10:19:16 -05:00
|
|
|
|
2016-10-27 15:21:09 -05:00
|
|
|
if search_results:
|
2016-11-07 11:35:02 -06:00
|
|
|
oneshot = args.noprompt
|
2016-10-28 09:27:46 -05:00
|
|
|
# In case of search and delete, prompt should be non-interactive
|
|
|
|
if args.delete is not None and len(args.delete) == 0:
|
|
|
|
oneshot = True
|
|
|
|
|
2016-11-07 11:35:02 -06:00
|
|
|
if not args.json:
|
2016-11-14 12:37:22 -06:00
|
|
|
prompt(bdb, search_results, oneshot, args.deep)
|
2016-10-28 08:23:34 -05:00
|
|
|
else:
|
2016-11-13 11:52:00 -06:00
|
|
|
# Printing in Json format is non-interactive
|
2016-11-07 11:35:02 -06:00
|
|
|
print(format_json(search_results, field_filter=args.format))
|
2016-10-27 15:21:09 -05:00
|
|
|
|
2016-10-28 09:27:46 -05:00
|
|
|
# Delete search results if opted
|
|
|
|
if args.delete is not None and len(args.delete) == 0:
|
|
|
|
bdb.delete_resultset(search_results)
|
|
|
|
|
2016-10-11 09:41:29 -05:00
|
|
|
# Delete record(s)
|
2016-10-27 15:53:07 -05:00
|
|
|
if args.delete is not None:
|
2016-06-16 16:08:38 -05:00
|
|
|
if len(args.delete) == 0:
|
2016-10-27 15:53:07 -05:00
|
|
|
# Attempt delete-all only if search was not opted
|
|
|
|
if not search_opted:
|
2016-10-29 06:31:14 -05:00
|
|
|
bdb.cleardb()
|
2016-06-16 16:08:38 -05:00
|
|
|
elif len(args.delete) == 1 and '-' in args.delete[0]:
|
|
|
|
vals = str(args.delete[0]).split('-')
|
|
|
|
if len(vals) == 2 and is_int(vals[0]) and is_int(vals[1]):
|
|
|
|
if int(vals[0]) == int(vals[1]):
|
2016-10-29 06:31:14 -05:00
|
|
|
bdb.delete_bm(int(vals[0]))
|
2016-06-16 16:08:38 -05:00
|
|
|
elif int(vals[0]) < int(vals[1]):
|
2016-10-29 06:31:14 -05:00
|
|
|
bdb.delete_bm(0, int(vals[0]), int(vals[1]), True)
|
2016-06-16 16:08:38 -05:00
|
|
|
else:
|
2016-10-29 06:31:14 -05:00
|
|
|
bdb.delete_bm(0, int(vals[1]), int(vals[0]), True)
|
2016-06-16 16:08:38 -05:00
|
|
|
else:
|
2016-11-22 12:09:03 -06:00
|
|
|
logerr('Incorrect index or range')
|
2016-06-16 16:08:38 -05:00
|
|
|
bdb.close_quit(1)
|
|
|
|
else:
|
|
|
|
ids = []
|
2016-06-17 10:18:07 -05:00
|
|
|
# Select the unique indices
|
2016-06-16 16:08:38 -05:00
|
|
|
for idx in args.delete:
|
|
|
|
if idx not in ids:
|
|
|
|
ids += (idx,)
|
|
|
|
|
2016-06-17 10:18:07 -05:00
|
|
|
try:
|
|
|
|
# Index delete order - highest to lowest
|
|
|
|
ids.sort(key=lambda x: int(x), reverse=True)
|
|
|
|
for idx in ids:
|
2016-10-29 06:31:14 -05:00
|
|
|
bdb.delete_bm(int(idx))
|
2016-11-07 09:13:08 -06:00
|
|
|
except ValueError:
|
2016-11-22 12:09:03 -06:00
|
|
|
logerr('Incorrect index or range')
|
2016-05-25 06:00:14 -05:00
|
|
|
|
2016-10-11 11:45:07 -05:00
|
|
|
# Print records
|
|
|
|
if args.print is not None:
|
|
|
|
if len(args.print) == 0:
|
2016-10-29 06:31:14 -05:00
|
|
|
bdb.print_bm(0)
|
2016-10-11 11:45:07 -05:00
|
|
|
else:
|
|
|
|
for idx in args.print:
|
|
|
|
if is_int(idx):
|
2016-10-29 06:31:14 -05:00
|
|
|
bdb.print_bm(int(idx))
|
2016-10-11 11:45:07 -05:00
|
|
|
elif '-' in idx and is_int(idx.split('-')[0]) \
|
|
|
|
and is_int(idx.split('-')[1]):
|
|
|
|
lower = int(idx.split('-')[0])
|
|
|
|
upper = int(idx.split('-')[1])
|
|
|
|
if lower > upper:
|
|
|
|
lower, upper = upper, lower
|
|
|
|
for _id in range(lower, upper + 1):
|
2016-10-29 06:31:14 -05:00
|
|
|
bdb.print_bm(_id)
|
2016-10-11 11:45:07 -05:00
|
|
|
else:
|
2016-11-22 12:09:03 -06:00
|
|
|
logerr('Invalid index or range')
|
2016-10-11 11:45:07 -05:00
|
|
|
bdb.close_quit(1)
|
2016-05-25 06:00:14 -05:00
|
|
|
|
|
|
|
# Replace a tag in DB
|
|
|
|
if args.replace is not None:
|
|
|
|
if len(args.replace) == 1:
|
|
|
|
bdb.replace_tag(args.replace[0])
|
|
|
|
else:
|
|
|
|
bdb.replace_tag(args.replace[0], args.replace[1:])
|
2016-03-25 02:52:52 -05:00
|
|
|
|
2016-06-02 10:39:16 -05:00
|
|
|
# Export bookmarks
|
|
|
|
if args.export is not None:
|
2016-09-20 13:02:04 -05:00
|
|
|
if args.tag is None:
|
2016-10-29 05:36:29 -05:00
|
|
|
bdb.exportdb(args.export[0], args.markdown)
|
2016-09-20 13:02:04 -05:00
|
|
|
elif len(args.tag) == 0:
|
2016-11-22 12:09:03 -06:00
|
|
|
logerr('Missing tag')
|
2016-09-20 13:02:04 -05:00
|
|
|
else:
|
2016-10-29 05:36:29 -05:00
|
|
|
bdb.exportdb(args.export[0], args.markdown, args.tag)
|
2016-06-02 10:39:16 -05:00
|
|
|
|
|
|
|
# Import bookmarks
|
2016-11-07 11:35:02 -06:00
|
|
|
if args.importfile is not None:
|
|
|
|
bdb.importdb(args.importfile[0], args.markdown)
|
2016-06-02 10:39:16 -05:00
|
|
|
|
|
|
|
# Merge a database file and exit
|
|
|
|
if args.merge is not None:
|
|
|
|
bdb.mergedb(args.merge[0])
|
|
|
|
|
2016-05-25 06:00:14 -05:00
|
|
|
# Open URL in browser
|
2016-11-07 11:35:02 -06:00
|
|
|
if args.open is not None:
|
|
|
|
if args.open < 0:
|
2016-11-22 12:09:03 -06:00
|
|
|
logerr('Index must be >= 0')
|
2016-05-25 06:00:14 -05:00
|
|
|
bdb.close_quit(1)
|
2016-11-07 11:35:02 -06:00
|
|
|
bdb.browse_by_index(args.open)
|
2016-03-25 02:52:52 -05:00
|
|
|
|
2016-11-26 08:28:22 -06:00
|
|
|
# Shorten URL
|
2016-11-12 09:47:36 -06:00
|
|
|
if args.shorten and len(args.shorten):
|
|
|
|
if is_int(args.shorten[0]):
|
|
|
|
shorturl = bdb.shorten_url(index=int(args.shorten[0]))
|
|
|
|
else:
|
|
|
|
shorturl = bdb.shorten_url(url=args.shorten[0])
|
|
|
|
|
|
|
|
if shorturl:
|
|
|
|
print(shorturl)
|
|
|
|
|
2016-10-23 13:11:31 -05:00
|
|
|
# Report upstream version
|
|
|
|
if args.upstream:
|
|
|
|
check_upstream_release()
|
|
|
|
|
2016-11-20 09:10:56 -06:00
|
|
|
# Fix tags
|
|
|
|
if args.fixtags:
|
|
|
|
bdb.fixtags()
|
|
|
|
|
2016-05-25 06:00:14 -05:00
|
|
|
# Close DB connection and quit
|
|
|
|
bdb.close_quit(0)
|
2016-11-06 12:44:44 -06:00
|
|
|
|
|
|
|
if __name__ == '__main__':
|
|
|
|
main()
|