2016-03-15 08:51:06 -05:00
|
|
|
#!/usr/bin/env python3
|
2015-11-01 14:08:45 -06:00
|
|
|
#
|
|
|
|
# Bookmark management utility
|
|
|
|
#
|
2017-01-01 08:25:10 -06:00
|
|
|
# Copyright © 2015-2017 Arun Prakash Jana <engineerarun@gmail.com>
|
2015-11-01 14:08:45 -06:00
|
|
|
#
|
|
|
|
# This program is free software: you can redistribute it and/or modify
|
|
|
|
# it under the terms of the GNU General Public License as published by
|
|
|
|
# the Free Software Foundation, either version 3 of the License, or
|
|
|
|
# (at your option) any later version.
|
|
|
|
#
|
|
|
|
# This program is distributed in the hope that it will be useful,
|
|
|
|
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
|
|
|
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
|
|
|
# GNU General Public License for more details.
|
|
|
|
#
|
|
|
|
# You should have received a copy of the GNU General Public License
|
2016-10-22 08:21:46 -05:00
|
|
|
# along with Buku. If not, see <http://www.gnu.org/licenses/>.
|
2015-11-01 14:04:41 -06:00
|
|
|
|
2016-04-24 14:19:32 -05:00
|
|
|
import argparse
|
2017-08-22 17:09:43 -05:00
|
|
|
import collections
|
2015-11-06 16:32:08 -06:00
|
|
|
import html.parser as HTMLParser
|
2016-10-23 13:11:31 -05:00
|
|
|
import json
|
2016-06-30 10:45:45 -05:00
|
|
|
import logging
|
2016-12-12 10:35:34 -06:00
|
|
|
import os
|
|
|
|
import re
|
2017-01-03 08:18:13 -06:00
|
|
|
try:
|
|
|
|
import readline
|
|
|
|
readline
|
|
|
|
except ImportError:
|
|
|
|
pass
|
2016-12-12 10:35:34 -06:00
|
|
|
import requests
|
|
|
|
import signal
|
|
|
|
import sqlite3
|
|
|
|
import sys
|
2016-11-27 22:00:42 -06:00
|
|
|
import threading
|
2017-09-08 10:48:39 -05:00
|
|
|
import time
|
2016-12-12 10:35:34 -06:00
|
|
|
import urllib3
|
|
|
|
from urllib3.util import parse_url, make_headers
|
|
|
|
import webbrowser
|
2016-11-08 11:32:45 -06:00
|
|
|
|
2017-09-18 01:03:49 -05:00
|
|
|
__version__ = '3.4'
|
2016-11-06 12:44:44 -06:00
|
|
|
__author__ = 'Arun Prakash Jana <engineerarun@gmail.com>'
|
|
|
|
__license__ = 'GPLv3'
|
|
|
|
|
2016-12-31 10:50:18 -06:00
|
|
|
# Global variables
|
2016-10-22 08:21:46 -05:00
|
|
|
interrupted = False # Received SIGINT
|
2016-10-29 05:36:29 -05:00
|
|
|
DELIM = ',' # Delimiter used to store tags in DB
|
2016-11-08 11:32:45 -06:00
|
|
|
SKIP_MIMES = {'.pdf', '.txt'}
|
2017-08-24 17:33:57 -05:00
|
|
|
promptmsg = 'buku (? for help): ' # Prompt message string
|
2017-01-03 18:43:47 -06:00
|
|
|
|
2017-08-24 17:33:57 -05:00
|
|
|
# Default format specifiers to print records
|
|
|
|
ID_str = '%d. %s [%s]\n'
|
|
|
|
ID_DB_str = '%d. %s'
|
|
|
|
MUTE_str = '%s (L)\n'
|
2017-08-28 12:16:00 -05:00
|
|
|
URL_str = ' > %s\n'
|
|
|
|
DESC_str = ' + %s\n'
|
|
|
|
TAG_str = ' # %s\n'
|
2017-08-22 17:09:43 -05:00
|
|
|
|
|
|
|
# colormap for color output from "googler" project
|
|
|
|
COLORMAP = {k: '\x1b[%sm' % v for k, v in {
|
|
|
|
'a': '30', 'b': '31', 'c': '32', 'd': '33',
|
|
|
|
'e': '34', 'f': '35', 'g': '36', 'h': '37',
|
|
|
|
'i': '90', 'j': '91', 'k': '92', 'l': '93',
|
|
|
|
'm': '94', 'n': '95', 'o': '96', 'p': '97',
|
|
|
|
'A': '30;1', 'B': '31;1', 'C': '32;1', 'D': '33;1',
|
|
|
|
'E': '34;1', 'F': '35;1', 'G': '36;1', 'H': '37;1',
|
|
|
|
'I': '90;1', 'J': '91;1', 'K': '92;1', 'L': '93;1',
|
|
|
|
'M': '94;1', 'N': '95;1', 'O': '96;1', 'P': '97;1',
|
2017-08-24 15:59:39 -05:00
|
|
|
'x': '0', 'X': '1', 'y': '7', 'Y': '7;1', 'z': '2',
|
2017-08-22 17:09:43 -05:00
|
|
|
}.items()}
|
2016-04-05 23:55:25 -05:00
|
|
|
|
2017-09-26 17:54:57 -05:00
|
|
|
USER_AGENT = 'Buku/{} (textmode; Linux 4.4.0-96-generic x86_64; 1024x768)'.format(__version__)
|
2016-12-11 09:23:48 -06:00
|
|
|
myheaders = None # Default dictionary of headers
|
|
|
|
myproxy = None # Default proxy
|
2016-11-09 11:14:31 -06:00
|
|
|
|
2016-06-30 10:45:45 -05:00
|
|
|
# Set up logging
|
|
|
|
logger = logging.getLogger()
|
2016-11-22 12:09:03 -06:00
|
|
|
logdbg = logger.debug
|
|
|
|
logerr = logger.error
|
2016-06-30 10:45:45 -05:00
|
|
|
|
2016-09-09 13:52:32 -05:00
|
|
|
|
2016-12-27 08:10:29 -06:00
|
|
|
class BukuHTMLParser(HTMLParser.HTMLParser):
|
2017-09-05 15:24:04 -05:00
|
|
|
"""Class to parse and fetch the title from a HTML page, if available.
|
|
|
|
|
|
|
|
.. note:: The methods in this class are custom implementations of the
|
|
|
|
HTMLParser object.
|
|
|
|
|
|
|
|
See docs https://docs.python.org/3/library/html.parser.html.
|
|
|
|
|
|
|
|
|
|
|
|
Attributes
|
|
|
|
----------
|
|
|
|
in_title_tag : bool
|
|
|
|
True if HTML tag is a <title> tag. Initial value is False.
|
|
|
|
data : str
|
|
|
|
Initial value is empty string.
|
|
|
|
prev_tag : None or str
|
|
|
|
Initial value is None.
|
|
|
|
parsed_title : None or str
|
|
|
|
The parsed title from a title tag. Initial value is None.
|
|
|
|
"""
|
2016-04-05 23:39:56 -05:00
|
|
|
|
2016-04-05 06:25:40 -05:00
|
|
|
def __init__(self):
|
|
|
|
HTMLParser.HTMLParser.__init__(self)
|
2016-11-08 12:52:34 -06:00
|
|
|
self.in_title_tag = False
|
2016-05-24 12:51:38 -05:00
|
|
|
self.data = ''
|
2016-11-08 12:52:34 -06:00
|
|
|
self.prev_tag = None
|
|
|
|
self.parsed_title = None
|
2016-04-05 06:25:40 -05:00
|
|
|
|
|
|
|
def handle_starttag(self, tag, attrs):
|
2016-11-08 12:52:34 -06:00
|
|
|
self.in_title_tag = False
|
2016-05-24 12:51:38 -05:00
|
|
|
if tag == 'title':
|
2016-11-08 12:52:34 -06:00
|
|
|
self.in_title_tag = True
|
|
|
|
self.prev_tag = tag
|
2016-04-05 06:25:40 -05:00
|
|
|
|
|
|
|
def handle_endtag(self, tag):
|
2016-05-24 12:51:38 -05:00
|
|
|
if tag == 'title':
|
2016-11-08 12:52:34 -06:00
|
|
|
self.in_title_tag = False
|
2016-05-24 12:51:38 -05:00
|
|
|
if self.data != '':
|
2016-11-08 12:52:34 -06:00
|
|
|
self.parsed_title = self.data
|
2016-05-21 05:40:37 -05:00
|
|
|
self.reset() # We have received title data, exit parsing
|
2016-04-05 06:25:40 -05:00
|
|
|
|
|
|
|
def handle_data(self, data):
|
2016-11-08 12:52:34 -06:00
|
|
|
if self.prev_tag == 'title' and self.in_title_tag:
|
2017-03-05 01:56:39 -06:00
|
|
|
self.data += data
|
2016-04-05 06:25:40 -05:00
|
|
|
|
2016-04-10 02:09:51 -05:00
|
|
|
def error(self, message):
|
|
|
|
pass
|
|
|
|
|
2016-09-09 10:07:01 -05:00
|
|
|
|
2016-06-01 11:51:55 -05:00
|
|
|
class BukuCrypt:
|
2017-09-05 15:24:04 -05:00
|
|
|
"""Class to handle encryption and decryption of
|
2016-10-29 02:54:10 -05:00
|
|
|
the database file. Functionally a separate entity.
|
2016-06-01 11:51:55 -05:00
|
|
|
|
|
|
|
Involves late imports in the static functions but it
|
|
|
|
saves ~100ms each time. Given that encrypt/decrypt are
|
|
|
|
not done automatically and any one should be called at
|
|
|
|
a time, this doesn't seem to be an outrageous approach.
|
2017-09-05 15:24:04 -05:00
|
|
|
"""
|
2016-06-01 11:51:55 -05:00
|
|
|
|
2016-12-30 11:55:26 -06:00
|
|
|
# Crypto constants
|
|
|
|
BLOCKSIZE = 0x10000 # 64 KB blocks
|
|
|
|
SALT_SIZE = 0x20
|
|
|
|
CHUNKSIZE = 0x80000 # Read/write 512 KB chunks
|
|
|
|
|
2016-06-01 11:51:55 -05:00
|
|
|
@staticmethod
|
|
|
|
def get_filehash(filepath):
|
2017-09-16 12:38:11 -05:00
|
|
|
"""Get the SHA256 hash of a file.
|
2017-09-05 15:24:04 -05:00
|
|
|
|
|
|
|
Parameters
|
|
|
|
----------
|
|
|
|
filepath : str
|
2017-09-16 12:38:11 -05:00
|
|
|
Path to the file.
|
2016-06-01 11:51:55 -05:00
|
|
|
|
2017-09-05 15:24:04 -05:00
|
|
|
Returns
|
|
|
|
-------
|
|
|
|
hash : bytes
|
2017-09-16 12:38:11 -05:00
|
|
|
Hash digest of file.
|
2017-09-05 15:24:04 -05:00
|
|
|
"""
|
2016-06-01 11:51:55 -05:00
|
|
|
|
|
|
|
from hashlib import sha256
|
|
|
|
|
2016-10-29 04:35:44 -05:00
|
|
|
with open(filepath, 'rb') as fp:
|
2016-06-01 11:51:55 -05:00
|
|
|
hasher = sha256()
|
2016-12-30 11:55:26 -06:00
|
|
|
buf = fp.read(BukuCrypt.BLOCKSIZE)
|
2016-06-01 11:51:55 -05:00
|
|
|
while len(buf) > 0:
|
|
|
|
hasher.update(buf)
|
2016-12-30 11:55:26 -06:00
|
|
|
buf = fp.read(BukuCrypt.BLOCKSIZE)
|
2016-06-01 11:51:55 -05:00
|
|
|
|
|
|
|
return hasher.digest()
|
|
|
|
|
|
|
|
@staticmethod
|
2016-11-11 20:38:28 -06:00
|
|
|
def encrypt_file(iterations, dbfile=None):
|
2017-09-16 12:38:11 -05:00
|
|
|
"""Encrypt the bookmarks database file.
|
2016-10-29 02:54:10 -05:00
|
|
|
|
2017-09-05 15:24:04 -05:00
|
|
|
Parameters
|
|
|
|
----------
|
|
|
|
iterations : int
|
2017-09-16 12:38:11 -05:00
|
|
|
Number of iterations for key generation.
|
2017-09-05 15:24:04 -05:00
|
|
|
dbfile : str, optional
|
2017-09-16 12:38:11 -05:00
|
|
|
Custom database file path (including filename).
|
2017-09-05 15:24:04 -05:00
|
|
|
"""
|
2016-06-01 11:51:55 -05:00
|
|
|
|
|
|
|
try:
|
|
|
|
from cryptography.hazmat.backends import default_backend
|
2017-08-12 01:52:37 -05:00
|
|
|
from cryptography.hazmat.primitives.ciphers import (Cipher, modes, algorithms)
|
2017-01-03 08:18:13 -06:00
|
|
|
from getpass import getpass
|
|
|
|
from hashlib import sha256
|
|
|
|
import struct
|
2016-10-09 23:23:56 -05:00
|
|
|
except ImportError:
|
2016-11-22 12:09:03 -06:00
|
|
|
logerr('cryptography lib(s) missing')
|
2016-06-01 11:51:55 -05:00
|
|
|
sys.exit(1)
|
|
|
|
|
|
|
|
if iterations < 1:
|
2016-11-22 12:09:03 -06:00
|
|
|
logerr('Iterations must be >= 1')
|
2016-06-01 11:51:55 -05:00
|
|
|
sys.exit(1)
|
|
|
|
|
2016-11-11 20:38:28 -06:00
|
|
|
if not dbfile:
|
|
|
|
dbfile = os.path.join(BukuDb.get_default_dbdir(), 'bookmarks.db')
|
2017-03-05 01:56:39 -06:00
|
|
|
encfile = dbfile + '.enc'
|
2016-11-11 20:38:28 -06:00
|
|
|
|
|
|
|
db_exists = os.path.exists(dbfile)
|
|
|
|
enc_exists = os.path.exists(encfile)
|
2016-06-01 11:51:55 -05:00
|
|
|
|
2016-11-11 20:38:28 -06:00
|
|
|
if db_exists and not enc_exists:
|
|
|
|
pass
|
|
|
|
elif not db_exists:
|
2016-11-22 12:09:03 -06:00
|
|
|
logerr('%s missing. Already encrypted?', dbfile)
|
2016-11-11 20:38:28 -06:00
|
|
|
sys.exit(1)
|
|
|
|
else:
|
|
|
|
# db_exists and enc_exists
|
2016-11-22 12:09:03 -06:00
|
|
|
logerr('Both encrypted and flat DB files exist!')
|
2016-06-01 11:51:55 -05:00
|
|
|
sys.exit(1)
|
|
|
|
|
|
|
|
password = getpass()
|
|
|
|
passconfirm = getpass()
|
2017-01-03 08:18:13 -06:00
|
|
|
if not password or not passconfirm:
|
2016-11-22 12:09:03 -06:00
|
|
|
logerr('Empty password')
|
2016-06-01 11:51:55 -05:00
|
|
|
sys.exit(1)
|
|
|
|
if password != passconfirm:
|
2016-11-22 12:09:03 -06:00
|
|
|
logerr('Passwords do not match')
|
2016-06-01 11:51:55 -05:00
|
|
|
sys.exit(1)
|
|
|
|
|
2016-11-11 20:38:28 -06:00
|
|
|
try:
|
|
|
|
# Get SHA256 hash of DB file
|
|
|
|
dbhash = BukuCrypt.get_filehash(dbfile)
|
|
|
|
except Exception as e:
|
2016-11-22 12:09:03 -06:00
|
|
|
logerr(e)
|
2016-11-11 20:38:28 -06:00
|
|
|
sys.exit(1)
|
2016-06-01 11:51:55 -05:00
|
|
|
|
|
|
|
# Generate random 256-bit salt and key
|
2016-12-30 11:55:26 -06:00
|
|
|
salt = os.urandom(BukuCrypt.SALT_SIZE)
|
2017-08-12 01:52:37 -05:00
|
|
|
key = ('%s%s' % (password, salt.decode('utf-8', 'replace'))).encode('utf-8')
|
2016-06-01 11:51:55 -05:00
|
|
|
for _ in range(iterations):
|
|
|
|
key = sha256(key).digest()
|
|
|
|
|
|
|
|
iv = os.urandom(16)
|
|
|
|
encryptor = Cipher(
|
|
|
|
algorithms.AES(key),
|
|
|
|
modes.CBC(iv),
|
|
|
|
backend=default_backend()
|
|
|
|
).encryptor()
|
2016-11-11 20:38:28 -06:00
|
|
|
filesize = os.path.getsize(dbfile)
|
2016-06-01 11:51:55 -05:00
|
|
|
|
2016-11-11 20:38:28 -06:00
|
|
|
try:
|
|
|
|
with open(dbfile, 'rb') as infp, open(encfile, 'wb') as outfp:
|
2016-10-29 04:35:44 -05:00
|
|
|
outfp.write(struct.pack('<Q', filesize))
|
|
|
|
outfp.write(salt)
|
|
|
|
outfp.write(iv)
|
2016-06-01 11:51:55 -05:00
|
|
|
|
|
|
|
# Embed DB file hash in encrypted file
|
2016-10-29 04:35:44 -05:00
|
|
|
outfp.write(dbhash)
|
2016-06-01 11:51:55 -05:00
|
|
|
|
|
|
|
while True:
|
2016-12-30 11:55:26 -06:00
|
|
|
chunk = infp.read(BukuCrypt.CHUNKSIZE)
|
2016-06-01 11:51:55 -05:00
|
|
|
if len(chunk) == 0:
|
|
|
|
break
|
|
|
|
elif len(chunk) % 16 != 0:
|
|
|
|
chunk = '%s%s' % (chunk, ' ' * (16 - len(chunk) % 16))
|
|
|
|
|
2016-10-29 05:36:29 -05:00
|
|
|
outfp.write(encryptor.update(chunk) + encryptor.finalize())
|
2016-06-01 11:51:55 -05:00
|
|
|
|
2016-11-11 20:38:28 -06:00
|
|
|
os.remove(dbfile)
|
|
|
|
print('File encrypted')
|
|
|
|
sys.exit(0)
|
|
|
|
except Exception as e:
|
2016-11-22 12:09:03 -06:00
|
|
|
logerr(e)
|
2016-11-11 20:38:28 -06:00
|
|
|
sys.exit(1)
|
2016-06-01 11:51:55 -05:00
|
|
|
|
|
|
|
@staticmethod
|
2016-11-11 20:38:28 -06:00
|
|
|
def decrypt_file(iterations, dbfile=None):
|
2017-09-16 12:38:11 -05:00
|
|
|
"""Decrypt the bookmarks database file.
|
2016-10-29 02:54:10 -05:00
|
|
|
|
2017-09-05 15:24:04 -05:00
|
|
|
Parameters
|
|
|
|
----------
|
|
|
|
iterations : int
|
2017-09-16 12:38:11 -05:00
|
|
|
Number of iterations for key generation.
|
2017-09-05 15:24:04 -05:00
|
|
|
dbfile : str, optional
|
2017-09-16 12:38:11 -05:00
|
|
|
Custom database file path (including filename).
|
2017-09-05 15:24:04 -05:00
|
|
|
The '.enc' suffix must be omitted.
|
|
|
|
"""
|
2016-06-01 11:51:55 -05:00
|
|
|
|
|
|
|
try:
|
|
|
|
from cryptography.hazmat.backends import default_backend
|
2017-08-12 01:52:37 -05:00
|
|
|
from cryptography.hazmat.primitives.ciphers import (Cipher, modes, algorithms)
|
2017-01-03 08:18:13 -06:00
|
|
|
from getpass import getpass
|
|
|
|
from hashlib import sha256
|
|
|
|
import struct
|
2016-10-09 23:23:56 -05:00
|
|
|
except ImportError:
|
2016-11-22 12:09:03 -06:00
|
|
|
logerr('cryptography lib(s) missing')
|
2016-06-01 11:51:55 -05:00
|
|
|
sys.exit(1)
|
|
|
|
|
|
|
|
if iterations < 1:
|
2016-11-22 12:09:03 -06:00
|
|
|
logerr('Decryption failed')
|
2016-06-01 11:51:55 -05:00
|
|
|
sys.exit(1)
|
|
|
|
|
2016-11-11 20:38:28 -06:00
|
|
|
if not dbfile:
|
|
|
|
dbfile = os.path.join(BukuDb.get_default_dbdir(), 'bookmarks.db')
|
|
|
|
else:
|
|
|
|
dbfile = os.path.abspath(dbfile)
|
|
|
|
dbpath, filename = os.path.split(dbfile)
|
|
|
|
|
2017-03-05 01:56:39 -06:00
|
|
|
encfile = dbfile + '.enc'
|
2016-11-11 20:38:28 -06:00
|
|
|
|
|
|
|
enc_exists = os.path.exists(encfile)
|
|
|
|
db_exists = os.path.exists(dbfile)
|
2016-06-01 11:51:55 -05:00
|
|
|
|
2016-11-11 20:38:28 -06:00
|
|
|
if enc_exists and not db_exists:
|
|
|
|
pass
|
|
|
|
elif not enc_exists:
|
2016-11-22 12:09:03 -06:00
|
|
|
logerr('%s missing', encfile)
|
2016-11-11 20:38:28 -06:00
|
|
|
sys.exit(1)
|
|
|
|
else:
|
|
|
|
# db_exists and enc_exists
|
2016-11-22 12:09:03 -06:00
|
|
|
logerr('Both encrypted and flat DB files exist!')
|
2016-06-01 11:51:55 -05:00
|
|
|
sys.exit(1)
|
|
|
|
|
|
|
|
password = getpass()
|
2017-01-03 08:18:13 -06:00
|
|
|
if not password:
|
2016-11-22 12:09:03 -06:00
|
|
|
logerr('Decryption failed')
|
2016-06-01 11:51:55 -05:00
|
|
|
sys.exit(1)
|
|
|
|
|
2016-11-11 20:38:28 -06:00
|
|
|
try:
|
|
|
|
with open(encfile, 'rb') as infp:
|
|
|
|
size = struct.unpack('<Q', infp.read(struct.calcsize('Q')))[0]
|
|
|
|
|
|
|
|
# Read 256-bit salt and generate key
|
|
|
|
salt = infp.read(32)
|
2017-08-12 01:52:37 -05:00
|
|
|
key = ('%s%s' % (password, salt.decode('utf-8', 'replace'))).encode('utf-8')
|
2016-11-11 20:38:28 -06:00
|
|
|
for _ in range(iterations):
|
|
|
|
key = sha256(key).digest()
|
|
|
|
|
|
|
|
iv = infp.read(16)
|
|
|
|
decryptor = Cipher(
|
|
|
|
algorithms.AES(key),
|
|
|
|
modes.CBC(iv),
|
|
|
|
backend=default_backend(),
|
|
|
|
).decryptor()
|
|
|
|
|
|
|
|
# Get original DB file's SHA256 hash from encrypted file
|
|
|
|
enchash = infp.read(32)
|
|
|
|
|
|
|
|
with open(dbfile, 'wb') as outfp:
|
|
|
|
while True:
|
2016-12-30 11:55:26 -06:00
|
|
|
chunk = infp.read(BukuCrypt.CHUNKSIZE)
|
2016-11-11 20:38:28 -06:00
|
|
|
if len(chunk) == 0:
|
|
|
|
break
|
|
|
|
|
2017-08-12 01:52:37 -05:00
|
|
|
outfp.write(decryptor.update(chunk) + decryptor.finalize())
|
2016-11-11 20:38:28 -06:00
|
|
|
|
|
|
|
outfp.truncate(size)
|
|
|
|
|
|
|
|
# Match hash of generated file with that of original DB file
|
|
|
|
dbhash = BukuCrypt.get_filehash(dbfile)
|
|
|
|
if dbhash != enchash:
|
|
|
|
os.remove(dbfile)
|
2016-11-22 12:09:03 -06:00
|
|
|
logerr('Decryption failed')
|
2016-11-11 20:38:28 -06:00
|
|
|
sys.exit(1)
|
|
|
|
else:
|
|
|
|
os.remove(encfile)
|
|
|
|
print('File decrypted')
|
|
|
|
except struct.error:
|
2016-11-22 12:09:03 -06:00
|
|
|
logerr('Tainted file')
|
2016-11-11 20:38:28 -06:00
|
|
|
sys.exit(1)
|
|
|
|
except Exception as e:
|
2016-11-22 12:09:03 -06:00
|
|
|
logerr(e)
|
2016-06-01 11:51:55 -05:00
|
|
|
sys.exit(1)
|
|
|
|
|
2016-09-09 10:07:01 -05:00
|
|
|
|
2017-07-21 22:31:29 -05:00
|
|
|
def import_md(filepath, newtag):
|
2017-09-16 12:38:11 -05:00
|
|
|
"""Parse bookmark markdown file.
|
2017-09-05 15:24:04 -05:00
|
|
|
|
|
|
|
Parameters
|
|
|
|
----------
|
|
|
|
filepath : str
|
2017-09-16 12:38:11 -05:00
|
|
|
Path to markdown file.
|
2017-09-05 15:24:04 -05:00
|
|
|
newtag : str
|
2017-09-16 12:38:11 -05:00
|
|
|
New tag for bookmarks in markdown file.
|
2017-09-05 15:24:04 -05:00
|
|
|
|
|
|
|
Returns
|
|
|
|
-------
|
|
|
|
tuple
|
2017-09-16 12:38:11 -05:00
|
|
|
Parsed result.
|
2017-09-05 15:24:04 -05:00
|
|
|
"""
|
2017-07-21 22:31:29 -05:00
|
|
|
with open(filepath, mode='r', encoding='utf-8') as infp:
|
|
|
|
for line in infp:
|
|
|
|
# Supported markdown format: [title](url)
|
|
|
|
# Find position of title end, url start delimiter combo
|
|
|
|
index = line.find('](')
|
|
|
|
if index != -1:
|
|
|
|
# Find title start delimiter
|
|
|
|
title_start_delim = line[:index].find('[')
|
|
|
|
# Reverse find the url end delimiter
|
|
|
|
url_end_delim = line[index + 2:].rfind(')')
|
|
|
|
|
|
|
|
if title_start_delim != -1 and url_end_delim > 0:
|
|
|
|
# Parse title
|
|
|
|
title = line[title_start_delim + 1:index]
|
|
|
|
# Parse url
|
|
|
|
url = line[index + 2:index + 2 + url_end_delim]
|
|
|
|
if (is_nongeneric_url(url)):
|
|
|
|
continue
|
|
|
|
|
|
|
|
yield (
|
|
|
|
url, title, delim_wrap(newtag)
|
|
|
|
if newtag else None, None, 0, True
|
|
|
|
)
|
|
|
|
|
|
|
|
|
|
|
|
def import_html(html_soup, add_parent_folder_as_tag, newtag):
|
2017-09-16 12:38:11 -05:00
|
|
|
"""Parse bookmark html.
|
2017-09-05 15:24:04 -05:00
|
|
|
|
|
|
|
Parameters
|
|
|
|
----------
|
|
|
|
html_soup : BeautifulSoup object
|
2017-09-16 12:38:11 -05:00
|
|
|
BeautifulSoup representation of bookmark html.
|
2017-09-05 15:24:04 -05:00
|
|
|
add_parent_folder_as_tag : bool
|
2017-09-16 12:38:11 -05:00
|
|
|
True if bookmark parent folders should be added as tags else False.
|
2017-09-05 15:24:04 -05:00
|
|
|
newtag : str
|
2017-09-16 12:38:11 -05:00
|
|
|
A new unique tag to add to imported bookmarks.
|
2017-09-05 15:24:04 -05:00
|
|
|
|
|
|
|
Returns
|
|
|
|
-------
|
|
|
|
tuple
|
2017-09-16 12:38:11 -05:00
|
|
|
Parsed result.
|
2017-09-05 15:24:04 -05:00
|
|
|
"""
|
2017-07-21 22:31:29 -05:00
|
|
|
|
|
|
|
# compatibility
|
|
|
|
soup = html_soup
|
|
|
|
|
|
|
|
for tag in soup.findAll('a'):
|
|
|
|
# Extract comment from <dd> tag
|
|
|
|
try:
|
|
|
|
if (is_nongeneric_url(tag['href'])):
|
|
|
|
continue
|
2017-07-30 15:50:51 -05:00
|
|
|
except KeyError:
|
2017-07-21 22:31:29 -05:00
|
|
|
continue
|
|
|
|
|
|
|
|
desc = None
|
|
|
|
comment_tag = tag.findNextSibling('dd')
|
|
|
|
|
|
|
|
if comment_tag:
|
|
|
|
desc = comment_tag.find(text=True, recursive=False)
|
|
|
|
|
|
|
|
# add parent folder as tag
|
|
|
|
if add_parent_folder_as_tag:
|
|
|
|
# could be its folder or not
|
|
|
|
possible_folder = tag.find_previous('h3')
|
|
|
|
# get list of tags within that folder
|
|
|
|
tag_list = tag.parent.parent.find_parent('dl')
|
|
|
|
|
2017-08-12 01:52:37 -05:00
|
|
|
if ((possible_folder) and possible_folder.parent in list(tag_list.parents)):
|
2017-07-21 22:31:29 -05:00
|
|
|
# then it's the folder of this bookmark
|
|
|
|
if tag.has_attr('tags'):
|
|
|
|
tag['tags'] += (DELIM + possible_folder.text)
|
|
|
|
else:
|
|
|
|
tag['tags'] = possible_folder.text
|
|
|
|
|
|
|
|
# add unique tag if opted
|
|
|
|
if newtag:
|
|
|
|
if tag.has_attr('tags'):
|
|
|
|
tag['tags'] += (DELIM + newtag)
|
|
|
|
else:
|
|
|
|
tag['tags'] = newtag
|
|
|
|
|
|
|
|
yield (
|
|
|
|
tag['href'], tag.string, parse_tags([tag['tags']])
|
|
|
|
if tag.has_attr('tags') else None, desc, 0, True
|
|
|
|
)
|
|
|
|
|
|
|
|
|
2016-05-20 17:05:25 -05:00
|
|
|
class BukuDb:
|
2017-09-16 12:38:11 -05:00
|
|
|
"""Abstracts all database operations.
|
2017-09-05 15:24:04 -05:00
|
|
|
|
|
|
|
Attributes
|
|
|
|
----------
|
2017-09-16 12:38:11 -05:00
|
|
|
conn : sqlite database connection.
|
|
|
|
cur : sqlite database cursor.
|
2017-09-05 15:24:04 -05:00
|
|
|
json : bool
|
2017-09-16 12:38:11 -05:00
|
|
|
True if results should be printed in json format else False.
|
2017-09-05 15:24:04 -05:00
|
|
|
field_filter : int
|
|
|
|
Indicates format for displaying bookmarks. Default is 0.
|
|
|
|
chatty : bool
|
|
|
|
Sets the verbosity of the APIs. Default is False.
|
|
|
|
"""
|
2016-04-05 06:25:40 -05:00
|
|
|
|
2017-08-12 01:52:37 -05:00
|
|
|
def __init__(self, json=False, field_filter=0, chatty=False, dbfile=None, colorize=True):
|
2017-09-16 12:38:11 -05:00
|
|
|
"""Database initialization API.
|
2017-09-05 15:24:04 -05:00
|
|
|
|
|
|
|
Parameters
|
|
|
|
----------
|
|
|
|
json : bool, optional
|
2017-09-16 12:38:11 -05:00
|
|
|
True if results should be printed in json format else False.
|
2017-09-05 15:24:04 -05:00
|
|
|
field_filter : int, optional
|
|
|
|
Indicates format for displaying bookmarks. Default is 0.
|
|
|
|
chatty : bool, optional
|
|
|
|
Sets the verbosity of the APIs. Default is False.
|
|
|
|
colorize : bool, optional
|
|
|
|
Indicates whether color should be used in output. Default is True.
|
|
|
|
"""
|
2016-10-29 02:54:10 -05:00
|
|
|
|
2016-06-29 13:06:33 -05:00
|
|
|
self.json = json
|
2016-10-29 04:02:50 -05:00
|
|
|
self.field_filter = field_filter
|
2016-11-06 09:30:45 -06:00
|
|
|
self.chatty = chatty
|
2017-01-03 18:43:47 -06:00
|
|
|
self.colorize = colorize
|
2017-09-17 01:23:18 -05:00
|
|
|
self.conn, self.cur = BukuDb.initdb(dbfile, self.chatty)
|
2016-04-10 07:28:49 -05:00
|
|
|
|
2016-05-20 17:05:25 -05:00
|
|
|
@staticmethod
|
2016-11-11 20:38:28 -06:00
|
|
|
def get_default_dbdir():
|
2017-09-16 12:38:11 -05:00
|
|
|
"""Determine the directory path where dbfile will be stored.
|
2017-09-05 15:24:04 -05:00
|
|
|
|
|
|
|
If the platform is Windows, use %APPDATA%
|
2017-04-02 15:28:26 -05:00
|
|
|
else if $XDG_DATA_HOME is defined, use it
|
2016-05-20 17:05:25 -05:00
|
|
|
else if $HOME exists, use it
|
2017-09-16 12:38:11 -05:00
|
|
|
else use the current directory.
|
2016-10-29 02:54:10 -05:00
|
|
|
|
2017-09-05 15:24:04 -05:00
|
|
|
Returns
|
|
|
|
-------
|
|
|
|
str
|
2017-09-16 12:38:11 -05:00
|
|
|
Path to database file.
|
2017-09-05 15:24:04 -05:00
|
|
|
"""
|
2016-05-21 08:15:56 -05:00
|
|
|
|
2017-05-28 09:21:19 -05:00
|
|
|
data_home = os.environ.get('XDG_DATA_HOME')
|
|
|
|
if data_home is None:
|
|
|
|
if os.environ.get('HOME') is None:
|
|
|
|
if sys.platform == 'win32':
|
|
|
|
data_home = os.environ.get('APPDATA')
|
|
|
|
if data_home is None:
|
|
|
|
return os.path.abspath('.')
|
2017-04-02 15:28:26 -05:00
|
|
|
else:
|
2017-05-28 09:21:19 -05:00
|
|
|
return os.path.abspath('.')
|
|
|
|
else:
|
2017-08-12 01:52:37 -05:00
|
|
|
data_home = os.path.join(os.environ.get('HOME'), '.local', 'share')
|
2016-05-21 08:15:56 -05:00
|
|
|
|
2016-05-20 17:05:25 -05:00
|
|
|
return os.path.join(data_home, 'buku')
|
2016-05-21 08:15:56 -05:00
|
|
|
|
2016-05-20 17:05:25 -05:00
|
|
|
@staticmethod
|
2017-09-17 01:23:18 -05:00
|
|
|
def initdb(dbfile=None, chatty=False):
|
2017-09-05 15:24:04 -05:00
|
|
|
"""Initialize the database connection.
|
|
|
|
|
|
|
|
Create DB file and/or bookmarks table if they don't exist.
|
2016-09-09 13:52:32 -05:00
|
|
|
Alert on encryption options on first execution.
|
2016-05-21 08:15:56 -05:00
|
|
|
|
2017-09-05 15:24:04 -05:00
|
|
|
Parameters
|
|
|
|
----------
|
|
|
|
dbfile : str, optional
|
2017-09-16 12:38:11 -05:00
|
|
|
Custom database file path (including filename).
|
2017-09-17 01:23:18 -05:00
|
|
|
chatty : bool
|
|
|
|
If True, shows informative message on DB creation.
|
2017-09-05 15:24:04 -05:00
|
|
|
|
|
|
|
Returns
|
|
|
|
-------
|
|
|
|
tuple
|
2017-09-16 12:38:11 -05:00
|
|
|
(connection, cursor).
|
2017-09-05 15:24:04 -05:00
|
|
|
"""
|
2016-05-21 08:15:56 -05:00
|
|
|
|
2016-11-11 20:38:28 -06:00
|
|
|
if not dbfile:
|
|
|
|
dbpath = BukuDb.get_default_dbdir()
|
|
|
|
filename = 'bookmarks.db'
|
|
|
|
dbfile = os.path.join(dbpath, filename)
|
|
|
|
else:
|
|
|
|
dbfile = os.path.abspath(dbfile)
|
|
|
|
dbpath, filename = os.path.split(dbfile)
|
2016-05-21 08:15:56 -05:00
|
|
|
|
2016-11-11 20:38:28 -06:00
|
|
|
try:
|
|
|
|
if not os.path.exists(dbpath):
|
|
|
|
os.makedirs(dbpath)
|
|
|
|
except Exception as e:
|
2016-11-22 12:09:03 -06:00
|
|
|
logerr(e)
|
2017-08-12 06:26:59 -05:00
|
|
|
os._exit(1)
|
2016-11-11 20:38:28 -06:00
|
|
|
|
|
|
|
db_exists = os.path.exists(dbfile)
|
2017-03-05 01:56:39 -06:00
|
|
|
enc_exists = os.path.exists(dbfile + '.enc')
|
2016-11-11 20:38:28 -06:00
|
|
|
|
|
|
|
if db_exists and not enc_exists:
|
|
|
|
pass
|
|
|
|
elif enc_exists and not db_exists:
|
2016-11-22 12:09:03 -06:00
|
|
|
logerr('Unlock database first')
|
2016-05-20 17:05:25 -05:00
|
|
|
sys.exit(1)
|
2016-11-11 20:38:28 -06:00
|
|
|
elif db_exists and enc_exists:
|
2016-11-22 12:09:03 -06:00
|
|
|
logerr('Both encrypted and flat DB files exist!')
|
2016-11-11 20:38:28 -06:00
|
|
|
sys.exit(1)
|
2017-09-17 01:23:18 -05:00
|
|
|
elif chatty:
|
2016-11-11 20:38:28 -06:00
|
|
|
# not db_exists and not enc_exists
|
2017-08-12 01:52:37 -05:00
|
|
|
print('DB file is being created at %s.\nYou should encrypt it.' % dbfile)
|
2016-05-21 08:15:56 -05:00
|
|
|
|
2016-05-20 17:05:25 -05:00
|
|
|
try:
|
|
|
|
# Create a connection
|
2016-11-29 16:51:11 -06:00
|
|
|
conn = sqlite3.connect(dbfile, check_same_thread=False)
|
2016-09-09 08:05:28 -05:00
|
|
|
conn.create_function('REGEXP', 2, regexp)
|
2016-05-20 17:05:25 -05:00
|
|
|
cur = conn.cursor()
|
2016-05-21 08:15:56 -05:00
|
|
|
|
2016-05-20 17:05:25 -05:00
|
|
|
# Create table if it doesn't exist
|
2017-03-05 01:56:39 -06:00
|
|
|
# flags: designed to be extended in future using bitwise masks
|
|
|
|
# Masks:
|
|
|
|
# 0b00000001: set title immutable
|
2017-03-05 13:19:12 -06:00
|
|
|
cur.execute('CREATE TABLE if not exists bookmarks ('
|
|
|
|
'id integer PRIMARY KEY, '
|
|
|
|
'URL text NOT NULL UNIQUE, '
|
|
|
|
'metadata text default \'\', '
|
|
|
|
'tags text default \',\', '
|
|
|
|
'desc text default \'\', '
|
|
|
|
'flags integer default 0)')
|
2016-05-20 17:05:25 -05:00
|
|
|
conn.commit()
|
|
|
|
except Exception as e:
|
2016-12-09 14:53:32 -06:00
|
|
|
logerr('initdb(): %s', e)
|
2016-05-20 17:05:25 -05:00
|
|
|
sys.exit(1)
|
2016-05-21 08:15:56 -05:00
|
|
|
|
2016-05-20 17:05:25 -05:00
|
|
|
return (conn, cur)
|
|
|
|
|
2017-02-18 20:48:38 -06:00
|
|
|
def get_rec_all(self):
|
2017-09-16 12:38:11 -05:00
|
|
|
"""Get all the bookmarks in the database.
|
2017-02-18 20:43:58 -06:00
|
|
|
|
2017-09-05 15:24:04 -05:00
|
|
|
Returns
|
|
|
|
-------
|
|
|
|
list
|
2017-09-16 12:38:11 -05:00
|
|
|
A list of tuples representing bookmark records.
|
2017-09-05 15:24:04 -05:00
|
|
|
"""
|
2017-02-18 20:43:58 -06:00
|
|
|
|
|
|
|
self.cur.execute('SELECT * FROM bookmarks')
|
|
|
|
return self.cur.fetchall()
|
|
|
|
|
2016-12-27 08:10:29 -06:00
|
|
|
def get_rec_by_id(self, index):
|
2017-09-05 15:24:04 -05:00
|
|
|
"""Get a bookmark from database by its ID.
|
2016-10-29 02:54:10 -05:00
|
|
|
|
2017-09-05 15:24:04 -05:00
|
|
|
Parameters
|
|
|
|
----------
|
|
|
|
index : int
|
2017-09-16 12:38:11 -05:00
|
|
|
DB index of bookmark record.
|
2017-09-05 15:24:04 -05:00
|
|
|
|
|
|
|
Returns
|
|
|
|
-------
|
|
|
|
tuple or None
|
2017-09-16 12:38:11 -05:00
|
|
|
Bookmark data, or None if index is not found.
|
2017-09-05 15:24:04 -05:00
|
|
|
"""
|
2016-06-02 00:38:14 -05:00
|
|
|
|
2017-08-12 01:52:37 -05:00
|
|
|
self.cur.execute('SELECT * FROM bookmarks WHERE id = ? LIMIT 1', (index,))
|
2017-01-29 11:21:55 -06:00
|
|
|
resultset = self.cur.fetchall()
|
2017-03-12 04:25:01 -05:00
|
|
|
return resultset[0] if resultset else None
|
2016-06-02 00:38:14 -05:00
|
|
|
|
2016-12-27 08:10:29 -06:00
|
|
|
def get_rec_id(self, url):
|
2017-09-16 12:38:11 -05:00
|
|
|
"""Check if URL already exists in DB.
|
2017-09-05 15:24:04 -05:00
|
|
|
|
|
|
|
Parameters
|
|
|
|
----------
|
|
|
|
url : str
|
2017-09-16 12:38:11 -05:00
|
|
|
A URL to search for in the DB.
|
2016-05-21 08:15:56 -05:00
|
|
|
|
2017-09-05 15:24:04 -05:00
|
|
|
Returns
|
|
|
|
-------
|
|
|
|
int
|
2017-09-16 12:38:11 -05:00
|
|
|
DB index, or -1 if URL not found in DB.
|
2017-09-05 15:24:04 -05:00
|
|
|
"""
|
2016-05-21 08:15:56 -05:00
|
|
|
|
2017-08-12 01:52:37 -05:00
|
|
|
self.cur.execute('SELECT id FROM bookmarks WHERE URL = ? LIMIT 1', (url,))
|
2016-05-20 17:05:25 -05:00
|
|
|
resultset = self.cur.fetchall()
|
2017-03-12 04:25:01 -05:00
|
|
|
return resultset[0][0] if resultset else -1
|
2016-05-21 08:15:56 -05:00
|
|
|
|
2017-03-05 14:56:40 -06:00
|
|
|
def get_max_id(self):
|
2017-09-16 12:38:11 -05:00
|
|
|
"""Fetch the ID of the last record.
|
2017-03-05 14:56:40 -06:00
|
|
|
|
2017-09-05 15:24:04 -05:00
|
|
|
Returns
|
|
|
|
-------
|
|
|
|
int
|
2017-09-16 12:38:11 -05:00
|
|
|
ID of the record if any record exists, else -1.
|
2017-09-05 15:24:04 -05:00
|
|
|
"""
|
2017-03-05 14:56:40 -06:00
|
|
|
|
|
|
|
self.cur.execute('SELECT MAX(id) from bookmarks')
|
|
|
|
resultset = self.cur.fetchall()
|
2017-03-12 04:25:01 -05:00
|
|
|
return -1 if resultset[0][0] is None else resultset[0][0]
|
2017-03-05 14:56:40 -06:00
|
|
|
|
2017-08-12 01:52:37 -05:00
|
|
|
def add_rec(self, url, title_in=None, tags_in=None, desc=None, immutable=0, delay_commit=False):
|
2017-09-16 12:38:11 -05:00
|
|
|
"""Add a new bookmark.
|
2017-09-05 15:24:04 -05:00
|
|
|
|
2017-09-08 22:40:05 -05:00
|
|
|
Parameters
|
2017-09-05 15:24:04 -05:00
|
|
|
----------
|
|
|
|
url : str
|
2017-09-16 12:38:11 -05:00
|
|
|
URL to bookmark.
|
2017-09-05 15:24:04 -05:00
|
|
|
title_in :str, optional
|
|
|
|
Title to add manually. Default is None.
|
|
|
|
tags_in : str, optional
|
|
|
|
Comma-separated tags to add manually.
|
|
|
|
Must start and end with comma. Default is None.
|
|
|
|
desc : str, optional
|
|
|
|
Description of the bookmark. Default is None.
|
|
|
|
immutable : int, optional
|
|
|
|
Indicates whether to disable title fetch from web.
|
|
|
|
Default is 0.
|
|
|
|
delay_commit : bool, optional
|
|
|
|
True if record should not be committed to the DB,
|
|
|
|
leaving commit responsibility to caller. Default is False.
|
|
|
|
|
|
|
|
Returns
|
|
|
|
-------
|
|
|
|
int
|
2017-09-16 12:38:11 -05:00
|
|
|
DB index of new bookmark on success, -1 on failure.
|
2017-09-05 15:24:04 -05:00
|
|
|
"""
|
2016-05-21 08:15:56 -05:00
|
|
|
|
2016-10-22 15:56:27 -05:00
|
|
|
# Return error for empty URL
|
|
|
|
if not url or url == '':
|
2016-11-22 12:09:03 -06:00
|
|
|
logerr('Invalid URL')
|
2017-02-24 23:55:18 -06:00
|
|
|
return -1
|
2016-10-22 15:56:27 -05:00
|
|
|
|
2016-05-20 17:05:25 -05:00
|
|
|
# Ensure that the URL does not exist in DB already
|
2016-12-27 08:10:29 -06:00
|
|
|
id = self.get_rec_id(url)
|
2016-05-20 17:05:25 -05:00
|
|
|
if id != -1:
|
2016-11-22 12:09:03 -06:00
|
|
|
logerr('URL [%s] already exists at index %d', url, id)
|
2017-02-24 23:55:18 -06:00
|
|
|
return -1
|
2016-05-21 08:15:56 -05:00
|
|
|
|
2016-05-20 17:05:25 -05:00
|
|
|
# Process title
|
2016-10-29 04:35:44 -05:00
|
|
|
if title_in is not None:
|
|
|
|
meta = title_in
|
2016-04-07 16:34:05 -05:00
|
|
|
else:
|
2016-11-08 11:32:45 -06:00
|
|
|
meta, mime, bad = network_handler(url)
|
|
|
|
if bad:
|
2017-01-03 18:43:47 -06:00
|
|
|
print('Malformed URL\n')
|
2016-11-08 11:32:45 -06:00
|
|
|
elif mime:
|
2017-03-04 09:33:59 -06:00
|
|
|
logdbg('HTTP HEAD requested')
|
2016-11-08 11:32:45 -06:00
|
|
|
elif meta == '':
|
2017-01-03 18:43:47 -06:00
|
|
|
print('No title\n')
|
2016-11-08 11:32:45 -06:00
|
|
|
else:
|
2016-11-22 12:09:03 -06:00
|
|
|
logdbg('Title: [%s]', meta)
|
2016-05-21 08:15:56 -05:00
|
|
|
|
2017-02-04 08:45:33 -06:00
|
|
|
# Fix up tags, if broken
|
2016-12-20 10:31:04 -06:00
|
|
|
if tags_in is None or tags_in == '':
|
2016-10-29 05:36:29 -05:00
|
|
|
tags_in = DELIM
|
2017-02-04 08:45:33 -06:00
|
|
|
elif tags_in[0] != DELIM:
|
2017-03-05 01:56:39 -06:00
|
|
|
tags_in = DELIM + tags_in
|
2017-02-04 08:45:33 -06:00
|
|
|
elif tags_in[-1] != DELIM:
|
2017-03-05 01:56:39 -06:00
|
|
|
tags_in = tags_in + DELIM
|
2016-05-21 08:15:56 -05:00
|
|
|
|
2016-05-20 17:05:25 -05:00
|
|
|
# Process description
|
2016-05-22 14:33:24 -05:00
|
|
|
if desc is None:
|
|
|
|
desc = ''
|
2016-05-21 08:15:56 -05:00
|
|
|
|
2016-05-20 17:05:25 -05:00
|
|
|
try:
|
2016-11-05 17:32:03 -05:00
|
|
|
flagset = 0
|
2016-12-20 11:07:14 -06:00
|
|
|
if immutable == 1:
|
|
|
|
flagset |= immutable
|
2016-11-05 17:32:03 -05:00
|
|
|
|
2017-08-12 01:52:37 -05:00
|
|
|
qry = 'INSERT INTO bookmarks(URL, metadata, tags, desc, flags) VALUES (?, ?, ?, ?, ?)'
|
2017-03-05 13:19:12 -06:00
|
|
|
self.cur.execute(qry, (url, meta, tags_in, desc, flagset))
|
2016-10-29 02:54:10 -05:00
|
|
|
if not delay_commit:
|
2016-06-01 04:35:33 -05:00
|
|
|
self.conn.commit()
|
2016-11-06 09:30:45 -06:00
|
|
|
if self.chatty:
|
2016-12-27 08:10:29 -06:00
|
|
|
self.print_rec(self.cur.lastrowid)
|
2017-02-24 23:55:18 -06:00
|
|
|
return self.cur.lastrowid
|
2016-05-22 14:20:50 -05:00
|
|
|
except Exception as e:
|
2016-12-27 08:10:29 -06:00
|
|
|
logerr('add_rec(): %s', e)
|
2017-02-24 23:55:18 -06:00
|
|
|
return -1
|
2016-05-21 08:15:56 -05:00
|
|
|
|
2017-04-06 14:19:08 -05:00
|
|
|
def append_tag_at_index(self, index, tags_in, delay_commit=False):
|
2017-09-16 12:38:11 -05:00
|
|
|
"""Append tags to bookmark tagset at index.
|
2017-09-05 15:24:04 -05:00
|
|
|
|
|
|
|
Parameters
|
|
|
|
----------
|
|
|
|
index : int
|
|
|
|
DB index of the record. 0 indicates all records.
|
|
|
|
tags_in : str
|
2017-09-16 12:38:11 -05:00
|
|
|
Comma-separated tags to add manually.
|
2017-09-05 15:24:04 -05:00
|
|
|
delay_commit : bool, optional
|
|
|
|
True if record should not be committed to the DB,
|
|
|
|
leaving commit responsibility to caller. Default is False.
|
|
|
|
|
|
|
|
Returns
|
|
|
|
-------
|
|
|
|
bool
|
2017-09-16 12:38:11 -05:00
|
|
|
True on success, False on failure.
|
2017-09-05 15:24:04 -05:00
|
|
|
"""
|
2016-06-12 05:30:54 -05:00
|
|
|
|
2016-08-25 13:32:33 -05:00
|
|
|
if index == 0:
|
2017-02-07 04:07:01 -06:00
|
|
|
resp = read_in('Append the tags to ALL bookmarks? (y/n): ')
|
2016-08-25 13:32:33 -05:00
|
|
|
if resp != 'y':
|
2016-10-29 02:54:10 -05:00
|
|
|
return False
|
2016-08-24 01:51:27 -05:00
|
|
|
|
2016-08-25 13:32:33 -05:00
|
|
|
self.cur.execute('SELECT id, tags FROM bookmarks ORDER BY id ASC')
|
|
|
|
else:
|
2017-08-12 01:52:37 -05:00
|
|
|
self.cur.execute('SELECT id, tags FROM bookmarks WHERE id = ? LIMIT 1', (index,))
|
2016-06-12 05:30:54 -05:00
|
|
|
|
2016-08-31 19:50:31 -05:00
|
|
|
resultset = self.cur.fetchall()
|
2017-01-29 13:55:04 -06:00
|
|
|
if resultset:
|
|
|
|
query = 'UPDATE bookmarks SET tags = ? WHERE id = ?'
|
|
|
|
for row in resultset:
|
2017-03-05 01:56:39 -06:00
|
|
|
tags = row[1] + tags_in[1:]
|
2017-01-29 13:55:04 -06:00
|
|
|
tags = parse_tags([tags])
|
|
|
|
self.cur.execute(query, (tags, row[0],))
|
2017-04-06 14:19:08 -05:00
|
|
|
if self.chatty and not delay_commit:
|
2017-01-29 13:55:04 -06:00
|
|
|
self.print_rec(row[0])
|
2017-04-06 14:19:08 -05:00
|
|
|
else:
|
|
|
|
return False
|
2017-01-29 13:55:04 -06:00
|
|
|
|
2017-04-06 14:19:08 -05:00
|
|
|
if not delay_commit:
|
2017-01-29 13:55:04 -06:00
|
|
|
self.conn.commit()
|
2016-08-31 19:50:31 -05:00
|
|
|
|
2016-10-29 02:54:10 -05:00
|
|
|
return True
|
2016-08-31 19:50:31 -05:00
|
|
|
|
2017-04-06 14:19:08 -05:00
|
|
|
def delete_tag_at_index(self, index, tags_in, delay_commit=False):
|
2017-09-16 12:38:11 -05:00
|
|
|
"""Delete tags from bookmark tagset at index.
|
2017-09-05 15:24:04 -05:00
|
|
|
|
|
|
|
Parameters
|
|
|
|
----------
|
|
|
|
index : int
|
|
|
|
DB index of bookmark record. 0 indicates all records.
|
|
|
|
tags_in : str
|
2017-09-16 12:38:11 -05:00
|
|
|
Comma-separated tags to delete manually.
|
2017-09-05 15:24:04 -05:00
|
|
|
delay_commit : bool, optional
|
|
|
|
True if record should not be committed to the DB,
|
|
|
|
leaving commit responsibility to caller. Default is False.
|
|
|
|
|
|
|
|
Returns
|
|
|
|
-------
|
|
|
|
bool
|
2017-09-16 12:38:11 -05:00
|
|
|
True on success, False on failure.
|
2017-09-05 15:24:04 -05:00
|
|
|
"""
|
2016-09-09 13:52:32 -05:00
|
|
|
|
2016-10-29 05:36:29 -05:00
|
|
|
tags_to_delete = tags_in.strip(DELIM).split(DELIM)
|
2016-07-03 16:50:44 -05:00
|
|
|
|
|
|
|
if index == 0:
|
2017-02-07 04:07:01 -06:00
|
|
|
resp = read_in('Delete the tag(s) from ALL bookmarks? (y/n): ')
|
2016-08-25 13:32:33 -05:00
|
|
|
if resp != 'y':
|
2016-10-29 02:54:10 -05:00
|
|
|
return False
|
2016-08-25 13:32:33 -05:00
|
|
|
|
2016-11-27 12:40:14 -06:00
|
|
|
count = 0
|
|
|
|
match = "'%' || ? || '%'"
|
2016-08-25 13:32:33 -05:00
|
|
|
for tag in tags_to_delete:
|
2017-03-05 01:56:39 -06:00
|
|
|
tag = delim_wrap(tag)
|
2017-08-12 01:52:37 -05:00
|
|
|
q = ("UPDATE bookmarks SET tags = replace(tags, '%s', '%s') WHERE tags LIKE %s" % (tag, DELIM, match))
|
2017-03-05 01:56:39 -06:00
|
|
|
self.cur.execute(q, (tag,))
|
2016-11-27 12:40:14 -06:00
|
|
|
count += self.cur.rowcount
|
2016-07-03 16:50:44 -05:00
|
|
|
|
2017-04-06 14:19:08 -05:00
|
|
|
if count and not delay_commit:
|
2016-11-27 12:40:14 -06:00
|
|
|
self.conn.commit()
|
|
|
|
if self.chatty:
|
2017-03-05 01:56:39 -06:00
|
|
|
print('%d record(s) updated' % count)
|
2016-07-03 16:50:44 -05:00
|
|
|
|
2017-01-29 13:55:04 -06:00
|
|
|
return True
|
|
|
|
|
|
|
|
# Process a single index
|
2017-03-05 01:56:39 -06:00
|
|
|
# Use SELECT and UPDATE to handle multiple tags at once
|
2017-01-29 13:55:04 -06:00
|
|
|
query = 'SELECT id, tags FROM bookmarks WHERE id = ? LIMIT 1'
|
|
|
|
self.cur.execute(query, (index,))
|
|
|
|
resultset = self.cur.fetchall()
|
|
|
|
if resultset:
|
2016-09-09 13:52:32 -05:00
|
|
|
query = 'UPDATE bookmarks SET tags = ? WHERE id = ?'
|
2016-08-27 10:47:33 -05:00
|
|
|
for row in resultset:
|
|
|
|
tags = row[1]
|
2016-07-03 16:50:44 -05:00
|
|
|
|
2016-08-27 10:47:33 -05:00
|
|
|
for tag in tags_to_delete:
|
2017-03-05 01:56:39 -06:00
|
|
|
tags = tags.replace(delim_wrap(tag), DELIM)
|
2016-08-27 10:47:33 -05:00
|
|
|
|
2016-09-09 13:52:32 -05:00
|
|
|
self.cur.execute(query, (parse_tags([tags]), row[0],))
|
2017-04-06 14:19:08 -05:00
|
|
|
if self.chatty and not delay_commit:
|
2016-12-27 08:10:29 -06:00
|
|
|
self.print_rec(row[0])
|
2016-11-21 08:03:04 -06:00
|
|
|
|
2017-04-06 14:19:08 -05:00
|
|
|
if not delay_commit:
|
|
|
|
self.conn.commit()
|
|
|
|
else:
|
|
|
|
return False
|
2016-07-03 16:50:44 -05:00
|
|
|
|
2016-10-29 02:54:10 -05:00
|
|
|
return True
|
|
|
|
|
2017-08-12 01:52:37 -05:00
|
|
|
def update_rec(self, index, url=None, title_in=None, tags_in=None, desc=None, immutable=-1, threads=4):
|
2017-09-16 12:38:11 -05:00
|
|
|
"""Update an existing record at index.
|
2017-09-05 15:24:04 -05:00
|
|
|
|
2016-06-06 13:55:09 -05:00
|
|
|
Update all records if index is 0 and url is not specified.
|
|
|
|
URL is an exception because URLs are unique in DB.
|
2016-05-21 08:15:56 -05:00
|
|
|
|
2017-09-05 15:24:04 -05:00
|
|
|
Parameters
|
|
|
|
----------
|
|
|
|
index : int
|
|
|
|
DB index of record. 0 indicates all records.
|
|
|
|
url : str, optional
|
2017-09-16 12:38:11 -05:00
|
|
|
Bookmark address.
|
2017-09-05 15:24:04 -05:00
|
|
|
title_in : str, optional
|
|
|
|
Title to add manually.
|
|
|
|
tags_in : str, optional
|
|
|
|
Comma-separated tags to add manually. Must start and end with comma.
|
|
|
|
Prefix with '+,' to append to current tags.
|
2017-09-16 12:38:11 -05:00
|
|
|
Prefix with '-,' to delete from current tags.
|
2017-09-05 15:24:04 -05:00
|
|
|
desc : str, optional
|
2017-09-16 12:38:11 -05:00
|
|
|
Description of bookmark.
|
2017-09-05 15:24:04 -05:00
|
|
|
immutable : int, optional
|
|
|
|
Diable title fetch from web if 1. Default is -1.
|
|
|
|
threads : int, optional
|
|
|
|
Number of threads to use to refresh full DB. Default is 4.
|
|
|
|
|
|
|
|
Returns
|
|
|
|
-------
|
|
|
|
bool
|
2017-09-16 12:38:11 -05:00
|
|
|
True on success, False on Failure.
|
2017-09-05 15:24:04 -05:00
|
|
|
"""
|
2016-05-21 08:15:56 -05:00
|
|
|
|
2016-05-20 17:05:25 -05:00
|
|
|
arguments = []
|
2016-05-24 12:51:38 -05:00
|
|
|
query = 'UPDATE bookmarks SET'
|
2016-05-21 05:40:37 -05:00
|
|
|
to_update = False
|
2016-12-03 09:32:03 -06:00
|
|
|
tag_modified = False
|
2016-10-29 02:54:10 -05:00
|
|
|
ret = False
|
2016-05-21 08:15:56 -05:00
|
|
|
|
2016-05-20 17:05:25 -05:00
|
|
|
# Update URL if passed as argument
|
2016-12-21 09:28:28 -06:00
|
|
|
if url is not None and url != '':
|
2016-06-30 10:45:45 -05:00
|
|
|
if index == 0:
|
2016-11-22 12:09:03 -06:00
|
|
|
logerr('All URLs cannot be same')
|
2016-10-29 02:54:10 -05:00
|
|
|
return False
|
2017-03-05 01:56:39 -06:00
|
|
|
query += ' URL = ?,'
|
2016-05-31 16:00:34 -05:00
|
|
|
arguments += (url,)
|
2016-05-21 05:40:37 -05:00
|
|
|
to_update = True
|
2016-05-21 08:15:56 -05:00
|
|
|
|
2016-05-20 17:05:25 -05:00
|
|
|
# Update tags if passed as argument
|
2016-10-29 04:35:44 -05:00
|
|
|
if tags_in is not None:
|
2016-12-03 09:32:03 -06:00
|
|
|
if tags_in == '+,' or tags_in == '-,':
|
|
|
|
logerr('Please specify a tag')
|
|
|
|
return False
|
|
|
|
|
|
|
|
if tags_in.startswith('+,'):
|
2017-03-05 01:56:39 -06:00
|
|
|
chatty = self.chatty
|
|
|
|
self.chatty = False
|
2016-12-03 09:32:03 -06:00
|
|
|
ret = self.append_tag_at_index(index, tags_in[1:])
|
2017-03-05 01:56:39 -06:00
|
|
|
self.chatty = chatty
|
2016-12-03 09:32:03 -06:00
|
|
|
tag_modified = True
|
|
|
|
elif tags_in.startswith('-,'):
|
2017-03-05 01:56:39 -06:00
|
|
|
chatty = self.chatty
|
|
|
|
self.chatty = False
|
2016-12-03 09:32:03 -06:00
|
|
|
ret = self.delete_tag_at_index(index, tags_in[1:])
|
2017-03-05 01:56:39 -06:00
|
|
|
self.chatty = chatty
|
2016-12-03 09:32:03 -06:00
|
|
|
tag_modified = True
|
2016-06-12 05:30:54 -05:00
|
|
|
else:
|
2017-02-04 08:45:33 -06:00
|
|
|
# Fix up tags, if broken
|
|
|
|
if tags_in is None or tags_in == '':
|
|
|
|
tags_in = DELIM
|
|
|
|
elif tags_in[0] != DELIM:
|
2017-03-05 01:56:39 -06:00
|
|
|
tags_in = DELIM + tags_in
|
2017-02-04 08:45:33 -06:00
|
|
|
elif tags_in[-1] != DELIM:
|
2017-03-05 01:56:39 -06:00
|
|
|
tags_in = tags_in + DELIM
|
2017-02-04 08:45:33 -06:00
|
|
|
|
2017-03-05 01:56:39 -06:00
|
|
|
query += ' tags = ?,'
|
2016-10-29 04:35:44 -05:00
|
|
|
arguments += (tags_in,)
|
2016-06-12 05:30:54 -05:00
|
|
|
to_update = True
|
2016-05-21 08:15:56 -05:00
|
|
|
|
2016-05-20 17:05:25 -05:00
|
|
|
# Update description if passed as an argument
|
2016-05-21 05:40:37 -05:00
|
|
|
if desc is not None:
|
2017-03-05 01:56:39 -06:00
|
|
|
query += ' desc = ?,'
|
2016-05-31 16:00:34 -05:00
|
|
|
arguments += (desc,)
|
2016-05-21 05:40:37 -05:00
|
|
|
to_update = True
|
2016-05-21 08:15:56 -05:00
|
|
|
|
2016-11-05 17:32:03 -05:00
|
|
|
# Update immutable flag if passed as argument
|
2016-12-20 11:07:14 -06:00
|
|
|
if immutable != -1:
|
2016-11-05 17:32:03 -05:00
|
|
|
flagset = 1
|
2016-12-20 11:07:14 -06:00
|
|
|
if immutable == 1:
|
2017-03-05 01:56:39 -06:00
|
|
|
query += ' flags = flags | ?,'
|
2016-12-20 11:07:14 -06:00
|
|
|
elif immutable == 0:
|
2017-03-05 01:56:39 -06:00
|
|
|
query += ' flags = flags & ?,'
|
2016-11-05 17:32:03 -05:00
|
|
|
flagset = ~flagset
|
|
|
|
|
|
|
|
arguments += (flagset,)
|
|
|
|
to_update = True
|
|
|
|
|
2016-05-20 17:05:25 -05:00
|
|
|
# Update title
|
|
|
|
#
|
2017-03-16 13:15:37 -05:00
|
|
|
# 1. if --title has no arguments, delete existing title
|
|
|
|
# 2. if --title has arguments, update existing title
|
|
|
|
# 3. if --title option is omitted at cmdline:
|
2016-05-20 17:05:25 -05:00
|
|
|
# if URL is passed, update the title from web using the URL
|
2016-11-05 17:32:03 -05:00
|
|
|
# 4. if no other argument (url, tag, comment, immutable) passed,
|
|
|
|
# update title from web using DB URL (if title is mutable)
|
2016-11-08 11:32:45 -06:00
|
|
|
title_to_insert = None
|
2016-10-29 04:35:44 -05:00
|
|
|
if title_in is not None:
|
2016-11-08 11:32:45 -06:00
|
|
|
title_to_insert = title_in
|
2016-12-21 09:28:28 -06:00
|
|
|
elif url is not None and url != '':
|
2016-11-08 11:32:45 -06:00
|
|
|
title_to_insert, mime, bad = network_handler(url)
|
|
|
|
if bad:
|
2017-01-03 18:43:47 -06:00
|
|
|
print('Malformed URL\n')
|
2016-11-08 11:32:45 -06:00
|
|
|
elif mime:
|
2017-03-04 09:33:59 -06:00
|
|
|
logdbg('HTTP HEAD requested')
|
2016-11-08 11:32:45 -06:00
|
|
|
elif title_to_insert == '':
|
2017-01-03 18:43:47 -06:00
|
|
|
print('No title\n')
|
2016-11-08 11:32:45 -06:00
|
|
|
else:
|
2016-11-22 12:09:03 -06:00
|
|
|
logdbg('Title: [%s]', title_to_insert)
|
2016-12-03 09:32:03 -06:00
|
|
|
elif not to_update and not tag_modified:
|
2016-12-03 07:20:53 -06:00
|
|
|
ret = self.refreshdb(index, threads)
|
2016-11-06 09:30:45 -06:00
|
|
|
if ret and index and self.chatty:
|
2016-12-27 08:10:29 -06:00
|
|
|
self.print_rec(index)
|
2016-11-05 22:43:24 -05:00
|
|
|
return ret
|
2016-05-21 08:15:56 -05:00
|
|
|
|
2016-11-08 11:32:45 -06:00
|
|
|
if title_to_insert is not None:
|
2017-03-05 01:56:39 -06:00
|
|
|
query += ' metadata = ?,'
|
2016-11-08 11:32:45 -06:00
|
|
|
arguments += (title_to_insert,)
|
2016-05-21 05:40:37 -05:00
|
|
|
to_update = True
|
2016-05-21 08:15:56 -05:00
|
|
|
|
2017-04-07 10:13:24 -05:00
|
|
|
if not to_update: # Nothing to update
|
2017-03-05 01:56:39 -06:00
|
|
|
# Show bookmark if tags were appended to deleted
|
|
|
|
if tag_modified and self.chatty:
|
|
|
|
self.print_rec(index)
|
2016-10-29 02:54:10 -05:00
|
|
|
return ret
|
2016-05-21 08:15:56 -05:00
|
|
|
|
2016-06-06 13:55:09 -05:00
|
|
|
if index == 0: # Update all records
|
2017-02-07 04:07:01 -06:00
|
|
|
resp = read_in('Update ALL bookmarks? (y/n): ')
|
2016-06-06 13:55:09 -05:00
|
|
|
if resp != 'y':
|
2016-10-29 02:54:10 -05:00
|
|
|
return False
|
2016-06-06 13:55:09 -05:00
|
|
|
|
|
|
|
query = query[:-1]
|
|
|
|
else:
|
2017-03-05 01:56:39 -06:00
|
|
|
query = query[:-1] + ' WHERE id = ?'
|
2016-06-06 13:55:09 -05:00
|
|
|
arguments += (index,)
|
|
|
|
|
2016-11-22 12:09:03 -06:00
|
|
|
logdbg('query: "%s", args: %s', query, arguments)
|
2016-05-21 08:15:56 -05:00
|
|
|
|
2016-05-20 17:05:25 -05:00
|
|
|
try:
|
|
|
|
self.cur.execute(query, arguments)
|
|
|
|
self.conn.commit()
|
2016-11-06 09:30:45 -06:00
|
|
|
if self.cur.rowcount and self.chatty:
|
2016-12-27 08:10:29 -06:00
|
|
|
self.print_rec(index)
|
2016-10-29 02:54:10 -05:00
|
|
|
|
|
|
|
if self.cur.rowcount == 0:
|
2016-12-26 20:40:08 -06:00
|
|
|
logerr('No matching index %d', index)
|
2016-10-29 02:54:10 -05:00
|
|
|
return False
|
2016-05-20 17:05:25 -05:00
|
|
|
except sqlite3.IntegrityError:
|
2016-11-22 12:09:03 -06:00
|
|
|
logerr('URL already exists')
|
2016-10-29 02:54:10 -05:00
|
|
|
return False
|
|
|
|
|
|
|
|
return True
|
2016-05-21 08:15:56 -05:00
|
|
|
|
2016-12-03 07:20:53 -06:00
|
|
|
def refreshdb(self, index, threads):
|
2017-09-05 15:24:04 -05:00
|
|
|
"""Refresh ALL records in the database.
|
2017-04-08 08:44:02 -05:00
|
|
|
|
2017-09-05 15:24:04 -05:00
|
|
|
Fetch title for eachbookmark from the web and update the records.
|
|
|
|
Doesn't update the record if title is empty.
|
|
|
|
|
|
|
|
Notes
|
|
|
|
-----
|
|
|
|
This API doesn't change DB index, URL or tags of a bookmark.
|
|
|
|
This API is verbose.
|
|
|
|
|
|
|
|
Parameters
|
|
|
|
----------
|
|
|
|
index : int
|
|
|
|
DB index of record to update. 0 indicates all records.
|
|
|
|
threads: int
|
|
|
|
Number of threads to use to refresh full DB. Default is 4.
|
|
|
|
"""
|
2016-05-21 08:15:56 -05:00
|
|
|
|
2016-05-20 17:05:25 -05:00
|
|
|
if index == 0:
|
2017-08-12 01:52:37 -05:00
|
|
|
self.cur.execute('SELECT id, url, flags FROM bookmarks ORDER BY id ASC')
|
2016-05-20 17:05:25 -05:00
|
|
|
else:
|
2017-08-12 01:52:37 -05:00
|
|
|
self.cur.execute('SELECT id, url, flags FROM bookmarks WHERE id = ? LIMIT 1', (index,))
|
2016-11-29 16:51:11 -06:00
|
|
|
|
2016-05-20 17:05:25 -05:00
|
|
|
resultset = self.cur.fetchall()
|
2016-11-30 14:50:36 -06:00
|
|
|
recs = len(resultset)
|
|
|
|
if not recs:
|
2016-11-22 12:09:03 -06:00
|
|
|
logerr('No matching index or title immutable or empty DB')
|
2016-11-29 16:51:11 -06:00
|
|
|
return False
|
2016-11-05 22:43:24 -05:00
|
|
|
|
2017-01-03 18:43:47 -06:00
|
|
|
# Set up strings to be printed
|
|
|
|
if self.colorize:
|
|
|
|
bad_url_str = '\x1b[1mIndex %d: Malformed URL\x1b[0m\n'
|
2017-03-04 09:33:59 -06:00
|
|
|
mime_str = '\x1b[1mIndex %d: HTTP HEAD requested\x1b[0m\n'
|
2017-04-08 15:19:25 -05:00
|
|
|
blank_URL_str = '\x1b[1mIndex %d: No title\x1b[0m\n'
|
2017-01-03 18:43:47 -06:00
|
|
|
success_str = 'Title: [%s]\n\x1b[92mIndex %d: updated\x1b[0m\n'
|
|
|
|
else:
|
|
|
|
bad_url_str = 'Index %d: Malformed URL\n'
|
2017-03-04 09:33:59 -06:00
|
|
|
mime_str = 'Index %d: HTTP HEAD requested\n'
|
2017-04-08 15:19:25 -05:00
|
|
|
blank_URL_str = 'Index %d: No title\n'
|
2017-01-03 18:43:47 -06:00
|
|
|
success_str = 'Title: [%s]\nIndex %d: updated\n'
|
|
|
|
|
2016-09-09 13:52:32 -05:00
|
|
|
query = 'UPDATE bookmarks SET metadata = ? WHERE id = ?'
|
2016-11-30 13:15:29 -06:00
|
|
|
done = {'value': 0} # count threads completed
|
2016-11-30 21:00:17 -06:00
|
|
|
processed = {'value': 0} # count number of records processed
|
|
|
|
|
2016-12-11 09:23:48 -06:00
|
|
|
# An additional call to generate default headers
|
|
|
|
# gen_headers() is called within network_handler()
|
|
|
|
# However, this initial call to setup headers
|
|
|
|
# ensures there is no race condition among the
|
|
|
|
# initial threads to setup headers
|
|
|
|
if not myheaders:
|
|
|
|
gen_headers()
|
|
|
|
|
2016-11-29 16:51:11 -06:00
|
|
|
cond = threading.Condition()
|
|
|
|
cond.acquire()
|
2016-11-08 00:45:39 -06:00
|
|
|
|
2016-11-29 16:51:11 -06:00
|
|
|
def refresh(count, cond):
|
2017-09-16 12:38:11 -05:00
|
|
|
"""Inner function to fetch titles and update records.
|
2016-11-29 16:51:11 -06:00
|
|
|
|
2017-09-05 15:24:04 -05:00
|
|
|
Parameters
|
|
|
|
----------
|
|
|
|
count : int
|
|
|
|
Dummy input to adhere to convention.
|
2017-09-16 12:38:11 -05:00
|
|
|
cond : threading condition object.
|
2017-09-05 15:24:04 -05:00
|
|
|
"""
|
2016-11-27 22:00:42 -06:00
|
|
|
|
2016-11-29 16:51:11 -06:00
|
|
|
count = 0
|
|
|
|
|
|
|
|
while True:
|
|
|
|
cond.acquire()
|
2017-01-29 11:21:55 -06:00
|
|
|
if resultset:
|
2016-11-29 16:51:11 -06:00
|
|
|
row = resultset.pop()
|
|
|
|
else:
|
|
|
|
cond.release()
|
|
|
|
break
|
|
|
|
cond.release()
|
2016-11-27 22:00:42 -06:00
|
|
|
|
2017-03-04 09:33:59 -06:00
|
|
|
title, mime, bad = network_handler(row[1], row[2] & 1)
|
2016-11-29 16:51:11 -06:00
|
|
|
count += 1
|
|
|
|
|
|
|
|
cond.acquire()
|
2016-11-27 22:00:42 -06:00
|
|
|
if bad:
|
2017-01-03 18:43:47 -06:00
|
|
|
print(bad_url_str % row[0])
|
2016-11-29 16:51:11 -06:00
|
|
|
cond.release()
|
2016-11-27 22:00:42 -06:00
|
|
|
continue
|
|
|
|
elif mime:
|
2017-08-03 23:14:07 -05:00
|
|
|
if self.chatty:
|
|
|
|
print(mime_str % row[0])
|
2016-11-29 16:51:11 -06:00
|
|
|
cond.release()
|
2016-11-27 22:00:42 -06:00
|
|
|
continue
|
|
|
|
elif title == '':
|
2017-04-08 15:19:25 -05:00
|
|
|
print(blank_URL_str % row[0])
|
2016-11-29 16:51:11 -06:00
|
|
|
cond.release()
|
2016-11-27 22:00:42 -06:00
|
|
|
continue
|
|
|
|
|
2016-11-28 21:47:30 -06:00
|
|
|
self.cur.execute(query, (title, row[0],))
|
2016-11-29 16:51:11 -06:00
|
|
|
# Save after fetching 32 titles per thread
|
|
|
|
if count & 0b11111 == 0:
|
|
|
|
self.conn.commit()
|
2016-11-27 22:00:42 -06:00
|
|
|
|
|
|
|
if self.chatty:
|
2017-01-03 18:43:47 -06:00
|
|
|
print(success_str % (title, row[0]))
|
2016-11-29 16:51:11 -06:00
|
|
|
cond.release()
|
|
|
|
|
|
|
|
if interrupted:
|
|
|
|
break
|
|
|
|
|
2016-11-30 13:44:23 -06:00
|
|
|
logdbg('Thread %d: processed %d', threading.get_ident(), count)
|
2016-11-29 16:51:11 -06:00
|
|
|
with cond:
|
2016-11-30 13:15:29 -06:00
|
|
|
done['value'] += 1
|
2016-11-30 21:00:17 -06:00
|
|
|
processed['value'] += count
|
2016-11-29 16:51:11 -06:00
|
|
|
cond.notify()
|
2016-11-27 22:00:42 -06:00
|
|
|
|
2016-12-03 07:20:53 -06:00
|
|
|
if recs < threads:
|
|
|
|
threads = recs
|
2016-11-30 14:50:36 -06:00
|
|
|
|
2016-12-03 07:20:53 -06:00
|
|
|
for i in range(threads):
|
2016-11-29 16:51:11 -06:00
|
|
|
thread = threading.Thread(target=refresh, args=(i, cond))
|
2016-11-27 22:00:42 -06:00
|
|
|
thread.start()
|
2016-05-21 08:15:56 -05:00
|
|
|
|
2016-12-03 07:20:53 -06:00
|
|
|
while done['value'] < threads:
|
2016-11-29 16:51:11 -06:00
|
|
|
cond.wait()
|
2016-11-30 13:15:29 -06:00
|
|
|
logdbg('%d threads completed', done['value'])
|
2016-11-29 16:51:11 -06:00
|
|
|
|
2016-11-30 21:00:17 -06:00
|
|
|
# Guard: records found == total records processed
|
|
|
|
if recs != processed['value']:
|
|
|
|
logerr('Records: %d, processed: %d !!!', recs, processed['value'])
|
|
|
|
|
2016-11-29 16:51:11 -06:00
|
|
|
cond.release()
|
|
|
|
self.conn.commit()
|
2016-11-05 22:43:24 -05:00
|
|
|
return True
|
2016-05-31 21:48:39 -05:00
|
|
|
|
2017-03-12 04:25:01 -05:00
|
|
|
def edit_update_rec(self, index, immutable=-1):
|
2017-09-16 12:38:11 -05:00
|
|
|
"""Edit in editor and update a record.
|
2017-09-05 15:24:04 -05:00
|
|
|
|
|
|
|
Parameters
|
|
|
|
----------
|
|
|
|
index : int
|
2017-09-16 12:38:11 -05:00
|
|
|
DB index of the record.
|
2017-09-05 15:24:04 -05:00
|
|
|
immutable : int, optional
|
|
|
|
Diable title fetch from web if 1. Default is -1.
|
2017-03-12 04:25:01 -05:00
|
|
|
|
2017-09-05 15:24:04 -05:00
|
|
|
Returns
|
|
|
|
-------
|
|
|
|
bool
|
2017-09-16 12:38:11 -05:00
|
|
|
True if updated, else False.
|
2017-09-05 15:24:04 -05:00
|
|
|
"""
|
2017-03-12 04:25:01 -05:00
|
|
|
|
|
|
|
editor = get_system_editor()
|
|
|
|
if editor == 'none':
|
|
|
|
logerr('EDITOR must be set to use index with -w')
|
|
|
|
return False
|
|
|
|
|
|
|
|
rec = self.get_rec_by_id(index)
|
|
|
|
if not rec:
|
|
|
|
logerr('No matching index %d', index)
|
|
|
|
return False
|
|
|
|
|
|
|
|
result = edit_rec(editor, rec[1], rec[2], rec[3], rec[4])
|
|
|
|
if result is not None:
|
|
|
|
url, title, tags, desc = result
|
|
|
|
return self.update_rec(index, url, title, tags, desc, immutable)
|
|
|
|
|
|
|
|
if immutable != -1:
|
|
|
|
return self.update_rec(index, immutable)
|
|
|
|
|
|
|
|
return False
|
|
|
|
|
2016-10-27 15:21:09 -05:00
|
|
|
def searchdb(self, keywords, all_keywords=False, deep=False, regex=False):
|
2017-09-16 12:38:11 -05:00
|
|
|
"""Search DB for entries where tags, URL, or title fields match keywords.
|
2017-09-05 15:24:04 -05:00
|
|
|
|
|
|
|
Parameters
|
|
|
|
----------
|
|
|
|
keywords : list of str
|
2017-09-16 12:38:11 -05:00
|
|
|
Keywords to search.
|
2017-09-05 15:24:04 -05:00
|
|
|
all_keywords : bool, optional
|
|
|
|
True to return records matching ALL keywords.
|
|
|
|
False (default value) to return records matching ANY keyword.
|
|
|
|
deep : bool, optional
|
|
|
|
True to search for matching substrings. Default is False.
|
|
|
|
regex : bool, optional
|
|
|
|
Match a regular expression if True. Default is False.
|
|
|
|
|
|
|
|
Returns
|
|
|
|
-------
|
|
|
|
list or None
|
2017-09-16 12:38:11 -05:00
|
|
|
List of search results, or None if no matches.
|
2017-09-05 15:24:04 -05:00
|
|
|
"""
|
2017-01-29 07:49:47 -06:00
|
|
|
if not keywords:
|
|
|
|
return None
|
|
|
|
|
2017-03-05 01:56:39 -06:00
|
|
|
q0 = 'SELECT id, url, metadata, tags, desc FROM bookmarks WHERE '
|
2016-11-13 03:03:19 -06:00
|
|
|
# Deep query string
|
2017-03-05 13:19:12 -06:00
|
|
|
q1 = ("(tags LIKE ('%' || ? || '%') OR "
|
|
|
|
"URL LIKE ('%' || ? || '%') OR "
|
|
|
|
"metadata LIKE ('%' || ? || '%') OR "
|
|
|
|
"desc LIKE ('%' || ? || '%')) ")
|
2016-11-13 11:52:00 -06:00
|
|
|
# Non-deep query string
|
2017-03-05 13:19:12 -06:00
|
|
|
q2 = ('(tags REGEXP ? OR '
|
|
|
|
'URL REGEXP ? OR '
|
|
|
|
'metadata REGEXP ? OR '
|
|
|
|
'desc REGEXP ?) ')
|
2016-11-20 11:10:54 -06:00
|
|
|
qargs = []
|
2016-11-13 03:03:19 -06:00
|
|
|
|
|
|
|
if regex:
|
|
|
|
for token in keywords:
|
2017-03-05 01:56:39 -06:00
|
|
|
q0 += q2 + 'OR '
|
2016-11-20 11:10:54 -06:00
|
|
|
qargs += (token, token, token, token,)
|
2017-03-05 01:56:39 -06:00
|
|
|
q0 = q0[:-3]
|
2016-11-13 03:03:19 -06:00
|
|
|
elif all_keywords:
|
2016-11-20 11:10:54 -06:00
|
|
|
if len(keywords) == 1 and keywords[0] == 'blank':
|
2017-03-05 01:56:39 -06:00
|
|
|
q0 = "SELECT * FROM bookmarks WHERE metadata = '' OR tags = ? "
|
2016-11-20 11:10:54 -06:00
|
|
|
qargs += (DELIM,)
|
|
|
|
elif len(keywords) == 1 and keywords[0] == 'immutable':
|
2017-03-05 01:56:39 -06:00
|
|
|
q0 = 'SELECT * FROM bookmarks WHERE flags & 1 == 1 '
|
2016-11-20 11:10:54 -06:00
|
|
|
else:
|
|
|
|
for token in keywords:
|
|
|
|
if deep:
|
2017-03-05 01:56:39 -06:00
|
|
|
q0 += q1 + 'AND '
|
2016-11-20 11:10:54 -06:00
|
|
|
else:
|
2016-12-05 20:00:01 -06:00
|
|
|
token = '\\b' + token.rstrip('/') + '\\b'
|
2017-03-05 01:56:39 -06:00
|
|
|
q0 += q2 + 'AND '
|
2016-09-05 03:18:21 -05:00
|
|
|
|
2016-11-20 11:10:54 -06:00
|
|
|
qargs += (token, token, token, token,)
|
2017-03-05 01:56:39 -06:00
|
|
|
q0 = q0[:-4]
|
2016-11-13 03:03:19 -06:00
|
|
|
elif not all_keywords:
|
2016-05-20 17:05:25 -05:00
|
|
|
for token in keywords:
|
2016-11-13 11:52:00 -06:00
|
|
|
if deep:
|
2017-03-05 01:56:39 -06:00
|
|
|
q0 += q1 + 'OR '
|
2016-09-05 03:18:21 -05:00
|
|
|
else:
|
2016-12-05 20:00:01 -06:00
|
|
|
token = '\\b' + token.rstrip('/') + '\\b'
|
2017-03-05 01:56:39 -06:00
|
|
|
q0 += q2 + 'OR '
|
2016-09-05 03:18:21 -05:00
|
|
|
|
2016-11-20 11:10:54 -06:00
|
|
|
qargs += (token, token, token, token,)
|
2017-03-05 01:56:39 -06:00
|
|
|
q0 = q0[:-3]
|
2016-11-13 03:03:19 -06:00
|
|
|
else:
|
2016-11-22 12:09:03 -06:00
|
|
|
logerr('Invalid search option')
|
2016-11-13 03:03:19 -06:00
|
|
|
return None
|
2016-05-21 08:15:56 -05:00
|
|
|
|
2017-03-05 01:56:39 -06:00
|
|
|
q0 += 'ORDER BY id ASC'
|
|
|
|
logdbg('query: "%s", args: %s', q0, qargs)
|
2016-05-21 08:15:56 -05:00
|
|
|
|
2016-11-20 07:45:44 -06:00
|
|
|
try:
|
2017-03-05 01:56:39 -06:00
|
|
|
self.cur.execute(q0, qargs)
|
2016-11-20 07:45:44 -06:00
|
|
|
except sqlite3.OperationalError as e:
|
2016-11-22 12:09:03 -06:00
|
|
|
logerr(e)
|
2016-11-20 07:45:44 -06:00
|
|
|
return None
|
|
|
|
|
2017-01-29 07:49:47 -06:00
|
|
|
return self.cur.fetchall()
|
2016-05-21 08:15:56 -05:00
|
|
|
|
2017-08-14 16:07:01 -05:00
|
|
|
def search_by_tag(self, tags):
|
2017-09-16 12:38:11 -05:00
|
|
|
"""Search bookmarks for entries with given tags.
|
2017-09-05 15:24:04 -05:00
|
|
|
|
|
|
|
Parameters
|
|
|
|
----------
|
|
|
|
tags : str
|
|
|
|
String of tags to search for.
|
|
|
|
Retrieves entries matching ANY tag if tags are
|
|
|
|
delimited with ','.
|
|
|
|
Retrieves entries matching ALL tags if tags are
|
|
|
|
delimited with '+'.
|
|
|
|
|
|
|
|
Returns
|
|
|
|
-------
|
|
|
|
list or None
|
2017-09-16 12:38:11 -05:00
|
|
|
List of search results, or None if no matches.
|
2017-09-05 15:24:04 -05:00
|
|
|
"""
|
2016-05-21 08:15:56 -05:00
|
|
|
|
2017-08-14 16:07:01 -05:00
|
|
|
# do not allow combination of search logics
|
|
|
|
if ' + ' in tags and ',' in tags:
|
|
|
|
logerr("Cannot use both '+' and ',' in same search")
|
|
|
|
return
|
|
|
|
|
|
|
|
tags, search_operator, excluded_tags = prep_tag_search(tags)
|
|
|
|
|
|
|
|
query = "SELECT id, url, metadata, tags, desc FROM bookmarks WHERE tags LIKE '%' || ? || '%' "
|
|
|
|
for tag in tags[1:]:
|
|
|
|
query += "{} tags LIKE '%' || ? || '%' ".format(search_operator)
|
|
|
|
if excluded_tags:
|
|
|
|
tags.append(excluded_tags)
|
|
|
|
query = query.replace('WHERE tags', 'WHERE (tags')
|
|
|
|
query += ') AND tags NOT REGEXP ? '
|
|
|
|
query += 'ORDER BY id ASC'
|
2016-07-09 10:19:16 -05:00
|
|
|
|
2017-08-14 16:07:01 -05:00
|
|
|
logdbg('query: "%s", args: %s', query, tags)
|
|
|
|
|
|
|
|
self.cur.execute(query, tuple(tags, ))
|
2017-01-29 07:49:47 -06:00
|
|
|
return self.cur.fetchall()
|
2016-05-21 08:15:56 -05:00
|
|
|
|
2016-10-28 13:31:52 -05:00
|
|
|
def compactdb(self, index, delay_commit=False):
|
2017-09-05 15:24:04 -05:00
|
|
|
"""When an entry at index is deleted, move the
|
2016-10-29 02:54:10 -05:00
|
|
|
last entry in DB to index, if index is lesser.
|
2016-05-21 08:15:56 -05:00
|
|
|
|
2017-09-05 15:24:04 -05:00
|
|
|
Parameters
|
|
|
|
----------
|
|
|
|
index : int
|
2017-09-16 12:38:11 -05:00
|
|
|
DB index of deleted entry.
|
2017-09-05 15:24:04 -05:00
|
|
|
delay_commit : bool, optional
|
|
|
|
True if record should not be committed to the DB,
|
|
|
|
leaving commit responsibility to caller. Default is False.
|
|
|
|
"""
|
2016-05-21 08:15:56 -05:00
|
|
|
|
2016-10-01 10:29:53 -05:00
|
|
|
# Return if the last index left in DB was just deleted
|
2017-03-05 14:56:40 -06:00
|
|
|
max_id = self.get_max_id()
|
|
|
|
if max_id == -1:
|
2016-05-20 17:05:25 -05:00
|
|
|
return
|
2016-05-21 08:15:56 -05:00
|
|
|
|
2017-08-12 01:52:37 -05:00
|
|
|
query1 = 'SELECT id, URL, metadata, tags, desc FROM bookmarks WHERE id = ? LIMIT 1'
|
2016-09-09 13:52:32 -05:00
|
|
|
query2 = 'DELETE FROM bookmarks WHERE id = ?'
|
2017-08-12 01:52:37 -05:00
|
|
|
query3 = 'INSERT INTO bookmarks(id, URL, metadata, tags, desc) VALUES (?, ?, ?, ?, ?)'
|
2016-09-09 13:52:32 -05:00
|
|
|
|
2017-03-05 14:56:40 -06:00
|
|
|
if max_id > index:
|
|
|
|
self.cur.execute(query1, (max_id,))
|
|
|
|
results = self.cur.fetchall()
|
|
|
|
for row in results:
|
|
|
|
self.cur.execute(query2, (row[0],))
|
2017-08-12 01:52:37 -05:00
|
|
|
self.cur.execute(query3, (index, row[1], row[2], row[3], row[4],))
|
2017-03-05 14:56:40 -06:00
|
|
|
if not delay_commit:
|
|
|
|
self.conn.commit()
|
|
|
|
if self.chatty:
|
|
|
|
print('Index %d moved to %d' % (row[0], index))
|
2016-05-21 08:15:56 -05:00
|
|
|
|
2017-08-12 01:52:37 -05:00
|
|
|
def delete_rec(self, index, low=0, high=0, is_range=False, delay_commit=False):
|
2017-09-16 12:38:11 -05:00
|
|
|
"""Delete a single record or remove the table if index is None.
|
2017-09-05 15:24:04 -05:00
|
|
|
|
|
|
|
Parameters
|
|
|
|
----------
|
|
|
|
index : int
|
|
|
|
DB index of deleted entry.
|
|
|
|
low : int, optional
|
|
|
|
Actual lower index of range.
|
|
|
|
high : int, optional
|
|
|
|
Actual higher index of range.
|
|
|
|
is_range : bool, optional
|
|
|
|
A range is passed using low and high arguments.
|
|
|
|
An index is ignored if is_range is True (use dummy index).
|
|
|
|
Default is False.
|
|
|
|
delay_commit : bool, optional
|
|
|
|
True if record should not be committed to the DB,
|
|
|
|
leaving commit responsibility to caller. Default is False.
|
|
|
|
|
|
|
|
Returns
|
|
|
|
-------
|
|
|
|
bool
|
|
|
|
True on success, False on failure.
|
|
|
|
"""
|
2016-05-21 08:15:56 -05:00
|
|
|
|
2016-10-28 13:52:49 -05:00
|
|
|
if is_range: # Delete a range of indices
|
2017-03-19 07:29:06 -05:00
|
|
|
if low < 0 or high < 0:
|
|
|
|
logerr('Negative range boundary')
|
|
|
|
return False
|
|
|
|
|
2017-03-19 02:09:44 -05:00
|
|
|
if low > high:
|
|
|
|
low, high = high, low
|
|
|
|
|
2016-10-28 14:19:24 -05:00
|
|
|
# If range starts from 0, delete all records
|
|
|
|
if low == 0:
|
2016-10-29 06:31:14 -05:00
|
|
|
return self.cleardb()
|
2016-10-28 14:19:24 -05:00
|
|
|
|
2016-06-16 16:08:38 -05:00
|
|
|
try:
|
2016-09-09 13:52:32 -05:00
|
|
|
query = 'DELETE from bookmarks where id BETWEEN ? AND ?'
|
|
|
|
self.cur.execute(query, (low, high))
|
2017-08-12 01:52:37 -05:00
|
|
|
print('Index %d-%d: %d deleted' % (low, high, self.cur.rowcount))
|
2016-12-26 21:40:08 -06:00
|
|
|
if not self.cur.rowcount:
|
|
|
|
return False
|
2016-10-28 13:31:52 -05:00
|
|
|
|
|
|
|
# Compact DB by ascending order of index to ensure
|
|
|
|
# the existing higher indices move only once
|
|
|
|
# Delayed commit is forced
|
|
|
|
for index in range(low, high + 1):
|
|
|
|
self.compactdb(index, delay_commit=True)
|
|
|
|
|
|
|
|
if not delay_commit:
|
|
|
|
self.conn.commit()
|
2016-06-16 16:08:38 -05:00
|
|
|
except IndexError:
|
2016-11-22 12:09:03 -06:00
|
|
|
logerr('No matching index')
|
2016-10-29 02:54:10 -05:00
|
|
|
return False
|
2016-06-16 16:08:38 -05:00
|
|
|
elif index == 0: # Remove the table
|
2016-10-29 06:31:14 -05:00
|
|
|
return self.cleardb()
|
2016-05-31 12:39:34 -05:00
|
|
|
else: # Remove a single entry
|
2016-05-20 17:05:25 -05:00
|
|
|
try:
|
2016-09-09 13:52:32 -05:00
|
|
|
query = 'DELETE FROM bookmarks WHERE id = ?'
|
|
|
|
self.cur.execute(query, (index,))
|
2016-05-20 17:05:25 -05:00
|
|
|
if self.cur.rowcount == 1:
|
2016-12-26 21:40:08 -06:00
|
|
|
print('Index %d deleted' % index)
|
2016-11-28 10:40:01 -06:00
|
|
|
self.compactdb(index, delay_commit=True)
|
|
|
|
if not delay_commit:
|
|
|
|
self.conn.commit()
|
2016-05-20 17:05:25 -05:00
|
|
|
else:
|
2016-12-26 20:40:08 -06:00
|
|
|
logerr('No matching index %d', index)
|
2016-10-29 02:54:10 -05:00
|
|
|
return False
|
2016-05-20 17:05:25 -05:00
|
|
|
except IndexError:
|
2016-12-26 20:40:08 -06:00
|
|
|
logerr('No matching index %d', index)
|
2016-10-29 02:54:10 -05:00
|
|
|
return False
|
2016-05-21 08:15:56 -05:00
|
|
|
|
2016-08-26 11:54:57 -05:00
|
|
|
return True
|
|
|
|
|
2016-10-28 09:27:46 -05:00
|
|
|
def delete_resultset(self, results):
|
2017-09-05 15:24:04 -05:00
|
|
|
"""Delete search results in descending order of DB index.
|
|
|
|
|
2016-10-28 09:27:46 -05:00
|
|
|
Indices are expected to be unique and in ascending order.
|
2016-10-29 02:54:10 -05:00
|
|
|
|
2017-09-05 15:24:04 -05:00
|
|
|
Notes
|
|
|
|
-----
|
|
|
|
This API forces a delayed commit.
|
2016-10-28 09:27:46 -05:00
|
|
|
|
2017-09-05 15:24:04 -05:00
|
|
|
Parameters
|
|
|
|
----------
|
|
|
|
results : list of tuples
|
|
|
|
List of results to delete from DB.
|
|
|
|
|
|
|
|
Returns
|
|
|
|
-------
|
|
|
|
bool
|
2017-09-16 12:38:11 -05:00
|
|
|
True on success, False on failure.
|
2017-09-05 15:24:04 -05:00
|
|
|
"""
|
2017-02-07 04:07:01 -06:00
|
|
|
resp = read_in('Delete the search results? (y/n): ')
|
2016-10-28 14:56:40 -05:00
|
|
|
if resp != 'y':
|
2016-10-29 02:54:10 -05:00
|
|
|
return False
|
2016-10-28 09:27:46 -05:00
|
|
|
|
|
|
|
# delete records in reverse order
|
|
|
|
pos = len(results) - 1
|
|
|
|
while pos >= 0:
|
|
|
|
idx = results[pos][0]
|
2016-12-27 08:10:29 -06:00
|
|
|
self.delete_rec(idx, delay_commit=True)
|
2016-10-28 09:27:46 -05:00
|
|
|
|
2016-10-28 13:52:49 -05:00
|
|
|
# Commit at every 200th removal
|
|
|
|
if pos % 200 == 0:
|
|
|
|
self.conn.commit()
|
|
|
|
|
|
|
|
pos -= 1
|
2016-10-28 13:31:52 -05:00
|
|
|
|
2016-10-29 02:54:10 -05:00
|
|
|
return True
|
|
|
|
|
2017-02-18 20:48:38 -06:00
|
|
|
def delete_rec_all(self, delay_commit=False):
|
2017-09-16 12:38:11 -05:00
|
|
|
"""Removes all records in the Bookmarks table.
|
2017-09-05 15:24:04 -05:00
|
|
|
|
|
|
|
Parameters
|
|
|
|
----------
|
|
|
|
delay_commit : bool, optional
|
|
|
|
True if record should not be committed to the DB,
|
|
|
|
leaving commit responsibility to caller. Default is False.
|
2017-02-18 20:43:58 -06:00
|
|
|
|
2017-09-05 15:24:04 -05:00
|
|
|
Returns
|
|
|
|
-------
|
|
|
|
bool
|
2017-09-16 12:38:11 -05:00
|
|
|
True on success, False on failure.
|
2017-09-05 15:24:04 -05:00
|
|
|
"""
|
2017-02-18 20:48:38 -06:00
|
|
|
|
2017-02-18 20:43:58 -06:00
|
|
|
try:
|
|
|
|
self.cur.execute('DELETE FROM bookmarks')
|
|
|
|
if not delay_commit:
|
|
|
|
self.conn.commit()
|
|
|
|
return True
|
|
|
|
except Exception as e:
|
|
|
|
logerr('delete_rec_all(): %s', e)
|
|
|
|
return False
|
|
|
|
|
2016-10-29 06:31:14 -05:00
|
|
|
def cleardb(self):
|
2017-09-16 12:38:11 -05:00
|
|
|
"""Drops the bookmark table if it exists.
|
2016-10-29 02:54:10 -05:00
|
|
|
|
2017-09-05 15:24:04 -05:00
|
|
|
Returns
|
|
|
|
-------
|
|
|
|
bool
|
2017-09-16 12:38:11 -05:00
|
|
|
True on success, False on failure.
|
2017-09-05 15:24:04 -05:00
|
|
|
"""
|
2016-10-28 14:03:10 -05:00
|
|
|
|
2017-02-07 04:07:01 -06:00
|
|
|
resp = read_in('Remove ALL bookmarks? (y/n): ')
|
2016-10-28 14:03:10 -05:00
|
|
|
if resp != 'y':
|
|
|
|
print('No bookmarks deleted')
|
|
|
|
return False
|
2016-08-21 18:09:07 -05:00
|
|
|
|
|
|
|
self.cur.execute('DROP TABLE if exists bookmarks')
|
|
|
|
self.conn.commit()
|
2016-10-28 14:03:10 -05:00
|
|
|
print('All bookmarks deleted')
|
|
|
|
return True
|
2016-08-21 18:09:07 -05:00
|
|
|
|
2017-03-19 02:09:44 -05:00
|
|
|
def print_rec(self, index=0, low=0, high=0, is_range=False):
|
2017-09-16 12:38:11 -05:00
|
|
|
"""Print bookmark details at index or all bookmarks if index is 0.
|
2017-09-05 15:24:04 -05:00
|
|
|
|
2017-09-16 12:38:11 -05:00
|
|
|
A negative index behaves like tail, if title is blank show "Untitled".
|
2016-05-21 08:15:56 -05:00
|
|
|
|
2017-09-05 15:24:04 -05:00
|
|
|
Parameters
|
|
|
|
-----------
|
|
|
|
index : int, optional
|
|
|
|
DB index of record to print. 0 prints all records.
|
|
|
|
low : int, optional
|
|
|
|
Actual lower index of range.
|
|
|
|
high : int, optional
|
|
|
|
Actual higher index of range.
|
|
|
|
is_range : bool, optional
|
|
|
|
A range is passed using low and high arguments.
|
|
|
|
An index is ignored if is_range is True (use dummy index).
|
|
|
|
Default is False.
|
|
|
|
"""
|
2016-05-21 08:15:56 -05:00
|
|
|
|
2017-04-26 08:48:34 -05:00
|
|
|
if (index < 0):
|
|
|
|
# Show the last n records
|
|
|
|
_id = self.get_max_id()
|
|
|
|
if _id == -1:
|
|
|
|
logerr('Empty database')
|
|
|
|
return False
|
|
|
|
|
|
|
|
low = (1 if _id <= -index else _id + index + 1)
|
|
|
|
high = _id
|
|
|
|
is_range = True
|
|
|
|
|
2017-03-18 14:15:26 -05:00
|
|
|
if is_range:
|
2017-03-19 07:29:06 -05:00
|
|
|
if low < 0 or high < 0:
|
|
|
|
logerr('Negative range boundary')
|
|
|
|
return False
|
|
|
|
|
2017-03-19 02:09:44 -05:00
|
|
|
if low > high:
|
|
|
|
low, high = high, low
|
|
|
|
|
2017-03-18 14:15:26 -05:00
|
|
|
try:
|
2017-03-19 02:09:44 -05:00
|
|
|
# If range starts from 0 print all records
|
|
|
|
if low == 0:
|
|
|
|
query = 'SELECT * from bookmarks'
|
|
|
|
resultset = self.cur.execute(query)
|
|
|
|
else:
|
|
|
|
query = 'SELECT * from bookmarks where id BETWEEN ? AND ?'
|
|
|
|
resultset = self.cur.execute(query, (low, high))
|
2017-03-18 14:15:26 -05:00
|
|
|
except IndexError:
|
|
|
|
logerr('Index out of range')
|
|
|
|
return
|
|
|
|
elif index != 0: # Show record at index
|
2016-05-20 17:05:25 -05:00
|
|
|
try:
|
2017-01-29 12:31:30 -06:00
|
|
|
query = 'SELECT * FROM bookmarks WHERE id = ? LIMIT 1'
|
2016-09-09 13:52:32 -05:00
|
|
|
self.cur.execute(query, (index,))
|
2016-05-23 03:51:54 -05:00
|
|
|
results = self.cur.fetchall()
|
2017-01-29 11:21:55 -06:00
|
|
|
if not results:
|
2016-12-26 20:40:08 -06:00
|
|
|
logerr('No matching index %d', index)
|
2016-05-23 03:51:54 -05:00
|
|
|
return
|
2016-05-20 17:05:25 -05:00
|
|
|
except IndexError:
|
2016-12-26 20:40:08 -06:00
|
|
|
logerr('No matching index %d', index)
|
2016-05-20 17:05:25 -05:00
|
|
|
return
|
2016-05-21 08:15:56 -05:00
|
|
|
|
2016-06-29 13:06:33 -05:00
|
|
|
if not self.json:
|
2017-09-23 12:01:37 -05:00
|
|
|
print_rec_with_filter(results, self.field_filter)
|
2016-05-20 17:05:25 -05:00
|
|
|
else:
|
2016-10-29 04:02:50 -05:00
|
|
|
print(format_json(results, True, self.field_filter))
|
2017-03-18 14:15:26 -05:00
|
|
|
|
|
|
|
return
|
2016-11-20 11:10:54 -06:00
|
|
|
else: # Show all entries
|
|
|
|
self.cur.execute('SELECT * FROM bookmarks')
|
|
|
|
resultset = self.cur.fetchall()
|
|
|
|
|
2017-03-18 14:15:26 -05:00
|
|
|
if not resultset:
|
|
|
|
logerr('0 records')
|
|
|
|
return
|
|
|
|
|
|
|
|
if not self.json:
|
2017-09-23 12:01:37 -05:00
|
|
|
print_rec_with_filter(resultset, self.field_filter)
|
2017-03-18 14:15:26 -05:00
|
|
|
else:
|
|
|
|
print(format_json(resultset, field_filter=self.field_filter))
|
2016-05-20 17:05:25 -05:00
|
|
|
|
2017-02-18 20:48:38 -06:00
|
|
|
def get_tag_all(self):
|
2017-09-16 12:38:11 -05:00
|
|
|
"""Get list of tags in DB.
|
2016-11-14 14:31:22 -06:00
|
|
|
|
2017-09-05 15:24:04 -05:00
|
|
|
Returns
|
|
|
|
-------
|
|
|
|
tuple
|
|
|
|
(list of unique tags sorted alphabetically,
|
2017-09-16 12:38:11 -05:00
|
|
|
dictionary of {tag: usage_count}).
|
2017-09-05 15:24:04 -05:00
|
|
|
"""
|
2016-05-21 08:15:56 -05:00
|
|
|
|
2016-10-22 08:21:46 -05:00
|
|
|
tags = []
|
|
|
|
unique_tags = []
|
2016-11-20 07:31:02 -06:00
|
|
|
dic = {}
|
|
|
|
qry = 'SELECT DISTINCT tags, COUNT(tags) FROM bookmarks GROUP BY tags'
|
|
|
|
for row in self.cur.execute(qry):
|
2016-10-29 05:36:29 -05:00
|
|
|
tagset = row[0].strip(DELIM).split(DELIM)
|
2016-05-31 16:00:34 -05:00
|
|
|
for tag in tagset:
|
2016-10-22 08:21:46 -05:00
|
|
|
if tag not in tags:
|
2016-11-20 07:31:02 -06:00
|
|
|
dic[tag] = row[1]
|
2016-10-22 08:21:46 -05:00
|
|
|
tags += (tag,)
|
2016-11-20 07:31:02 -06:00
|
|
|
else:
|
|
|
|
dic[tag] += row[1]
|
2016-05-31 16:00:34 -05:00
|
|
|
|
2017-01-29 11:21:55 -06:00
|
|
|
if not tags:
|
|
|
|
return tags, dic
|
2016-11-19 23:41:09 -06:00
|
|
|
|
2016-10-22 08:21:46 -05:00
|
|
|
if tags[0] == '':
|
2016-11-20 07:31:02 -06:00
|
|
|
unique_tags = sorted(tags[1:])
|
2016-05-31 16:00:34 -05:00
|
|
|
else:
|
2016-11-20 07:31:02 -06:00
|
|
|
unique_tags = sorted(tags)
|
2016-11-14 14:31:22 -06:00
|
|
|
|
2016-11-20 07:31:02 -06:00
|
|
|
return unique_tags, dic
|
2016-05-21 08:15:56 -05:00
|
|
|
|
2017-08-01 10:37:03 -05:00
|
|
|
def suggest_similar_tag(self, tagstr):
|
2017-09-16 12:38:11 -05:00
|
|
|
"""Show list of tags those go together in DB.
|
2017-09-05 15:24:04 -05:00
|
|
|
|
|
|
|
Parameters
|
|
|
|
----------
|
|
|
|
tagstr : str
|
|
|
|
Original tag string.
|
2017-07-30 15:50:51 -05:00
|
|
|
|
2017-09-05 15:24:04 -05:00
|
|
|
Returns
|
|
|
|
-------
|
|
|
|
str
|
2017-09-16 12:38:11 -05:00
|
|
|
DELIM separated string of tags.
|
2017-09-05 15:24:04 -05:00
|
|
|
"""
|
2017-07-30 15:50:51 -05:00
|
|
|
|
|
|
|
tags = tagstr.split(',')
|
|
|
|
if not len(tags):
|
|
|
|
return tagstr
|
|
|
|
|
|
|
|
qry = 'SELECT DISTINCT tags FROM bookmarks WHERE tags LIKE ?'
|
|
|
|
tagset = []
|
|
|
|
unique_tags = []
|
|
|
|
for tag in tags:
|
|
|
|
if tag == '':
|
|
|
|
continue
|
|
|
|
|
|
|
|
self.cur.execute(qry, ('%' + delim_wrap(tag) + '%',))
|
|
|
|
results = self.cur.fetchall()
|
|
|
|
if results:
|
|
|
|
for row in results:
|
|
|
|
tagset += row[0].strip(DELIM).split(DELIM)
|
|
|
|
|
|
|
|
if len(tagset):
|
|
|
|
for tag in tagset:
|
|
|
|
if tag not in tags and tag not in unique_tags:
|
|
|
|
unique_tags += (tag, )
|
|
|
|
|
|
|
|
if not len(unique_tags):
|
|
|
|
return tagstr
|
|
|
|
|
|
|
|
unique_tags = sorted(unique_tags)
|
|
|
|
print('similar tags:\n')
|
|
|
|
count = 0
|
|
|
|
for tag in unique_tags:
|
|
|
|
print('%d. %s' % (count + 1, unique_tags[count]))
|
|
|
|
count += 1
|
|
|
|
|
|
|
|
resp = input('\nselect: ')
|
2017-08-01 11:22:18 -05:00
|
|
|
print()
|
2017-07-30 15:50:51 -05:00
|
|
|
if not resp:
|
|
|
|
return tagstr
|
|
|
|
|
|
|
|
tagset = resp.split()
|
|
|
|
tags = [tagstr]
|
|
|
|
for index in tagset:
|
|
|
|
try:
|
|
|
|
tags.append(delim_wrap(unique_tags[int(index) - 1]))
|
|
|
|
except:
|
|
|
|
continue
|
|
|
|
|
|
|
|
return parse_tags(tags)
|
|
|
|
|
2016-05-22 09:53:19 -05:00
|
|
|
def replace_tag(self, orig, new=None):
|
2017-09-05 15:24:04 -05:00
|
|
|
"""Replace original tag by new tags in all records.
|
|
|
|
|
2016-11-27 12:40:14 -06:00
|
|
|
Remove original tag if new tag is empty.
|
2016-05-21 08:15:56 -05:00
|
|
|
|
2017-09-05 15:24:04 -05:00
|
|
|
Parameters
|
|
|
|
----------
|
|
|
|
orig : str
|
|
|
|
Original tag.
|
|
|
|
new : list
|
|
|
|
Replacement tags.
|
|
|
|
|
|
|
|
Returns
|
|
|
|
-------
|
|
|
|
bool
|
|
|
|
True on success, False on failure.
|
|
|
|
"""
|
2016-05-21 08:15:56 -05:00
|
|
|
|
2016-10-29 05:36:29 -05:00
|
|
|
newtags = DELIM
|
2016-05-21 08:15:56 -05:00
|
|
|
|
2017-03-05 01:56:39 -06:00
|
|
|
orig = delim_wrap(orig)
|
2016-11-27 12:40:14 -06:00
|
|
|
if new is not None:
|
2016-05-23 10:57:06 -05:00
|
|
|
newtags = parse_tags(new)
|
2016-05-21 08:15:56 -05:00
|
|
|
|
2016-05-20 17:05:25 -05:00
|
|
|
if orig == newtags:
|
2016-05-24 12:51:38 -05:00
|
|
|
print('Tags are same.')
|
2016-10-29 02:54:10 -05:00
|
|
|
return False
|
2016-05-21 08:15:56 -05:00
|
|
|
|
2017-03-05 01:56:39 -06:00
|
|
|
# Remove original tag from DB if new tagset reduces to delimiter
|
2016-11-27 12:40:14 -06:00
|
|
|
if newtags == DELIM:
|
|
|
|
return self.delete_tag_at_index(0, orig)
|
|
|
|
|
2017-03-05 01:56:39 -06:00
|
|
|
# Update bookmarks with original tag
|
2016-09-09 13:52:32 -05:00
|
|
|
query = 'SELECT id, tags FROM bookmarks WHERE tags LIKE ?'
|
|
|
|
self.cur.execute(query, ('%' + orig + '%',))
|
2016-05-20 17:05:25 -05:00
|
|
|
results = self.cur.fetchall()
|
2017-01-29 11:21:55 -06:00
|
|
|
if results:
|
|
|
|
query = 'UPDATE bookmarks SET tags = ? WHERE id = ?'
|
|
|
|
for row in results:
|
|
|
|
tags = row[1].replace(orig, newtags)
|
|
|
|
tags = parse_tags([tags])
|
|
|
|
self.cur.execute(query, (tags, row[0],))
|
|
|
|
print('Index %d updated' % row[0])
|
2016-05-21 08:15:56 -05:00
|
|
|
|
2016-05-20 17:05:25 -05:00
|
|
|
self.conn.commit()
|
|
|
|
|
2016-11-27 12:40:14 -06:00
|
|
|
return True
|
2016-10-29 02:54:10 -05:00
|
|
|
|
2017-04-06 14:19:08 -05:00
|
|
|
def set_tag(self, cmdstr, taglist):
|
2017-09-05 15:24:04 -05:00
|
|
|
"""Append, overwrite, remove tags using the symbols >>, > and << respectively.
|
|
|
|
|
|
|
|
Parameters
|
|
|
|
----------
|
|
|
|
cmdstr : str
|
|
|
|
Command pattern.
|
|
|
|
taglist : list
|
|
|
|
List of tags.
|
2017-04-06 14:19:08 -05:00
|
|
|
|
2017-09-05 15:24:04 -05:00
|
|
|
Returns
|
|
|
|
-------
|
|
|
|
int
|
|
|
|
Number of indices updated on success, -1 on failure.
|
|
|
|
"""
|
2017-04-06 14:19:08 -05:00
|
|
|
|
2017-04-08 12:45:15 -05:00
|
|
|
if not cmdstr or not taglist:
|
2017-04-06 14:19:08 -05:00
|
|
|
return -1
|
|
|
|
|
|
|
|
flag = 0 # 0: invalid, 1: append, 2: overwrite, 3: remove
|
|
|
|
index = cmdstr.find('>>')
|
|
|
|
if index == -1:
|
|
|
|
index = cmdstr.find('>')
|
|
|
|
if index != -1:
|
|
|
|
flag = 2
|
|
|
|
else:
|
|
|
|
index = cmdstr.find('<<')
|
|
|
|
if index != -1:
|
|
|
|
flag = 3
|
|
|
|
else:
|
|
|
|
flag = 1
|
|
|
|
|
|
|
|
if not flag:
|
|
|
|
return -1
|
|
|
|
|
|
|
|
tags = DELIM
|
|
|
|
id_list = cmdstr[:index].split()
|
|
|
|
try:
|
|
|
|
for id in id_list:
|
|
|
|
if is_int(id) and int(id) > 0:
|
|
|
|
tags += taglist[int(id) - 1] + DELIM
|
|
|
|
elif '-' in id:
|
|
|
|
vals = [int(x) for x in id.split('-')]
|
|
|
|
if vals[0] > vals[-1]:
|
|
|
|
vals[0], vals[-1] = vals[-1], vals[0]
|
|
|
|
|
|
|
|
for _id in range(vals[0], vals[-1] + 1):
|
|
|
|
tags += taglist[_id - 1] + DELIM
|
|
|
|
else:
|
|
|
|
return -1
|
|
|
|
except ValueError:
|
|
|
|
return -1
|
|
|
|
|
|
|
|
if flag != 2:
|
|
|
|
index += 1
|
|
|
|
|
|
|
|
update_count = 0
|
|
|
|
query = 'UPDATE bookmarks SET tags = ? WHERE id = ?'
|
|
|
|
try:
|
|
|
|
db_id_list = cmdstr[index + 1:].split()
|
|
|
|
for id in db_id_list:
|
|
|
|
if is_int(id) and int(id) > 0:
|
|
|
|
if flag == 1:
|
|
|
|
if self.append_tag_at_index(id, tags, True):
|
|
|
|
update_count += 1
|
|
|
|
elif flag == 2:
|
|
|
|
tags = parse_tags([tags])
|
|
|
|
self.cur.execute(query, (tags, id,))
|
|
|
|
update_count += self.cur.rowcount
|
|
|
|
else:
|
|
|
|
self.delete_tag_at_index(id, tags, True)
|
|
|
|
update_count += 1
|
|
|
|
elif '-' in id:
|
|
|
|
vals = [int(x) for x in id.split('-')]
|
|
|
|
if vals[0] > vals[-1]:
|
|
|
|
vals[0], vals[-1] = vals[-1], vals[0]
|
|
|
|
|
|
|
|
for _id in range(vals[0], vals[-1] + 1):
|
|
|
|
if flag == 1:
|
|
|
|
if self.append_tag_at_index(_id, tags, True):
|
|
|
|
update_count += 1
|
|
|
|
elif flag == 2:
|
|
|
|
tags = parse_tags([tags])
|
|
|
|
self.cur.execute(query, (tags, _id,))
|
|
|
|
update_count += self.cur.rowcount
|
|
|
|
else:
|
|
|
|
if self.delete_tag_at_index(_id, tags, True):
|
|
|
|
update_count += 1
|
|
|
|
else:
|
|
|
|
return -1
|
|
|
|
except ValueError:
|
|
|
|
return -1
|
|
|
|
except sqlite3.IntegrityError:
|
|
|
|
return -1
|
|
|
|
|
|
|
|
try:
|
|
|
|
self.conn.commit()
|
|
|
|
except:
|
|
|
|
return -1
|
|
|
|
|
|
|
|
return update_count
|
|
|
|
|
2017-05-01 10:24:01 -05:00
|
|
|
def browse_by_index(self, index=0, low=0, high=0, is_range=False):
|
2017-09-16 12:38:11 -05:00
|
|
|
"""Open URL at index or range of indies in browser.
|
2017-09-05 15:24:04 -05:00
|
|
|
|
|
|
|
Parameters
|
|
|
|
----------
|
|
|
|
index : int
|
|
|
|
Index to browse. 0 opens a random bookmark.
|
|
|
|
low : int
|
|
|
|
Actual lower index of range.
|
|
|
|
high : int
|
|
|
|
Higher index of range.
|
|
|
|
is_range : bool
|
|
|
|
A range is passed using low and high arguments.
|
|
|
|
If True, index is ignored. Default is False.
|
|
|
|
|
|
|
|
Returns
|
|
|
|
-------
|
|
|
|
bool
|
|
|
|
True on success, False on failure.
|
|
|
|
"""
|
2016-03-24 13:47:57 -05:00
|
|
|
|
2017-05-01 10:24:01 -05:00
|
|
|
if is_range:
|
|
|
|
if low < 0 or high < 0:
|
|
|
|
logerr('Negative range boundary')
|
|
|
|
return False
|
|
|
|
|
|
|
|
if low > high:
|
|
|
|
low, high = high, low
|
|
|
|
|
|
|
|
try:
|
|
|
|
# If range starts from 0 throw an error
|
|
|
|
if low <= 0:
|
|
|
|
raise IndexError
|
|
|
|
else:
|
|
|
|
qry = 'SELECT URL from bookmarks where id BETWEEN ? AND ?'
|
|
|
|
for row in self.cur.execute(qry, (low, high)):
|
|
|
|
browse(row[0])
|
|
|
|
return True
|
|
|
|
except IndexError:
|
|
|
|
logerr('Index out of range')
|
|
|
|
return False
|
|
|
|
|
|
|
|
if index < 0:
|
|
|
|
logerr('Invalid index %d', index)
|
|
|
|
return False
|
|
|
|
|
2016-10-01 10:29:53 -05:00
|
|
|
if index == 0:
|
2017-05-01 10:24:01 -05:00
|
|
|
qry = 'SELECT id from bookmarks ORDER BY RANDOM() LIMIT 1'
|
|
|
|
self.cur.execute(qry)
|
2016-10-09 15:33:59 -05:00
|
|
|
result = self.cur.fetchone()
|
2016-10-01 10:29:53 -05:00
|
|
|
|
|
|
|
# Return if no entries in DB
|
2016-10-09 15:33:59 -05:00
|
|
|
if result is None:
|
2016-11-06 07:52:12 -06:00
|
|
|
print('No bookmarks added yet ...')
|
2016-10-29 02:54:10 -05:00
|
|
|
return False
|
2016-10-01 10:29:53 -05:00
|
|
|
|
2016-10-09 15:33:59 -05:00
|
|
|
index = result[0]
|
2016-11-30 13:44:23 -06:00
|
|
|
logdbg('Opening random index %d', index)
|
2016-10-01 10:29:53 -05:00
|
|
|
|
2017-05-01 10:24:01 -05:00
|
|
|
qry = 'SELECT URL FROM bookmarks WHERE id = ? LIMIT 1'
|
2016-05-20 17:05:25 -05:00
|
|
|
try:
|
2017-05-01 10:24:01 -05:00
|
|
|
for row in self.cur.execute(qry, (index,)):
|
2017-02-18 20:48:38 -06:00
|
|
|
browse(row[0])
|
2016-10-29 02:54:10 -05:00
|
|
|
return True
|
2016-12-26 20:40:08 -06:00
|
|
|
logerr('No matching index %d', index)
|
2016-05-20 17:05:25 -05:00
|
|
|
except IndexError:
|
2016-12-26 20:40:08 -06:00
|
|
|
logerr('No matching index %d', index)
|
2016-05-29 01:09:51 -05:00
|
|
|
|
2016-10-29 02:54:10 -05:00
|
|
|
return False
|
|
|
|
|
2017-02-10 10:39:02 -06:00
|
|
|
def exportdb(self, filepath, taglist=None):
|
2017-09-05 15:24:04 -05:00
|
|
|
"""Export DB bookmarks to file.
|
2016-06-02 10:39:16 -05:00
|
|
|
|
2017-09-17 01:23:18 -05:00
|
|
|
If destination file name ends with '.db', bookmarks are
|
|
|
|
exported to a Buku database file.
|
2017-09-05 15:24:04 -05:00
|
|
|
If destination file name ends with '.md', bookmarks are
|
|
|
|
exported to a markdown file. Otherwise, bookmarks are
|
|
|
|
exported to a Firefox bookmarks.html formatted file.
|
|
|
|
|
|
|
|
Parameters
|
|
|
|
----------
|
|
|
|
filepath : str
|
|
|
|
Path to export destination file.
|
|
|
|
taglist : list, optional
|
|
|
|
Specific tags to export.
|
|
|
|
|
|
|
|
Returns
|
|
|
|
-------
|
|
|
|
bool
|
|
|
|
True on success, False on failure.
|
|
|
|
"""
|
2016-06-02 10:39:16 -05:00
|
|
|
|
2016-09-20 13:02:04 -05:00
|
|
|
count = 0
|
2017-03-05 13:19:12 -06:00
|
|
|
timestamp = str(int(time.time()))
|
2016-09-20 13:02:04 -05:00
|
|
|
arguments = []
|
|
|
|
query = 'SELECT * FROM bookmarks'
|
2016-10-26 11:17:01 -05:00
|
|
|
is_tag_valid = False
|
2016-09-20 13:02:04 -05:00
|
|
|
|
|
|
|
if taglist is not None:
|
|
|
|
tagstr = parse_tags(taglist)
|
|
|
|
|
2017-01-29 11:21:55 -06:00
|
|
|
if not tagstr or tagstr == DELIM:
|
2016-11-22 12:09:03 -06:00
|
|
|
logerr('Invalid tag')
|
2016-10-29 02:54:10 -05:00
|
|
|
return False
|
2016-09-20 13:02:04 -05:00
|
|
|
|
2017-01-29 11:21:55 -06:00
|
|
|
tags = tagstr.split(DELIM)
|
2017-03-05 01:56:39 -06:00
|
|
|
query += ' WHERE'
|
2017-01-29 11:21:55 -06:00
|
|
|
for tag in tags:
|
|
|
|
if tag != '':
|
|
|
|
is_tag_valid = True
|
|
|
|
query += " tags LIKE '%' || ? || '%' OR"
|
2017-03-05 01:56:39 -06:00
|
|
|
tag = delim_wrap(tag)
|
2017-01-29 11:21:55 -06:00
|
|
|
arguments += (tag,)
|
|
|
|
|
|
|
|
if is_tag_valid:
|
|
|
|
query = query[:-3]
|
|
|
|
else:
|
|
|
|
query = query[:-6]
|
2016-09-20 13:02:04 -05:00
|
|
|
|
2016-11-30 13:44:23 -06:00
|
|
|
logdbg('(%s), %s', query, arguments)
|
2016-09-20 13:02:04 -05:00
|
|
|
self.cur.execute(query, arguments)
|
|
|
|
resultset = self.cur.fetchall()
|
2017-01-29 11:21:55 -06:00
|
|
|
if not resultset:
|
2017-09-17 01:23:18 -05:00
|
|
|
print('No records found')
|
2016-10-29 02:54:10 -05:00
|
|
|
return False
|
2016-09-20 13:43:32 -05:00
|
|
|
|
2016-10-29 04:35:44 -05:00
|
|
|
if os.path.exists(filepath):
|
2017-03-05 01:56:39 -06:00
|
|
|
resp = read_in(filepath + ' exists. Overwrite? (y/n): ')
|
2016-06-02 10:39:16 -05:00
|
|
|
if resp != 'y':
|
2016-10-29 02:54:10 -05:00
|
|
|
return False
|
2016-06-02 10:39:16 -05:00
|
|
|
|
2017-09-17 01:23:18 -05:00
|
|
|
if filepath.endswith('.db'):
|
|
|
|
os.remove(filepath)
|
|
|
|
|
|
|
|
if filepath.endswith('.db'):
|
|
|
|
outdb = BukuDb(dbfile=filepath)
|
|
|
|
qry = 'INSERT INTO bookmarks(URL, metadata, tags, desc, flags) VALUES (?, ?, ?, ?, ?)'
|
|
|
|
for row in resultset:
|
|
|
|
outdb.cur.execute(qry, (row[1], row[2], row[3], row[4], row[5]))
|
|
|
|
|
|
|
|
outdb.conn.commit()
|
|
|
|
outdb.close()
|
|
|
|
return True
|
|
|
|
|
2016-06-02 10:39:16 -05:00
|
|
|
try:
|
2016-10-29 04:35:44 -05:00
|
|
|
outfp = open(filepath, mode='w', encoding='utf-8')
|
2016-06-02 10:39:16 -05:00
|
|
|
except Exception as e:
|
2016-11-22 12:09:03 -06:00
|
|
|
logerr(e)
|
2016-10-29 02:54:10 -05:00
|
|
|
return False
|
2016-06-02 10:39:16 -05:00
|
|
|
|
2017-02-10 10:39:02 -06:00
|
|
|
if filepath.endswith('.md'):
|
|
|
|
for row in resultset:
|
|
|
|
if row[2] == '':
|
2017-03-05 01:56:39 -06:00
|
|
|
out = '- [Untitled](' + row[1] + ')\n'
|
2017-02-10 10:39:02 -06:00
|
|
|
else:
|
2017-03-05 01:56:39 -06:00
|
|
|
out = '- [' + row[2] + '](' + row[1] + ')\n'
|
2017-02-10 10:39:02 -06:00
|
|
|
outfp.write(out)
|
|
|
|
count += 1
|
|
|
|
else:
|
2017-03-05 13:19:12 -06:00
|
|
|
outfp.write('<!DOCTYPE NETSCAPE-Bookmark-file-1>\n\n'
|
2017-08-12 01:52:37 -05:00
|
|
|
'<META HTTP-EQUIV="Content-Type" CONTENT="text/html; charset=UTF-8">\n'
|
2017-03-05 13:19:12 -06:00
|
|
|
'<TITLE>Bookmarks</TITLE>\n'
|
|
|
|
'<H1>Bookmarks</H1>\n\n'
|
|
|
|
'<DL><p>\n'
|
2017-08-12 01:52:37 -05:00
|
|
|
' <DT><H3 ADD_DATE="%s" LAST_MODIFIED="%s" PERSONAL_TOOLBAR_FOLDER="true">Buku bookmarks</H3>\n'
|
2017-03-05 13:19:12 -06:00
|
|
|
' <DL><p>\n'
|
|
|
|
% (timestamp, timestamp))
|
2016-06-02 10:39:16 -05:00
|
|
|
|
2016-10-20 07:48:14 -05:00
|
|
|
for row in resultset:
|
2017-08-12 01:52:37 -05:00
|
|
|
out = (' <DT><A HREF="%s" ADD_DATE="%s" LAST_MODIFIED="%s"' % (row[1], timestamp, timestamp))
|
2016-10-29 05:36:29 -05:00
|
|
|
if row[3] != DELIM:
|
2017-03-05 01:56:39 -06:00
|
|
|
out += ' TAGS="' + row[3][1:-1] + '"'
|
|
|
|
out += '>' + row[2] + '</A>\n'
|
2016-10-20 07:48:14 -05:00
|
|
|
if row[4] != '':
|
2017-03-05 01:56:39 -06:00
|
|
|
out += ' <DD>' + row[4] + '\n'
|
2016-10-20 07:48:14 -05:00
|
|
|
|
2016-10-29 04:35:44 -05:00
|
|
|
outfp.write(out)
|
2016-10-20 07:48:14 -05:00
|
|
|
count += 1
|
|
|
|
|
2016-10-29 04:35:44 -05:00
|
|
|
outfp.write(' </DL><p>\n</DL><p>')
|
2016-10-22 15:56:27 -05:00
|
|
|
|
2016-10-29 04:35:44 -05:00
|
|
|
outfp.close()
|
2016-10-22 15:56:27 -05:00
|
|
|
print('%s exported' % count)
|
2016-10-29 02:54:10 -05:00
|
|
|
return True
|
2016-06-02 10:39:16 -05:00
|
|
|
|
2017-09-09 04:33:24 -05:00
|
|
|
def traverse_bm_folder(self, sublist, unique_tag, folder_name, add_parent_folder_as_tag):
|
2017-09-09 07:47:01 -05:00
|
|
|
"""Traverse bookmark folders recursively and find bookmarks.
|
2017-09-09 04:33:24 -05:00
|
|
|
|
|
|
|
Parameters
|
|
|
|
----------
|
|
|
|
sublist : list
|
2017-09-09 07:47:01 -05:00
|
|
|
List of child entries in bookmark folder.
|
2017-09-09 04:33:24 -05:00
|
|
|
unique_tag : str
|
2017-09-09 07:47:01 -05:00
|
|
|
Timestamp tag in YYYYMonDD format.
|
2017-09-09 04:33:24 -05:00
|
|
|
folder_name : str
|
2017-09-09 07:47:01 -05:00
|
|
|
Name of the parent folder.
|
2017-09-09 04:33:24 -05:00
|
|
|
add_parent_folder_as_tag : bool
|
2017-09-09 07:47:01 -05:00
|
|
|
True if bookmark parent folders should be added as tags else False.
|
2017-09-09 04:33:24 -05:00
|
|
|
|
|
|
|
Returns
|
|
|
|
-------
|
|
|
|
tuple
|
2017-09-09 07:47:01 -05:00
|
|
|
Bookmark record data.
|
2017-09-09 04:33:24 -05:00
|
|
|
"""
|
|
|
|
|
|
|
|
for item in sublist:
|
|
|
|
if item['type'] == 'folder':
|
|
|
|
for i in self.traverse_bm_folder(item['children'], unique_tag, item['name'], add_parent_folder_as_tag):
|
|
|
|
yield (i)
|
|
|
|
elif item['type'] == 'url':
|
|
|
|
try:
|
|
|
|
if (is_nongeneric_url(item['url'])):
|
|
|
|
continue
|
|
|
|
except KeyError:
|
|
|
|
continue
|
|
|
|
|
|
|
|
tags = ''
|
|
|
|
if add_parent_folder_as_tag:
|
|
|
|
tags += folder_name
|
|
|
|
if unique_tag:
|
|
|
|
tags += DELIM + unique_tag
|
|
|
|
yield (item['url'], item['name'], parse_tags([tags]), None, 0, True)
|
|
|
|
|
|
|
|
def load_chrome_database(self, path, unique_tag, add_parent_folder_as_tag):
|
2017-09-05 15:24:04 -05:00
|
|
|
"""Open Chrome Bookmarks json file and import data.
|
|
|
|
|
|
|
|
Parameters
|
|
|
|
----------
|
|
|
|
path : str
|
|
|
|
Path to Google Chrome bookmarks file.
|
2017-09-09 04:33:24 -05:00
|
|
|
unique_tag : str
|
2017-09-09 07:47:01 -05:00
|
|
|
Timestamp tag in YYYYMonDD format.
|
2017-09-09 04:33:24 -05:00
|
|
|
add_parent_folder_as_tag : bool
|
2017-09-09 07:47:01 -05:00
|
|
|
True if bookmark parent folders should be added as tags else False.
|
2017-09-05 15:24:04 -05:00
|
|
|
"""
|
2017-08-11 10:32:44 -05:00
|
|
|
|
2017-08-09 21:56:44 -05:00
|
|
|
with open(path, 'r') as datafile:
|
|
|
|
data = json.load(datafile)
|
|
|
|
|
2017-09-09 04:33:24 -05:00
|
|
|
roots = data['roots']
|
|
|
|
for entry in roots:
|
2017-09-21 04:41:49 -05:00
|
|
|
# Needed to skip 'sync_transaction_version' key from roots
|
|
|
|
if isinstance(roots[entry], str):
|
|
|
|
continue
|
2017-09-09 04:33:24 -05:00
|
|
|
for item in self.traverse_bm_folder(roots[entry]['children'], unique_tag, roots[entry]['name'], add_parent_folder_as_tag):
|
|
|
|
self.add_rec(*item)
|
2017-08-09 21:56:44 -05:00
|
|
|
|
2017-09-09 04:33:24 -05:00
|
|
|
def load_firefox_database(self, path, unique_tag, add_parent_folder_as_tag):
|
2017-09-05 15:24:04 -05:00
|
|
|
"""Connect to Firefox sqlite db and import bookmarks into BukuDb.
|
2017-08-09 21:56:44 -05:00
|
|
|
|
2017-09-05 15:24:04 -05:00
|
|
|
Parameters
|
|
|
|
----------
|
|
|
|
path : str
|
|
|
|
Path to Firefox bookmarks sqlite database.
|
2017-09-09 04:33:24 -05:00
|
|
|
unique_tag : str
|
2017-09-09 07:47:01 -05:00
|
|
|
Timestamp tag in YYYYMonDD format.
|
2017-09-09 04:33:24 -05:00
|
|
|
add_parent_folder_as_tag : bool
|
2017-09-09 07:47:01 -05:00
|
|
|
True if bookmark parent folders should be added as tags else False.
|
2017-09-05 15:24:04 -05:00
|
|
|
"""
|
2017-08-11 10:32:44 -05:00
|
|
|
|
2017-08-09 21:56:44 -05:00
|
|
|
# Connect to input DB
|
|
|
|
if sys.version_info >= (3, 4, 4):
|
|
|
|
# Python 3.4.4 and above
|
|
|
|
conn = sqlite3.connect('file:%s?mode=ro' % path, uri=True)
|
|
|
|
else:
|
|
|
|
conn = sqlite3.connect(path)
|
|
|
|
|
|
|
|
cur = conn.cursor()
|
2017-09-09 17:51:38 -05:00
|
|
|
res = cur.execute('SELECT DISTINCT fk, parent, title FROM moz_bookmarks WHERE type=1')
|
2017-08-09 21:56:44 -05:00
|
|
|
# get id's and remove duplicates
|
2017-09-09 17:51:38 -05:00
|
|
|
for row in res.fetchall():
|
|
|
|
# get the url
|
|
|
|
res = cur.execute('SELECT url FROM moz_places where id={}'.format(row[0]))
|
2017-08-09 21:56:44 -05:00
|
|
|
url = res.fetchone()[0]
|
2017-09-09 17:51:38 -05:00
|
|
|
if (is_nongeneric_url(url)):
|
|
|
|
continue
|
2017-08-09 21:56:44 -05:00
|
|
|
|
|
|
|
# get tags
|
2017-09-09 17:51:38 -05:00
|
|
|
res = cur.execute('SELECT parent FROM moz_bookmarks WHERE fk={} AND title IS NULL'.format(row[0]))
|
2017-08-11 10:32:44 -05:00
|
|
|
bm_tag_ids = [tid for item in res.fetchall() for tid in item]
|
2017-08-09 21:56:44 -05:00
|
|
|
|
|
|
|
bookmark_tags = []
|
2017-08-11 10:32:44 -05:00
|
|
|
for bm_tag_id in bm_tag_ids:
|
2017-08-12 01:52:37 -05:00
|
|
|
res = cur.execute('SELECT title FROM moz_bookmarks WHERE id={}'.format(bm_tag_id))
|
2017-08-09 21:56:44 -05:00
|
|
|
bookmark_tags.append(res.fetchone()[0])
|
|
|
|
|
2017-09-09 17:51:38 -05:00
|
|
|
if add_parent_folder_as_tag:
|
|
|
|
# add folder name
|
|
|
|
res = cur.execute('SELECT title FROM moz_bookmarks WHERE id={}'.format(row[1]))
|
|
|
|
bookmark_tags.append(res.fetchone()[0])
|
|
|
|
|
|
|
|
if unique_tag:
|
|
|
|
# add timestamp tag
|
|
|
|
bookmark_tags.append(unique_tag)
|
|
|
|
|
|
|
|
formatted_tags = [DELIM + tag for tag in bookmark_tags]
|
|
|
|
tags = parse_tags(formatted_tags)
|
2017-08-09 21:56:44 -05:00
|
|
|
|
|
|
|
# get the title
|
2017-09-09 17:51:38 -05:00
|
|
|
if row[2]:
|
|
|
|
title = row[2]
|
2017-08-09 21:56:44 -05:00
|
|
|
else:
|
|
|
|
title = ''
|
2017-09-09 17:51:38 -05:00
|
|
|
|
|
|
|
self.add_rec(url, title, tags, None, 0, True)
|
2017-08-09 21:56:44 -05:00
|
|
|
try:
|
|
|
|
cur.close()
|
|
|
|
conn.close()
|
2017-09-09 17:51:38 -05:00
|
|
|
except Exception as e:
|
|
|
|
logerr(e)
|
2017-08-09 21:56:44 -05:00
|
|
|
|
2017-08-11 10:58:31 -05:00
|
|
|
def auto_import_from_browser(self):
|
2017-09-05 15:24:04 -05:00
|
|
|
"""Import bookmarks from a browser default database file.
|
|
|
|
|
2017-08-09 21:56:44 -05:00
|
|
|
Supports Firefox and Google Chrome.
|
|
|
|
|
2017-09-05 15:24:04 -05:00
|
|
|
Returns
|
|
|
|
-------
|
|
|
|
bool
|
|
|
|
True on success, False on failure.
|
|
|
|
"""
|
2017-08-11 10:32:44 -05:00
|
|
|
|
2017-09-12 08:19:49 -05:00
|
|
|
FF_BM_DB_PATH = None
|
|
|
|
|
2017-08-09 21:56:44 -05:00
|
|
|
if sys.platform.startswith('linux'):
|
|
|
|
GC_BM_DB_PATH = '~/.config/google-chrome/Default/Bookmarks'
|
2017-08-12 01:52:37 -05:00
|
|
|
|
2017-08-09 21:56:44 -05:00
|
|
|
DEFAULT_FF_FOLDER = os.path.expanduser('~/.mozilla/firefox')
|
|
|
|
profile = get_firefox_profile_name(DEFAULT_FF_FOLDER)
|
2017-09-12 08:19:49 -05:00
|
|
|
if profile:
|
2017-09-19 22:44:22 -05:00
|
|
|
FF_BM_DB_PATH = '~/.mozilla/firefox/{}/places.sqlite'.format(profile)
|
2017-08-09 21:56:44 -05:00
|
|
|
elif sys.platform == 'darwin':
|
2017-08-12 01:52:37 -05:00
|
|
|
GC_BM_DB_PATH = '~/Library/Application Support/Google/Chrome/Default/Bookmarks'
|
2017-08-09 21:56:44 -05:00
|
|
|
|
2017-08-12 01:52:37 -05:00
|
|
|
DEFAULT_FF_FOLDER = os.path.expanduser('~/Library/Application Support/Firefox')
|
|
|
|
profile = get_firefox_profile_name(DEFAULT_FF_FOLDER)
|
2017-09-12 08:19:49 -05:00
|
|
|
if profile:
|
2017-09-19 22:44:22 -05:00
|
|
|
FF_BM_DB_PATH = '~/Library/Application Support/Firefox/{}/places.sqlite'.format(profile)
|
2017-08-09 21:56:44 -05:00
|
|
|
elif sys.platform == 'win32':
|
|
|
|
username = os.getlogin()
|
2017-08-12 01:52:37 -05:00
|
|
|
GC_BM_DB_PATH = 'C:/Users/{}/AppData/Local/Google/Chrome/User Data/Default/Bookmarks'.format(username)
|
2017-08-09 21:56:44 -05:00
|
|
|
|
2017-09-21 04:41:49 -05:00
|
|
|
DEFAULT_FF_FOLDER = 'C:/Users/{}/AppData/Roaming/Mozilla/Firefox/'.format(username)
|
2017-08-12 01:52:37 -05:00
|
|
|
profile = get_firefox_profile_name(DEFAULT_FF_FOLDER)
|
2017-09-12 08:19:49 -05:00
|
|
|
if profile:
|
2017-09-19 22:44:22 -05:00
|
|
|
FF_BM_DB_PATH = os.path.join(DEFAULT_FF_FOLDER, '{}/places.sqlite'.format(profile))
|
2017-08-09 21:56:44 -05:00
|
|
|
else:
|
|
|
|
logerr('Buku does not support {} yet'.format(sys.platform))
|
|
|
|
self.close_quit(1)
|
|
|
|
|
2017-09-09 04:33:24 -05:00
|
|
|
if self.chatty:
|
|
|
|
newtag = gen_auto_tag()
|
|
|
|
resp = input('Add parent folder names as tags? (y/n): ')
|
|
|
|
else:
|
|
|
|
newtag = None
|
|
|
|
resp = 'y'
|
|
|
|
add_parent_folder_as_tag = (resp == 'y')
|
|
|
|
|
2017-08-11 10:58:31 -05:00
|
|
|
resp = 'y'
|
|
|
|
|
2017-08-09 21:56:44 -05:00
|
|
|
try:
|
2017-08-11 10:58:31 -05:00
|
|
|
if self.chatty:
|
|
|
|
resp = input('Import bookmarks from google chrome? (y/n): ')
|
|
|
|
if resp == 'y':
|
|
|
|
bookmarks_database = os.path.expanduser(GC_BM_DB_PATH)
|
2017-09-08 12:30:38 -05:00
|
|
|
if not os.path.exists(bookmarks_database):
|
|
|
|
raise FileNotFoundError
|
2017-09-09 04:33:24 -05:00
|
|
|
self.load_chrome_database(bookmarks_database, newtag, add_parent_folder_as_tag)
|
2017-08-11 10:32:44 -05:00
|
|
|
except Exception:
|
2017-09-09 17:51:38 -05:00
|
|
|
print('Could not import bookmarks from google-chrome')
|
2017-08-09 21:56:44 -05:00
|
|
|
|
|
|
|
try:
|
2017-08-11 10:58:31 -05:00
|
|
|
if self.chatty:
|
|
|
|
resp = input('Import bookmarks from firefox? (y/n): ')
|
|
|
|
if resp == 'y':
|
|
|
|
bookmarks_database = os.path.expanduser(FF_BM_DB_PATH)
|
2017-09-08 12:30:38 -05:00
|
|
|
if not os.path.exists(bookmarks_database):
|
|
|
|
raise FileNotFoundError
|
2017-09-09 04:33:24 -05:00
|
|
|
self.load_firefox_database(bookmarks_database, newtag, add_parent_folder_as_tag)
|
2017-08-11 10:32:44 -05:00
|
|
|
except Exception:
|
2017-09-09 17:51:38 -05:00
|
|
|
print('Could not import bookmarks from firefox')
|
2017-08-09 21:56:44 -05:00
|
|
|
|
|
|
|
self.conn.commit()
|
|
|
|
|
2017-09-09 04:33:24 -05:00
|
|
|
if newtag:
|
|
|
|
print('\nAuto-generated tag: %s' % newtag)
|
|
|
|
|
2017-07-19 08:45:35 -05:00
|
|
|
def importdb(self, filepath, tacit=False):
|
2017-09-05 15:24:04 -05:00
|
|
|
"""Import bookmarks from a html or a markdown file.
|
|
|
|
|
|
|
|
Supports Firefox, Google Chrome, and IE exported html bookmarks.
|
|
|
|
Supports markdown files with extension '.md'.
|
2017-09-16 22:03:06 -05:00
|
|
|
Supports importing bookmarks from another Buku database file.
|
2017-09-05 15:24:04 -05:00
|
|
|
|
|
|
|
Parameters
|
|
|
|
----------
|
|
|
|
filepath : str
|
|
|
|
Path to file to import.
|
|
|
|
tacit : bool, optional
|
|
|
|
If True, no questions asked and folder names are automatically
|
2017-09-08 10:48:39 -05:00
|
|
|
imported as tags from bookmarks html.
|
|
|
|
If True, automatic timestamp tag is NOT added.
|
|
|
|
Default is False.
|
2016-05-22 16:03:47 -05:00
|
|
|
|
2017-09-05 15:24:04 -05:00
|
|
|
Returns
|
|
|
|
-------
|
|
|
|
bool
|
|
|
|
True on success, False on failure.
|
|
|
|
"""
|
2016-05-22 16:03:47 -05:00
|
|
|
|
2017-09-16 22:03:06 -05:00
|
|
|
if filepath.endswith('.db'):
|
|
|
|
return self.mergedb(filepath)
|
|
|
|
|
2017-07-19 08:45:35 -05:00
|
|
|
if not tacit:
|
2017-09-08 10:48:39 -05:00
|
|
|
newtag = gen_auto_tag()
|
2017-07-19 08:45:35 -05:00
|
|
|
else:
|
|
|
|
newtag = None
|
|
|
|
|
2017-02-10 10:39:02 -06:00
|
|
|
if filepath.endswith('.md'):
|
2017-07-21 22:31:29 -05:00
|
|
|
for item in import_md(filepath=filepath, newtag=newtag):
|
|
|
|
self.add_rec(*item)
|
2017-02-10 10:39:02 -06:00
|
|
|
|
|
|
|
self.conn.commit()
|
|
|
|
else:
|
2016-10-20 07:48:14 -05:00
|
|
|
try:
|
|
|
|
import bs4
|
2016-10-29 04:35:44 -05:00
|
|
|
with open(filepath, mode='r', encoding='utf-8') as infp:
|
|
|
|
soup = bs4.BeautifulSoup(infp, 'html.parser')
|
2016-10-20 07:48:14 -05:00
|
|
|
except ImportError:
|
2016-11-22 12:09:03 -06:00
|
|
|
logerr('Beautiful Soup not found')
|
2016-10-29 02:54:10 -05:00
|
|
|
return False
|
2016-10-20 07:48:14 -05:00
|
|
|
except Exception as e:
|
2016-11-22 12:09:03 -06:00
|
|
|
logerr(e)
|
2016-10-29 02:54:10 -05:00
|
|
|
return False
|
2017-04-05 19:48:55 -05:00
|
|
|
|
2017-07-19 23:44:37 -05:00
|
|
|
if not tacit:
|
2017-09-03 08:54:13 -05:00
|
|
|
resp = input('Add parent folder names as tags? (y/n): ')
|
2017-07-19 23:44:37 -05:00
|
|
|
else:
|
|
|
|
resp = 'y'
|
2017-04-05 19:48:55 -05:00
|
|
|
|
2017-07-21 22:31:29 -05:00
|
|
|
add_parent_folder_as_tag = (resp == 'y')
|
|
|
|
for item in import_html(soup, add_parent_folder_as_tag, newtag):
|
|
|
|
self.add_rec(*item)
|
2016-05-29 01:09:51 -05:00
|
|
|
|
2016-10-20 07:48:14 -05:00
|
|
|
self.conn.commit()
|
2016-10-29 04:35:44 -05:00
|
|
|
infp.close()
|
2016-10-22 01:25:41 -05:00
|
|
|
|
2017-09-08 10:48:39 -05:00
|
|
|
if newtag:
|
|
|
|
print('\nAuto-generated tag: %s' % newtag)
|
|
|
|
|
2016-10-29 02:54:10 -05:00
|
|
|
return True
|
|
|
|
|
2016-10-29 05:36:29 -05:00
|
|
|
def mergedb(self, path):
|
2017-09-05 15:24:04 -05:00
|
|
|
"""Merge bookmarks from another Buku database file.
|
2016-05-29 01:09:51 -05:00
|
|
|
|
2017-09-05 15:24:04 -05:00
|
|
|
Parameters
|
|
|
|
----------
|
|
|
|
path : str
|
|
|
|
Path to DB file to merge.
|
|
|
|
|
|
|
|
Returns
|
|
|
|
-------
|
|
|
|
bool
|
|
|
|
True on success, False on failure.
|
|
|
|
"""
|
2016-05-29 01:09:51 -05:00
|
|
|
|
|
|
|
try:
|
2016-06-11 01:03:56 -05:00
|
|
|
# Connect to input DB
|
2016-09-09 13:52:32 -05:00
|
|
|
if sys.version_info >= (3, 4, 4):
|
2016-06-11 01:03:56 -05:00
|
|
|
# Python 3.4.4 and above
|
2016-11-29 16:51:11 -06:00
|
|
|
indb_conn = sqlite3.connect('file:%s?mode=ro' % path, uri=True)
|
2016-06-11 01:03:56 -05:00
|
|
|
else:
|
2016-11-29 16:51:11 -06:00
|
|
|
indb_conn = sqlite3.connect(path)
|
2016-06-11 01:03:56 -05:00
|
|
|
|
2016-10-29 04:35:44 -05:00
|
|
|
indb_cur = indb_conn.cursor()
|
2016-10-29 05:00:13 -05:00
|
|
|
indb_cur.execute('SELECT * FROM bookmarks')
|
2016-05-29 01:09:51 -05:00
|
|
|
except Exception as e:
|
2016-11-22 12:09:03 -06:00
|
|
|
logerr(e)
|
2016-10-29 02:54:10 -05:00
|
|
|
return False
|
2016-05-29 01:09:51 -05:00
|
|
|
|
2016-10-29 04:35:44 -05:00
|
|
|
resultset = indb_cur.fetchall()
|
2017-01-29 11:21:55 -06:00
|
|
|
if resultset:
|
|
|
|
for row in resultset:
|
|
|
|
self.add_rec(row[1], row[2], row[3], row[4], row[5], True)
|
2016-06-01 07:02:26 -05:00
|
|
|
|
2016-10-29 05:00:13 -05:00
|
|
|
self.conn.commit()
|
2016-05-29 01:09:51 -05:00
|
|
|
|
|
|
|
try:
|
2016-10-29 04:35:44 -05:00
|
|
|
indb_cur.close()
|
|
|
|
indb_conn.close()
|
2016-05-31 12:39:34 -05:00
|
|
|
except Exception:
|
2016-05-29 01:09:51 -05:00
|
|
|
pass
|
2016-05-22 16:03:47 -05:00
|
|
|
|
2016-10-29 02:54:10 -05:00
|
|
|
return True
|
|
|
|
|
2016-12-10 02:35:17 -06:00
|
|
|
def tnyfy_url(self, index=0, url=None, shorten=True):
|
2017-09-05 15:24:04 -05:00
|
|
|
"""Shorten a URL using Google URL shortener.
|
|
|
|
|
|
|
|
Parameters
|
|
|
|
----------
|
|
|
|
index : int, optional (if URL is provided)
|
|
|
|
DB index of the bookmark with the URL to shorten. Default is 0.
|
|
|
|
url : str, optional (if index is provided)
|
2017-09-16 12:38:11 -05:00
|
|
|
URL to shorten.
|
2017-09-05 15:24:04 -05:00
|
|
|
shorten : bool, optional
|
|
|
|
True to shorten, False to expand. Default is False.
|
|
|
|
|
|
|
|
Returns
|
|
|
|
-------
|
|
|
|
str
|
|
|
|
Shortened url on success, None on failure.
|
|
|
|
"""
|
2016-11-12 09:47:36 -06:00
|
|
|
|
|
|
|
if not index and not url:
|
2016-11-22 12:09:03 -06:00
|
|
|
logerr('Either a valid DB index or URL required')
|
2016-11-12 09:47:36 -06:00
|
|
|
return None
|
|
|
|
|
|
|
|
if index:
|
2017-08-12 01:52:37 -05:00
|
|
|
self.cur.execute('SELECT url FROM bookmarks WHERE id = ? LIMIT 1', (index,))
|
2016-11-12 09:47:36 -06:00
|
|
|
results = self.cur.fetchall()
|
2017-01-29 11:21:55 -06:00
|
|
|
if not results:
|
2016-11-12 09:47:36 -06:00
|
|
|
return None
|
|
|
|
|
2017-01-29 11:21:55 -06:00
|
|
|
url = results[0][0]
|
|
|
|
|
2016-12-03 08:03:14 -06:00
|
|
|
proxies = {
|
|
|
|
'https': os.environ.get('https_proxy'),
|
|
|
|
}
|
2016-12-10 02:35:17 -06:00
|
|
|
|
2017-03-05 01:56:39 -06:00
|
|
|
from urllib.parse import quote_plus as qp
|
|
|
|
|
2016-12-10 02:35:17 -06:00
|
|
|
urlbase = 'https://tny.im/yourls-api.php?action='
|
|
|
|
if shorten:
|
2017-03-05 01:56:39 -06:00
|
|
|
_u = urlbase + 'shorturl&format=simple&url=' + qp(url)
|
2016-12-10 02:35:17 -06:00
|
|
|
else:
|
2017-03-05 01:56:39 -06:00
|
|
|
_u = urlbase + 'expand&format=simple&shorturl=' + qp(url)
|
2016-12-03 08:03:14 -06:00
|
|
|
|
|
|
|
try:
|
2016-12-10 02:35:17 -06:00
|
|
|
r = requests.post(_u,
|
2016-12-03 08:03:14 -06:00
|
|
|
headers={
|
|
|
|
'content-type': 'application/json',
|
|
|
|
'User-Agent': USER_AGENT
|
|
|
|
},
|
|
|
|
proxies=proxies)
|
|
|
|
except Exception as e:
|
|
|
|
logerr(e)
|
|
|
|
return None
|
|
|
|
|
2016-11-12 09:47:36 -06:00
|
|
|
if r.status_code != 200:
|
2016-11-22 12:09:03 -06:00
|
|
|
logerr('[%s] %s', r.status_code, r.reason)
|
2016-11-12 09:47:36 -06:00
|
|
|
return None
|
|
|
|
|
|
|
|
return r.text
|
|
|
|
|
2016-11-20 09:10:56 -06:00
|
|
|
def fixtags(self):
|
2017-09-05 15:24:04 -05:00
|
|
|
"""Undocumented API to fix tags set in earlier versions.
|
|
|
|
|
|
|
|
Functionalities:
|
2016-11-20 09:10:56 -06:00
|
|
|
|
|
|
|
1. Remove duplicate tags
|
|
|
|
2. Sort tags
|
|
|
|
3. Use lower case to store tags
|
2017-09-05 15:24:04 -05:00
|
|
|
"""
|
2016-11-20 09:10:56 -06:00
|
|
|
|
|
|
|
to_commit = False
|
|
|
|
self.cur.execute('SELECT id, tags FROM bookmarks ORDER BY id ASC')
|
|
|
|
resultset = self.cur.fetchall()
|
|
|
|
query = 'UPDATE bookmarks SET tags = ? WHERE id = ?'
|
|
|
|
for row in resultset:
|
|
|
|
oldtags = row[1]
|
2017-04-07 10:13:24 -05:00
|
|
|
if oldtags == DELIM:
|
2016-11-20 09:10:56 -06:00
|
|
|
continue
|
|
|
|
|
|
|
|
tags = parse_tags([oldtags])
|
|
|
|
if tags == oldtags:
|
|
|
|
continue
|
|
|
|
|
|
|
|
self.cur.execute(query, (tags, row[0],))
|
|
|
|
to_commit = True
|
|
|
|
|
|
|
|
if to_commit:
|
|
|
|
self.conn.commit()
|
|
|
|
|
2017-09-17 01:23:18 -05:00
|
|
|
def close(self):
|
|
|
|
"""Close a DB connection."""
|
|
|
|
|
|
|
|
if self.conn is not None:
|
|
|
|
try:
|
|
|
|
self.cur.close()
|
|
|
|
self.conn.close()
|
|
|
|
except Exception:
|
|
|
|
# ignore errors here, we're closing down
|
|
|
|
pass
|
|
|
|
|
2016-05-31 13:18:06 -05:00
|
|
|
def close_quit(self, exitval=0):
|
2017-09-05 15:24:04 -05:00
|
|
|
"""Close a DB connection and exit.
|
2016-10-29 02:54:10 -05:00
|
|
|
|
2017-09-05 15:24:04 -05:00
|
|
|
Parameters
|
|
|
|
----------
|
|
|
|
exitval : int, optional
|
|
|
|
Program exit value.
|
|
|
|
"""
|
2016-05-31 13:18:06 -05:00
|
|
|
|
|
|
|
if self.conn is not None:
|
|
|
|
try:
|
|
|
|
self.cur.close()
|
|
|
|
self.conn.close()
|
2016-09-09 13:52:32 -05:00
|
|
|
except Exception:
|
|
|
|
# ignore errors here, we're closing down
|
2016-05-31 13:18:06 -05:00
|
|
|
pass
|
|
|
|
sys.exit(exitval)
|
|
|
|
|
2016-09-09 10:07:01 -05:00
|
|
|
|
2016-12-31 10:50:18 -06:00
|
|
|
class ExtendedArgumentParser(argparse.ArgumentParser):
|
2017-09-16 12:38:11 -05:00
|
|
|
"""Extend classic argument parser."""
|
2016-12-31 10:50:18 -06:00
|
|
|
|
|
|
|
@staticmethod
|
2017-02-18 20:48:38 -06:00
|
|
|
def program_info(file=sys.stdout):
|
2017-09-05 15:24:04 -05:00
|
|
|
"""Print program info.
|
|
|
|
|
|
|
|
Parameters
|
|
|
|
----------
|
|
|
|
file : file, optional
|
|
|
|
File to write program info to. Default is sys.stdout.
|
|
|
|
"""
|
2017-01-03 23:11:27 -06:00
|
|
|
if sys.platform == 'win32' and file == sys.stdout:
|
|
|
|
file = sys.stderr
|
2016-12-31 10:50:18 -06:00
|
|
|
|
2017-01-04 10:17:02 -06:00
|
|
|
file.write('''
|
|
|
|
SYMBOLS:
|
2017-04-08 15:19:25 -05:00
|
|
|
> url
|
2017-01-04 10:17:02 -06:00
|
|
|
+ comment
|
|
|
|
# tags
|
2016-12-31 10:50:18 -06:00
|
|
|
|
|
|
|
Version %s
|
2017-01-04 10:17:02 -06:00
|
|
|
Copyright © 2015-2017 %s
|
|
|
|
License: %s
|
2016-12-31 10:50:18 -06:00
|
|
|
Webpage: https://github.com/jarun/Buku
|
2017-01-04 10:17:02 -06:00
|
|
|
''' % (__version__, __author__, __license__))
|
2016-12-31 10:50:18 -06:00
|
|
|
|
|
|
|
@staticmethod
|
2017-02-18 20:48:38 -06:00
|
|
|
def prompt_help(file=sys.stdout):
|
2017-09-05 15:24:04 -05:00
|
|
|
"""Print prompt help.
|
|
|
|
|
|
|
|
Parameters
|
|
|
|
----------
|
|
|
|
file : file, optional
|
|
|
|
File to write program info to. Default is sys.stdout.
|
|
|
|
"""
|
2016-12-31 10:50:18 -06:00
|
|
|
file.write('''
|
2017-08-13 04:03:06 -05:00
|
|
|
PROMPT KEYS:
|
2017-07-24 22:58:32 -05:00
|
|
|
1-N browse search result indices and/or ranges
|
|
|
|
a open all results in browser
|
|
|
|
s keyword [...] search for records with ANY keyword
|
|
|
|
S keyword [...] search for records with ALL keywords
|
|
|
|
d match substrings ('pen' matches 'opened')
|
|
|
|
r expression run a regex search
|
2017-08-14 16:07:01 -05:00
|
|
|
t [...] search bookmarks by tags or show taglist
|
2017-08-13 04:03:06 -05:00
|
|
|
list index after a tag listing shows records with the tag
|
|
|
|
o id|range [...] browse bookmarks by indices and/or ranges
|
|
|
|
p id|range [...] print bookmarks by indices and/or ranges
|
|
|
|
g [taglist id|range ...] [>>|>|<<] record id|range [...]
|
|
|
|
append, set, remove (all or specific) tags
|
|
|
|
w [editor|id] edit and add or update a bookmark
|
2017-07-24 22:58:32 -05:00
|
|
|
? show this help
|
|
|
|
q, ^D, double Enter exit buku
|
2016-12-31 10:50:18 -06:00
|
|
|
|
|
|
|
''')
|
|
|
|
|
2017-08-22 17:09:43 -05:00
|
|
|
@staticmethod
|
|
|
|
def is_colorstr(arg):
|
2017-09-05 15:24:04 -05:00
|
|
|
"""Check if a string is a valid color string.
|
|
|
|
|
|
|
|
Parameters
|
|
|
|
----------
|
|
|
|
arg : str
|
|
|
|
Color string to validate.
|
|
|
|
|
|
|
|
Returns
|
|
|
|
-------
|
|
|
|
str
|
|
|
|
Same color string that was passed as an argument.
|
|
|
|
|
|
|
|
Raises
|
|
|
|
------
|
|
|
|
ArgumentTypeError
|
|
|
|
If the arg is not a valid color string.
|
|
|
|
"""
|
2017-08-22 17:09:43 -05:00
|
|
|
try:
|
|
|
|
assert len(arg) == 5
|
|
|
|
for c in arg:
|
|
|
|
assert c in COLORMAP
|
|
|
|
except AssertionError:
|
|
|
|
raise argparse.ArgumentTypeError('%s is not a valid color string' % arg)
|
|
|
|
return arg
|
|
|
|
|
2016-12-31 10:50:18 -06:00
|
|
|
# Help
|
2017-01-10 10:27:20 -06:00
|
|
|
def print_help(self, file=sys.stdout):
|
2017-09-05 15:24:04 -05:00
|
|
|
"""Print help prompt.
|
|
|
|
|
|
|
|
Parameters
|
|
|
|
----------
|
|
|
|
file : file, optional
|
|
|
|
File to write program info to. Default is sys.stdout.
|
|
|
|
"""
|
2016-12-31 10:50:18 -06:00
|
|
|
super(ExtendedArgumentParser, self).print_help(file)
|
2017-02-18 20:48:38 -06:00
|
|
|
self.program_info(file)
|
2016-12-31 10:50:18 -06:00
|
|
|
|
|
|
|
|
|
|
|
# ----------------
|
|
|
|
# Helper functions
|
|
|
|
# ----------------
|
2016-05-22 16:03:47 -05:00
|
|
|
|
2017-08-09 21:56:44 -05:00
|
|
|
def get_firefox_profile_name(path):
|
2017-09-05 15:24:04 -05:00
|
|
|
"""List folder and detect default Firefox profile name.
|
2017-08-09 21:56:44 -05:00
|
|
|
|
2017-09-05 15:24:04 -05:00
|
|
|
Returns
|
|
|
|
-------
|
|
|
|
profile : str
|
|
|
|
Firefox profile name.
|
|
|
|
"""
|
2017-09-21 04:41:49 -05:00
|
|
|
from configparser import ConfigParser, NoOptionError
|
|
|
|
|
|
|
|
profile_path = os.path.join(path, 'profiles.ini')
|
|
|
|
if os.path.exists(profile_path):
|
|
|
|
config = ConfigParser()
|
|
|
|
config.read(profile_path)
|
|
|
|
profiles_names = [section for section in config.sections() if section.startswith('Profile')]
|
|
|
|
if not profiles_names:
|
|
|
|
return None
|
|
|
|
for name in profiles_names:
|
|
|
|
try:
|
|
|
|
# If profile is default
|
|
|
|
if config.getboolean(name, 'default'):
|
|
|
|
profile_path = config.get(name, 'path')
|
|
|
|
return profile_path
|
|
|
|
except NoOptionError:
|
|
|
|
continue
|
2017-08-11 10:32:44 -05:00
|
|
|
|
2017-09-21 04:41:49 -05:00
|
|
|
# There is no default profile
|
|
|
|
return None
|
|
|
|
else:
|
|
|
|
logdbg('get_firefox_profile_name(): {} does not exist'.format(path))
|
|
|
|
return None
|
2017-08-09 21:56:44 -05:00
|
|
|
|
|
|
|
|
|
|
|
def walk(root):
|
2017-09-05 15:24:04 -05:00
|
|
|
"""Recursively iterate over json.
|
2017-08-09 21:56:44 -05:00
|
|
|
|
2017-09-05 15:24:04 -05:00
|
|
|
Parameters
|
|
|
|
----------
|
|
|
|
root : json element
|
|
|
|
Base node of the json data.
|
|
|
|
"""
|
2017-08-11 10:32:44 -05:00
|
|
|
|
2017-08-09 21:56:44 -05:00
|
|
|
for element in root['children']:
|
|
|
|
if element['type'] == 'url':
|
|
|
|
url = element['url']
|
|
|
|
title = element['name']
|
|
|
|
yield (url, title, None, None, 0, True)
|
|
|
|
else:
|
|
|
|
walk(element)
|
|
|
|
|
|
|
|
|
2016-11-08 11:32:45 -06:00
|
|
|
def is_bad_url(url):
|
2017-09-05 15:24:04 -05:00
|
|
|
"""Check if URL is malformed.
|
|
|
|
|
|
|
|
.. note:: This API is not bulletproof but works in most cases.
|
2016-11-08 11:32:45 -06:00
|
|
|
|
2017-09-05 15:24:04 -05:00
|
|
|
Parameters
|
|
|
|
----------
|
|
|
|
url : str
|
|
|
|
URL to scan.
|
|
|
|
|
|
|
|
Returns
|
|
|
|
-------
|
|
|
|
bool
|
|
|
|
True if URL is malformed, False otherwise.
|
|
|
|
"""
|
2016-11-08 11:32:45 -06:00
|
|
|
|
|
|
|
# Get the netloc token
|
2016-12-09 12:28:19 -06:00
|
|
|
netloc = parse_url(url).netloc
|
2016-11-08 11:32:45 -06:00
|
|
|
if not netloc:
|
|
|
|
# Try of prepend '//' and get netloc
|
2016-12-09 12:28:19 -06:00
|
|
|
netloc = parse_url('//' + url).netloc
|
2016-11-08 11:32:45 -06:00
|
|
|
if not netloc:
|
|
|
|
return True
|
|
|
|
|
2016-11-30 13:44:23 -06:00
|
|
|
logdbg('netloc: %s', netloc)
|
2016-11-08 12:13:32 -06:00
|
|
|
|
|
|
|
# netloc cannot start or end with a '.'
|
|
|
|
if netloc.startswith('.') or netloc.endswith('.'):
|
2016-11-08 11:32:45 -06:00
|
|
|
return True
|
|
|
|
|
|
|
|
# netloc should have at least one '.'
|
2017-02-04 13:27:30 -06:00
|
|
|
if netloc.rfind('.') < 0:
|
2016-11-08 11:32:45 -06:00
|
|
|
return True
|
|
|
|
|
2016-11-08 12:13:32 -06:00
|
|
|
return False
|
2016-11-08 11:32:45 -06:00
|
|
|
|
|
|
|
|
2017-07-18 00:09:58 -05:00
|
|
|
def is_nongeneric_url(url):
|
2017-09-05 15:24:04 -05:00
|
|
|
"""Returns True for URLs which are non-http and non-generic.
|
|
|
|
|
|
|
|
Parameters
|
|
|
|
----------
|
|
|
|
url : str
|
|
|
|
URL to scan.
|
|
|
|
|
|
|
|
Returns
|
|
|
|
-------
|
|
|
|
bool
|
2017-09-16 12:38:11 -05:00
|
|
|
True if URL is a non-generic URL, False otherwise.
|
2017-09-05 15:24:04 -05:00
|
|
|
"""
|
2017-07-18 00:09:58 -05:00
|
|
|
|
2017-07-18 09:35:08 -05:00
|
|
|
ignored_prefix = ['place:', 'file://', 'apt:']
|
2017-07-18 00:09:58 -05:00
|
|
|
|
|
|
|
for prefix in ignored_prefix:
|
|
|
|
if url.startswith(prefix):
|
|
|
|
return True
|
|
|
|
|
|
|
|
return False
|
|
|
|
|
|
|
|
|
2016-11-08 11:32:45 -06:00
|
|
|
def is_ignored_mime(url):
|
2017-09-05 15:24:04 -05:00
|
|
|
"""Check if URL links to ignored MIME.
|
|
|
|
|
|
|
|
.. note:: Only a 'HEAD' request is made for these URLs.
|
2016-11-08 11:32:45 -06:00
|
|
|
|
2017-09-05 15:24:04 -05:00
|
|
|
Parameters
|
|
|
|
----------
|
|
|
|
url : str
|
|
|
|
URL to scan.
|
|
|
|
|
|
|
|
Returns
|
|
|
|
-------
|
|
|
|
bool
|
|
|
|
True if URL links to ignored MIME, False otherwise.
|
|
|
|
"""
|
2016-11-08 11:32:45 -06:00
|
|
|
|
|
|
|
for mime in SKIP_MIMES:
|
|
|
|
if url.lower().endswith(mime):
|
2017-03-04 09:33:59 -06:00
|
|
|
logdbg('matched MIME: %s', mime)
|
2016-11-08 11:32:45 -06:00
|
|
|
return True
|
|
|
|
|
|
|
|
return False
|
|
|
|
|
|
|
|
|
2016-05-22 09:53:19 -05:00
|
|
|
def get_page_title(resp):
|
2017-09-05 15:24:04 -05:00
|
|
|
"""Invoke HTML parser and extract title from HTTP response.
|
|
|
|
|
|
|
|
Parameters
|
|
|
|
----------
|
|
|
|
resp : HTTP response
|
|
|
|
Response from GET request.
|
2016-04-10 07:41:00 -05:00
|
|
|
|
2017-09-05 15:24:04 -05:00
|
|
|
Returns
|
|
|
|
-------
|
|
|
|
str
|
|
|
|
Title fetched from parsed page.
|
|
|
|
"""
|
2016-04-05 06:25:40 -05:00
|
|
|
|
2016-12-27 08:10:29 -06:00
|
|
|
parser = BukuHTMLParser()
|
2016-11-08 13:43:53 -06:00
|
|
|
|
2016-04-05 06:25:40 -05:00
|
|
|
try:
|
2016-11-29 16:51:11 -06:00
|
|
|
parser.feed(resp.data.decode(errors='replace'))
|
2016-04-05 06:25:40 -05:00
|
|
|
except Exception as e:
|
2016-11-29 16:51:11 -06:00
|
|
|
# Suppress Exception due to intentional self.reset() in BHTMLParser
|
2017-08-12 01:52:37 -05:00
|
|
|
if (logger.isEnabledFor(logging.DEBUG) and str(e) != 'we should not get here!'):
|
2016-12-09 14:53:32 -06:00
|
|
|
logerr('get_page_title(): %s', e)
|
2016-11-08 12:52:34 -06:00
|
|
|
finally:
|
2016-11-29 16:51:11 -06:00
|
|
|
return parser.parsed_title
|
2016-04-05 06:25:40 -05:00
|
|
|
|
2016-09-09 10:07:01 -05:00
|
|
|
|
2016-12-11 09:23:48 -06:00
|
|
|
def gen_headers():
|
2017-09-05 15:24:04 -05:00
|
|
|
"""Generate headers for network connection."""
|
2016-11-16 18:11:08 -06:00
|
|
|
|
2016-12-11 09:23:48 -06:00
|
|
|
global myheaders, myproxy
|
2016-11-16 18:11:08 -06:00
|
|
|
|
2016-12-11 09:23:48 -06:00
|
|
|
myheaders = {
|
|
|
|
'Accept-Encoding': 'gzip,deflate',
|
|
|
|
'User-Agent': USER_AGENT,
|
|
|
|
'Accept': '*/*',
|
|
|
|
'Cookie': '',
|
|
|
|
'DNT': '1'
|
|
|
|
}
|
2016-12-01 13:04:52 -06:00
|
|
|
|
2016-12-11 09:23:48 -06:00
|
|
|
myproxy = os.environ.get('https_proxy')
|
|
|
|
if myproxy:
|
|
|
|
try:
|
|
|
|
url = parse_url(myproxy)
|
|
|
|
except Exception as e:
|
|
|
|
logerr(e)
|
|
|
|
return
|
2016-12-01 13:04:52 -06:00
|
|
|
|
2016-12-11 09:23:48 -06:00
|
|
|
# Strip username and password (if present) and update headers
|
|
|
|
if url.auth:
|
|
|
|
myproxy = myproxy.replace(url.auth + '@', '')
|
|
|
|
auth_headers = make_headers(basic_auth=url.auth)
|
|
|
|
myheaders.update(auth_headers)
|
2016-12-01 13:04:52 -06:00
|
|
|
|
2016-12-11 09:23:48 -06:00
|
|
|
logdbg('proxy: [%s]', myproxy)
|
2016-11-18 10:36:09 -06:00
|
|
|
|
2016-11-16 18:11:08 -06:00
|
|
|
|
2016-12-11 09:23:48 -06:00
|
|
|
def get_PoolManager():
|
2017-09-05 15:24:04 -05:00
|
|
|
"""Creates a pool manager with proxy support, if applicable.
|
2016-12-11 09:23:48 -06:00
|
|
|
|
2017-09-05 15:24:04 -05:00
|
|
|
Returns
|
|
|
|
-------
|
|
|
|
ProxyManager or PoolManager
|
|
|
|
ProxyManager if https_proxy is defined, PoolManager otherwise.
|
|
|
|
"""
|
2016-12-11 09:23:48 -06:00
|
|
|
|
|
|
|
if myproxy:
|
|
|
|
return urllib3.ProxyManager(myproxy, num_pools=1, headers=myheaders)
|
|
|
|
|
|
|
|
return urllib3.PoolManager(num_pools=1, headers=myheaders)
|
2016-11-16 18:11:08 -06:00
|
|
|
|
|
|
|
|
2017-03-04 09:33:59 -06:00
|
|
|
def network_handler(url, http_head=False):
|
2017-09-05 15:24:04 -05:00
|
|
|
"""Handle server connection and redirections.
|
|
|
|
|
|
|
|
Parameters
|
|
|
|
----------
|
|
|
|
url : str
|
|
|
|
URL to fetch.
|
|
|
|
http_head : bool
|
|
|
|
If True, send only HTTP HEAD request. Default is False.
|
|
|
|
|
|
|
|
Returns
|
|
|
|
-------
|
|
|
|
tuple
|
2017-09-16 12:38:11 -05:00
|
|
|
(title, recognized mime, bad url).
|
2017-09-05 15:24:04 -05:00
|
|
|
"""
|
2016-04-05 06:25:40 -05:00
|
|
|
|
2016-11-08 12:52:34 -06:00
|
|
|
page_title = None
|
2016-11-07 09:13:08 -06:00
|
|
|
|
2016-11-08 11:32:45 -06:00
|
|
|
if is_bad_url(url):
|
|
|
|
return ('', 0, 1)
|
2016-11-07 09:13:08 -06:00
|
|
|
|
2017-03-04 09:33:59 -06:00
|
|
|
if is_ignored_mime(url) or http_head:
|
2016-11-08 11:32:45 -06:00
|
|
|
method = 'HEAD'
|
2017-03-04 11:18:56 -06:00
|
|
|
else:
|
|
|
|
method = 'GET'
|
2016-11-07 09:13:08 -06:00
|
|
|
|
2016-12-11 09:23:48 -06:00
|
|
|
if not myheaders:
|
|
|
|
gen_headers()
|
2016-03-22 15:23:46 -05:00
|
|
|
|
|
|
|
try:
|
2016-12-11 09:23:48 -06:00
|
|
|
http_handler = get_PoolManager()
|
|
|
|
|
2016-05-31 12:39:34 -05:00
|
|
|
while True:
|
2017-09-26 17:53:54 -05:00
|
|
|
resp = http_handler.request_encode_url(method, url, timeout=40)
|
2016-11-08 11:32:45 -06:00
|
|
|
|
2016-11-06 13:26:35 -06:00
|
|
|
if resp.status == 200:
|
2016-12-01 13:04:52 -06:00
|
|
|
if method == 'GET':
|
|
|
|
page_title = get_page_title(resp)
|
2016-11-09 11:14:31 -06:00
|
|
|
elif resp.status == 403 and url.endswith('/'):
|
2016-11-07 23:45:24 -06:00
|
|
|
# HTTP response Forbidden
|
|
|
|
# Handle URLs in the form of https://www.domain.com/
|
|
|
|
# which fail when trying to fetch resource '/'
|
|
|
|
# retry without trailing '/'
|
2016-09-09 13:52:32 -05:00
|
|
|
|
2016-11-22 12:09:03 -06:00
|
|
|
logdbg('Received status 403: retrying...')
|
2016-05-20 12:01:03 -05:00
|
|
|
# Remove trailing /
|
2016-11-09 11:14:31 -06:00
|
|
|
url = url[:-1]
|
2017-09-26 17:53:54 -05:00
|
|
|
resp.close()
|
2016-11-09 11:14:31 -06:00
|
|
|
continue
|
2016-03-24 15:38:38 -05:00
|
|
|
else:
|
2016-11-22 12:09:03 -06:00
|
|
|
logerr('[%s] %s', resp.status, resp.reason)
|
2016-11-07 09:13:08 -06:00
|
|
|
|
2016-11-29 16:51:11 -06:00
|
|
|
if resp:
|
2017-09-26 17:53:54 -05:00
|
|
|
resp.close()
|
2016-11-29 16:51:11 -06:00
|
|
|
|
2016-11-09 11:14:31 -06:00
|
|
|
break
|
2016-03-22 15:23:46 -05:00
|
|
|
except Exception as e:
|
2016-12-09 14:53:32 -06:00
|
|
|
logerr('network_handler(): %s', e)
|
2016-03-22 15:23:46 -05:00
|
|
|
finally:
|
2016-12-11 09:23:48 -06:00
|
|
|
if http_handler:
|
|
|
|
http_handler.clear()
|
2016-11-08 11:32:45 -06:00
|
|
|
if method == 'HEAD':
|
|
|
|
return ('', 1, 0)
|
2016-11-08 12:52:34 -06:00
|
|
|
if page_title is None:
|
2016-11-08 11:32:45 -06:00
|
|
|
return ('', 0, 0)
|
2016-11-08 12:52:34 -06:00
|
|
|
return (page_title.strip().replace('\n', ''), 0, 0)
|
2016-03-22 15:23:46 -05:00
|
|
|
|
2016-09-09 10:07:01 -05:00
|
|
|
|
2017-01-29 11:21:55 -06:00
|
|
|
def parse_tags(keywords=[]):
|
2017-09-05 15:24:04 -05:00
|
|
|
"""Format and get tag string from tokens.
|
|
|
|
|
|
|
|
Parameters
|
|
|
|
----------
|
|
|
|
keywords : list, optional
|
|
|
|
List of tags to parse. Default is empty list.
|
|
|
|
|
|
|
|
Returns
|
|
|
|
-------
|
|
|
|
str
|
|
|
|
Comma-delimited string of tags.
|
|
|
|
DELIM : str
|
|
|
|
If no keywords, returns the delimiter.
|
|
|
|
None
|
|
|
|
If keywords is None.
|
|
|
|
"""
|
2016-05-18 22:24:46 -05:00
|
|
|
|
2016-05-31 12:39:34 -05:00
|
|
|
if keywords is None:
|
2016-05-31 16:40:51 -05:00
|
|
|
return None
|
2016-05-31 12:39:34 -05:00
|
|
|
|
2017-01-29 11:21:55 -06:00
|
|
|
if not keywords:
|
|
|
|
return DELIM
|
|
|
|
|
2016-10-29 05:36:29 -05:00
|
|
|
tags = DELIM
|
2016-05-18 22:24:46 -05:00
|
|
|
|
|
|
|
# Cleanse and get the tags
|
2016-05-29 12:13:37 -05:00
|
|
|
tagstr = ' '.join(keywords)
|
2016-10-29 05:36:29 -05:00
|
|
|
marker = tagstr.find(DELIM)
|
2016-05-29 12:13:37 -05:00
|
|
|
|
|
|
|
while marker >= 0:
|
|
|
|
token = tagstr[0:marker]
|
2016-05-31 12:39:34 -05:00
|
|
|
tagstr = tagstr[marker + 1:]
|
2016-10-29 05:36:29 -05:00
|
|
|
marker = tagstr.find(DELIM)
|
2016-05-29 12:13:37 -05:00
|
|
|
token = token.strip()
|
|
|
|
if token == '':
|
2016-05-23 10:57:06 -05:00
|
|
|
continue
|
|
|
|
|
2017-03-05 01:56:39 -06:00
|
|
|
tags += token + DELIM
|
2016-05-19 09:24:18 -05:00
|
|
|
|
2016-05-29 12:13:37 -05:00
|
|
|
tagstr = tagstr.strip()
|
|
|
|
if tagstr != '':
|
2017-03-05 01:56:39 -06:00
|
|
|
tags += tagstr + DELIM
|
2016-05-18 22:24:46 -05:00
|
|
|
|
2016-11-22 12:09:03 -06:00
|
|
|
logdbg('keywords: %s', keywords)
|
|
|
|
logdbg('parsed tags: [%s]', tags)
|
2016-05-18 22:24:46 -05:00
|
|
|
|
2016-10-29 05:36:29 -05:00
|
|
|
if tags == DELIM:
|
2016-05-23 10:57:06 -05:00
|
|
|
return tags
|
|
|
|
|
2017-03-05 01:56:39 -06:00
|
|
|
orig_tags = tags.strip(DELIM).split(DELIM)
|
|
|
|
|
|
|
|
# Add unique tags in lower case
|
|
|
|
unique_tags = []
|
2016-10-22 08:21:46 -05:00
|
|
|
for tag in orig_tags:
|
2017-03-05 01:56:39 -06:00
|
|
|
tag = tag.lower()
|
|
|
|
if tag not in unique_tags:
|
|
|
|
unique_tags += (tag, )
|
2016-05-19 09:24:18 -05:00
|
|
|
|
|
|
|
# Sort the tags
|
2016-11-20 00:22:28 -06:00
|
|
|
sorted_tags = sorted(unique_tags)
|
2016-05-19 09:24:18 -05:00
|
|
|
|
2016-05-19 22:44:39 -05:00
|
|
|
# Wrap with delimiter
|
2017-03-05 01:56:39 -06:00
|
|
|
return delim_wrap(DELIM.join(sorted_tags))
|
2016-05-18 22:24:46 -05:00
|
|
|
|
2016-09-09 10:07:01 -05:00
|
|
|
|
2017-08-14 16:07:01 -05:00
|
|
|
def prep_tag_search(tags):
|
2017-09-05 15:24:04 -05:00
|
|
|
"""Prepare list of tags to search and determine search operator.
|
|
|
|
|
|
|
|
Parameters
|
|
|
|
----------
|
|
|
|
tags : str
|
|
|
|
String list of tags to search.
|
|
|
|
|
|
|
|
Returns
|
|
|
|
-------
|
|
|
|
tuple
|
|
|
|
(list of formatted tags to search,
|
|
|
|
a string indicating query search operator (either OR or AND),
|
2017-09-16 12:38:11 -05:00
|
|
|
a regex string of tags or None if ' - ' delimiter not in tags).
|
2017-08-14 16:07:01 -05:00
|
|
|
"""
|
|
|
|
|
|
|
|
excluded_tags = None
|
|
|
|
if ' - ' in tags:
|
|
|
|
tags, excluded_tags = tags.split(' - ', 1)
|
|
|
|
|
|
|
|
excluded_taglist = [delim_wrap(t.strip()) for t in excluded_tags.split(',')]
|
|
|
|
# join with pipe to construct regex string
|
|
|
|
excluded_tags = '|'.join(excluded_taglist)
|
|
|
|
|
|
|
|
search_operator = 'OR'
|
|
|
|
tag_delim = ','
|
|
|
|
if ' + ' in tags:
|
|
|
|
search_operator = 'AND'
|
|
|
|
tag_delim = ' + '
|
|
|
|
|
|
|
|
tags = [delim_wrap(t.strip()) for t in tags.split(tag_delim)]
|
|
|
|
|
|
|
|
return tags, search_operator, excluded_tags
|
|
|
|
|
|
|
|
|
2017-09-08 10:48:39 -05:00
|
|
|
def gen_auto_tag():
|
2017-09-08 11:11:41 -05:00
|
|
|
"""Generate a tag in Year-Month-Date format.
|
2017-09-08 10:48:39 -05:00
|
|
|
|
|
|
|
Returns
|
|
|
|
-------
|
|
|
|
str
|
2017-09-16 12:38:11 -05:00
|
|
|
New tag as YYYYMonDD.
|
2017-09-08 10:48:39 -05:00
|
|
|
"""
|
|
|
|
|
|
|
|
import calendar as cal
|
|
|
|
|
|
|
|
t = time.localtime()
|
|
|
|
return ('%d%s%02d' % (t.tm_year, cal.month_abbr[t.tm_mon], t.tm_mday))
|
|
|
|
|
|
|
|
|
2017-09-15 14:59:05 -05:00
|
|
|
def edit_at_prompt(obj, nav, suggest=False):
|
2017-09-05 15:24:04 -05:00
|
|
|
"""Edit and add or update a bookmark.
|
|
|
|
|
|
|
|
Parameters
|
|
|
|
----------
|
|
|
|
obj : BukuDb instance
|
|
|
|
A valid instance of BukuDb class.
|
|
|
|
nav : str
|
|
|
|
Navigation command argument passed at prompt by user.
|
2017-09-15 14:59:05 -05:00
|
|
|
suggest : bool, optional
|
|
|
|
If True, suggest similar tags on new bookmark addition.
|
2017-09-05 15:24:04 -05:00
|
|
|
"""
|
2017-03-12 09:41:53 -05:00
|
|
|
|
|
|
|
if nav == 'w':
|
|
|
|
editor = get_system_editor()
|
|
|
|
if not is_editor_valid(editor):
|
|
|
|
return
|
|
|
|
elif is_int(nav[2:]):
|
|
|
|
obj.edit_update_rec(int(nav[2:]))
|
|
|
|
return
|
|
|
|
else:
|
|
|
|
editor = nav[2:]
|
|
|
|
|
|
|
|
result = edit_rec(editor, '', None, DELIM, None)
|
|
|
|
if result is not None:
|
|
|
|
url, title, tags, desc = result
|
2017-09-15 14:59:05 -05:00
|
|
|
if suggest:
|
|
|
|
tags = obj.suggest_similar_tag(tags)
|
2017-03-12 09:41:53 -05:00
|
|
|
obj.add_rec(url, title, tags, desc)
|
|
|
|
|
|
|
|
|
2017-08-24 17:33:57 -05:00
|
|
|
def taglist_subprompt(obj, noninteractive=False):
|
2017-09-05 15:24:04 -05:00
|
|
|
"""Additional prompt to show unique tag list.
|
|
|
|
|
|
|
|
Parameters
|
|
|
|
----------
|
|
|
|
obj : BukuDb instance
|
|
|
|
A valid instance of BukuDb class.
|
|
|
|
noninteractive : bool, optional
|
|
|
|
If True, does not seek user input. Default is False.
|
|
|
|
|
|
|
|
Returns
|
|
|
|
-------
|
|
|
|
str
|
|
|
|
New command string.
|
|
|
|
"""
|
2016-11-14 14:31:22 -06:00
|
|
|
|
2017-02-18 20:48:38 -06:00
|
|
|
unique_tags, dic = obj.get_tag_all()
|
2016-11-14 14:31:22 -06:00
|
|
|
new_results = True
|
|
|
|
|
|
|
|
while True:
|
|
|
|
if new_results:
|
2017-01-29 11:21:55 -06:00
|
|
|
if not unique_tags:
|
2016-11-15 09:44:26 -06:00
|
|
|
count = 0
|
|
|
|
print('0 tags')
|
|
|
|
else:
|
|
|
|
count = 1
|
|
|
|
for tag in unique_tags:
|
2016-11-20 07:31:02 -06:00
|
|
|
print('%6d. %s (%d)' % (count, tag, dic[tag]))
|
2016-11-15 09:44:26 -06:00
|
|
|
count += 1
|
2016-11-20 12:20:23 -06:00
|
|
|
print()
|
2016-11-14 14:31:22 -06:00
|
|
|
|
2016-11-27 09:14:23 -06:00
|
|
|
if noninteractive:
|
|
|
|
return
|
|
|
|
|
2016-11-14 14:31:22 -06:00
|
|
|
try:
|
2017-08-24 17:33:57 -05:00
|
|
|
nav = read_in(promptmsg)
|
2016-11-14 14:31:22 -06:00
|
|
|
if not nav:
|
2017-08-24 17:33:57 -05:00
|
|
|
nav = read_in(promptmsg)
|
2016-11-14 14:31:22 -06:00
|
|
|
if not nav:
|
|
|
|
# Quit on double enter
|
|
|
|
return 'q'
|
|
|
|
nav = nav.strip()
|
|
|
|
except EOFError:
|
|
|
|
return 'q'
|
|
|
|
|
|
|
|
if is_int(nav) and int(nav) > 0 and int(nav) < count:
|
|
|
|
return 't ' + unique_tags[int(nav) - 1]
|
|
|
|
elif is_int(nav):
|
2016-12-26 20:40:08 -06:00
|
|
|
print('No matching index %s' % nav)
|
2016-11-14 14:31:22 -06:00
|
|
|
new_results = False
|
|
|
|
elif nav == 't':
|
|
|
|
new_results = True
|
|
|
|
elif (nav == 'q' or nav == 'd' or nav == '?' or
|
2017-08-12 01:52:37 -05:00
|
|
|
nav.startswith('s ') or nav.startswith('S ') or nav.startswith('r ') or
|
2017-09-15 14:59:05 -05:00
|
|
|
nav.startswith('t ') or nav.startswith('o ') or nav.startswith('p ') or
|
|
|
|
nav.startswith('g ')) or nav == 'w' or nav.startswith('w '):
|
2016-11-14 14:31:22 -06:00
|
|
|
return nav
|
|
|
|
else:
|
|
|
|
print('Invalid input')
|
|
|
|
new_results = False
|
|
|
|
|
2016-11-16 09:56:26 -06:00
|
|
|
|
2017-09-15 14:59:05 -05:00
|
|
|
def prompt(obj, results, noninteractive=False, deep=False, subprompt=False, suggest=False):
|
2017-09-05 15:24:04 -05:00
|
|
|
"""Show each matching result from a search and prompt.
|
|
|
|
|
|
|
|
Parameters
|
|
|
|
----------
|
|
|
|
obj : BukuDb instance
|
|
|
|
A valid instance of BukuDb class.
|
|
|
|
results : list
|
|
|
|
Search result set from a DB query.
|
|
|
|
noninteractive : bool, optional
|
|
|
|
If True, does not seek user input. Default is False.
|
|
|
|
deep : bool, optional
|
|
|
|
Use deep search. Default is False.
|
|
|
|
subprompt : bool, optional
|
|
|
|
If True, jump directly to subprompt.
|
2017-09-15 14:59:05 -05:00
|
|
|
suggest : bool, optional
|
|
|
|
If True, suggest similar tags on edit and add bookmark.
|
2017-09-05 15:24:04 -05:00
|
|
|
"""
|
2016-05-18 10:46:08 -05:00
|
|
|
|
2016-11-13 11:52:00 -06:00
|
|
|
if not type(obj) is BukuDb:
|
2016-11-22 12:09:03 -06:00
|
|
|
logerr('Not a BukuDb instance')
|
2016-10-28 09:27:46 -05:00
|
|
|
return
|
2016-05-18 10:46:08 -05:00
|
|
|
|
2016-11-13 12:40:47 -06:00
|
|
|
new_results = True
|
|
|
|
|
2016-10-28 09:27:46 -05:00
|
|
|
while True:
|
2016-11-20 12:20:23 -06:00
|
|
|
if not subprompt:
|
|
|
|
if new_results:
|
|
|
|
if results:
|
|
|
|
count = 0
|
|
|
|
|
|
|
|
for row in results:
|
|
|
|
count += 1
|
2017-03-19 02:09:44 -05:00
|
|
|
print_single_rec(row, count)
|
2016-11-20 12:20:23 -06:00
|
|
|
else:
|
|
|
|
print('0 results')
|
2016-11-13 11:52:00 -06:00
|
|
|
|
2016-11-20 12:20:23 -06:00
|
|
|
if noninteractive:
|
|
|
|
return
|
2016-11-13 11:52:00 -06:00
|
|
|
|
2016-11-20 12:20:23 -06:00
|
|
|
try:
|
2017-08-24 17:33:57 -05:00
|
|
|
nav = read_in(promptmsg)
|
2016-07-09 10:19:16 -05:00
|
|
|
if not nav:
|
2017-08-24 17:33:57 -05:00
|
|
|
nav = read_in(promptmsg)
|
2016-11-20 12:20:23 -06:00
|
|
|
if not nav:
|
|
|
|
# Quit on double enter
|
|
|
|
break
|
|
|
|
nav = nav.strip()
|
|
|
|
except EOFError:
|
|
|
|
return
|
|
|
|
else:
|
|
|
|
nav = 't'
|
|
|
|
subprompt = False
|
2016-07-09 10:19:16 -05:00
|
|
|
|
2016-11-14 14:31:22 -06:00
|
|
|
# list tags with 't'
|
|
|
|
if nav == 't':
|
2017-08-24 17:33:57 -05:00
|
|
|
nav = taglist_subprompt(obj, noninteractive)
|
2016-11-27 09:14:23 -06:00
|
|
|
if noninteractive:
|
|
|
|
return
|
2016-11-14 14:31:22 -06:00
|
|
|
|
2016-11-13 11:52:00 -06:00
|
|
|
# search ANY match with new keywords
|
2016-11-14 14:31:22 -06:00
|
|
|
if nav.startswith('s '):
|
2016-11-14 12:37:22 -06:00
|
|
|
results = obj.searchdb(nav[2:].split(), False, deep)
|
2016-11-13 11:52:00 -06:00
|
|
|
new_results = True
|
|
|
|
continue
|
|
|
|
|
|
|
|
# search ALL match with new keywords
|
2016-11-14 14:31:22 -06:00
|
|
|
if nav.startswith('S '):
|
2016-11-14 12:37:22 -06:00
|
|
|
results = obj.searchdb(nav[2:].split(), True, deep)
|
2016-11-13 11:52:00 -06:00
|
|
|
new_results = True
|
|
|
|
continue
|
|
|
|
|
|
|
|
# regular expressions search with new keywords
|
2016-11-14 14:31:22 -06:00
|
|
|
if nav.startswith('r '):
|
2016-11-13 11:52:00 -06:00
|
|
|
results = obj.searchdb(nav[2:].split(), True, regex=True)
|
|
|
|
new_results = True
|
|
|
|
continue
|
|
|
|
|
|
|
|
# tag search with new keywords
|
2016-11-14 14:31:22 -06:00
|
|
|
if nav.startswith('t '):
|
2016-11-13 11:52:00 -06:00
|
|
|
results = obj.search_by_tag(nav[2:])
|
|
|
|
new_results = True
|
|
|
|
continue
|
|
|
|
|
|
|
|
# quit with 'q'
|
|
|
|
if nav == 'q':
|
|
|
|
return
|
|
|
|
|
2017-03-12 09:41:53 -05:00
|
|
|
# No new results fetched beyond this point
|
|
|
|
new_results = False
|
|
|
|
|
2016-11-13 11:52:00 -06:00
|
|
|
# toggle deep search with 'd'
|
|
|
|
if nav == 'd':
|
2016-11-14 12:37:22 -06:00
|
|
|
deep = not deep
|
|
|
|
if deep:
|
2016-11-13 11:52:00 -06:00
|
|
|
print('deep search on')
|
|
|
|
else:
|
|
|
|
print('deep search off')
|
|
|
|
|
|
|
|
continue
|
|
|
|
|
2016-11-14 10:14:23 -06:00
|
|
|
# Show help with '?'
|
|
|
|
if nav == '?':
|
2017-02-18 20:48:38 -06:00
|
|
|
ExtendedArgumentParser.prompt_help(sys.stdout)
|
2016-11-14 10:14:23 -06:00
|
|
|
continue
|
|
|
|
|
2017-03-12 09:41:53 -05:00
|
|
|
# Edit and add or update
|
|
|
|
if nav == 'w' or nav.startswith('w '):
|
2017-09-15 14:59:05 -05:00
|
|
|
edit_at_prompt(obj, nav, suggest)
|
2017-03-12 09:41:53 -05:00
|
|
|
continue
|
2016-11-13 11:52:00 -06:00
|
|
|
|
2017-04-06 15:18:16 -05:00
|
|
|
# Append or overwrite tags
|
2017-04-06 14:19:08 -05:00
|
|
|
if nav.startswith('g '):
|
|
|
|
unique_tags, dic = obj.get_tag_all()
|
|
|
|
_count = obj.set_tag(nav[2:], unique_tags)
|
|
|
|
if _count == -1:
|
|
|
|
print('Invalid input')
|
|
|
|
else:
|
|
|
|
print('%d updated' % _count)
|
|
|
|
continue
|
|
|
|
|
2017-04-06 15:18:16 -05:00
|
|
|
# Print bookmarks by DB index
|
|
|
|
if nav.startswith('p '):
|
|
|
|
id_list = nav[2:].split()
|
|
|
|
try:
|
|
|
|
for id in id_list:
|
2017-04-26 08:48:34 -05:00
|
|
|
if is_int(id):
|
2017-04-06 15:18:16 -05:00
|
|
|
obj.print_rec(int(id))
|
|
|
|
elif '-' in id:
|
|
|
|
vals = [int(x) for x in id.split('-')]
|
2017-05-01 11:16:37 -05:00
|
|
|
obj.print_rec(0, vals[0], vals[-1], True)
|
|
|
|
else:
|
|
|
|
print('Invalid input')
|
|
|
|
except ValueError:
|
|
|
|
print('Invalid input')
|
|
|
|
continue
|
2017-04-06 15:18:16 -05:00
|
|
|
|
2017-05-01 11:16:37 -05:00
|
|
|
# Browse bookmarks by DB index
|
|
|
|
if nav.startswith('o '):
|
|
|
|
id_list = nav[2:].split()
|
|
|
|
try:
|
|
|
|
for id in id_list:
|
|
|
|
if is_int(id):
|
|
|
|
obj.browse_by_index(int(id))
|
|
|
|
elif '-' in id:
|
|
|
|
vals = [int(x) for x in id.split('-')]
|
|
|
|
obj.browse_by_index(0, vals[0], vals[-1], True)
|
2017-04-06 15:18:16 -05:00
|
|
|
else:
|
|
|
|
print('Invalid input')
|
|
|
|
except ValueError:
|
|
|
|
print('Invalid input')
|
|
|
|
continue
|
|
|
|
|
2016-11-13 11:52:00 -06:00
|
|
|
# Nothing to browse if there are no results
|
|
|
|
if not results:
|
|
|
|
print('Not in a search context')
|
|
|
|
continue
|
|
|
|
|
|
|
|
# open all results and re-prompt with 'a'
|
2016-10-28 09:27:46 -05:00
|
|
|
if nav == 'a':
|
|
|
|
for index in range(0, count):
|
2017-02-18 20:48:38 -06:00
|
|
|
browse(results[index][1])
|
2016-10-28 09:27:46 -05:00
|
|
|
continue
|
2016-09-07 12:59:49 -05:00
|
|
|
|
2016-10-28 09:27:46 -05:00
|
|
|
# iterate over white-space separated indices
|
2016-12-26 20:40:08 -06:00
|
|
|
for nav in nav.split():
|
2016-10-28 09:27:46 -05:00
|
|
|
if is_int(nav):
|
|
|
|
index = int(nav) - 1
|
|
|
|
if index < 0 or index >= count:
|
2016-12-26 20:40:08 -06:00
|
|
|
print('No matching index %s' % nav)
|
2016-10-28 09:27:46 -05:00
|
|
|
continue
|
2017-02-18 20:48:38 -06:00
|
|
|
browse(results[index][1])
|
2017-04-08 08:44:02 -05:00
|
|
|
elif '-' in nav:
|
|
|
|
try:
|
|
|
|
vals = [int(x) for x in nav.split('-')]
|
|
|
|
if vals[0] > vals[-1]:
|
|
|
|
vals[0], vals[-1] = vals[-1], vals[0]
|
|
|
|
|
|
|
|
for _id in range(vals[0]-1, vals[-1]):
|
|
|
|
if 0 <= _id < count:
|
|
|
|
browse(results[_id][1])
|
|
|
|
else:
|
|
|
|
print('No matching index %d' % (_id + 1))
|
|
|
|
except ValueError:
|
|
|
|
print('Invalid input')
|
|
|
|
break
|
2016-10-28 09:27:46 -05:00
|
|
|
else:
|
2016-11-13 11:52:00 -06:00
|
|
|
print('Invalid input')
|
2016-10-28 09:27:46 -05:00
|
|
|
break
|
2016-05-18 10:46:08 -05:00
|
|
|
|
2016-09-09 10:07:01 -05:00
|
|
|
|
2017-09-23 12:01:37 -05:00
|
|
|
def print_rec_with_filter(records, field_filter=0):
|
|
|
|
"""Print records filtered by field.
|
|
|
|
|
|
|
|
User determines which fields in the records to display
|
|
|
|
by using the --format option.
|
|
|
|
|
|
|
|
Parameters
|
|
|
|
----------
|
|
|
|
records : list or sqlite3.Cursor object
|
|
|
|
List of bookmark records to print
|
|
|
|
field_filter : int
|
|
|
|
Integer indicating which fields to print.
|
|
|
|
"""
|
|
|
|
|
|
|
|
if field_filter == 0:
|
|
|
|
for row in records:
|
|
|
|
print_single_rec(row)
|
|
|
|
elif field_filter == 1:
|
|
|
|
for row in records:
|
|
|
|
print('%s\t%s' % (row[0], row[1]))
|
|
|
|
elif field_filter == 2:
|
|
|
|
for row in records:
|
|
|
|
print('%s\t%s\t%s' % (row[0], row[1], row[3][1:-1]))
|
|
|
|
elif field_filter == 3:
|
|
|
|
for row in records:
|
|
|
|
print('%s\t%s' % (row[0], row[2]))
|
|
|
|
elif field_filter == 4:
|
|
|
|
for row in records:
|
|
|
|
print('%s\t%s\t%s\t%s' % (row[0], row[1], row[2], row[3][1:-1]))
|
|
|
|
elif field_filter == 10:
|
|
|
|
for row in records:
|
|
|
|
print(row[1])
|
|
|
|
elif field_filter == 20:
|
|
|
|
for row in records:
|
|
|
|
print('%s\t%s' % (row[1], row[3][1:-1]))
|
|
|
|
elif field_filter == 30:
|
|
|
|
for row in records:
|
|
|
|
print(row[2])
|
|
|
|
elif field_filter == 40:
|
|
|
|
for row in records:
|
|
|
|
print('%s\t%s\t%s' % (row[1], row[2], row[3][1:-1]))
|
|
|
|
|
2017-04-12 10:36:11 -05:00
|
|
|
def print_single_rec(row, idx=0): # NOQA
|
2017-09-05 15:24:04 -05:00
|
|
|
"""Print a single DB record.
|
|
|
|
|
2017-09-16 12:38:11 -05:00
|
|
|
Handles both search results and individual record.
|
2016-10-29 02:54:10 -05:00
|
|
|
|
2017-09-05 15:24:04 -05:00
|
|
|
Parameters
|
|
|
|
----------
|
|
|
|
row : tuple
|
|
|
|
Tuple representing bookmark record data.
|
|
|
|
idx : int, optional
|
|
|
|
Search result index. If 0, print with DB index.
|
|
|
|
Default is 0.
|
|
|
|
"""
|
2016-05-17 15:11:31 -05:00
|
|
|
|
2017-08-28 12:16:00 -05:00
|
|
|
str_list = []
|
|
|
|
|
2017-04-08 15:19:25 -05:00
|
|
|
# Start with index and title
|
2016-10-11 13:49:05 -05:00
|
|
|
if idx != 0:
|
2017-08-28 12:16:00 -05:00
|
|
|
id_title_res = ID_str % (idx, row[2] if row[2] else 'Untitled', row[0])
|
2016-05-17 15:11:31 -05:00
|
|
|
else:
|
2017-08-28 12:16:00 -05:00
|
|
|
id_title_res = ID_DB_str % (row[0], row[2] if row[2] else 'Untitled')
|
2016-11-06 07:52:12 -06:00
|
|
|
# Indicate if record is immutable
|
|
|
|
if row[5] & 1:
|
2017-08-22 17:09:43 -05:00
|
|
|
id_title_res = MUTE_str % (id_title_res)
|
2016-11-06 07:52:12 -06:00
|
|
|
else:
|
2017-08-22 17:09:43 -05:00
|
|
|
id_title_res += '\n'
|
2016-06-02 12:26:37 -05:00
|
|
|
|
2017-08-28 12:16:00 -05:00
|
|
|
str_list.append(id_title_res)
|
|
|
|
str_list.append(URL_str % (row[1]))
|
2017-04-08 15:19:25 -05:00
|
|
|
if row[4]:
|
2017-08-28 12:16:00 -05:00
|
|
|
str_list.append(DESC_str % (row[4]))
|
2016-10-29 05:36:29 -05:00
|
|
|
if row[3] != DELIM:
|
2017-08-28 12:16:00 -05:00
|
|
|
str_list.append(TAG_str % (row[3][1:-1]))
|
|
|
|
|
|
|
|
print(''.join(str_list))
|
2016-05-17 15:11:31 -05:00
|
|
|
|
2016-09-09 10:07:01 -05:00
|
|
|
|
2016-10-29 04:02:50 -05:00
|
|
|
def format_json(resultset, single_record=False, field_filter=0):
|
2017-09-05 15:24:04 -05:00
|
|
|
"""Return results in json format.
|
|
|
|
|
|
|
|
Parameters
|
|
|
|
----------
|
|
|
|
resultset : list
|
|
|
|
Search results from DB query.
|
|
|
|
single_record : bool, optional
|
|
|
|
If True, indicates only one record. Default is False.
|
|
|
|
|
|
|
|
Returns
|
|
|
|
-------
|
|
|
|
json
|
|
|
|
Record(s) in json format.
|
|
|
|
"""
|
2016-03-22 18:29:45 -05:00
|
|
|
|
2016-10-29 02:54:10 -05:00
|
|
|
if single_record:
|
2016-03-22 18:29:45 -05:00
|
|
|
marks = {}
|
|
|
|
for row in resultset:
|
2016-10-29 04:02:50 -05:00
|
|
|
if field_filter == 1:
|
2016-05-22 22:39:58 -05:00
|
|
|
marks['uri'] = row[1]
|
2016-10-29 04:02:50 -05:00
|
|
|
elif field_filter == 2:
|
2016-05-22 22:39:58 -05:00
|
|
|
marks['uri'] = row[1]
|
2016-05-31 12:39:34 -05:00
|
|
|
marks['tags'] = row[3][1:-1]
|
2016-10-29 04:02:50 -05:00
|
|
|
elif field_filter == 3:
|
2016-10-09 14:56:45 -05:00
|
|
|
marks['title'] = row[2]
|
2017-05-03 14:33:45 -05:00
|
|
|
elif field_filter == 4:
|
|
|
|
marks['uri'] = row[1]
|
|
|
|
marks['tags'] = row[3][1:-1]
|
|
|
|
marks['title'] = row[2]
|
2016-03-22 18:29:45 -05:00
|
|
|
else:
|
2016-11-26 20:16:54 -06:00
|
|
|
marks['index'] = row[0]
|
2016-05-31 12:39:34 -05:00
|
|
|
marks['uri'] = row[1]
|
2016-03-22 18:29:45 -05:00
|
|
|
marks['title'] = row[2]
|
2016-05-22 22:39:58 -05:00
|
|
|
marks['description'] = row[4]
|
2016-05-31 12:39:34 -05:00
|
|
|
marks['tags'] = row[3][1:-1]
|
2016-10-29 02:54:10 -05:00
|
|
|
else:
|
|
|
|
marks = []
|
|
|
|
for row in resultset:
|
2016-10-29 04:02:50 -05:00
|
|
|
if field_filter == 1:
|
2016-10-29 02:54:10 -05:00
|
|
|
record = {'uri': row[1]}
|
2016-10-29 04:02:50 -05:00
|
|
|
elif field_filter == 2:
|
2016-10-29 02:54:10 -05:00
|
|
|
record = {'uri': row[1], 'tags': row[3][1:-1]}
|
2016-10-29 04:02:50 -05:00
|
|
|
elif field_filter == 3:
|
2016-10-29 02:54:10 -05:00
|
|
|
record = {'title': row[2]}
|
2017-05-03 14:33:45 -05:00
|
|
|
elif field_filter == 4:
|
|
|
|
record = {'uri': row[1], 'title': row[2], 'tags': row[3][1:-1]}
|
2016-10-29 02:54:10 -05:00
|
|
|
else:
|
2017-08-12 01:52:37 -05:00
|
|
|
record = {'index': row[0], 'uri': row[1], 'title': row[2], 'description': row[4], 'tags': row[3][1:-1]}
|
2016-10-29 02:54:10 -05:00
|
|
|
|
|
|
|
marks.append(record)
|
2015-11-05 09:18:51 -06:00
|
|
|
|
2016-03-22 18:29:45 -05:00
|
|
|
return json.dumps(marks, sort_keys=True, indent=4)
|
2015-11-08 12:56:52 -06:00
|
|
|
|
2016-09-09 10:07:01 -05:00
|
|
|
|
2015-11-06 13:59:57 -06:00
|
|
|
def is_int(string):
|
2017-09-05 15:24:04 -05:00
|
|
|
"""Check if a string is a digit.
|
2016-04-10 07:41:00 -05:00
|
|
|
|
2017-09-05 15:24:04 -05:00
|
|
|
string : str
|
|
|
|
Input string to check.
|
|
|
|
|
|
|
|
Returns
|
|
|
|
-------
|
|
|
|
bool
|
|
|
|
True on success, False on exception.
|
|
|
|
"""
|
2016-04-05 06:25:40 -05:00
|
|
|
|
2015-11-06 13:59:57 -06:00
|
|
|
try:
|
|
|
|
int(string)
|
|
|
|
return True
|
2016-05-31 12:39:34 -05:00
|
|
|
except Exception:
|
2015-11-06 13:59:57 -06:00
|
|
|
return False
|
|
|
|
|
2016-09-09 10:07:01 -05:00
|
|
|
|
2017-02-18 20:48:38 -06:00
|
|
|
def browse(url):
|
2017-09-16 12:38:11 -05:00
|
|
|
"""Duplicate stdin, stdout and open URL in default browser.
|
2017-09-05 15:24:04 -05:00
|
|
|
|
|
|
|
.. note:: Duplicates stdin and stdout in order to
|
|
|
|
suppress showing errors on the terminal.
|
2015-11-06 16:32:08 -06:00
|
|
|
|
2017-09-05 15:24:04 -05:00
|
|
|
Parameters
|
|
|
|
----------
|
|
|
|
url : str
|
|
|
|
URL to open in browser.
|
2017-09-08 13:48:21 -05:00
|
|
|
|
|
|
|
Attributes
|
|
|
|
----------
|
|
|
|
suppress_browser_output : bool
|
|
|
|
True if a text based browser is detected.
|
2017-09-11 18:01:31 -05:00
|
|
|
Must be initialized (as applicable) to use the API.
|
2017-09-05 15:24:04 -05:00
|
|
|
"""
|
2015-11-08 12:56:52 -06:00
|
|
|
|
2016-12-09 12:28:19 -06:00
|
|
|
if not parse_url(url).scheme:
|
2017-03-04 15:36:57 -06:00
|
|
|
# Prefix with 'http://' if no scheme
|
2016-11-18 10:36:09 -06:00
|
|
|
# Otherwise, opening in browser fails anyway
|
|
|
|
# We expect http to https redirection
|
|
|
|
# will happen for https-only websites
|
2016-11-22 12:09:03 -06:00
|
|
|
logerr('scheme missing in URI, trying http')
|
2017-03-05 01:56:39 -06:00
|
|
|
url = 'http://' + url
|
2015-11-10 05:20:30 -06:00
|
|
|
|
2017-09-08 13:48:21 -05:00
|
|
|
if browse.suppress_browser_output:
|
|
|
|
_stderr = os.dup(2)
|
|
|
|
os.close(2)
|
|
|
|
_stdout = os.dup(1)
|
|
|
|
os.close(1)
|
|
|
|
fd = os.open(os.devnull, os.O_RDWR)
|
|
|
|
os.dup2(fd, 2)
|
|
|
|
os.dup2(fd, 1)
|
2015-11-10 03:11:05 -06:00
|
|
|
try:
|
2017-04-03 14:44:02 -05:00
|
|
|
if sys.platform != 'win32':
|
|
|
|
webbrowser.open(url, new=2)
|
|
|
|
else:
|
|
|
|
# On Windows, the webbrowser module does not fork.
|
|
|
|
# Use threads instead.
|
2017-04-05 22:14:44 -05:00
|
|
|
def browserthread():
|
|
|
|
webbrowser.open(url, new=2)
|
|
|
|
|
2017-04-03 14:44:02 -05:00
|
|
|
t = threading.Thread(target=browserthread)
|
|
|
|
t.start()
|
2015-11-10 03:11:05 -06:00
|
|
|
except Exception as e:
|
2017-02-18 20:48:38 -06:00
|
|
|
logerr('browse(): %s', e)
|
2015-11-10 03:11:05 -06:00
|
|
|
finally:
|
2017-09-08 13:48:21 -05:00
|
|
|
if browse.suppress_browser_output:
|
|
|
|
os.close(fd)
|
|
|
|
os.dup2(_stderr, 2)
|
|
|
|
os.dup2(_stdout, 1)
|
2015-11-10 03:11:05 -06:00
|
|
|
|
2016-09-09 10:07:01 -05:00
|
|
|
|
2016-10-23 13:11:31 -05:00
|
|
|
def check_upstream_release():
|
2017-09-05 15:24:04 -05:00
|
|
|
"""Check and report the latest upstream release version."""
|
2016-10-23 13:11:31 -05:00
|
|
|
|
2016-12-03 08:03:14 -06:00
|
|
|
proxies = {
|
|
|
|
'https': os.environ.get('https_proxy'),
|
|
|
|
}
|
|
|
|
|
|
|
|
try:
|
|
|
|
r = requests.get(
|
2017-08-12 01:52:37 -05:00
|
|
|
'https://api.github.com/repos/jarun/buku/releases?per_page=1',
|
|
|
|
proxies=proxies
|
2016-12-03 08:03:14 -06:00
|
|
|
)
|
|
|
|
except Exception as e:
|
|
|
|
logerr(e)
|
|
|
|
return
|
|
|
|
|
2016-11-07 23:28:24 -06:00
|
|
|
if r.status_code != 200:
|
2016-11-22 12:09:03 -06:00
|
|
|
logerr('[%s] %s', r.status_code, r.reason)
|
2016-10-23 13:11:31 -05:00
|
|
|
else:
|
2017-03-01 22:36:57 -06:00
|
|
|
latest = r.json()[0]['tag_name']
|
2016-11-06 12:44:44 -06:00
|
|
|
if latest == 'v' + __version__:
|
2016-10-23 13:11:31 -05:00
|
|
|
print('This is the latest release')
|
|
|
|
else:
|
|
|
|
print('Latest upstream release is %s' % latest)
|
|
|
|
|
|
|
|
|
2017-01-10 21:55:45 -06:00
|
|
|
def regexp(expr, item):
|
2017-09-05 15:24:04 -05:00
|
|
|
"""Perform a regular expression search.
|
|
|
|
|
|
|
|
Parameters
|
|
|
|
----------
|
|
|
|
expr : regex
|
|
|
|
Regular expression to search for.
|
|
|
|
item : str
|
|
|
|
Item on which to perform regex search.
|
|
|
|
|
|
|
|
Returns
|
|
|
|
-------
|
|
|
|
bool
|
|
|
|
True if result of search is not None, returns None otherwise.
|
|
|
|
"""
|
2017-01-10 21:55:45 -06:00
|
|
|
|
|
|
|
return re.search(expr, item, re.IGNORECASE) is not None
|
|
|
|
|
|
|
|
|
2017-03-05 01:56:39 -06:00
|
|
|
def delim_wrap(token):
|
2017-09-05 15:24:04 -05:00
|
|
|
"""Returns token string wrapped in delimiters.
|
|
|
|
|
|
|
|
Parameters
|
|
|
|
----------
|
|
|
|
token : str
|
|
|
|
String item to wrap with DELIM.
|
|
|
|
|
|
|
|
Returns
|
|
|
|
-------
|
|
|
|
str
|
|
|
|
Token string wrapped by DELIM.
|
|
|
|
"""
|
2017-03-05 01:56:39 -06:00
|
|
|
|
|
|
|
return DELIM + token + DELIM
|
|
|
|
|
|
|
|
|
2017-02-07 12:03:51 -06:00
|
|
|
def read_in(msg):
|
2017-09-05 15:24:04 -05:00
|
|
|
"""A wrapper to handle input() with interrupts disabled.
|
|
|
|
|
|
|
|
Parameters
|
|
|
|
----------
|
|
|
|
msg : str
|
2017-09-16 12:38:11 -05:00
|
|
|
String to pass to to input().
|
2017-09-05 15:24:04 -05:00
|
|
|
"""
|
2017-03-05 01:56:39 -06:00
|
|
|
|
2017-02-07 12:03:51 -06:00
|
|
|
disable_sigint_handler()
|
|
|
|
message = None
|
|
|
|
try:
|
|
|
|
message = input(msg)
|
|
|
|
except KeyboardInterrupt:
|
|
|
|
print('Interrupted.')
|
|
|
|
|
|
|
|
enable_sigint_handler()
|
|
|
|
return message
|
|
|
|
|
|
|
|
|
2016-03-16 10:10:55 -05:00
|
|
|
def sigint_handler(signum, frame):
|
2017-09-05 15:24:04 -05:00
|
|
|
"""Custom SIGINT handler.
|
|
|
|
|
|
|
|
.. note:: Neither signum nor frame are used in
|
|
|
|
this custom handler. However, they are
|
|
|
|
required parameters for signal handlers.
|
|
|
|
|
|
|
|
Parameters
|
|
|
|
----------
|
|
|
|
signum : int
|
|
|
|
Signal number.
|
2017-09-16 12:38:11 -05:00
|
|
|
frame : frame object or None.
|
2017-09-05 15:24:04 -05:00
|
|
|
"""
|
2016-04-05 07:55:29 -05:00
|
|
|
|
2016-06-01 01:10:12 -05:00
|
|
|
global interrupted
|
|
|
|
|
|
|
|
interrupted = True
|
2016-03-16 10:10:55 -05:00
|
|
|
print('\nInterrupted.', file=sys.stderr)
|
2016-11-29 16:51:11 -06:00
|
|
|
|
|
|
|
# Do a hard exit from here
|
|
|
|
os._exit(1)
|
2016-03-16 10:10:55 -05:00
|
|
|
|
2017-02-07 04:07:01 -06:00
|
|
|
DEFAULT_HANDLER = signal.signal(signal.SIGINT, sigint_handler)
|
2017-02-07 12:03:51 -06:00
|
|
|
|
|
|
|
|
2017-02-07 04:07:01 -06:00
|
|
|
def disable_sigint_handler():
|
2017-09-05 15:24:04 -05:00
|
|
|
"""Disable signint handler."""
|
2017-02-07 04:07:01 -06:00
|
|
|
signal.signal(signal.SIGINT, DEFAULT_HANDLER)
|
|
|
|
|
2017-02-07 12:03:51 -06:00
|
|
|
|
2017-02-07 04:07:01 -06:00
|
|
|
def enable_sigint_handler():
|
2017-09-05 15:24:04 -05:00
|
|
|
"""Enable sigint handler."""
|
2017-02-07 04:07:01 -06:00
|
|
|
signal.signal(signal.SIGINT, sigint_handler)
|
2016-09-09 10:07:01 -05:00
|
|
|
|
2017-02-01 10:53:00 -06:00
|
|
|
# ---------------------
|
|
|
|
# Editor mode functions
|
|
|
|
# ---------------------
|
|
|
|
|
2017-02-07 12:03:51 -06:00
|
|
|
|
2017-02-04 08:45:33 -06:00
|
|
|
def get_system_editor():
|
2017-09-16 12:38:11 -05:00
|
|
|
"""Returns default system editor is $EDITOR is set."""
|
2017-02-01 10:53:00 -06:00
|
|
|
|
2017-02-07 12:03:51 -06:00
|
|
|
return os.environ.get('EDITOR', 'none')
|
2017-02-01 10:53:00 -06:00
|
|
|
|
|
|
|
|
2017-03-12 04:25:01 -05:00
|
|
|
def is_editor_valid(editor):
|
2017-09-16 12:38:11 -05:00
|
|
|
"""Check if the editor string is valid.
|
2017-03-12 04:25:01 -05:00
|
|
|
|
2017-09-05 15:24:04 -05:00
|
|
|
Parameters
|
|
|
|
----------
|
|
|
|
editor : str
|
|
|
|
Editor string.
|
|
|
|
|
|
|
|
Returns
|
|
|
|
-------
|
|
|
|
bool
|
|
|
|
True if string is valid, else False.
|
|
|
|
"""
|
2017-03-12 04:25:01 -05:00
|
|
|
|
|
|
|
if editor == 'none':
|
|
|
|
logerr('EDITOR is not set')
|
|
|
|
return False
|
|
|
|
|
|
|
|
if editor == '0':
|
|
|
|
logerr('Cannot edit index 0')
|
|
|
|
return False
|
|
|
|
|
|
|
|
return True
|
|
|
|
|
|
|
|
|
2017-02-04 08:45:33 -06:00
|
|
|
def to_temp_file_content(url, title_in, tags_in, desc):
|
2017-09-16 12:38:11 -05:00
|
|
|
"""Generate temporary file content string.
|
2017-09-05 15:24:04 -05:00
|
|
|
|
|
|
|
Parameters
|
|
|
|
----------
|
|
|
|
url : str
|
|
|
|
URL to open.
|
|
|
|
title_in : str
|
|
|
|
Title to add manually.
|
|
|
|
tags_in : str
|
|
|
|
Comma-separated tags to add manually.
|
|
|
|
desc : str
|
|
|
|
String description.
|
|
|
|
|
|
|
|
Returns
|
|
|
|
-------
|
|
|
|
str
|
|
|
|
Lines as newline separated string.
|
|
|
|
"""
|
2017-02-01 10:53:00 -06:00
|
|
|
|
2017-03-05 12:19:58 -06:00
|
|
|
strings = [('# Lines beginning with "#" will be stripped.\n'
|
|
|
|
'# Add URL in next line (single line).'), ]
|
2017-02-01 10:53:00 -06:00
|
|
|
|
|
|
|
# URL
|
|
|
|
if url is not None:
|
2017-03-05 12:19:58 -06:00
|
|
|
strings += (url,)
|
2017-02-01 10:53:00 -06:00
|
|
|
|
|
|
|
# TITLE
|
2017-08-12 01:52:37 -05:00
|
|
|
strings += (('# Add TITLE in next line (single line). Leave blank to web fetch, "-" for no title.'),)
|
2017-02-01 10:53:00 -06:00
|
|
|
if title_in is None:
|
|
|
|
title_in = ''
|
|
|
|
elif title_in == '':
|
|
|
|
title_in = '-'
|
2017-03-05 12:19:58 -06:00
|
|
|
strings += (title_in,)
|
2017-02-01 10:53:00 -06:00
|
|
|
|
|
|
|
# TAGS
|
2017-03-05 12:19:58 -06:00
|
|
|
strings += ('# Add comma-separated TAGS in next line (single line).',)
|
|
|
|
strings += (tags_in.strip(DELIM),) if not None else ''
|
2017-02-01 10:53:00 -06:00
|
|
|
|
|
|
|
# DESC
|
2017-03-05 12:19:58 -06:00
|
|
|
strings += ('# Add COMMENTS in next line(s).',)
|
2017-02-01 10:53:00 -06:00
|
|
|
if desc is not None and desc != '':
|
2017-03-05 12:19:58 -06:00
|
|
|
strings += (desc,)
|
2017-02-01 10:53:00 -06:00
|
|
|
else:
|
2017-03-05 12:19:58 -06:00
|
|
|
strings += ('\n',)
|
2017-02-01 10:53:00 -06:00
|
|
|
return '\n'.join(strings)
|
|
|
|
|
2017-02-04 08:45:33 -06:00
|
|
|
|
2017-02-01 10:53:00 -06:00
|
|
|
def parse_temp_file_content(content):
|
2017-09-05 15:24:04 -05:00
|
|
|
"""Parse and return temporary file content.
|
|
|
|
|
|
|
|
Parameters
|
|
|
|
----------
|
|
|
|
content : str
|
2017-09-16 12:38:11 -05:00
|
|
|
String of content.
|
2017-09-05 15:24:04 -05:00
|
|
|
|
|
|
|
Returns
|
|
|
|
-------
|
|
|
|
tuple
|
|
|
|
(url, title, tags, comments)
|
|
|
|
|
|
|
|
url: URL to open
|
|
|
|
title: string title to add manually
|
|
|
|
tags: string of comma-separated tags to add manually
|
|
|
|
comments: string description
|
|
|
|
"""
|
2017-02-04 08:45:33 -06:00
|
|
|
|
2017-02-01 10:53:00 -06:00
|
|
|
content = content.split('\n')
|
2017-04-08 12:45:15 -05:00
|
|
|
content = [c for c in content if not c or c[0] != '#']
|
|
|
|
if not content or content[0].strip() == '':
|
2017-02-04 08:45:33 -06:00
|
|
|
print('Edit aborted')
|
2017-02-01 10:53:00 -06:00
|
|
|
return None
|
|
|
|
|
|
|
|
url = content[0]
|
|
|
|
title = None
|
|
|
|
if len(content) > 1:
|
|
|
|
title = content[1]
|
|
|
|
|
|
|
|
if title == '':
|
|
|
|
title = None
|
|
|
|
elif title == '-':
|
|
|
|
title = ''
|
|
|
|
|
2017-04-07 10:13:24 -05:00
|
|
|
tags = DELIM
|
2017-02-01 10:53:00 -06:00
|
|
|
if len(content) > 2:
|
2017-02-04 08:45:33 -06:00
|
|
|
tags = parse_tags([content[2]])
|
2017-02-01 10:53:00 -06:00
|
|
|
|
|
|
|
comments = []
|
|
|
|
if len(content) > 3:
|
2017-02-04 08:45:33 -06:00
|
|
|
comments = [c for c in content[3:]]
|
2017-02-01 10:53:00 -06:00
|
|
|
# need to remove all empty line that are at the end
|
|
|
|
# and not those in the middle of the text
|
2017-02-04 08:45:33 -06:00
|
|
|
for i in range(len(comments) - 1, -1, -1):
|
2017-02-01 10:53:00 -06:00
|
|
|
if comments[i].strip() != '':
|
|
|
|
break
|
2017-02-04 08:45:33 -06:00
|
|
|
|
2017-02-01 10:53:00 -06:00
|
|
|
if i == -1:
|
|
|
|
comments = []
|
|
|
|
else:
|
|
|
|
comments = comments[0:i+1]
|
|
|
|
|
2017-02-04 08:45:33 -06:00
|
|
|
comments = '\n'.join(comments)
|
2017-02-01 10:53:00 -06:00
|
|
|
return url, title, tags, comments
|
|
|
|
|
|
|
|
|
2017-02-04 08:45:33 -06:00
|
|
|
def edit_rec(editor, url, title_in, tags_in, desc):
|
2017-09-16 12:38:11 -05:00
|
|
|
"""Edit a bookmark record.
|
2017-09-05 15:24:04 -05:00
|
|
|
|
|
|
|
Parameters
|
|
|
|
----------
|
|
|
|
editor : str
|
|
|
|
Editor to open.
|
|
|
|
URL : str
|
|
|
|
URL to open.
|
|
|
|
title_in : str
|
|
|
|
Title to add manually.
|
|
|
|
tags_in : str
|
|
|
|
Comma-separated tags to add manually.
|
|
|
|
desc : str
|
|
|
|
Bookmark description.
|
|
|
|
|
|
|
|
Returns
|
|
|
|
-------
|
|
|
|
tuple
|
|
|
|
Parsed results from parse_temp_file_content().
|
|
|
|
"""
|
2017-02-04 08:45:33 -06:00
|
|
|
|
|
|
|
import tempfile
|
|
|
|
import subprocess
|
|
|
|
|
|
|
|
temp_file_content = to_temp_file_content(url, title_in, tags_in, desc)
|
|
|
|
|
|
|
|
fd, tmpfile = tempfile.mkstemp(prefix='buku-edit-')
|
|
|
|
os.close(fd)
|
|
|
|
|
|
|
|
try:
|
|
|
|
with open(tmpfile, 'w+', encoding='utf-8') as fp:
|
|
|
|
fp.write(temp_file_content)
|
|
|
|
fp.flush()
|
|
|
|
logdbg('Edited content written to %s', tmpfile)
|
|
|
|
|
2017-02-05 00:58:50 -06:00
|
|
|
cmd = editor.split(' ')
|
2017-03-05 12:19:58 -06:00
|
|
|
cmd += (tmpfile,)
|
2017-02-05 00:58:50 -06:00
|
|
|
subprocess.call(cmd)
|
2017-02-04 08:45:33 -06:00
|
|
|
|
|
|
|
with open(tmpfile, 'r', encoding='utf-8') as f:
|
|
|
|
content = f.read()
|
|
|
|
|
|
|
|
os.remove(tmpfile)
|
|
|
|
except FileNotFoundError:
|
2017-02-05 00:58:50 -06:00
|
|
|
if os.path.exists(tmpfile):
|
|
|
|
os.remove(tmpfile)
|
|
|
|
logerr('Cannot open editor')
|
|
|
|
else:
|
|
|
|
logerr('Cannot open tempfile')
|
2017-02-04 08:45:33 -06:00
|
|
|
return None
|
|
|
|
|
|
|
|
parsed_content = parse_temp_file_content(content)
|
|
|
|
return parsed_content
|
|
|
|
|
|
|
|
|
2017-03-04 15:36:57 -06:00
|
|
|
def setup_logger(logger):
|
2017-09-05 15:24:04 -05:00
|
|
|
"""Setup logger with color.
|
2017-03-04 15:36:57 -06:00
|
|
|
|
2017-09-05 15:24:04 -05:00
|
|
|
Parameters
|
|
|
|
----------
|
|
|
|
logger : logger object
|
|
|
|
Logger to colorize.
|
|
|
|
"""
|
2017-03-04 15:36:57 -06:00
|
|
|
|
|
|
|
def decorate_emit(fn):
|
|
|
|
def new(*args):
|
|
|
|
levelno = args[0].levelno
|
|
|
|
|
|
|
|
if levelno == logging.DEBUG:
|
|
|
|
color = '\x1b[35m'
|
|
|
|
elif levelno == logging.ERROR:
|
|
|
|
color = '\x1b[31m'
|
|
|
|
elif levelno == logging.WARNING:
|
|
|
|
color = '\x1b[33m'
|
|
|
|
elif levelno == logging.INFO:
|
|
|
|
color = '\x1b[32m'
|
|
|
|
elif levelno == logging.CRITICAL:
|
|
|
|
color = '\x1b[31m'
|
|
|
|
else:
|
|
|
|
color = '\x1b[0m'
|
|
|
|
|
2017-08-12 01:52:37 -05:00
|
|
|
args[0].msg = '{}[{}]\x1b[0m {}'.format(color, args[0].levelname, args[0].msg)
|
2017-03-04 15:36:57 -06:00
|
|
|
return fn(*args)
|
|
|
|
return new
|
|
|
|
|
|
|
|
sh = logging.StreamHandler()
|
|
|
|
sh.emit = decorate_emit(sh.emit)
|
|
|
|
logger.addHandler(sh)
|
|
|
|
|
|
|
|
|
2016-11-06 12:44:44 -06:00
|
|
|
def piped_input(argv, pipeargs=None):
|
2017-09-05 15:24:04 -05:00
|
|
|
"""Handle piped input.
|
|
|
|
|
|
|
|
Parameters
|
|
|
|
----------
|
|
|
|
pipeargs : str
|
|
|
|
"""
|
2016-03-26 10:59:07 -05:00
|
|
|
if not sys.stdin.isatty():
|
2017-03-05 12:19:58 -06:00
|
|
|
pipeargs += argv
|
2017-03-31 12:09:37 -05:00
|
|
|
print('waiting for input')
|
2017-05-05 16:54:28 -05:00
|
|
|
for s in sys.stdin:
|
2017-03-05 12:19:58 -06:00
|
|
|
pipeargs += s.split()
|
2016-03-26 10:59:07 -05:00
|
|
|
|
2016-11-06 12:44:44 -06:00
|
|
|
|
2017-08-22 17:09:43 -05:00
|
|
|
def setcolors(args):
|
2017-09-05 15:24:04 -05:00
|
|
|
"""Get colors from user and separate into 'result' list for use in arg.colors.
|
|
|
|
|
|
|
|
Parameters
|
|
|
|
----------
|
|
|
|
args : str
|
|
|
|
Color string.
|
|
|
|
"""
|
2017-08-22 17:09:43 -05:00
|
|
|
Colors = collections.namedtuple('Colors', ' ID_srch, ID_str, URL_str, DESC_str, TAG_str')
|
|
|
|
colors = Colors(*[COLORMAP[c] for c in args])
|
|
|
|
id_col = colors.ID_srch
|
|
|
|
id_str_col = colors.ID_str
|
|
|
|
url_col = colors.URL_str
|
|
|
|
desc_col = colors.DESC_str
|
|
|
|
tag_col = colors.TAG_str
|
|
|
|
result = [id_col, id_str_col, url_col, desc_col, tag_col]
|
|
|
|
return result
|
|
|
|
|
2016-12-31 10:50:18 -06:00
|
|
|
# main starts here
|
2016-11-06 12:44:44 -06:00
|
|
|
def main():
|
2017-09-05 15:24:04 -05:00
|
|
|
"""Main."""
|
2017-08-24 17:33:57 -05:00
|
|
|
global ID_str, ID_DB_str, MUTE_str, URL_str, DESC_str, TAG_str, promptmsg
|
2017-01-03 18:43:47 -06:00
|
|
|
|
2016-12-30 18:43:17 -06:00
|
|
|
title_in = None
|
|
|
|
tags_in = None
|
|
|
|
desc_in = None
|
2016-06-30 09:19:57 -05:00
|
|
|
pipeargs = []
|
2017-08-24 16:30:45 -05:00
|
|
|
colorstr_env = os.getenv('BUKU_COLORS')
|
2016-06-30 09:19:57 -05:00
|
|
|
|
2016-03-26 10:59:07 -05:00
|
|
|
try:
|
2016-11-06 12:44:44 -06:00
|
|
|
piped_input(sys.argv, pipeargs)
|
2016-03-26 10:59:07 -05:00
|
|
|
except KeyboardInterrupt:
|
|
|
|
pass
|
|
|
|
|
2016-05-25 06:00:14 -05:00
|
|
|
# If piped input, set argument vector
|
2017-01-29 11:21:55 -06:00
|
|
|
if pipeargs:
|
2016-05-25 06:00:14 -05:00
|
|
|
sys.argv = pipeargs
|
|
|
|
|
|
|
|
# Setup custom argument parser
|
|
|
|
argparser = ExtendedArgumentParser(
|
2017-09-10 21:56:11 -05:00
|
|
|
description='''Powerful command-line bookmark manager.
|
2017-01-29 07:49:47 -06:00
|
|
|
|
|
|
|
POSITIONAL ARGUMENTS:
|
|
|
|
KEYWORD search keywords''',
|
2016-05-25 06:00:14 -05:00
|
|
|
formatter_class=argparse.RawTextHelpFormatter,
|
2016-06-08 11:57:50 -05:00
|
|
|
usage='''buku [OPTIONS] [KEYWORD [KEYWORD ...]]''',
|
2016-05-25 06:00:14 -05:00
|
|
|
add_help=False
|
|
|
|
)
|
2016-11-07 11:52:54 -06:00
|
|
|
HIDE = argparse.SUPPRESS
|
|
|
|
|
2017-01-29 07:49:47 -06:00
|
|
|
argparser.add_argument('keywords', nargs='*', metavar='KEYWORD', help=HIDE)
|
|
|
|
|
2016-11-07 11:52:54 -06:00
|
|
|
# ---------------------
|
|
|
|
# GENERAL OPTIONS GROUP
|
|
|
|
# ---------------------
|
2016-05-25 06:00:14 -05:00
|
|
|
|
2016-09-09 13:52:32 -05:00
|
|
|
general_grp = argparser.add_argument_group(
|
2017-01-04 10:17:02 -06:00
|
|
|
title='GENERAL OPTIONS',
|
|
|
|
description=''' -a, --add URL [tag, ...]
|
|
|
|
bookmark URL with comma-separated tags
|
2017-02-11 00:03:29 -06:00
|
|
|
-u, --update [...] update fields of an existing bookmark
|
2017-01-04 10:17:02 -06:00
|
|
|
accepts indices and ranges
|
2017-02-11 00:03:29 -06:00
|
|
|
refresh the title, if no edit options
|
|
|
|
if no arguments:
|
|
|
|
- update results when used with search
|
|
|
|
- otherwise refresh all titles
|
|
|
|
-w, --write [editor|index]
|
|
|
|
open editor to edit a fresh bookmark
|
|
|
|
to update by index, EDITOR must be set
|
|
|
|
-d, --delete [...] remove bookmarks from DB
|
|
|
|
accepts indices or a single range
|
|
|
|
if no arguments:
|
|
|
|
- delete results when used with search
|
|
|
|
- otherwise delete all bookmarks
|
|
|
|
-h, --help show this information and exit
|
|
|
|
-v, --version show the program version and exit''')
|
2016-09-09 13:52:32 -05:00
|
|
|
addarg = general_grp.add_argument
|
2016-11-07 11:52:54 -06:00
|
|
|
addarg('-a', '--add', nargs='+', help=HIDE)
|
2016-12-30 18:43:17 -06:00
|
|
|
addarg('-u', '--update', nargs='*', help=HIDE)
|
2017-02-11 00:03:29 -06:00
|
|
|
addarg('-w', '--write', nargs='?', const=get_system_editor(), help=HIDE)
|
2016-11-07 11:52:54 -06:00
|
|
|
addarg('-d', '--delete', nargs='*', help=HIDE)
|
|
|
|
addarg('-h', '--help', action='store_true', help=HIDE)
|
2017-02-11 00:03:29 -06:00
|
|
|
addarg('-v', '--version', action='version', version=__version__, help=HIDE)
|
2016-11-07 11:52:54 -06:00
|
|
|
|
|
|
|
# ------------------
|
|
|
|
# EDIT OPTIONS GROUP
|
|
|
|
# ------------------
|
|
|
|
|
2016-09-09 13:52:32 -05:00
|
|
|
edit_grp = argparser.add_argument_group(
|
2017-01-04 10:17:02 -06:00
|
|
|
title='EDIT OPTIONS',
|
2017-02-11 00:03:29 -06:00
|
|
|
description=''' --url keyword bookmark link
|
|
|
|
--tag [+|-] [...] comma-separated tags
|
|
|
|
clear bookmark tagset, if no arguments
|
|
|
|
'+' appends to, '-' removes from tagset
|
2017-03-16 13:15:37 -05:00
|
|
|
--title [...] bookmark title; if no arguments:
|
2017-01-04 10:17:02 -06:00
|
|
|
-a: do not set title, -u: clear title
|
2017-03-16 22:05:27 -05:00
|
|
|
-c, --comment [...] notes or description of the bookmark
|
2017-02-11 00:03:29 -06:00
|
|
|
clears description, if no arguments
|
2017-01-04 10:17:02 -06:00
|
|
|
--immutable N disable title fetch from web on update
|
|
|
|
N=0: mutable (default), N=1: immutable''')
|
2016-09-09 13:52:32 -05:00
|
|
|
addarg = edit_grp.add_argument
|
2016-11-07 11:52:54 -06:00
|
|
|
addarg('--url', nargs=1, help=HIDE)
|
2016-12-30 18:43:17 -06:00
|
|
|
addarg('--tag', nargs='*', help=HIDE)
|
2017-03-16 13:15:37 -05:00
|
|
|
addarg('--title', nargs='*', help=HIDE)
|
2016-12-30 18:43:17 -06:00
|
|
|
addarg('-c', '--comment', nargs='*', help=HIDE)
|
2016-11-07 11:52:54 -06:00
|
|
|
addarg('--immutable', type=int, default=-1, choices={0, 1}, help=HIDE)
|
|
|
|
|
|
|
|
# --------------------
|
|
|
|
# SEARCH OPTIONS GROUP
|
|
|
|
# --------------------
|
|
|
|
|
2016-09-09 13:52:32 -05:00
|
|
|
search_grp = argparser.add_argument_group(
|
2017-01-04 10:17:02 -06:00
|
|
|
title='SEARCH OPTIONS',
|
2017-08-13 04:03:06 -05:00
|
|
|
description=''' -s, --sany find records with ANY matching keyword
|
2017-01-29 07:49:47 -06:00
|
|
|
this is the default search option
|
2017-08-13 04:03:06 -05:00
|
|
|
-S, --sall find records matching ALL the keywords
|
2017-01-04 10:17:02 -06:00
|
|
|
special keywords -
|
|
|
|
"blank": entries with empty title/tag
|
|
|
|
"immutable": entries with locked title
|
|
|
|
--deep match substrings ('pen' matches 'opens')
|
2017-03-16 13:45:12 -05:00
|
|
|
-r, --sreg run a regex search
|
2017-08-14 16:07:01 -05:00
|
|
|
-t, --stag [tag [,|+] ...] [- tag, ...]
|
|
|
|
search bookmarks by tags
|
|
|
|
use ',' to find entries matching ANY tag
|
|
|
|
use '+' to find entries matching ALL tags
|
2017-09-17 01:23:55 -05:00
|
|
|
excludes entries with tags after ' - '
|
2017-01-29 07:49:47 -06:00
|
|
|
list all tags, if no search keywords''')
|
2016-09-09 13:52:32 -05:00
|
|
|
addarg = search_grp.add_argument
|
2017-01-29 07:49:47 -06:00
|
|
|
addarg('-s', '--sany', action='store_true', help=HIDE)
|
|
|
|
addarg('-S', '--sall', action='store_true', help=HIDE)
|
2017-03-16 13:45:12 -05:00
|
|
|
addarg('-r', '--sreg', action='store_true', help=HIDE)
|
2016-11-07 11:52:54 -06:00
|
|
|
addarg('--deep', action='store_true', help=HIDE)
|
2017-03-16 13:45:12 -05:00
|
|
|
addarg('-t', '--stag', action='store_true', help=HIDE)
|
2016-11-07 11:52:54 -06:00
|
|
|
|
|
|
|
# ------------------------
|
|
|
|
# ENCRYPTION OPTIONS GROUP
|
|
|
|
# ------------------------
|
|
|
|
|
2016-09-09 13:52:32 -05:00
|
|
|
crypto_grp = argparser.add_argument_group(
|
2017-01-04 10:17:02 -06:00
|
|
|
title='ENCRYPTION OPTIONS',
|
2017-09-17 01:23:55 -05:00
|
|
|
description=''' -l, --lock [N] encrypt DB in N (default 8) # iterations
|
|
|
|
-k, --unlock [N] decrypt DB in N (default 8) # iterations''')
|
2016-09-09 13:52:32 -05:00
|
|
|
addarg = crypto_grp.add_argument
|
2016-11-07 11:52:54 -06:00
|
|
|
addarg('-k', '--unlock', nargs='?', type=int, const=8, help=HIDE)
|
|
|
|
addarg('-l', '--lock', nargs='?', type=int, const=8, help=HIDE)
|
|
|
|
|
|
|
|
# ----------------
|
|
|
|
# POWER TOYS GROUP
|
|
|
|
# ----------------
|
2016-09-09 13:52:32 -05:00
|
|
|
|
|
|
|
power_grp = argparser.add_argument_group(
|
2017-01-04 10:17:02 -06:00
|
|
|
title='POWER TOYS',
|
2017-08-13 04:03:06 -05:00
|
|
|
description=''' --ai auto-import from Firefox and Chrome
|
|
|
|
-e, --export file export bookmarks in Firefox format html
|
2017-02-10 10:39:02 -06:00
|
|
|
export markdown, if file ends with '.md'
|
|
|
|
format: [title](url), 1 entry per line
|
2017-09-17 01:23:18 -05:00
|
|
|
export buku DB, if file ends with '.db'
|
2017-09-18 01:03:49 -05:00
|
|
|
use --tag to export specific tags
|
2017-02-11 00:03:29 -06:00
|
|
|
-i, --import file import Firefox or Chrome bookmarks html
|
2017-02-10 10:39:02 -06:00
|
|
|
import markdown, if file ends with '.md'
|
2017-09-16 22:03:06 -05:00
|
|
|
import buku DB, if file ends with '.db'
|
2017-02-11 00:03:29 -06:00
|
|
|
-p, --print [...] show record details by indices, ranges
|
|
|
|
print all bookmarks, if no arguments
|
2017-03-18 14:15:26 -05:00
|
|
|
-n shows the last n results (like tail)
|
2017-01-04 10:17:02 -06:00
|
|
|
-f, --format N limit fields in -p or Json search output
|
2017-05-03 14:33:45 -05:00
|
|
|
N=1: URL, N=2: URL and tag, N=3: title,
|
2017-09-23 12:01:37 -05:00
|
|
|
N=4: URL, title and tag. To omit DB index,
|
|
|
|
use N0, e.g., 10, 20, 30, 40.
|
2017-01-04 10:17:02 -06:00
|
|
|
-j, --json Json formatted output for -p and search
|
2017-08-22 23:43:55 -05:00
|
|
|
--colors COLORS set output colors in five-letter string
|
2017-02-10 07:55:05 -06:00
|
|
|
--nc disable color output
|
2017-02-10 07:59:02 -06:00
|
|
|
--np do not show the prompt, run and exit
|
2017-02-11 00:03:29 -06:00
|
|
|
-o, --open [...] browse bookmarks by indices and ranges
|
|
|
|
open a random bookmark, if no arguments
|
|
|
|
--oa browse all search results immediately
|
2017-03-16 22:05:27 -05:00
|
|
|
--replace old new replace old tag with new tag everywhere
|
|
|
|
delete old tag, if new tag not specified
|
2017-02-11 00:03:29 -06:00
|
|
|
--shorten index|URL fetch shortened url from tny.im service
|
|
|
|
--expand index|URL expand a tny.im shortened url
|
2017-08-01 10:37:03 -05:00
|
|
|
--suggest show similar tags when adding bookmarks
|
2017-01-04 10:17:02 -06:00
|
|
|
--tacit reduce verbosity
|
|
|
|
--threads N max network connections in full refresh
|
2017-01-29 07:49:47 -06:00
|
|
|
default N=4, min N=1, max N=10
|
2017-02-10 09:04:24 -06:00
|
|
|
-V check latest upstream version available
|
2017-01-04 10:17:02 -06:00
|
|
|
-z, --debug show debug information and verbose logs''')
|
2016-09-09 13:52:32 -05:00
|
|
|
addarg = power_grp.add_argument
|
2017-08-13 04:03:06 -05:00
|
|
|
addarg('--ai', action='store_true', help=HIDE)
|
2016-11-07 11:52:54 -06:00
|
|
|
addarg('-e', '--export', nargs=1, help=HIDE)
|
|
|
|
addarg('-i', '--import', nargs=1, dest='importfile', help=HIDE)
|
|
|
|
addarg('-p', '--print', nargs='*', help=HIDE)
|
2017-09-23 12:01:37 -05:00
|
|
|
addarg('-f', '--format', type=int, default=0, choices={1, 2, 3, 4, 10, 20, 30, 40}, help=HIDE)
|
2016-11-07 11:52:54 -06:00
|
|
|
addarg('-j', '--json', action='store_true', help=HIDE)
|
2017-09-23 20:55:19 -05:00
|
|
|
addarg('--colors', dest='colorstr', type=argparser.is_colorstr, metavar='COLORS', help=HIDE)
|
2017-02-10 07:55:05 -06:00
|
|
|
addarg('--nc', action='store_true', help=HIDE)
|
2017-02-10 07:59:02 -06:00
|
|
|
addarg('--np', action='store_true', help=HIDE)
|
2016-12-30 12:59:57 -06:00
|
|
|
addarg('-o', '--open', nargs='*', help=HIDE)
|
2017-02-04 21:40:38 -06:00
|
|
|
addarg('--oa', action='store_true', help=HIDE)
|
2017-03-16 22:05:27 -05:00
|
|
|
addarg('--replace', nargs='+', help=HIDE)
|
2016-11-12 09:47:36 -06:00
|
|
|
addarg('--shorten', nargs=1, help=HIDE)
|
2016-12-10 02:35:17 -06:00
|
|
|
addarg('--expand', nargs=1, help=HIDE)
|
2017-08-01 10:37:03 -05:00
|
|
|
addarg('--suggest', action='store_true', help=HIDE)
|
2016-11-07 11:52:54 -06:00
|
|
|
addarg('--tacit', action='store_true', help=HIDE)
|
2016-12-03 08:03:14 -06:00
|
|
|
addarg('--threads', type=int, default=4, choices=range(1, 11), help=HIDE)
|
2017-02-10 09:04:24 -06:00
|
|
|
addarg('-V', dest='upstream', action='store_true', help=HIDE)
|
2016-11-07 11:52:54 -06:00
|
|
|
addarg('-z', '--debug', action='store_true', help=HIDE)
|
2017-07-16 04:38:01 -05:00
|
|
|
# Undocumented APIs
|
2016-11-20 09:10:56 -06:00
|
|
|
addarg('--fixtags', action='store_true', help=HIDE)
|
2017-07-16 04:38:01 -05:00
|
|
|
addarg('--db', nargs=1, help=HIDE)
|
2016-05-25 06:00:14 -05:00
|
|
|
|
|
|
|
# Show help and exit if no arguments
|
2017-01-29 11:21:55 -06:00
|
|
|
if len(sys.argv) == 1:
|
2016-11-04 10:36:56 -05:00
|
|
|
argparser.print_help(sys.stdout)
|
2016-05-25 06:00:14 -05:00
|
|
|
sys.exit(1)
|
2015-11-07 07:29:38 -06:00
|
|
|
|
2016-05-25 06:00:14 -05:00
|
|
|
# Parse the arguments
|
|
|
|
args = argparser.parse_args()
|
|
|
|
|
|
|
|
# Show help and exit if help requested
|
2016-05-31 12:39:34 -05:00
|
|
|
if args.help:
|
2016-11-04 10:36:56 -05:00
|
|
|
argparser.print_help(sys.stdout)
|
2016-05-25 06:00:14 -05:00
|
|
|
sys.exit(0)
|
|
|
|
|
2017-09-23 20:55:19 -05:00
|
|
|
# By default, Buku uses ANSI colors. As Windows does not really use them,
|
|
|
|
# we'd better check for known working console emulators first. Currently,
|
|
|
|
# only ConEmu is supported. If the user does not use ConEmu, colors are
|
|
|
|
# disabled unless --colors or %BUKU_COLORS% is specified.
|
|
|
|
if sys.platform == 'win32' and os.environ.get('ConemuDir') is None:
|
|
|
|
if args.colorstr is None and colorstr_env is not None:
|
|
|
|
args.nc = True
|
|
|
|
|
2017-01-03 18:43:47 -06:00
|
|
|
# Handle color output preference
|
2017-02-10 07:55:05 -06:00
|
|
|
if args.nc:
|
2017-03-04 15:36:57 -06:00
|
|
|
logging.basicConfig(format='[%(levelname)s] %(message)s')
|
|
|
|
else:
|
2017-08-24 16:33:31 -05:00
|
|
|
# Set colors
|
2017-09-23 20:55:19 -05:00
|
|
|
if colorstr_env is not None:
|
|
|
|
# Someone set BUKU_COLORS.
|
|
|
|
colorstr = colorstr_env
|
|
|
|
elif args.colorstr is not None:
|
|
|
|
colorstr = args.colorstr
|
|
|
|
else:
|
|
|
|
colorstr = 'oKlxm'
|
|
|
|
|
|
|
|
ID = setcolors(colorstr)[0] + '%d. ' + COLORMAP['x']
|
2017-08-24 15:59:39 -05:00
|
|
|
ID_DB_dim = COLORMAP['z'] + '[%s]\n' + COLORMAP['x']
|
2017-09-23 20:55:19 -05:00
|
|
|
ID_str = ID + setcolors(colorstr)[1] + '%s ' + COLORMAP['x'] + ID_DB_dim
|
|
|
|
ID_DB_str = ID + setcolors(colorstr)[1] + '%s' + COLORMAP['x']
|
2017-08-24 17:33:57 -05:00
|
|
|
MUTE_str = '%s \x1b[2m(L)\x1b[0m\n'
|
2017-09-23 20:55:19 -05:00
|
|
|
URL_str = COLORMAP['j'] + ' > ' + setcolors(colorstr)[2] + '%s\n' + COLORMAP['x']
|
|
|
|
DESC_str = COLORMAP['j'] + ' + ' + setcolors(colorstr)[3] + '%s\n' + COLORMAP['x']
|
|
|
|
TAG_str = COLORMAP['j'] + ' # ' + setcolors(colorstr)[4] + '%s\n' + COLORMAP['x']
|
2017-08-22 17:09:43 -05:00
|
|
|
|
2017-08-24 16:33:31 -05:00
|
|
|
# Enable color in logs
|
|
|
|
setup_logger(logger)
|
|
|
|
|
2017-08-24 17:33:57 -05:00
|
|
|
# Enable prompt with reverse video
|
|
|
|
promptmsg = '\x1b[7mbuku (? for help)\x1b[0m '
|
|
|
|
|
2017-05-22 02:08:35 -05:00
|
|
|
# Set up debugging
|
|
|
|
if args.debug:
|
|
|
|
logger.setLevel(logging.DEBUG)
|
|
|
|
logdbg('Version %s', __version__)
|
|
|
|
else:
|
|
|
|
logging.disable(logging.WARNING)
|
|
|
|
urllib3.disable_warnings()
|
|
|
|
|
2017-01-03 18:43:47 -06:00
|
|
|
# Handle encrypt/decrypt options at top priority
|
|
|
|
if args.lock is not None:
|
|
|
|
BukuCrypt.encrypt_file(args.lock)
|
2017-02-16 09:52:19 -06:00
|
|
|
elif args.unlock is not None:
|
2017-01-03 18:43:47 -06:00
|
|
|
BukuCrypt.decrypt_file(args.unlock)
|
|
|
|
|
2016-12-30 18:43:17 -06:00
|
|
|
# Set up title
|
|
|
|
if args.title is not None:
|
2017-01-29 11:21:55 -06:00
|
|
|
if args.title:
|
2016-12-30 18:43:17 -06:00
|
|
|
title_in = ' '.join(args.title)
|
|
|
|
else:
|
|
|
|
title_in = ''
|
|
|
|
|
2017-02-04 08:45:33 -06:00
|
|
|
# Set up tags
|
|
|
|
if args.tag is not None:
|
|
|
|
if args.tag:
|
|
|
|
tags_in = args.tag
|
|
|
|
else:
|
|
|
|
tags_in = [DELIM, ]
|
|
|
|
|
2016-12-30 18:43:17 -06:00
|
|
|
# Set up comment
|
|
|
|
if args.comment is not None:
|
2017-01-29 11:21:55 -06:00
|
|
|
if args.comment:
|
2016-12-30 18:43:17 -06:00
|
|
|
desc_in = ' '.join(args.comment)
|
|
|
|
else:
|
|
|
|
desc_in = ''
|
|
|
|
|
2016-11-06 09:30:45 -06:00
|
|
|
# Initialize the database and get handles, set verbose by default
|
2017-08-24 17:33:57 -05:00
|
|
|
bdb = BukuDb(args.json, args.format, not args.tacit, dbfile=args.db[0] if args.db is not None else None, colorize=not args.nc)
|
2016-05-22 15:24:24 -05:00
|
|
|
|
2017-02-07 12:03:51 -06:00
|
|
|
# Editor mode
|
|
|
|
if args.write is not None:
|
2017-03-12 04:25:01 -05:00
|
|
|
if not is_editor_valid(args.write):
|
2017-02-07 12:03:51 -06:00
|
|
|
bdb.close_quit(1)
|
|
|
|
|
|
|
|
if is_int(args.write):
|
2017-03-12 04:25:01 -05:00
|
|
|
if not bdb.edit_update_rec(int(args.write), args.immutable):
|
2017-02-07 12:03:51 -06:00
|
|
|
bdb.close_quit(1)
|
|
|
|
elif args.add is None:
|
|
|
|
# Edit and add a new bookmark
|
|
|
|
# Parse tags into a comma-separated string
|
|
|
|
if tags_in:
|
|
|
|
if tags_in[0] == '+':
|
2017-03-05 01:56:39 -06:00
|
|
|
tags = '+' + parse_tags(tags_in[1:])
|
2017-02-07 12:03:51 -06:00
|
|
|
elif tags_in[0] == '-':
|
2017-03-05 01:56:39 -06:00
|
|
|
tags = '-' + parse_tags(tags_in[1:])
|
2017-02-07 12:03:51 -06:00
|
|
|
else:
|
|
|
|
tags = parse_tags(tags_in)
|
2017-02-04 08:45:33 -06:00
|
|
|
else:
|
2017-02-07 12:03:51 -06:00
|
|
|
tags = DELIM
|
2017-02-04 08:45:33 -06:00
|
|
|
|
2017-02-07 12:03:51 -06:00
|
|
|
result = edit_rec(args.write, '', title_in, tags, desc_in)
|
|
|
|
if result is not None:
|
|
|
|
url, title_in, tags, desc_in = result
|
2017-08-01 10:37:03 -05:00
|
|
|
if args.suggest:
|
|
|
|
tags = bdb.suggest_similar_tag(tags)
|
2017-02-07 12:03:51 -06:00
|
|
|
bdb.add_rec(url, title_in, tags, desc_in, args.immutable)
|
2017-02-01 10:53:00 -06:00
|
|
|
|
2017-01-01 08:25:10 -06:00
|
|
|
# Add record
|
2016-11-07 11:35:02 -06:00
|
|
|
if args.add is not None:
|
2017-02-11 00:03:29 -06:00
|
|
|
if args.url is not None and args.update is None:
|
|
|
|
logerr('Bookmark a single URL at a time')
|
|
|
|
bdb.close_quit(1)
|
|
|
|
|
2016-05-22 15:24:24 -05:00
|
|
|
# Parse tags into a comma-separated string
|
2016-10-29 05:36:29 -05:00
|
|
|
tags = DELIM
|
2016-11-07 11:35:02 -06:00
|
|
|
keywords = args.add
|
2016-10-29 04:35:44 -05:00
|
|
|
if tags_in is not None:
|
2017-01-29 11:21:55 -06:00
|
|
|
if tags_in[0] == '+':
|
|
|
|
if len(tags_in) > 1:
|
|
|
|
# The case: buku -a url tag1, tag2 --tag + tag3, tag4
|
|
|
|
tags_in = tags_in[1:]
|
|
|
|
# In case of add, args.add may have URL followed by tags
|
|
|
|
# Add delimiter as url+tags may not end with one
|
|
|
|
keywords = args.add + [DELIM] + tags_in
|
2016-06-12 05:30:54 -05:00
|
|
|
else:
|
2016-11-07 11:35:02 -06:00
|
|
|
keywords = args.add + [DELIM] + tags_in
|
2016-05-25 06:00:14 -05:00
|
|
|
|
|
|
|
if len(keywords) > 1:
|
|
|
|
tags = parse_tags(keywords[1:])
|
|
|
|
|
2017-02-01 10:53:00 -06:00
|
|
|
url = args.add[0]
|
2017-02-04 08:45:33 -06:00
|
|
|
|
2017-02-16 09:52:19 -06:00
|
|
|
if args.write and not is_int(args.write):
|
2017-02-04 08:45:33 -06:00
|
|
|
result = edit_rec(args.write, url, title_in, tags, desc_in)
|
|
|
|
if result is not None:
|
|
|
|
url, title_in, tags, desc_in = result
|
2017-02-01 10:53:00 -06:00
|
|
|
|
2017-08-01 10:37:03 -05:00
|
|
|
if args.suggest:
|
|
|
|
tags = bdb.suggest_similar_tag(tags)
|
2017-02-01 10:53:00 -06:00
|
|
|
bdb.add_rec(url, title_in, tags, desc_in, args.immutable)
|
2016-05-25 06:00:14 -05:00
|
|
|
|
2017-09-08 13:48:21 -05:00
|
|
|
# Enable browser output in case of a text based browser
|
|
|
|
if os.getenv('BROWSER') in ['elinks', 'links', 'lynx', 'w3m', 'links2']:
|
|
|
|
browse.suppress_browser_output = False
|
|
|
|
else:
|
|
|
|
browse.suppress_browser_output = True
|
|
|
|
|
2017-01-08 22:22:22 -06:00
|
|
|
# Search record
|
|
|
|
search_results = None
|
|
|
|
search_opted = True
|
|
|
|
update_search_results = False
|
|
|
|
|
2017-01-29 07:49:47 -06:00
|
|
|
if args.sany:
|
2017-01-08 22:22:22 -06:00
|
|
|
# Search URLs, titles, tags for any keyword
|
2017-01-29 07:49:47 -06:00
|
|
|
search_results = bdb.searchdb(args.keywords, False, args.deep)
|
|
|
|
elif args.sall:
|
2017-01-08 22:22:22 -06:00
|
|
|
# Search URLs, titles, tags with all keywords
|
2017-01-29 07:49:47 -06:00
|
|
|
search_results = bdb.searchdb(args.keywords, True, args.deep)
|
|
|
|
elif args.sreg:
|
2017-01-08 22:22:22 -06:00
|
|
|
# Run a regular expression search
|
2017-01-29 07:49:47 -06:00
|
|
|
search_results = bdb.searchdb(args.keywords, regex=True)
|
|
|
|
elif args.stag:
|
2017-01-08 22:22:22 -06:00
|
|
|
# Search bookmarks by tag
|
2017-01-29 07:49:47 -06:00
|
|
|
if args.keywords:
|
|
|
|
search_results = bdb.search_by_tag(' '.join(args.keywords))
|
2017-01-08 22:22:22 -06:00
|
|
|
else:
|
|
|
|
# Use sub prompt to list all tags
|
2017-09-15 14:59:05 -05:00
|
|
|
prompt(bdb, None, args.np, subprompt=True, suggest=args.suggest)
|
2017-01-29 07:49:47 -06:00
|
|
|
elif args.keywords:
|
|
|
|
search_results = bdb.searchdb(args.keywords, False, args.deep)
|
2017-01-08 22:22:22 -06:00
|
|
|
else:
|
|
|
|
search_opted = False
|
|
|
|
|
2017-06-16 09:29:47 -05:00
|
|
|
# Add cmdline search options to readline history
|
|
|
|
if search_opted and args.keywords:
|
|
|
|
try:
|
|
|
|
readline.add_history(' '.join(args.keywords))
|
|
|
|
except Exception:
|
|
|
|
pass
|
|
|
|
|
2017-01-08 22:22:22 -06:00
|
|
|
if search_results:
|
2017-02-10 07:59:02 -06:00
|
|
|
oneshot = args.np
|
2017-01-08 22:22:22 -06:00
|
|
|
to_delete = False
|
|
|
|
|
2017-02-04 21:40:38 -06:00
|
|
|
# Open all results in browser right away if args.oa
|
|
|
|
# is specified. The has priority over delete/update.
|
|
|
|
# URLs are opened first and updated/deleted later.
|
|
|
|
if args.oa:
|
|
|
|
for row in search_results:
|
2017-02-18 20:48:38 -06:00
|
|
|
browse(row[1])
|
2017-02-04 21:40:38 -06:00
|
|
|
|
2017-01-08 22:22:22 -06:00
|
|
|
# In case of search and delete/update,
|
|
|
|
# prompt should be non-interactive
|
|
|
|
# delete gets priority over update
|
2017-01-29 11:21:55 -06:00
|
|
|
if args.delete is not None and not args.delete:
|
2017-01-08 22:22:22 -06:00
|
|
|
oneshot = True
|
|
|
|
to_delete = True
|
2017-01-29 11:21:55 -06:00
|
|
|
elif args.update is not None and not args.update:
|
2017-01-08 22:22:22 -06:00
|
|
|
oneshot = True
|
|
|
|
update_search_results = True
|
|
|
|
|
2017-09-23 12:01:37 -05:00
|
|
|
if not args.json and not args.format:
|
2017-01-08 22:22:22 -06:00
|
|
|
prompt(bdb, search_results, oneshot, args.deep)
|
2017-09-23 12:01:37 -05:00
|
|
|
elif not args.json:
|
|
|
|
print_rec_with_filter(search_results, field_filter=args.format)
|
2017-01-08 22:22:22 -06:00
|
|
|
else:
|
|
|
|
# Printing in Json format is non-interactive
|
|
|
|
print(format_json(search_results, field_filter=args.format))
|
|
|
|
|
|
|
|
# Delete search results if opted
|
|
|
|
if to_delete:
|
|
|
|
bdb.delete_resultset(search_results)
|
|
|
|
|
2016-05-25 06:00:14 -05:00
|
|
|
# Update record
|
2016-12-30 18:43:17 -06:00
|
|
|
if args.update is not None:
|
2016-06-06 13:55:09 -05:00
|
|
|
if args.url is not None:
|
2016-10-29 04:35:44 -05:00
|
|
|
url_in = args.url[0]
|
2016-06-06 13:55:09 -05:00
|
|
|
else:
|
2016-10-29 04:35:44 -05:00
|
|
|
url_in = ''
|
2016-06-06 13:55:09 -05:00
|
|
|
|
|
|
|
# Parse tags into a comma-separated string
|
2017-01-29 11:21:55 -06:00
|
|
|
if tags_in:
|
2016-12-03 09:32:03 -06:00
|
|
|
if tags_in[0] == '+':
|
2017-03-05 01:56:39 -06:00
|
|
|
tags = '+' + parse_tags(tags_in[1:])
|
2016-12-03 09:32:03 -06:00
|
|
|
elif tags_in[0] == '-':
|
2017-03-05 01:56:39 -06:00
|
|
|
tags = '-' + parse_tags(tags_in[1:])
|
2016-12-03 09:32:03 -06:00
|
|
|
else:
|
|
|
|
tags = parse_tags(tags_in)
|
|
|
|
else:
|
|
|
|
tags = None
|
2016-05-25 06:00:14 -05:00
|
|
|
|
2017-02-04 08:45:33 -06:00
|
|
|
# No arguments to --update, update all
|
2017-01-29 11:21:55 -06:00
|
|
|
if not args.update:
|
2017-01-08 22:22:22 -06:00
|
|
|
# Update all records only if search was not opted
|
|
|
|
if not search_opted:
|
2017-08-12 01:52:37 -05:00
|
|
|
bdb.update_rec(0, url_in, title_in, tags, desc_in, args.immutable, args.threads)
|
2017-01-09 11:11:36 -06:00
|
|
|
elif update_search_results and search_results is not None:
|
2017-01-29 11:21:55 -06:00
|
|
|
if not args.tacit:
|
2017-02-04 08:45:33 -06:00
|
|
|
print('Updated results:\n')
|
2017-01-08 22:22:22 -06:00
|
|
|
|
|
|
|
pos = len(search_results) - 1
|
|
|
|
while pos >= 0:
|
|
|
|
idx = search_results[pos][0]
|
2017-08-12 01:52:37 -05:00
|
|
|
bdb.update_rec(idx, url_in, title_in, tags, desc_in, args.immutable, args.threads)
|
2017-01-08 22:22:22 -06:00
|
|
|
|
|
|
|
# Commit at every 200th removal
|
|
|
|
if pos % 200 == 0:
|
|
|
|
bdb.conn.commit()
|
|
|
|
|
|
|
|
pos -= 1
|
2016-10-11 10:15:57 -05:00
|
|
|
else:
|
2017-02-07 12:03:51 -06:00
|
|
|
for idx in args.update:
|
|
|
|
if is_int(idx):
|
2017-08-12 01:52:37 -05:00
|
|
|
bdb.update_rec(int(idx), url_in, title_in, tags, desc_in, args.immutable, args.threads)
|
2017-03-19 07:29:06 -05:00
|
|
|
elif '-' in idx:
|
|
|
|
try:
|
|
|
|
vals = [int(x) for x in idx.split('-')]
|
|
|
|
if vals[0] > vals[1]:
|
|
|
|
vals[0], vals[1] = vals[1], vals[0]
|
|
|
|
|
|
|
|
# Update only once if range starts from 0 (all)
|
|
|
|
if vals[0] == 0:
|
2017-08-12 01:52:37 -05:00
|
|
|
bdb.update_rec(0, url_in, title_in, tags, desc_in, args.immutable, args.threads)
|
2017-03-19 07:29:06 -05:00
|
|
|
else:
|
|
|
|
for _id in range(vals[0], vals[1] + 1):
|
2017-08-12 01:52:37 -05:00
|
|
|
bdb.update_rec(_id, url_in, title_in, tags, desc_in, args.immutable, args.threads)
|
2017-03-19 07:29:06 -05:00
|
|
|
if interrupted:
|
|
|
|
break
|
|
|
|
except ValueError:
|
|
|
|
logerr('Invalid index or range to update')
|
|
|
|
bdb.close_quit(1)
|
2017-02-07 12:03:51 -06:00
|
|
|
|
|
|
|
if interrupted:
|
|
|
|
break
|
2016-05-25 06:00:14 -05:00
|
|
|
|
2017-01-01 08:25:10 -06:00
|
|
|
# Delete record
|
2016-10-27 15:53:07 -05:00
|
|
|
if args.delete is not None:
|
2017-01-29 11:21:55 -06:00
|
|
|
if not args.delete:
|
2016-10-27 15:53:07 -05:00
|
|
|
# Attempt delete-all only if search was not opted
|
|
|
|
if not search_opted:
|
2016-10-29 06:31:14 -05:00
|
|
|
bdb.cleardb()
|
2016-06-16 16:08:38 -05:00
|
|
|
elif len(args.delete) == 1 and '-' in args.delete[0]:
|
2017-03-19 07:29:06 -05:00
|
|
|
try:
|
|
|
|
vals = [int(x) for x in args.delete[0].split('-')]
|
|
|
|
if len(vals) == 2:
|
|
|
|
bdb.delete_rec(0, vals[0], vals[1], True)
|
|
|
|
except ValueError:
|
|
|
|
logerr('Invalid index or range to delete')
|
2016-06-16 16:08:38 -05:00
|
|
|
bdb.close_quit(1)
|
|
|
|
else:
|
|
|
|
ids = []
|
2016-06-17 10:18:07 -05:00
|
|
|
# Select the unique indices
|
2016-06-16 16:08:38 -05:00
|
|
|
for idx in args.delete:
|
|
|
|
if idx not in ids:
|
|
|
|
ids += (idx,)
|
|
|
|
|
2016-06-17 10:18:07 -05:00
|
|
|
try:
|
|
|
|
# Index delete order - highest to lowest
|
|
|
|
ids.sort(key=lambda x: int(x), reverse=True)
|
|
|
|
for idx in ids:
|
2016-12-27 08:10:29 -06:00
|
|
|
bdb.delete_rec(int(idx))
|
2016-11-07 09:13:08 -06:00
|
|
|
except ValueError:
|
2017-03-19 07:29:06 -05:00
|
|
|
logerr('Invalid index or range or combination')
|
|
|
|
bdb.close_quit(1)
|
2016-05-25 06:00:14 -05:00
|
|
|
|
2017-01-01 08:25:10 -06:00
|
|
|
# Print record
|
2016-10-11 11:45:07 -05:00
|
|
|
if args.print is not None:
|
2017-01-29 11:21:55 -06:00
|
|
|
if not args.print:
|
2016-12-27 08:10:29 -06:00
|
|
|
bdb.print_rec(0)
|
2016-10-11 11:45:07 -05:00
|
|
|
else:
|
2017-03-19 07:29:06 -05:00
|
|
|
try:
|
|
|
|
for idx in args.print:
|
|
|
|
if is_int(idx):
|
2017-04-26 08:48:34 -05:00
|
|
|
bdb.print_rec(int(idx))
|
2017-03-19 07:29:06 -05:00
|
|
|
elif '-' in idx:
|
|
|
|
vals = [int(x) for x in idx.split('-')]
|
|
|
|
bdb.print_rec(0, vals[0], vals[-1], True)
|
|
|
|
except ValueError:
|
|
|
|
logerr('Invalid index or range to print')
|
|
|
|
bdb.close_quit(1)
|
2016-05-25 06:00:14 -05:00
|
|
|
|
|
|
|
# Replace a tag in DB
|
|
|
|
if args.replace is not None:
|
|
|
|
if len(args.replace) == 1:
|
2016-11-27 12:40:14 -06:00
|
|
|
bdb.delete_tag_at_index(0, args.replace[0])
|
2016-05-25 06:00:14 -05:00
|
|
|
else:
|
|
|
|
bdb.replace_tag(args.replace[0], args.replace[1:])
|
2016-03-25 02:52:52 -05:00
|
|
|
|
2016-06-02 10:39:16 -05:00
|
|
|
# Export bookmarks
|
|
|
|
if args.export is not None:
|
2016-09-20 13:02:04 -05:00
|
|
|
if args.tag is None:
|
2017-02-10 10:39:02 -06:00
|
|
|
bdb.exportdb(args.export[0])
|
2017-01-29 11:21:55 -06:00
|
|
|
elif not args.tag:
|
2016-11-22 12:09:03 -06:00
|
|
|
logerr('Missing tag')
|
2016-09-20 13:02:04 -05:00
|
|
|
else:
|
2017-02-10 10:39:02 -06:00
|
|
|
bdb.exportdb(args.export[0], args.tag)
|
2016-06-02 10:39:16 -05:00
|
|
|
|
|
|
|
# Import bookmarks
|
2016-11-07 11:35:02 -06:00
|
|
|
if args.importfile is not None:
|
2017-07-19 08:45:35 -05:00
|
|
|
bdb.importdb(args.importfile[0], args.tacit)
|
2016-06-02 10:39:16 -05:00
|
|
|
|
2017-08-09 21:56:44 -05:00
|
|
|
# Import bookmarks from browser
|
2017-08-11 14:40:56 -05:00
|
|
|
if args.ai:
|
2017-08-11 10:58:31 -05:00
|
|
|
bdb.auto_import_from_browser()
|
2017-08-09 21:56:44 -05:00
|
|
|
|
2016-05-25 06:00:14 -05:00
|
|
|
# Open URL in browser
|
2016-11-07 11:35:02 -06:00
|
|
|
if args.open is not None:
|
2017-01-29 11:21:55 -06:00
|
|
|
if not args.open:
|
2016-12-30 12:59:57 -06:00
|
|
|
bdb.browse_by_index(0)
|
|
|
|
else:
|
2017-03-19 07:29:06 -05:00
|
|
|
try:
|
|
|
|
for idx in args.open:
|
|
|
|
if is_int(idx):
|
2017-05-01 10:24:01 -05:00
|
|
|
bdb.browse_by_index(int(idx))
|
2017-03-19 07:29:06 -05:00
|
|
|
elif '-' in idx:
|
|
|
|
vals = [int(x) for x in idx.split('-')]
|
2017-05-01 10:24:01 -05:00
|
|
|
bdb.browse_by_index(0, vals[0], vals[-1], True)
|
2017-03-19 07:29:06 -05:00
|
|
|
except ValueError:
|
|
|
|
logerr('Invalid index or range to open')
|
|
|
|
bdb.close_quit(1)
|
2016-03-25 02:52:52 -05:00
|
|
|
|
2016-11-26 08:28:22 -06:00
|
|
|
# Shorten URL
|
2016-12-10 02:35:17 -06:00
|
|
|
if args.shorten:
|
2016-11-12 09:47:36 -06:00
|
|
|
if is_int(args.shorten[0]):
|
2016-12-10 02:35:17 -06:00
|
|
|
shorturl = bdb.tnyfy_url(index=int(args.shorten[0]))
|
2016-11-12 09:47:36 -06:00
|
|
|
else:
|
2016-12-10 02:35:17 -06:00
|
|
|
shorturl = bdb.tnyfy_url(url=args.shorten[0])
|
2016-11-12 09:47:36 -06:00
|
|
|
|
|
|
|
if shorturl:
|
|
|
|
print(shorturl)
|
|
|
|
|
2016-12-10 02:35:17 -06:00
|
|
|
# Expand URL
|
|
|
|
if args.expand:
|
|
|
|
if is_int(args.expand[0]):
|
|
|
|
url = bdb.tnyfy_url(index=int(args.expand[0]), shorten=False)
|
|
|
|
else:
|
|
|
|
url = bdb.tnyfy_url(url=args.expand[0], shorten=False)
|
|
|
|
|
|
|
|
if url:
|
|
|
|
print(url)
|
|
|
|
|
2016-10-23 13:11:31 -05:00
|
|
|
# Report upstream version
|
|
|
|
if args.upstream:
|
|
|
|
check_upstream_release()
|
|
|
|
|
2016-11-20 09:10:56 -06:00
|
|
|
# Fix tags
|
|
|
|
if args.fixtags:
|
|
|
|
bdb.fixtags()
|
|
|
|
|
2016-11-29 16:51:11 -06:00
|
|
|
# Close DB connection and quit
|
|
|
|
bdb.close_quit(0)
|
|
|
|
|
2016-11-06 12:44:44 -06:00
|
|
|
if __name__ == '__main__':
|
|
|
|
main()
|