2015-11-01 14:04:41 -06:00
|
|
|
#!/usr/bin/python3
|
2015-11-01 14:08:45 -06:00
|
|
|
#
|
|
|
|
# Bookmark management utility
|
|
|
|
#
|
|
|
|
# Copyright (C) 2015 Arun Prakash Jana <engineerarun@gmail.com>
|
|
|
|
#
|
|
|
|
# This program is free software: you can redistribute it and/or modify
|
|
|
|
# it under the terms of the GNU General Public License as published by
|
|
|
|
# the Free Software Foundation, either version 3 of the License, or
|
|
|
|
# (at your option) any later version.
|
|
|
|
#
|
|
|
|
# This program is distributed in the hope that it will be useful,
|
|
|
|
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
|
|
|
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
|
|
|
# GNU General Public License for more details.
|
|
|
|
#
|
|
|
|
# You should have received a copy of the GNU General Public License
|
|
|
|
# along with markit. If not, see <http://www.gnu.org/licenses/>.
|
2015-11-01 14:04:41 -06:00
|
|
|
|
2015-11-04 08:11:16 -06:00
|
|
|
import sys
|
2015-11-06 14:26:08 -06:00
|
|
|
import os
|
2015-11-01 14:04:41 -06:00
|
|
|
import sqlite3
|
2015-11-04 08:11:16 -06:00
|
|
|
from getopt import getopt, GetoptError
|
2015-11-06 13:26:20 -06:00
|
|
|
import readline
|
2015-11-06 13:59:57 -06:00
|
|
|
import webbrowser
|
2015-11-06 16:32:08 -06:00
|
|
|
import html.parser as HTMLParser
|
2015-11-07 07:29:38 -06:00
|
|
|
from http.client import HTTPConnection
|
2015-11-06 17:07:15 -06:00
|
|
|
from http.client import HTTPSConnection
|
2015-11-07 13:19:59 -06:00
|
|
|
from urllib.parse import urljoin, unquote
|
2015-11-04 08:11:16 -06:00
|
|
|
|
2015-11-04 13:07:10 -06:00
|
|
|
# Globals
|
2015-11-05 09:18:51 -06:00
|
|
|
addurl = False
|
2015-11-07 12:10:21 -06:00
|
|
|
addindex = None
|
2015-11-07 01:19:13 -06:00
|
|
|
online = False
|
2015-11-08 12:19:12 -06:00
|
|
|
delete = False
|
2015-11-10 05:20:30 -06:00
|
|
|
openurl = None
|
2015-11-08 12:19:12 -06:00
|
|
|
show = False
|
2015-11-09 12:32:10 -06:00
|
|
|
showindex = None
|
2015-11-11 01:28:37 -06:00
|
|
|
showOpt = 0
|
2015-11-05 12:26:02 -06:00
|
|
|
search = False
|
2015-11-07 07:29:38 -06:00
|
|
|
entry = None
|
2015-11-08 12:19:12 -06:00
|
|
|
update = False
|
|
|
|
debug = False
|
2015-11-10 17:07:14 -06:00
|
|
|
titleData = None
|
2015-11-04 13:07:10 -06:00
|
|
|
|
2015-11-08 12:56:52 -06:00
|
|
|
|
|
|
|
|
|
|
|
# Show usage of markit and exit
|
2015-11-04 08:11:16 -06:00
|
|
|
def usage():
|
|
|
|
print("Usage: markit [OPTIONS] KEYWORDS...")
|
2015-11-07 07:29:38 -06:00
|
|
|
print("Bookmark manager. Your private Google.\n")
|
2015-11-04 08:11:16 -06:00
|
|
|
print("Options")
|
2015-11-08 17:51:25 -06:00
|
|
|
print(" -a URL tag 1, tag 2, ... add URL as bookmark with comma separated tags")
|
2015-11-10 05:20:30 -06:00
|
|
|
print(" -d N delete entry at DB index N (from -P output)")
|
2015-11-07 12:10:21 -06:00
|
|
|
print(" -D delete ALL bookmarks")
|
2015-11-10 05:20:30 -06:00
|
|
|
print(" -i N insert entry at DB index N, useful to fill deleted index")
|
|
|
|
print(" -o N open URL at DB index N in browser")
|
|
|
|
print(" -p N show details of bookmark record at DB index N")
|
|
|
|
print(" -P show all bookmarks along with index from DB")
|
2015-11-09 04:29:16 -06:00
|
|
|
print(" -s keyword(s) search all bookmarks for a (partial) tag or each keyword")
|
2015-11-10 05:20:30 -06:00
|
|
|
print(" -u N update entry at DB index N")
|
2015-11-10 03:11:05 -06:00
|
|
|
print(" -w fetch title info from web, works with -a, -i, -u")
|
2015-11-11 01:28:37 -06:00
|
|
|
print(" -x N works with -P, N=1: show only URL, N=2: show URL and tag")
|
2015-11-08 12:19:12 -06:00
|
|
|
print(" -z show debug information")
|
2015-11-07 12:10:21 -06:00
|
|
|
print(" you can either add or update or delete in one instance")
|
2015-11-08 15:16:01 -06:00
|
|
|
print(" any other option shows help and exits markit\n")
|
|
|
|
print("Keys")
|
|
|
|
print(" 1-N open Nth search result in browser. Enter exits markit.\n")
|
2015-11-10 17:32:58 -06:00
|
|
|
print("Version 1.2")
|
2015-11-04 08:11:16 -06:00
|
|
|
print("Copyright (C) 2015 Arun Prakash Jana <engineerarun@gmail.com>")
|
|
|
|
print("Webpage: https://github.com/jarun/markit")
|
|
|
|
sys.exit(1)
|
2015-11-01 14:04:41 -06:00
|
|
|
|
2015-11-08 12:56:52 -06:00
|
|
|
|
|
|
|
|
|
|
|
# Initialize the database connection
|
|
|
|
# Create bookmarks table is not existing
|
2015-11-05 09:18:51 -06:00
|
|
|
def initdb():
|
2015-11-09 23:04:34 -06:00
|
|
|
dbpath = os.path.join(os.environ.get('HOME'), '.cache', 'markit')
|
|
|
|
if not os.path.exists(dbpath):
|
|
|
|
os.makedirs(dbpath)
|
|
|
|
|
2015-11-05 09:18:51 -06:00
|
|
|
# Create a connection
|
2015-11-09 23:04:34 -06:00
|
|
|
conn = sqlite3.connect(os.path.join(dbpath, 'bookmarks.db'))
|
2015-11-05 09:18:51 -06:00
|
|
|
cur = conn.cursor()
|
|
|
|
|
|
|
|
# Create table if it doesn't exist
|
2015-11-07 11:47:24 -06:00
|
|
|
cur.execute('''CREATE TABLE if not exists bookmarks \
|
2015-11-09 04:18:44 -06:00
|
|
|
(id integer PRIMARY KEY, URL text NOT NULL UNIQUE, metadata text, tags text)''')
|
2015-11-05 09:18:51 -06:00
|
|
|
conn.commit()
|
|
|
|
return (conn, cur)
|
|
|
|
|
2015-11-08 12:56:52 -06:00
|
|
|
|
|
|
|
|
2015-11-10 05:20:30 -06:00
|
|
|
# Add a new bookmark or update an existing record at index
|
|
|
|
def AddUpdateEntry(conn, cur, keywords, index):
|
2015-11-07 01:19:13 -06:00
|
|
|
global online
|
|
|
|
|
2015-11-05 09:18:51 -06:00
|
|
|
tags = ','
|
|
|
|
url = keywords[0]
|
2015-11-09 04:18:44 -06:00
|
|
|
if len(keywords) > 1:
|
|
|
|
for tag in keywords[1:]:
|
|
|
|
if tags[-1] == ",":
|
|
|
|
tags += tag
|
|
|
|
else:
|
|
|
|
tags += " " + tag
|
2015-11-05 09:18:51 -06:00
|
|
|
|
|
|
|
if tags[-1] != ",":
|
|
|
|
tags += ","
|
|
|
|
|
2015-11-06 16:32:08 -06:00
|
|
|
meta = ''
|
2015-11-06 17:07:15 -06:00
|
|
|
|
2015-11-07 01:19:13 -06:00
|
|
|
if online == True:
|
2015-11-07 07:29:38 -06:00
|
|
|
secure = True
|
2015-11-07 01:19:13 -06:00
|
|
|
if url.find("https://") >= 0:
|
|
|
|
server = url[8:]
|
|
|
|
elif url.find("http://") >= 0:
|
2015-11-07 07:29:38 -06:00
|
|
|
secure = False
|
2015-11-07 01:19:13 -06:00
|
|
|
server = url[7:]
|
|
|
|
else:
|
|
|
|
online = False
|
2015-11-06 17:07:15 -06:00
|
|
|
|
2015-11-07 01:19:13 -06:00
|
|
|
if online == True:
|
|
|
|
marker = server.find("/")
|
|
|
|
if marker > 0:
|
2015-11-09 08:37:09 -06:00
|
|
|
fetchurl = server[marker:]
|
2015-11-07 01:19:13 -06:00
|
|
|
server = server[:marker]
|
2015-11-09 09:24:52 -06:00
|
|
|
else:
|
|
|
|
fetchurl = url
|
2015-11-07 01:19:13 -06:00
|
|
|
|
|
|
|
try:
|
2015-11-08 12:19:12 -06:00
|
|
|
if debug:
|
|
|
|
print("server: [%s]" % server)
|
2015-11-07 07:29:38 -06:00
|
|
|
if secure == True:
|
|
|
|
urlconn = HTTPSConnection(server, timeout=30)
|
|
|
|
else:
|
|
|
|
urlconn = HTTPConnection(server, timeout=30)
|
2015-11-08 12:19:12 -06:00
|
|
|
|
|
|
|
if debug:
|
2015-11-09 08:37:09 -06:00
|
|
|
print("URL: [%s]" % fetchurl)
|
|
|
|
urlconn.request("GET", fetchurl)
|
2015-11-07 01:19:13 -06:00
|
|
|
resp = urlconn.getresponse()
|
|
|
|
if resp.status != 200:
|
2015-11-08 12:56:52 -06:00
|
|
|
# Handle first redirection
|
2015-11-07 13:06:46 -06:00
|
|
|
if resp.status in (301,302,):
|
2015-11-09 08:37:09 -06:00
|
|
|
if debug:
|
|
|
|
print(resp.getheader('location', ''))
|
2015-11-07 13:06:46 -06:00
|
|
|
redirurl = urljoin(url, resp.getheader('location', ''))
|
|
|
|
if redirurl.find("sorry/IndexRedirect?") >= 0:
|
|
|
|
print("ERROR: Connection blocked due to unusual activity.")
|
|
|
|
else:
|
|
|
|
urlconn.close()
|
|
|
|
|
|
|
|
secure = False
|
|
|
|
if url.find("https://") >= 0:
|
|
|
|
secure = True
|
|
|
|
|
|
|
|
if secure == True:
|
|
|
|
server = redirurl[8:]
|
|
|
|
marker = server.find("/")
|
|
|
|
if marker > 0:
|
|
|
|
server = server[:marker]
|
|
|
|
urlconn = HTTPSConnection(server, timeout=30)
|
|
|
|
else:
|
|
|
|
server = redirurl[7:]
|
|
|
|
marker = server.find("/")
|
|
|
|
if marker > 0:
|
|
|
|
server = server[:marker]
|
|
|
|
urlconn = HTTPConnection(server, timeout=30)
|
|
|
|
|
2015-11-08 12:19:12 -06:00
|
|
|
if debug:
|
|
|
|
print("Redir server: [%s]" % server)
|
|
|
|
print("Redir URL: [%s]" % redirurl)
|
2015-11-07 13:06:46 -06:00
|
|
|
|
|
|
|
urlconn.request("GET", redirurl)
|
|
|
|
resp = urlconn.getresponse()
|
|
|
|
if resp.status != 200:
|
|
|
|
print("ERROR on retry:", str(resp.status), ": ", resp.reason)
|
|
|
|
meta = ''
|
|
|
|
else:
|
2015-11-10 17:07:14 -06:00
|
|
|
getTitleData(resp)
|
|
|
|
if titleData != None:
|
|
|
|
meta = titleData
|
2015-11-08 12:56:52 -06:00
|
|
|
else: # if resp.status in (301,302,):
|
2015-11-07 13:06:46 -06:00
|
|
|
print("ERROR:", str(resp.status), ": ", resp.reason)
|
|
|
|
meta = ''
|
2015-11-08 12:56:52 -06:00
|
|
|
else: # if resp.status != 200:
|
2015-11-10 17:07:14 -06:00
|
|
|
getTitleData(resp)
|
|
|
|
if titleData != None:
|
|
|
|
meta = titleData
|
2015-11-07 07:29:38 -06:00
|
|
|
except Exception as e:
|
|
|
|
print("Exception: %s" % e)
|
2015-11-07 01:19:13 -06:00
|
|
|
meta = ''
|
|
|
|
finally:
|
|
|
|
urlconn.close()
|
|
|
|
|
2015-11-09 23:40:58 -06:00
|
|
|
if online == True:
|
|
|
|
meta = meta.strip().replace("\n","")
|
|
|
|
print("Title: [%s]" % meta)
|
2015-11-06 16:32:08 -06:00
|
|
|
|
2015-11-10 05:20:30 -06:00
|
|
|
if index == None: # Insert a new entry
|
2015-11-07 07:29:38 -06:00
|
|
|
try:
|
2015-11-10 05:20:30 -06:00
|
|
|
if addindex == None: # addindex is index number to insert record at
|
2015-11-09 04:18:44 -06:00
|
|
|
cur.execute('INSERT INTO bookmarks(URL, metadata, tags) VALUES (?, ?, ?)', (url, meta, tags,))
|
2015-11-07 12:10:21 -06:00
|
|
|
else:
|
2015-11-09 04:18:44 -06:00
|
|
|
cur.execute('INSERT INTO bookmarks(id, URL, metadata, tags) VALUES (?, ?, ?, ?)', (int(addindex), url, meta, tags,))
|
2015-11-07 07:29:38 -06:00
|
|
|
conn.commit()
|
2015-11-10 00:24:30 -06:00
|
|
|
print("Added at index %d" % cur.lastrowid)
|
2015-11-07 07:29:38 -06:00
|
|
|
except sqlite3.IntegrityError:
|
2015-11-08 17:29:06 -06:00
|
|
|
for row in cur.execute("SELECT id from bookmarks where URL LIKE ?", (url,)):
|
|
|
|
print("URL already exists at index %s" % row[0])
|
|
|
|
return
|
|
|
|
|
|
|
|
print("Index %s exists" % addindex)
|
2015-11-07 07:29:38 -06:00
|
|
|
else: # Update an existing entry
|
|
|
|
try:
|
2015-11-10 05:20:30 -06:00
|
|
|
cur.execute("UPDATE bookmarks SET URL = ?, metadata = ?, tags = ? WHERE id = ?", (url, meta, tags, int(index),))
|
2015-11-07 07:29:38 -06:00
|
|
|
conn.commit()
|
|
|
|
if cur.rowcount == 1:
|
|
|
|
print("Updated")
|
|
|
|
else:
|
|
|
|
print("No matching index")
|
|
|
|
except sqlite3.IntegrityError:
|
|
|
|
print("URL already exists")
|
2015-11-05 09:18:51 -06:00
|
|
|
|
2015-11-08 12:56:52 -06:00
|
|
|
|
|
|
|
|
|
|
|
# Search the database for a tag or mathcing URL or Title info
|
2015-11-05 12:26:02 -06:00
|
|
|
def searchdb(cur, keywords):
|
|
|
|
searchtag = ''
|
|
|
|
for token in keywords:
|
|
|
|
searchtag += token + " "
|
2015-11-08 11:44:35 -06:00
|
|
|
searchtag = searchtag[0:-1]
|
2015-11-05 12:26:02 -06:00
|
|
|
|
2015-11-08 11:44:35 -06:00
|
|
|
arguments = []
|
|
|
|
arguments.append(searchtag)
|
|
|
|
placeholder = "'%' || ? || '%'"
|
2015-11-09 04:18:44 -06:00
|
|
|
query = "SELECT url, metadata, tags FROM bookmarks WHERE tags LIKE (%s)" % placeholder
|
2015-11-08 11:44:35 -06:00
|
|
|
for token in keywords:
|
|
|
|
query += " OR URL LIKE (%s) OR metadata LIKE (%s)" % (placeholder, placeholder)
|
|
|
|
arguments.append(token)
|
|
|
|
arguments.append(token)
|
2015-11-08 12:19:12 -06:00
|
|
|
if debug:
|
|
|
|
print("%s, (%s)" % (query, arguments))
|
2015-11-05 12:26:02 -06:00
|
|
|
|
2015-11-05 12:33:00 -06:00
|
|
|
count = 0
|
2015-11-06 13:59:57 -06:00
|
|
|
results = []
|
2015-11-08 11:44:35 -06:00
|
|
|
for row in cur.execute(query, arguments):
|
2015-11-07 01:29:40 -06:00
|
|
|
results.append(row[0])
|
2015-11-05 12:33:00 -06:00
|
|
|
count += 1
|
2015-11-10 00:55:54 -06:00
|
|
|
print("\x1B[1m\x1B[93m%d. \x1B[0m\x1B[92m%s\x1B[0m\n\t%s\n\t\x1B[91m[TAGS]\x1B[0m %s" % (count, row[0], row[1], row[2][1:-1]))
|
2015-11-05 12:26:02 -06:00
|
|
|
|
2015-11-06 13:59:57 -06:00
|
|
|
if count == 0:
|
|
|
|
return
|
|
|
|
|
2015-11-07 09:08:15 -06:00
|
|
|
print("")
|
|
|
|
|
2015-11-06 13:59:57 -06:00
|
|
|
while True:
|
2015-11-06 14:45:30 -06:00
|
|
|
nav = input("Index number to open: ")
|
2015-11-06 13:59:57 -06:00
|
|
|
if is_int(nav):
|
2015-11-06 14:26:08 -06:00
|
|
|
index = int(nav) - 1
|
|
|
|
if index < 0:
|
2015-11-09 12:32:10 -06:00
|
|
|
print("Index out of bound")
|
2015-11-06 14:26:08 -06:00
|
|
|
continue
|
|
|
|
|
2015-11-06 13:59:57 -06:00
|
|
|
try:
|
2015-11-10 03:11:05 -06:00
|
|
|
openurl = unquote(results[int(nav) - 1])
|
|
|
|
browser_open(openurl)
|
2015-11-06 13:59:57 -06:00
|
|
|
except IndexError:
|
2015-11-09 12:32:10 -06:00
|
|
|
print("Index out of bound")
|
2015-11-06 13:59:57 -06:00
|
|
|
else:
|
|
|
|
break
|
|
|
|
|
2015-11-08 12:56:52 -06:00
|
|
|
|
|
|
|
|
|
|
|
# Delete a single record or remove the table
|
2015-11-10 05:20:30 -06:00
|
|
|
def cleardb(conn, cur, index):
|
|
|
|
if index == None: # Remove the table
|
2015-11-07 07:29:38 -06:00
|
|
|
cur.execute('DROP TABLE if exists bookmarks')
|
|
|
|
conn.commit()
|
|
|
|
else: # Remove a single entry
|
|
|
|
try:
|
2015-11-10 05:20:30 -06:00
|
|
|
cur.execute("DELETE FROM bookmarks WHERE id = ?", (int(index),))
|
2015-11-07 07:29:38 -06:00
|
|
|
conn.commit()
|
|
|
|
if cur.rowcount == 1:
|
|
|
|
print("Removed")
|
|
|
|
else:
|
|
|
|
print("No matching index")
|
|
|
|
except IndexError:
|
2015-11-09 12:32:10 -06:00
|
|
|
print("Index out of bound")
|
2015-11-05 09:18:51 -06:00
|
|
|
|
2015-11-08 12:56:52 -06:00
|
|
|
|
|
|
|
|
|
|
|
# Print all records in the table
|
2015-11-10 05:20:30 -06:00
|
|
|
def printdb(cur, index):
|
2015-11-11 01:28:37 -06:00
|
|
|
global showOpt
|
|
|
|
|
2015-11-10 05:20:30 -06:00
|
|
|
if index == None: # Show all entries
|
2015-11-09 12:32:10 -06:00
|
|
|
for row in cur.execute('SELECT * FROM bookmarks'):
|
2015-11-11 01:28:37 -06:00
|
|
|
if showOpt == 1:
|
|
|
|
print("%s %s" % (row[0], row[1]))
|
|
|
|
elif showOpt == 2:
|
|
|
|
print("%s %s %s" % (row[0], row[1], row[3][1:-1]))
|
|
|
|
else:
|
|
|
|
print("\x1B[1m\x1B[93m%s. \x1B[0m\x1B[92m%s\x1B[0m\n\t%s\n\t\x1B[91m[TAGS]\x1B[0m %s" % (row[0], row[1], row[2], row[3][1:-1]))
|
2015-11-10 05:20:30 -06:00
|
|
|
else: # Show record at index
|
2015-11-09 12:32:10 -06:00
|
|
|
try:
|
2015-11-10 05:20:30 -06:00
|
|
|
for row in cur.execute("SELECT * FROM bookmarks WHERE id = ?", (int(index),)):
|
2015-11-10 00:55:54 -06:00
|
|
|
print("\x1B[1m\x1B[93m%s. \x1B[0m\x1B[92m%s\x1B[0m\n\t%s\n\t\x1B[91m[TAGS]\x1B[0m %s" % (row[0], row[1], row[2], row[3][1:-1]))
|
2015-11-09 12:32:10 -06:00
|
|
|
return
|
|
|
|
print("No matching index")
|
|
|
|
except IndexError:
|
|
|
|
print("Index out of bound")
|
2015-11-05 09:18:51 -06:00
|
|
|
|
2015-11-08 12:56:52 -06:00
|
|
|
|
|
|
|
|
2015-11-10 05:20:30 -06:00
|
|
|
# Fetch index and open URL in browser
|
|
|
|
def fetchopen(index):
|
|
|
|
try:
|
|
|
|
for row in cur.execute("SELECT URL FROM bookmarks WHERE id = ?", (int(index),)):
|
|
|
|
url = unquote(row[0])
|
|
|
|
browser_open(url)
|
|
|
|
return
|
|
|
|
print("No matching index")
|
|
|
|
except IndexError:
|
|
|
|
print("Index out of bound")
|
|
|
|
|
|
|
|
|
|
|
|
|
2015-11-08 12:56:52 -06:00
|
|
|
# Check if a value is a digit
|
2015-11-06 13:59:57 -06:00
|
|
|
def is_int(string):
|
|
|
|
try:
|
|
|
|
int(string)
|
|
|
|
return True
|
|
|
|
except:
|
|
|
|
return False
|
|
|
|
|
2015-11-08 12:56:52 -06:00
|
|
|
|
|
|
|
|
2015-11-10 17:07:14 -06:00
|
|
|
# Fetch titleData from GET response
|
|
|
|
def getTitleData(resp):
|
|
|
|
charset = ''
|
|
|
|
charset = resp.headers.get_content_charset()
|
|
|
|
if charset == None:
|
|
|
|
charset = 'utf-8'
|
|
|
|
if debug:
|
|
|
|
print(charset)
|
|
|
|
|
2015-11-10 23:41:27 -06:00
|
|
|
titleData = None
|
2015-11-10 17:07:14 -06:00
|
|
|
parser = BMHTMLParser()
|
|
|
|
try:
|
|
|
|
if charset == 'utf-8':
|
|
|
|
parser.feed(resp.read().decode(charset, "replace"))
|
|
|
|
else:
|
|
|
|
parser.feed(resp.read().decode(charset))
|
|
|
|
except Exception as e:
|
|
|
|
if debug:
|
|
|
|
print("Exception: %s" % e)
|
|
|
|
|
|
|
|
|
|
|
|
|
2015-11-08 12:56:52 -06:00
|
|
|
# Parse HTML page for Title info
|
2015-11-06 16:32:08 -06:00
|
|
|
class BMHTMLParser(HTMLParser.HTMLParser):
|
|
|
|
def __init__(self):
|
|
|
|
HTMLParser.HTMLParser.__init__(self)
|
|
|
|
self.inTitle = False
|
|
|
|
self.data = ""
|
|
|
|
self.lasttag = None
|
|
|
|
|
|
|
|
def handle_starttag(self, tag, attrs):
|
|
|
|
self.inTitle = False
|
|
|
|
if tag == "title":
|
|
|
|
self.inTitle = True
|
|
|
|
self.lasttag = tag
|
|
|
|
|
|
|
|
def handle_endtag(self, tag):
|
2015-11-10 17:07:14 -06:00
|
|
|
global titleData
|
|
|
|
|
2015-11-06 16:32:08 -06:00
|
|
|
if tag == "title":
|
|
|
|
self.inTitle = False
|
2015-11-11 01:29:46 -06:00
|
|
|
if self.data != "":
|
2015-11-10 17:07:14 -06:00
|
|
|
titleData = self.data
|
|
|
|
self.reset() # We have received title data, exit parsing
|
2015-11-06 16:32:08 -06:00
|
|
|
|
|
|
|
def handle_data(self, data):
|
|
|
|
if self.lasttag == "title" and self.inTitle == True:
|
2015-11-07 07:29:38 -06:00
|
|
|
self.data += data
|
2015-11-06 16:32:08 -06:00
|
|
|
|
|
|
|
|
2015-11-08 12:56:52 -06:00
|
|
|
|
2015-11-10 03:11:05 -06:00
|
|
|
# Open a URL in browser
|
|
|
|
def browser_open(url):
|
2015-11-10 05:20:30 -06:00
|
|
|
url = url.replace("%22", "\"")
|
|
|
|
|
2015-11-10 03:11:05 -06:00
|
|
|
_stderr = os.dup(2)
|
|
|
|
os.close(2)
|
|
|
|
_stdout = os.dup(1)
|
|
|
|
os.close(1)
|
|
|
|
fd = os.open(os.devnull, os.O_RDWR)
|
|
|
|
os.dup2(fd, 2)
|
|
|
|
os.dup2(fd, 1)
|
|
|
|
try:
|
|
|
|
webbrowser.open(url)
|
|
|
|
except Exception as e:
|
|
|
|
print("Browser Exception: %s" % e)
|
|
|
|
finally:
|
|
|
|
os.close(fd)
|
|
|
|
os.dup2(_stderr, 2)
|
|
|
|
os.dup2(_stdout, 1)
|
|
|
|
|
|
|
|
|
|
|
|
|
2015-11-01 14:04:41 -06:00
|
|
|
# Main starts here
|
|
|
|
# ----------------
|
2015-11-04 08:11:16 -06:00
|
|
|
optlist = None
|
|
|
|
keywords = None
|
|
|
|
|
|
|
|
if len(sys.argv) < 2:
|
|
|
|
usage()
|
|
|
|
|
2015-11-08 12:56:52 -06:00
|
|
|
# Check cmdline options
|
2015-11-04 08:11:16 -06:00
|
|
|
try:
|
2015-11-11 01:28:37 -06:00
|
|
|
optlist, keywords = getopt(sys.argv[1:], "d:i:o:p:u:x:aDPswz")
|
2015-11-06 13:59:57 -06:00
|
|
|
if len(optlist) < 1:
|
|
|
|
usage()
|
|
|
|
|
2015-11-04 08:11:16 -06:00
|
|
|
for opt in optlist:
|
2015-11-05 09:18:51 -06:00
|
|
|
if opt[0] == "-a":
|
2015-11-08 12:19:12 -06:00
|
|
|
if update == True or delete == True:
|
2015-11-07 07:29:38 -06:00
|
|
|
print("You can either add or update or delete in one instance\n")
|
2015-11-05 21:40:44 -06:00
|
|
|
usage()
|
|
|
|
|
2015-11-05 09:18:51 -06:00
|
|
|
addurl = True
|
2015-11-04 08:11:16 -06:00
|
|
|
elif opt[0] == "-d":
|
2015-11-08 12:19:12 -06:00
|
|
|
if addurl == True or update == True:
|
2015-11-07 07:29:38 -06:00
|
|
|
print("You can either add or update or delete in one instance\n")
|
|
|
|
usage()
|
|
|
|
|
|
|
|
if not opt[1].isdigit():
|
|
|
|
usage()
|
|
|
|
|
|
|
|
entry = opt[1]
|
|
|
|
if int(entry) <= 0:
|
|
|
|
usage()
|
|
|
|
|
2015-11-08 12:19:12 -06:00
|
|
|
delete = True
|
2015-11-07 07:29:38 -06:00
|
|
|
elif opt[0] == "-D":
|
2015-11-08 12:19:12 -06:00
|
|
|
if addurl == True or update == True:
|
2015-11-07 07:29:38 -06:00
|
|
|
print("You can either add or update or delete in one instance\n")
|
|
|
|
usage()
|
|
|
|
|
2015-11-08 12:19:12 -06:00
|
|
|
delete = True
|
2015-11-07 12:10:21 -06:00
|
|
|
elif opt[0] == "-i":
|
2015-11-10 03:11:05 -06:00
|
|
|
if update == True or delete == True:
|
|
|
|
print("You can either add or update or delete in one instance\n")
|
|
|
|
usage()
|
|
|
|
|
2015-11-07 12:10:21 -06:00
|
|
|
if not opt[1].isdigit():
|
|
|
|
usage()
|
|
|
|
|
|
|
|
addindex = opt[1]
|
|
|
|
if int(addindex) <= 0:
|
|
|
|
usage()
|
2015-11-10 03:11:05 -06:00
|
|
|
|
|
|
|
addurl = True
|
2015-11-10 05:20:30 -06:00
|
|
|
elif opt[0] == "-o":
|
|
|
|
if not opt[1].isdigit():
|
|
|
|
usage()
|
|
|
|
|
|
|
|
openurl = opt[1]
|
|
|
|
if int(openurl) <= 0:
|
|
|
|
usage()
|
2015-11-05 09:18:51 -06:00
|
|
|
elif opt[0] == "-p":
|
2015-11-09 12:32:10 -06:00
|
|
|
if not opt[1].isdigit():
|
|
|
|
usage()
|
|
|
|
|
|
|
|
showindex = opt[1]
|
|
|
|
if int(showindex) <= 0:
|
|
|
|
usage()
|
|
|
|
|
|
|
|
show = True
|
|
|
|
elif opt[0] == "-P":
|
2015-11-08 12:19:12 -06:00
|
|
|
show = True
|
2015-11-05 12:26:02 -06:00
|
|
|
elif opt[0] == "-s":
|
|
|
|
search = True
|
2015-11-05 21:40:44 -06:00
|
|
|
elif opt[0] == "-u":
|
2015-11-08 12:19:12 -06:00
|
|
|
if addurl == True or delete == True:
|
2015-11-07 07:29:38 -06:00
|
|
|
print("You can either add or update or delete in one instance\n")
|
2015-11-05 21:40:44 -06:00
|
|
|
usage()
|
2015-11-07 07:29:38 -06:00
|
|
|
|
2015-11-05 21:40:44 -06:00
|
|
|
if not opt[1].isdigit():
|
|
|
|
usage()
|
|
|
|
|
|
|
|
entry = opt[1]
|
|
|
|
if int(entry) <= 0:
|
|
|
|
usage()
|
|
|
|
|
2015-11-08 12:19:12 -06:00
|
|
|
update = True
|
2015-11-10 03:11:05 -06:00
|
|
|
elif opt[0] == "-w":
|
|
|
|
online = True
|
2015-11-11 01:28:37 -06:00
|
|
|
elif opt[0] == "-x":
|
|
|
|
if not opt[1].isdigit():
|
|
|
|
usage()
|
|
|
|
|
|
|
|
showOpt = int(opt[1])
|
|
|
|
if showOpt < 1 or showOpt > 2:
|
|
|
|
usage()
|
2015-11-08 12:19:12 -06:00
|
|
|
elif opt[0] == "-z":
|
|
|
|
debug = True
|
2015-11-04 08:11:16 -06:00
|
|
|
except GetoptError as e:
|
|
|
|
print("markit:", e)
|
|
|
|
sys.exit(1)
|
|
|
|
|
2015-11-08 12:56:52 -06:00
|
|
|
# Initilize the database and get handles
|
2015-11-05 09:18:51 -06:00
|
|
|
conn, cur = initdb()
|
|
|
|
|
2015-11-08 12:56:52 -06:00
|
|
|
# To insert (-i) a new record at user-defined index, -a option is must
|
2015-11-07 12:10:21 -06:00
|
|
|
if addindex != None and addurl == False:
|
|
|
|
conn.close()
|
|
|
|
usage()
|
|
|
|
|
2015-11-08 12:56:52 -06:00
|
|
|
# Call add or update record
|
2015-11-08 12:19:12 -06:00
|
|
|
if addurl == True or update == True:
|
2015-11-05 09:18:51 -06:00
|
|
|
if len(keywords) < 1:
|
2015-11-07 12:10:21 -06:00
|
|
|
conn.close()
|
2015-11-05 09:18:51 -06:00
|
|
|
usage()
|
|
|
|
|
2015-11-07 07:29:38 -06:00
|
|
|
AddUpdateEntry(conn, cur, keywords, entry)
|
2015-11-05 09:18:51 -06:00
|
|
|
|
2015-11-08 12:56:52 -06:00
|
|
|
# Search tags, URLs, Title info
|
2015-11-05 12:26:02 -06:00
|
|
|
if search == True:
|
|
|
|
if len(keywords) < 1:
|
2015-11-07 12:10:21 -06:00
|
|
|
conn.close()
|
2015-11-05 12:26:02 -06:00
|
|
|
usage()
|
|
|
|
|
|
|
|
searchdb(cur, keywords)
|
|
|
|
|
2015-11-08 12:56:52 -06:00
|
|
|
# Print all records
|
2015-11-08 12:19:12 -06:00
|
|
|
if show == True:
|
2015-11-09 12:32:10 -06:00
|
|
|
printdb(cur, showindex)
|
2015-11-04 21:54:10 -06:00
|
|
|
|
2015-11-10 05:20:30 -06:00
|
|
|
# Open URL in browser
|
|
|
|
if openurl != None:
|
|
|
|
fetchopen(openurl)
|
|
|
|
|
2015-11-08 12:56:52 -06:00
|
|
|
# Remove a single record of all records
|
2015-11-08 12:19:12 -06:00
|
|
|
if delete == True:
|
2015-11-07 07:29:38 -06:00
|
|
|
cleardb(conn, cur, entry)
|
2015-11-05 21:40:44 -06:00
|
|
|
|
2015-11-08 12:56:52 -06:00
|
|
|
# Close the connection before exiting
|
2015-11-01 14:04:41 -06:00
|
|
|
conn.close()
|