Disable web fetch during auto-import, import and merge

This commit is contained in:
Arun Prakash Jana 2018-10-27 23:34:27 +05:30
parent 292ac9e728
commit 3b95f7cfca
No known key found for this signature in database
GPG Key ID: A75979F35C080412

36
buku.py
View File

@ -526,7 +526,7 @@ class BukuDb:
resultset = self.cur.fetchall() resultset = self.cur.fetchall()
return -1 if resultset[0][0] is None else resultset[0][0] return -1 if resultset[0][0] is None else resultset[0][0]
def add_rec(self, url, title_in=None, tags_in=None, desc=None, immutable=0, delay_commit=False): def add_rec(self, url, title_in=None, tags_in=None, desc=None, immutable=0, delay_commit=False, fetch=True):
"""Add a new bookmark. """Add a new bookmark.
Parameters Parameters
@ -546,6 +546,8 @@ class BukuDb:
delay_commit : bool, optional delay_commit : bool, optional
True if record should not be committed to the DB, True if record should not be committed to the DB,
leaving commit responsibility to caller. Default is False. leaving commit responsibility to caller. Default is False.
fetch : bool, optional
Fetch page from web and parse for data
Returns Returns
------- -------
@ -564,16 +566,19 @@ class BukuDb:
logerr('URL [%s] already exists at index %d', url, id) logerr('URL [%s] already exists at index %d', url, id)
return -1 return -1
# Fetch data if fetch:
ptitle, pdesc, ptags, mime, bad = network_handler(url) # Fetch data
if bad: ptitle, pdesc, ptags, mime, bad = network_handler(url)
print('Malformed URL\n') if bad:
elif mime: print('Malformed URL\n')
logdbg('HTTP HEAD requested') elif mime:
elif ptitle == '' and title_in is None: logdbg('HTTP HEAD requested')
print('No title\n') elif ptitle == '' and title_in is None:
print('No title\n')
else:
logdbg('Title: [%s]', ptitle)
else: else:
logdbg('Title: [%s]', ptitle) ptitle = pdesc = ptags = ''
if title_in is not None: if title_in is not None:
ptitle = title_in ptitle = title_in
@ -2145,7 +2150,7 @@ class BukuDb:
tags += folder_name tags += folder_name
if unique_tag: if unique_tag:
tags += DELIM + unique_tag tags += DELIM + unique_tag
yield (item['url'], item['name'], parse_tags([tags]), None, 0, True) yield (item['url'], item['name'], parse_tags([tags]), None, 0, True, False)
def load_chrome_database(self, path, unique_tag, add_parent_folder_as_tag): def load_chrome_database(self, path, unique_tag, add_parent_folder_as_tag):
"""Open Chrome Bookmarks json file and import data. """Open Chrome Bookmarks json file and import data.
@ -2233,7 +2238,7 @@ class BukuDb:
else: else:
title = '' title = ''
self.add_rec(url, title, tags, None, 0, True) self.add_rec(url, title, tags, None, 0, True, False)
try: try:
cur.close() cur.close()
conn.close() conn.close()
@ -2436,7 +2441,7 @@ class BukuDb:
resultset = indb_cur.fetchall() resultset = indb_cur.fetchall()
if resultset: if resultset:
for row in resultset: for row in resultset:
self.add_rec(row[1], row[2], row[3], row[4], row[5], True) self.add_rec(row[1], row[2], row[3], row[4], row[5], True, False)
self.conn.commit() self.conn.commit()
@ -2907,8 +2912,9 @@ def import_html(html_soup, add_parent_folder_as_tag, newtag):
tag['tags'] = newtag tag['tags'] = newtag
yield ( yield (
tag['href'], tag.string, parse_tags([tag['tags']]) tag['href'], tag.string,
if tag.has_attr('tags') else None, desc, 0, True parse_tags([tag['tags']]) if tag.has_attr('tags') else None,
desc, 0, True, False
) )