2016-05-31 17:06:17 -05:00
|
|
|
#!/usr/bin/env python3
|
2016-11-06 12:44:44 -06:00
|
|
|
#
|
2016-06-23 08:44:09 -05:00
|
|
|
# Unit test cases for buku
|
2016-11-06 12:44:44 -06:00
|
|
|
#
|
2018-10-29 08:33:57 -05:00
|
|
|
import logging
|
2017-03-27 12:01:57 -05:00
|
|
|
import math
|
2016-05-25 07:57:33 -05:00
|
|
|
import os
|
2016-08-26 14:45:50 -05:00
|
|
|
import re
|
2018-10-29 21:15:05 -05:00
|
|
|
import shutil
|
2016-05-25 07:57:33 -05:00
|
|
|
import sqlite3
|
2017-03-27 12:01:57 -05:00
|
|
|
import sys
|
2018-10-29 21:15:05 -05:00
|
|
|
import urllib
|
|
|
|
import zipfile
|
2016-11-06 12:44:44 -06:00
|
|
|
from genericpath import exists
|
2019-04-27 19:12:00 -05:00
|
|
|
from tempfile import TemporaryDirectory, NamedTemporaryFile
|
2016-05-25 03:59:40 -05:00
|
|
|
|
2018-07-18 15:57:15 -05:00
|
|
|
from unittest import mock
|
2016-11-06 12:44:44 -06:00
|
|
|
import unittest
|
2018-12-30 20:49:38 -06:00
|
|
|
import pytest
|
2018-04-17 16:27:12 -05:00
|
|
|
import yaml
|
2018-12-30 20:49:38 -06:00
|
|
|
from hypothesis import given, example, settings
|
|
|
|
from hypothesis import strategies as st
|
|
|
|
import vcr
|
2016-11-06 12:44:44 -06:00
|
|
|
|
|
|
|
from buku import BukuDb, parse_tags, prompt
|
2016-05-25 07:57:33 -05:00
|
|
|
|
2018-10-29 08:33:57 -05:00
|
|
|
|
|
|
|
logging.basicConfig() # you need to initialize logging, otherwise you will not see anything from vcrpy
|
|
|
|
vcr_log = logging.getLogger("vcr")
|
|
|
|
vcr_log.setLevel(logging.INFO)
|
|
|
|
|
2016-05-25 07:57:33 -05:00
|
|
|
TEST_TEMP_DIR_OBJ = TemporaryDirectory(prefix='bukutest_')
|
|
|
|
TEST_TEMP_DIR_PATH = TEST_TEMP_DIR_OBJ.name
|
2016-11-06 12:44:44 -06:00
|
|
|
TEST_TEMP_DBDIR_PATH = os.path.join(TEST_TEMP_DIR_PATH, 'buku')
|
|
|
|
TEST_TEMP_DBFILE_PATH = os.path.join(TEST_TEMP_DBDIR_PATH, 'bookmarks.db')
|
2017-03-27 12:01:57 -05:00
|
|
|
MAX_SQLITE_INT = int(math.pow(2, 63) - 1)
|
2016-11-06 12:44:44 -06:00
|
|
|
|
|
|
|
TEST_BOOKMARKS = [
|
|
|
|
['http://slashdot.org',
|
|
|
|
'SLASHDOT',
|
|
|
|
parse_tags(['old,news']),
|
|
|
|
"News for old nerds, stuff that doesn't matter"],
|
|
|
|
['http://www.zażółćgęśląjaźń.pl/',
|
|
|
|
'ZAŻÓŁĆ',
|
|
|
|
parse_tags(['zażółć,gęślą,jaźń']),
|
|
|
|
"Testing UTF-8, zażółć gęślą jaźń."],
|
2018-10-29 08:33:57 -05:00
|
|
|
['http://example.com/',
|
2016-11-06 12:44:44 -06:00
|
|
|
'test',
|
|
|
|
parse_tags(['test,tes,est,es']),
|
|
|
|
"a case for replace_tag test"],
|
2016-06-25 13:59:33 -05:00
|
|
|
]
|
|
|
|
|
2017-07-12 11:53:38 -05:00
|
|
|
only_python_3_5 = pytest.mark.skipif(
|
|
|
|
sys.version_info < (3, 5), reason="requires Python 3.5 or later")
|
2017-03-27 12:01:57 -05:00
|
|
|
|
2016-11-06 12:44:44 -06:00
|
|
|
|
2016-06-23 16:59:55 -05:00
|
|
|
@pytest.fixture()
|
|
|
|
def setup():
|
|
|
|
os.environ['XDG_DATA_HOME'] = TEST_TEMP_DIR_PATH
|
|
|
|
|
|
|
|
# start every test from a clean state
|
|
|
|
if exists(TEST_TEMP_DBFILE_PATH):
|
|
|
|
os.remove(TEST_TEMP_DBFILE_PATH)
|
2016-05-25 03:59:40 -05:00
|
|
|
|
2016-11-06 12:44:44 -06:00
|
|
|
|
2019-12-06 21:06:29 -06:00
|
|
|
class PrettySafeLoader(yaml.SafeLoader): # pylint: disable=too-many-ancestors,too-few-public-methods
|
2019-07-21 18:21:49 -05:00
|
|
|
def construct_python_tuple(self, node):
|
|
|
|
return tuple(self.construct_sequence(node))
|
|
|
|
|
|
|
|
|
|
|
|
PrettySafeLoader.add_constructor(
|
|
|
|
u'tag:yaml.org,2002:python/tuple',
|
|
|
|
PrettySafeLoader.construct_python_tuple)
|
|
|
|
|
|
|
|
|
2016-05-31 17:06:17 -05:00
|
|
|
class TestBukuDb(unittest.TestCase):
|
2016-05-25 03:59:40 -05:00
|
|
|
|
2016-05-25 07:57:33 -05:00
|
|
|
def setUp(self):
|
|
|
|
os.environ['XDG_DATA_HOME'] = TEST_TEMP_DIR_PATH
|
2016-05-25 03:59:40 -05:00
|
|
|
|
2016-05-31 20:33:20 -05:00
|
|
|
# start every test from a clean state
|
|
|
|
if exists(TEST_TEMP_DBFILE_PATH):
|
|
|
|
os.remove(TEST_TEMP_DBFILE_PATH)
|
|
|
|
|
2016-06-25 13:59:33 -05:00
|
|
|
self.bookmarks = TEST_BOOKMARKS
|
2016-08-30 08:52:56 -05:00
|
|
|
self.bdb = BukuDb()
|
2016-05-31 20:33:20 -05:00
|
|
|
|
2016-05-25 07:57:33 -05:00
|
|
|
def tearDown(self):
|
|
|
|
os.environ['XDG_DATA_HOME'] = TEST_TEMP_DIR_PATH
|
2016-05-25 03:59:40 -05:00
|
|
|
|
2017-03-28 07:18:32 -05:00
|
|
|
@pytest.mark.non_tox
|
2016-12-27 09:49:25 -06:00
|
|
|
def test_get_default_dbdir(self):
|
2016-05-25 07:57:33 -05:00
|
|
|
dbdir_expected = TEST_TEMP_DBDIR_PATH
|
2016-11-06 12:44:44 -06:00
|
|
|
dbdir_local_expected = os.path.join(os.path.expanduser('~'), '.local', 'share', 'buku')
|
2016-10-09 23:52:21 -05:00
|
|
|
dbdir_relative_expected = os.path.abspath('.')
|
2016-05-25 03:59:40 -05:00
|
|
|
|
2016-05-25 07:57:33 -05:00
|
|
|
# desktop linux
|
2016-12-27 09:49:25 -06:00
|
|
|
self.assertEqual(dbdir_expected, BukuDb.get_default_dbdir())
|
2016-05-25 07:57:33 -05:00
|
|
|
|
|
|
|
# desktop generic
|
|
|
|
os.environ.pop('XDG_DATA_HOME')
|
2016-12-27 09:49:25 -06:00
|
|
|
self.assertEqual(dbdir_local_expected, BukuDb.get_default_dbdir())
|
2016-05-25 07:57:33 -05:00
|
|
|
|
|
|
|
# no desktop
|
|
|
|
|
|
|
|
# -- home is defined differently on various platforms.
|
|
|
|
# -- keep a copy and set it back once done
|
|
|
|
originals = {}
|
|
|
|
for env_var in ['HOME', 'HOMEPATH', 'HOMEDIR']:
|
|
|
|
try:
|
|
|
|
originals[env_var] = os.environ.pop(env_var)
|
|
|
|
except KeyError:
|
|
|
|
pass
|
2016-12-27 09:49:25 -06:00
|
|
|
self.assertEqual(dbdir_relative_expected, BukuDb.get_default_dbdir())
|
2017-05-18 20:40:43 -05:00
|
|
|
for key, value in list(originals.items()):
|
2016-05-25 07:57:33 -05:00
|
|
|
os.environ[key] = value
|
|
|
|
|
|
|
|
# # not sure how to test this in nondestructive manner
|
|
|
|
# def test_move_legacy_dbfile(self):
|
|
|
|
# self.fail()
|
|
|
|
|
|
|
|
def test_initdb(self):
|
|
|
|
if exists(TEST_TEMP_DBFILE_PATH):
|
|
|
|
os.remove(TEST_TEMP_DBFILE_PATH)
|
|
|
|
self.assertIs(False, exists(TEST_TEMP_DBFILE_PATH))
|
|
|
|
conn, curr = BukuDb.initdb()
|
|
|
|
self.assertIsInstance(conn, sqlite3.Connection)
|
|
|
|
self.assertIsInstance(curr, sqlite3.Cursor)
|
|
|
|
self.assertIs(True, exists(TEST_TEMP_DBFILE_PATH))
|
|
|
|
curr.close()
|
|
|
|
conn.close()
|
|
|
|
|
2016-12-27 08:10:29 -06:00
|
|
|
def test_get_rec_by_id(self):
|
2016-08-21 22:14:46 -05:00
|
|
|
for bookmark in self.bookmarks:
|
|
|
|
# adding bookmark from self.bookmarks
|
2016-12-27 08:10:29 -06:00
|
|
|
self.bdb.add_rec(*bookmark)
|
2016-08-21 22:14:46 -05:00
|
|
|
|
2016-08-23 01:10:14 -05:00
|
|
|
# the expected bookmark
|
2016-08-21 22:14:46 -05:00
|
|
|
expected = (1, 'http://slashdot.org', 'SLASHDOT', ',news,old,',
|
2016-11-06 12:44:44 -06:00
|
|
|
"News for old nerds, stuff that doesn't matter", 0)
|
2016-12-27 08:10:29 -06:00
|
|
|
bookmark_from_db = self.bdb.get_rec_by_id(1)
|
2016-08-21 22:14:46 -05:00
|
|
|
# asserting bookmark matches expected
|
|
|
|
self.assertEqual(expected, bookmark_from_db)
|
|
|
|
# asserting None returned if index out of range
|
2016-12-27 08:10:29 -06:00
|
|
|
self.assertIsNone(self.bdb.get_rec_by_id(len(self.bookmarks[0]) + 1))
|
2016-08-21 22:14:46 -05:00
|
|
|
|
2016-12-27 08:10:29 -06:00
|
|
|
def test_get_rec_id(self):
|
2016-06-13 16:54:22 -05:00
|
|
|
for idx, bookmark in enumerate(self.bookmarks):
|
|
|
|
# adding bookmark from self.bookmarks to database
|
2016-12-27 08:10:29 -06:00
|
|
|
self.bdb.add_rec(*bookmark)
|
2016-06-13 16:54:22 -05:00
|
|
|
# asserting index is in order
|
2016-12-27 08:10:29 -06:00
|
|
|
idx_from_db = self.bdb.get_rec_id(bookmark[0])
|
2016-06-13 16:54:22 -05:00
|
|
|
self.assertEqual(idx + 1, idx_from_db)
|
|
|
|
|
|
|
|
# asserting -1 is returned for nonexistent url
|
2016-12-27 08:10:29 -06:00
|
|
|
idx_from_db = self.bdb.get_rec_id("http://nonexistent.url")
|
2016-06-13 16:54:22 -05:00
|
|
|
self.assertEqual(-1, idx_from_db)
|
2016-05-25 07:57:33 -05:00
|
|
|
|
2016-12-27 08:10:29 -06:00
|
|
|
def test_add_rec(self):
|
2016-06-13 16:54:22 -05:00
|
|
|
for bookmark in self.bookmarks:
|
2016-05-31 20:33:20 -05:00
|
|
|
# adding bookmark from self.bookmarks to database
|
2016-12-27 08:10:29 -06:00
|
|
|
self.bdb.add_rec(*bookmark)
|
2016-05-31 20:33:20 -05:00
|
|
|
# retrieving bookmark from database
|
2016-12-27 08:10:29 -06:00
|
|
|
index = self.bdb.get_rec_id(bookmark[0])
|
|
|
|
from_db = self.bdb.get_rec_by_id(index)
|
2016-05-31 20:33:20 -05:00
|
|
|
self.assertIsNotNone(from_db)
|
|
|
|
# comparing data
|
|
|
|
for pair in zip(from_db[1:], bookmark):
|
2016-09-03 22:14:56 -05:00
|
|
|
self.assertEqual(*pair)
|
2016-05-25 07:57:33 -05:00
|
|
|
|
|
|
|
# TODO: tags should be passed to the api as a sequence...
|
2016-05-25 03:59:40 -05:00
|
|
|
|
2017-11-05 20:49:10 -06:00
|
|
|
def test_suggest_tags(self):
|
|
|
|
for bookmark in self.bookmarks:
|
|
|
|
self.bdb.add_rec(*bookmark)
|
|
|
|
|
|
|
|
tagstr = ',test,old,'
|
|
|
|
with mock.patch('builtins.input', return_value='1 2 3'):
|
|
|
|
expected_results = ',es,est,news,old,test,'
|
|
|
|
suggested_results = self.bdb.suggest_similar_tag(tagstr)
|
|
|
|
self.assertEqual(expected_results, suggested_results)
|
|
|
|
|
|
|
|
# returns user supplied tags if none are in the DB
|
|
|
|
tagstr = ',uniquetag1,uniquetag2,'
|
|
|
|
expected_results = tagstr
|
|
|
|
suggested_results = self.bdb.suggest_similar_tag(tagstr)
|
|
|
|
self.assertEqual(expected_results, suggested_results)
|
|
|
|
|
2016-12-27 08:10:29 -06:00
|
|
|
def test_update_rec(self):
|
2016-06-03 18:18:54 -05:00
|
|
|
old_values = self.bookmarks[0]
|
|
|
|
new_values = self.bookmarks[1]
|
|
|
|
|
|
|
|
# adding bookmark and getting index
|
2016-12-27 08:10:29 -06:00
|
|
|
self.bdb.add_rec(*old_values)
|
|
|
|
index = self.bdb.get_rec_id(old_values[0])
|
2016-06-03 18:18:54 -05:00
|
|
|
# updating with new values
|
2016-12-27 08:10:29 -06:00
|
|
|
self.bdb.update_rec(index, *new_values)
|
2016-06-03 18:18:54 -05:00
|
|
|
# retrieving bookmark from database
|
2016-12-27 08:10:29 -06:00
|
|
|
from_db = self.bdb.get_rec_by_id(index)
|
2016-06-03 18:18:54 -05:00
|
|
|
self.assertIsNotNone(from_db)
|
|
|
|
# checking if values are updated
|
|
|
|
for pair in zip(from_db[1:], new_values):
|
2016-09-03 22:14:56 -05:00
|
|
|
self.assertEqual(*pair)
|
2016-05-25 03:59:40 -05:00
|
|
|
|
2016-08-23 01:10:14 -05:00
|
|
|
def test_append_tag_at_index(self):
|
|
|
|
for bookmark in self.bookmarks:
|
2016-12-27 08:10:29 -06:00
|
|
|
self.bdb.add_rec(*bookmark)
|
2016-08-23 01:10:14 -05:00
|
|
|
|
|
|
|
# tags to add
|
2016-12-27 08:10:29 -06:00
|
|
|
old_tags = self.bdb.get_rec_by_id(1)[3]
|
2016-08-23 01:10:14 -05:00
|
|
|
new_tags = ",foo,bar,baz"
|
2016-09-03 22:14:56 -05:00
|
|
|
self.bdb.append_tag_at_index(1, new_tags)
|
|
|
|
# updated list of tags
|
2016-12-27 08:10:29 -06:00
|
|
|
from_db = self.bdb.get_rec_by_id(1)[3]
|
2016-08-23 01:10:14 -05:00
|
|
|
|
2016-09-03 22:14:56 -05:00
|
|
|
# checking if new tags were added to the bookmark
|
2016-09-07 01:55:59 -05:00
|
|
|
self.assertTrue(split_and_test_membership(new_tags, from_db))
|
2016-09-03 22:14:56 -05:00
|
|
|
# checking if old tags still exist
|
2016-09-07 01:55:59 -05:00
|
|
|
self.assertTrue(split_and_test_membership(old_tags, from_db))
|
2016-08-23 01:10:14 -05:00
|
|
|
|
2016-08-26 14:45:50 -05:00
|
|
|
def test_append_tag_at_all_indices(self):
|
|
|
|
for bookmark in self.bookmarks:
|
2016-12-27 08:10:29 -06:00
|
|
|
self.bdb.add_rec(*bookmark)
|
2016-08-26 14:45:50 -05:00
|
|
|
|
|
|
|
# tags to add
|
|
|
|
new_tags = ",foo,bar,baz"
|
|
|
|
# record of original tags for each bookmark
|
2016-12-27 08:10:29 -06:00
|
|
|
old_tagsets = {i: self.bdb.get_rec_by_id(i)[3] for i in inclusive_range(1, len(self.bookmarks))}
|
2016-08-26 14:45:50 -05:00
|
|
|
|
|
|
|
with mock.patch('builtins.input', return_value='y'):
|
2016-08-30 08:52:56 -05:00
|
|
|
self.bdb.append_tag_at_index(0, new_tags)
|
2016-08-26 14:45:50 -05:00
|
|
|
# updated tags for each bookmark
|
2016-12-27 08:10:29 -06:00
|
|
|
from_db = [(i, self.bdb.get_rec_by_id(i)[3]) for i in inclusive_range(1, len(self.bookmarks))]
|
2016-08-26 14:45:50 -05:00
|
|
|
for index, tagset in from_db:
|
2016-09-03 22:14:56 -05:00
|
|
|
# checking if new tags added to bookmark
|
2016-09-07 01:55:59 -05:00
|
|
|
self.assertTrue(split_and_test_membership(new_tags, tagset))
|
2016-09-03 22:14:56 -05:00
|
|
|
# checking if old tags still exist for boomark
|
2016-09-07 01:55:59 -05:00
|
|
|
self.assertTrue(split_and_test_membership(old_tagsets[index], tagset))
|
2016-08-26 14:45:50 -05:00
|
|
|
|
|
|
|
def test_delete_tag_at_index(self):
|
|
|
|
# adding bookmarks
|
|
|
|
for bookmark in self.bookmarks:
|
2016-12-27 08:10:29 -06:00
|
|
|
self.bdb.add_rec(*bookmark)
|
2016-08-26 14:45:50 -05:00
|
|
|
|
2016-12-27 08:10:29 -06:00
|
|
|
get_tags_at_idx = lambda i: self.bdb.get_rec_by_id(i)[3]
|
2016-09-07 01:55:59 -05:00
|
|
|
# list of two-tuples, each containg bookmark index and corresponding tags
|
2016-11-06 12:44:44 -06:00
|
|
|
tags_by_index = [(i, get_tags_at_idx(i)) for i in inclusive_range(1, len(self.bookmarks))]
|
2016-08-26 14:45:50 -05:00
|
|
|
|
2016-09-07 01:55:59 -05:00
|
|
|
for i, tags in tags_by_index:
|
2016-08-26 14:45:50 -05:00
|
|
|
# get the first tag from the bookmark
|
|
|
|
to_delete = re.match(',.*?,', tags).group(0)
|
2016-09-03 22:14:56 -05:00
|
|
|
self.bdb.delete_tag_at_index(i, to_delete)
|
2016-08-26 14:45:50 -05:00
|
|
|
# get updated tags from db
|
2016-09-07 01:55:59 -05:00
|
|
|
from_db = get_tags_at_idx(i)
|
2016-09-03 22:14:56 -05:00
|
|
|
self.assertNotIn(to_delete, from_db)
|
2016-08-26 14:45:50 -05:00
|
|
|
|
2018-03-22 18:30:08 -05:00
|
|
|
def test_search_keywords_and_filter_by_tags(self):
|
2018-03-22 17:45:20 -05:00
|
|
|
# adding bookmark
|
|
|
|
for bookmark in self.bookmarks:
|
|
|
|
self.bdb.add_rec(*bookmark)
|
|
|
|
|
|
|
|
with mock.patch('buku.prompt'):
|
|
|
|
expected = [(3,
|
2018-10-29 09:00:13 -05:00
|
|
|
'http://example.com/',
|
2018-03-22 17:45:20 -05:00
|
|
|
'test',
|
|
|
|
',es,est,tes,test,',
|
2019-12-18 06:50:01 -06:00
|
|
|
'a case for replace_tag test', 0)]
|
2018-03-22 18:30:08 -05:00
|
|
|
results = self.bdb.search_keywords_and_filter_by_tags(
|
2018-03-22 17:45:20 -05:00
|
|
|
['News', 'case'],
|
|
|
|
False,
|
2018-03-22 18:30:08 -05:00
|
|
|
False,
|
|
|
|
False,
|
2018-03-22 17:45:20 -05:00
|
|
|
['est'],
|
|
|
|
)
|
|
|
|
self.assertIn(expected[0], results)
|
|
|
|
expected = [(3,
|
2018-10-29 09:00:13 -05:00
|
|
|
'http://example.com/',
|
2018-03-22 17:45:20 -05:00
|
|
|
'test',
|
|
|
|
',es,est,tes,test,',
|
2019-12-18 06:50:01 -06:00
|
|
|
'a case for replace_tag test', 0),
|
2018-03-22 17:45:20 -05:00
|
|
|
(2,
|
|
|
|
'http://www.zażółćgęśląjaźń.pl/',
|
|
|
|
'ZAŻÓŁĆ',
|
|
|
|
',gęślą,jaźń,zażółć,',
|
2019-12-18 06:50:01 -06:00
|
|
|
'Testing UTF-8, zażółć gęślą jaźń.', 0)]
|
2018-03-22 18:30:08 -05:00
|
|
|
results = self.bdb.search_keywords_and_filter_by_tags(
|
2018-03-22 17:45:20 -05:00
|
|
|
['UTF-8', 'case'],
|
|
|
|
False,
|
2018-03-22 18:30:08 -05:00
|
|
|
False,
|
|
|
|
False,
|
2018-03-22 17:45:20 -05:00
|
|
|
'jaźń, test',
|
|
|
|
)
|
|
|
|
self.assertIn(expected[0], results)
|
|
|
|
self.assertIn(expected[1], results)
|
|
|
|
|
2016-09-07 16:36:22 -05:00
|
|
|
def test_searchdb(self):
|
|
|
|
# adding bookmarks
|
|
|
|
for bookmark in self.bookmarks:
|
2016-12-27 08:10:29 -06:00
|
|
|
self.bdb.add_rec(*bookmark)
|
2016-09-07 16:36:22 -05:00
|
|
|
|
2016-09-08 09:39:41 -05:00
|
|
|
get_first_tag = lambda x: ''.join(x[2].split(',')[:2])
|
|
|
|
for i, bookmark in enumerate(self.bookmarks):
|
|
|
|
tag_search = get_first_tag(bookmark)
|
|
|
|
# search by the domain name for url
|
2018-06-22 00:11:36 -05:00
|
|
|
url_search = re.match(r'https?://(.*)?\..*', bookmark[0]).group(1)
|
2016-09-08 09:39:41 -05:00
|
|
|
title_search = bookmark[1]
|
|
|
|
# Expect a five-tuple containing all bookmark data
|
|
|
|
# db index, URL, title, tags, description
|
2016-11-06 12:44:44 -06:00
|
|
|
expected = [(i + 1,) + tuple(bookmark)]
|
2019-12-18 06:50:01 -06:00
|
|
|
expected[0] += tuple([0])
|
2016-09-08 09:39:41 -05:00
|
|
|
# search db by tag, url (domain name), and title
|
|
|
|
for keyword in (tag_search, url_search, title_search):
|
2016-11-06 12:44:44 -06:00
|
|
|
with mock.patch('buku.prompt'):
|
2016-09-07 16:36:22 -05:00
|
|
|
# search by keyword
|
2016-10-28 01:58:53 -05:00
|
|
|
results = self.bdb.searchdb([keyword])
|
|
|
|
self.assertEqual(results, expected)
|
2016-05-25 03:59:40 -05:00
|
|
|
|
2016-09-07 01:37:09 -05:00
|
|
|
def test_search_by_tag(self):
|
|
|
|
# adding bookmarks
|
|
|
|
for bookmark in self.bookmarks:
|
2016-12-27 08:10:29 -06:00
|
|
|
self.bdb.add_rec(*bookmark)
|
2016-09-07 01:37:09 -05:00
|
|
|
|
2016-11-06 12:44:44 -06:00
|
|
|
with mock.patch('buku.prompt'):
|
2016-09-07 01:37:09 -05:00
|
|
|
get_first_tag = lambda x: ''.join(x[2].split(',')[:2])
|
2016-09-07 01:55:59 -05:00
|
|
|
for i in range(len(self.bookmarks)):
|
|
|
|
# search for bookmark with a tag that is known to exist
|
2016-10-28 01:58:53 -05:00
|
|
|
results = self.bdb.search_by_tag(get_first_tag(self.bookmarks[i]))
|
2016-09-07 01:55:59 -05:00
|
|
|
# Expect a five-tuple containing all bookmark data
|
|
|
|
# db index, URL, title, tags, description
|
2016-09-08 09:39:41 -05:00
|
|
|
expected = [(i + 1,) + tuple(self.bookmarks[i])]
|
2019-12-18 06:50:01 -06:00
|
|
|
expected[0] += tuple([0])
|
2016-10-28 01:58:53 -05:00
|
|
|
self.assertEqual(results, expected)
|
2016-05-25 03:59:40 -05:00
|
|
|
|
2018-10-29 08:56:21 -05:00
|
|
|
@vcr.use_cassette('tests/vcr_cassettes/test_search_by_multiple_tags_search_any.yaml')
|
2017-08-14 16:07:01 -05:00
|
|
|
def test_search_by_multiple_tags_search_any(self):
|
|
|
|
# adding bookmarks
|
|
|
|
for bookmark in self.bookmarks:
|
|
|
|
self.bdb.add_rec(*bookmark)
|
|
|
|
|
|
|
|
new_bookmark = ['https://newbookmark.com',
|
|
|
|
'New Bookmark',
|
|
|
|
parse_tags(['test,old,new']),
|
2019-12-18 06:50:01 -06:00
|
|
|
'additional bookmark to test multiple tag search', 0]
|
2017-08-14 16:07:01 -05:00
|
|
|
|
|
|
|
self.bdb.add_rec(*new_bookmark)
|
|
|
|
|
|
|
|
with mock.patch('buku.prompt'):
|
|
|
|
# search for bookmarks matching ANY of the supplied tags
|
|
|
|
results = self.bdb.search_by_tag('test, old')
|
|
|
|
# Expect a list of five-element tuples containing all bookmark data
|
2018-02-17 19:22:59 -06:00
|
|
|
# db index, URL, title, tags, description, ordered by records with
|
|
|
|
# the most number of matches.
|
2017-08-14 16:07:01 -05:00
|
|
|
expected = [
|
2018-02-17 19:22:59 -06:00
|
|
|
(4, 'https://newbookmark.com', 'New Bookmark',
|
|
|
|
parse_tags([',test,old,new,']),
|
2019-12-18 06:50:01 -06:00
|
|
|
'additional bookmark to test multiple tag search', 0),
|
2017-08-14 16:07:01 -05:00
|
|
|
(1, 'http://slashdot.org', 'SLASHDOT',
|
|
|
|
parse_tags([',news,old,']),
|
2019-12-18 06:50:01 -06:00
|
|
|
"News for old nerds, stuff that doesn't matter", 0),
|
|
|
|
(3, 'http://example.com/', 'test', ',es,est,tes,test,', 'a case for replace_tag test', 0)
|
2017-08-14 16:07:01 -05:00
|
|
|
]
|
|
|
|
self.assertEqual(results, expected)
|
|
|
|
|
2018-10-29 08:33:57 -05:00
|
|
|
@vcr.use_cassette('tests/vcr_cassettes/test_search_by_multiple_tags_search_all.yaml')
|
2017-08-14 16:07:01 -05:00
|
|
|
def test_search_by_multiple_tags_search_all(self):
|
|
|
|
# adding bookmarks
|
|
|
|
for bookmark in self.bookmarks:
|
|
|
|
self.bdb.add_rec(*bookmark)
|
|
|
|
|
|
|
|
new_bookmark = ['https://newbookmark.com',
|
|
|
|
'New Bookmark',
|
|
|
|
parse_tags(['test,old,new']),
|
|
|
|
'additional bookmark to test multiple tag search']
|
|
|
|
|
|
|
|
self.bdb.add_rec(*new_bookmark)
|
|
|
|
|
|
|
|
with mock.patch('buku.prompt'):
|
|
|
|
# search for bookmarks matching ALL of the supplied tags
|
|
|
|
results = self.bdb.search_by_tag('test + old')
|
|
|
|
# Expect a list of five-element tuples containing all bookmark data
|
|
|
|
# db index, URL, title, tags, description
|
|
|
|
expected = [
|
|
|
|
(4, 'https://newbookmark.com', 'New Bookmark',
|
|
|
|
parse_tags([',test,old,new,']),
|
2019-12-18 06:50:01 -06:00
|
|
|
'additional bookmark to test multiple tag search', 0)
|
2017-08-14 16:07:01 -05:00
|
|
|
]
|
|
|
|
self.assertEqual(results, expected)
|
|
|
|
|
|
|
|
def test_search_by_tags_enforces_space_seprations_search_all(self):
|
|
|
|
|
|
|
|
bookmark1 = ['https://bookmark1.com',
|
|
|
|
'Bookmark One',
|
|
|
|
parse_tags(['tag, two,tag+two']),
|
|
|
|
"test case for bookmark with '+' in tag"]
|
|
|
|
|
|
|
|
bookmark2 = ['https://bookmark2.com',
|
|
|
|
'Bookmark Two',
|
|
|
|
parse_tags(['tag,two, tag-two']),
|
|
|
|
"test case for bookmark with hyphenated tag"]
|
|
|
|
|
|
|
|
self.bdb.add_rec(*bookmark1)
|
|
|
|
self.bdb.add_rec(*bookmark2)
|
|
|
|
|
|
|
|
with mock.patch('buku.prompt'):
|
|
|
|
# check that space separation for ' + ' operator is enforced
|
|
|
|
results = self.bdb.search_by_tag('tag+two')
|
|
|
|
# Expect a list of five-element tuples containing all bookmark data
|
|
|
|
# db index, URL, title, tags, description
|
|
|
|
expected = [
|
|
|
|
(1, 'https://bookmark1.com', 'Bookmark One',
|
|
|
|
parse_tags([',tag,two,tag+two,']),
|
2019-12-18 06:50:01 -06:00
|
|
|
"test case for bookmark with '+' in tag", 0)
|
2017-08-14 16:07:01 -05:00
|
|
|
]
|
|
|
|
self.assertEqual(results, expected)
|
|
|
|
results = self.bdb.search_by_tag('tag + two')
|
|
|
|
# Expect a list of five-element tuples containing all bookmark data
|
|
|
|
# db index, URL, title, tags, description
|
|
|
|
expected = [
|
|
|
|
(1, 'https://bookmark1.com', 'Bookmark One',
|
|
|
|
parse_tags([',tag,two,tag+two,']),
|
2019-12-18 06:50:01 -06:00
|
|
|
"test case for bookmark with '+' in tag", 0),
|
2017-08-14 16:07:01 -05:00
|
|
|
(2, 'https://bookmark2.com', 'Bookmark Two',
|
|
|
|
parse_tags([',tag,two,tag-two,']),
|
2019-12-18 06:50:01 -06:00
|
|
|
"test case for bookmark with hyphenated tag", 0),
|
2017-08-14 16:07:01 -05:00
|
|
|
]
|
|
|
|
self.assertEqual(results, expected)
|
|
|
|
|
|
|
|
def test_search_by_tags_exclusion(self):
|
|
|
|
# adding bookmarks
|
|
|
|
for bookmark in self.bookmarks:
|
|
|
|
self.bdb.add_rec(*bookmark)
|
|
|
|
|
|
|
|
new_bookmark = ['https://newbookmark.com',
|
|
|
|
'New Bookmark',
|
|
|
|
parse_tags(['test,old,new']),
|
|
|
|
'additional bookmark to test multiple tag search']
|
|
|
|
|
|
|
|
self.bdb.add_rec(*new_bookmark)
|
|
|
|
|
|
|
|
with mock.patch('buku.prompt'):
|
|
|
|
# search for bookmarks matching ANY of the supplied tags
|
|
|
|
# while excluding bookmarks from results that match a given tag
|
|
|
|
results = self.bdb.search_by_tag('test, old - est')
|
|
|
|
# Expect a list of five-element tuples containing all bookmark data
|
|
|
|
# db index, URL, title, tags, description
|
|
|
|
expected = [
|
2018-02-17 19:22:59 -06:00
|
|
|
(4, 'https://newbookmark.com', 'New Bookmark',
|
|
|
|
parse_tags([',test,old,new,']),
|
2019-12-18 06:50:01 -06:00
|
|
|
'additional bookmark to test multiple tag search', 0),
|
2017-08-14 16:07:01 -05:00
|
|
|
(1, 'http://slashdot.org', 'SLASHDOT',
|
|
|
|
parse_tags([',news,old,']),
|
2019-12-18 06:50:01 -06:00
|
|
|
"News for old nerds, stuff that doesn't matter", 0),
|
2017-08-14 16:07:01 -05:00
|
|
|
]
|
|
|
|
self.assertEqual(results, expected)
|
|
|
|
|
2018-10-29 08:33:57 -05:00
|
|
|
@vcr.use_cassette('tests/vcr_cassettes/test_search_by_tags_enforces_space_seprations_exclusion.yaml')
|
2017-08-14 16:07:01 -05:00
|
|
|
def test_search_by_tags_enforces_space_seprations_exclusion(self):
|
|
|
|
|
|
|
|
bookmark1 = ['https://bookmark1.com',
|
|
|
|
'Bookmark One',
|
|
|
|
parse_tags(['tag, two,tag+two']),
|
|
|
|
"test case for bookmark with '+' in tag"]
|
|
|
|
|
|
|
|
bookmark2 = ['https://bookmark2.com',
|
|
|
|
'Bookmark Two',
|
|
|
|
parse_tags(['tag,two, tag-two']),
|
|
|
|
"test case for bookmark with hyphenated tag"]
|
|
|
|
|
|
|
|
bookmark3 = ['https://bookmark3.com',
|
|
|
|
'Bookmark Three',
|
|
|
|
parse_tags(['tag, tag three']),
|
|
|
|
"second test case for bookmark with hyphenated tag"]
|
|
|
|
|
|
|
|
self.bdb.add_rec(*bookmark1)
|
|
|
|
self.bdb.add_rec(*bookmark2)
|
|
|
|
self.bdb.add_rec(*bookmark3)
|
|
|
|
|
|
|
|
with mock.patch('buku.prompt'):
|
|
|
|
# check that space separation for ' - ' operator is enforced
|
|
|
|
results = self.bdb.search_by_tag('tag-two')
|
|
|
|
# Expect a list of five-element tuples containing all bookmark data
|
|
|
|
# db index, URL, title, tags, description
|
|
|
|
expected = [
|
|
|
|
(2, 'https://bookmark2.com', 'Bookmark Two',
|
|
|
|
parse_tags([',tag,two,tag-two,']),
|
2019-12-18 06:50:01 -06:00
|
|
|
"test case for bookmark with hyphenated tag", 0),
|
2017-08-14 16:07:01 -05:00
|
|
|
]
|
|
|
|
self.assertEqual(results, expected)
|
|
|
|
results = self.bdb.search_by_tag('tag - two')
|
|
|
|
# Expect a list of five-element tuples containing all bookmark data
|
|
|
|
# db index, URL, title, tags, description
|
|
|
|
expected = [
|
|
|
|
(3, 'https://bookmark3.com', 'Bookmark Three',
|
|
|
|
parse_tags([',tag,tag three,']),
|
2019-12-18 06:50:01 -06:00
|
|
|
"second test case for bookmark with hyphenated tag", 0),
|
2017-08-14 16:07:01 -05:00
|
|
|
]
|
|
|
|
self.assertEqual(results, expected)
|
|
|
|
|
2016-09-06 13:25:57 -05:00
|
|
|
def test_search_and_open_in_broswer_by_range(self):
|
|
|
|
# adding bookmarks
|
|
|
|
for bookmark in self.bookmarks:
|
2016-12-27 08:10:29 -06:00
|
|
|
self.bdb.add_rec(*bookmark)
|
2016-09-06 13:25:57 -05:00
|
|
|
|
|
|
|
# simulate user input, select range of indices 1-3
|
|
|
|
index_range = '1-%s' % len(self.bookmarks)
|
|
|
|
with mock.patch('builtins.input', side_effect=[index_range]):
|
2017-02-18 20:48:38 -06:00
|
|
|
with mock.patch('buku.browse') as mock_browse:
|
2016-09-06 13:25:57 -05:00
|
|
|
try:
|
|
|
|
# search the db with keywords from each bookmark
|
|
|
|
# searching using the first tag from bookmarks
|
2016-09-07 01:37:09 -05:00
|
|
|
get_first_tag = lambda x: x[2].split(',')[1]
|
2016-11-06 12:44:44 -06:00
|
|
|
results = self.bdb.searchdb([get_first_tag(bm) for bm in self.bookmarks])
|
2016-12-27 09:49:25 -06:00
|
|
|
prompt(self.bdb, results)
|
2016-09-06 13:25:57 -05:00
|
|
|
except StopIteration:
|
2016-09-07 01:55:59 -05:00
|
|
|
# catch exception thrown by reaching the end of the side effect iterable
|
2016-09-06 13:25:57 -05:00
|
|
|
pass
|
|
|
|
|
2017-02-18 20:48:38 -06:00
|
|
|
# collect arguments passed to browse
|
|
|
|
arg_list = [args[0] for args, _ in mock_browse.call_args_list]
|
2016-09-06 13:25:57 -05:00
|
|
|
# expect a list of one-tuples that are bookmark URLs
|
2016-11-06 12:44:44 -06:00
|
|
|
expected = [x[0] for x in self.bookmarks]
|
2017-02-18 20:48:38 -06:00
|
|
|
# checking if browse called with expected arguments
|
2016-09-06 13:25:57 -05:00
|
|
|
self.assertEqual(arg_list, expected)
|
|
|
|
|
2018-10-29 08:33:57 -05:00
|
|
|
@vcr.use_cassette('tests/vcr_cassettes/test_search_and_open_all_in_browser.yaml')
|
2016-09-08 11:00:59 -05:00
|
|
|
def test_search_and_open_all_in_browser(self):
|
|
|
|
# adding bookmarks
|
|
|
|
for bookmark in self.bookmarks:
|
2016-12-27 08:10:29 -06:00
|
|
|
self.bdb.add_rec(*bookmark)
|
2016-09-08 11:00:59 -05:00
|
|
|
|
|
|
|
# simulate user input, select 'a' to open all bookmarks in results
|
|
|
|
with mock.patch('builtins.input', side_effect=['a']):
|
2017-02-18 20:48:38 -06:00
|
|
|
with mock.patch('buku.browse') as mock_browse:
|
2016-09-08 11:00:59 -05:00
|
|
|
try:
|
|
|
|
# search the db with keywords from each bookmark
|
|
|
|
# searching using the first tag from bookmarks
|
|
|
|
get_first_tag = lambda x: x[2].split(',')[1]
|
2016-11-06 12:44:44 -06:00
|
|
|
results = self.bdb.searchdb([get_first_tag(bm) for bm in self.bookmarks[:2]])
|
2016-12-27 09:49:25 -06:00
|
|
|
prompt(self.bdb, results)
|
2016-09-08 11:00:59 -05:00
|
|
|
except StopIteration:
|
|
|
|
# catch exception thrown by reaching the end of the side effect iterable
|
|
|
|
pass
|
|
|
|
|
2017-02-18 20:48:38 -06:00
|
|
|
# collect arguments passed to browse
|
|
|
|
arg_list = [args[0] for args, _ in mock_browse.call_args_list]
|
2016-09-08 11:00:59 -05:00
|
|
|
# expect a list of one-tuples that are bookmark URLs
|
2016-11-06 12:44:44 -06:00
|
|
|
expected = [x[0] for x in self.bookmarks][:2]
|
2017-02-18 20:48:38 -06:00
|
|
|
# checking if browse called with expected arguments
|
2016-09-08 11:00:59 -05:00
|
|
|
self.assertEqual(arg_list, expected)
|
|
|
|
|
2016-12-27 08:10:29 -06:00
|
|
|
def test_delete_rec(self):
|
2016-06-03 18:18:54 -05:00
|
|
|
# adding bookmark and getting index
|
2016-12-27 08:10:29 -06:00
|
|
|
self.bdb.add_rec(*self.bookmarks[0])
|
|
|
|
index = self.bdb.get_rec_id(self.bookmarks[0][0])
|
2016-06-03 18:18:54 -05:00
|
|
|
# deleting bookmark
|
2016-12-27 08:10:29 -06:00
|
|
|
self.bdb.delete_rec(index)
|
2016-06-03 18:18:54 -05:00
|
|
|
# asserting it doesn't exist
|
2016-12-27 08:10:29 -06:00
|
|
|
from_db = self.bdb.get_rec_by_id(index)
|
2016-06-03 18:18:54 -05:00
|
|
|
self.assertIsNone(from_db)
|
|
|
|
|
2016-12-27 08:10:29 -06:00
|
|
|
def test_delete_rec_yes(self):
|
|
|
|
# checking that "y" response causes delete_rec to return True
|
2016-08-26 11:54:57 -05:00
|
|
|
with mock.patch('builtins.input', return_value='y'):
|
2016-12-27 08:10:29 -06:00
|
|
|
self.assertTrue(self.bdb.delete_rec(0))
|
2016-08-26 11:54:57 -05:00
|
|
|
|
2016-12-27 08:10:29 -06:00
|
|
|
def test_delete_rec_no(self):
|
|
|
|
# checking that non-"y" response causes delete_rec to return None
|
2016-08-26 11:54:57 -05:00
|
|
|
with mock.patch('builtins.input', return_value='n'):
|
2016-12-27 08:10:29 -06:00
|
|
|
self.assertFalse(self.bdb.delete_rec(0))
|
2016-08-26 11:54:57 -05:00
|
|
|
|
2016-10-29 06:31:14 -05:00
|
|
|
def test_cleardb(self):
|
2016-08-21 18:09:07 -05:00
|
|
|
# adding bookmarks
|
2016-12-27 08:10:29 -06:00
|
|
|
self.bdb.add_rec(*self.bookmarks[0])
|
2016-08-21 18:09:07 -05:00
|
|
|
# deleting all bookmarks
|
2016-10-28 14:08:49 -05:00
|
|
|
with mock.patch('builtins.input', return_value='y'):
|
2016-10-29 06:31:14 -05:00
|
|
|
self.bdb.cleardb()
|
2016-08-21 18:09:07 -05:00
|
|
|
# assert table has been dropped
|
2019-01-01 13:41:24 -06:00
|
|
|
assert self.bdb.get_rec_by_id(0) is None
|
2016-08-31 23:22:28 -05:00
|
|
|
|
2016-06-06 23:22:38 -05:00
|
|
|
def test_replace_tag(self):
|
|
|
|
indices = []
|
|
|
|
for bookmark in self.bookmarks:
|
|
|
|
# adding bookmark, getting index
|
2016-12-27 08:10:29 -06:00
|
|
|
self.bdb.add_rec(*bookmark)
|
|
|
|
index = self.bdb.get_rec_id(bookmark[0])
|
2016-06-06 23:22:38 -05:00
|
|
|
indices += [index]
|
2016-12-27 09:49:25 -06:00
|
|
|
|
2016-06-06 23:22:38 -05:00
|
|
|
# replacing tags
|
2016-12-27 09:49:25 -06:00
|
|
|
with mock.patch('builtins.input', return_value='y'):
|
|
|
|
self.bdb.replace_tag("news", ["__01"])
|
|
|
|
with mock.patch('builtins.input', return_value='y'):
|
|
|
|
self.bdb.replace_tag("zażółć", ["__02,__03"])
|
|
|
|
|
2016-06-06 23:22:38 -05:00
|
|
|
# replacing tag which is also a substring of other tag
|
2016-12-27 09:49:25 -06:00
|
|
|
with mock.patch('builtins.input', return_value='y'):
|
|
|
|
self.bdb.replace_tag("es", ["__04"])
|
|
|
|
|
2016-06-06 23:22:38 -05:00
|
|
|
# removing tags
|
2016-12-27 09:49:25 -06:00
|
|
|
with mock.patch('builtins.input', return_value='y'):
|
|
|
|
self.bdb.replace_tag("gęślą")
|
|
|
|
with mock.patch('builtins.input', return_value='y'):
|
|
|
|
self.bdb.replace_tag("old")
|
|
|
|
|
|
|
|
# removing non-existent tag
|
|
|
|
with mock.patch('builtins.input', return_value='y'):
|
|
|
|
self.bdb.replace_tag("_")
|
|
|
|
|
2016-06-06 23:22:38 -05:00
|
|
|
# removing nonexistent tag which is also a substring of other tag
|
2016-12-27 09:49:25 -06:00
|
|
|
with mock.patch('builtins.input', return_value='y'):
|
|
|
|
self.bdb.replace_tag("e")
|
2016-06-06 23:22:38 -05:00
|
|
|
|
2016-11-06 12:44:44 -06:00
|
|
|
for url, title, _, _ in self.bookmarks:
|
2016-06-06 23:22:38 -05:00
|
|
|
# retrieving from db
|
2016-12-27 08:10:29 -06:00
|
|
|
index = self.bdb.get_rec_id(url)
|
|
|
|
from_db = self.bdb.get_rec_by_id(index)
|
2016-06-06 23:22:38 -05:00
|
|
|
# asserting tags were replaced
|
|
|
|
if title == "SLASHDOT":
|
|
|
|
self.assertEqual(from_db[3], parse_tags(["__01"]))
|
|
|
|
elif title == "ZAŻÓŁĆ":
|
2016-06-08 03:16:09 -05:00
|
|
|
self.assertEqual(from_db[3], parse_tags(["__02,__03,jaźń"]))
|
2016-06-06 23:22:38 -05:00
|
|
|
elif title == "test":
|
2016-06-08 03:16:09 -05:00
|
|
|
self.assertEqual(from_db[3], parse_tags(["test,tes,est,__04"]))
|
2016-05-25 03:59:40 -05:00
|
|
|
|
2017-10-23 08:58:18 -05:00
|
|
|
def test_tnyfy_url(self):
|
|
|
|
# shorten a well-known url
|
|
|
|
shorturl = self.bdb.tnyfy_url(url='https://www.google.com', shorten=True)
|
|
|
|
self.assertEqual(shorturl, 'http://tny.im/yt')
|
|
|
|
|
|
|
|
# expand a well-known short url
|
|
|
|
url = self.bdb.tnyfy_url(url='http://tny.im/yt', shorten=False)
|
|
|
|
self.assertEqual(url, 'https://www.google.com')
|
|
|
|
|
2016-05-31 17:06:17 -05:00
|
|
|
# def test_browse_by_index(self):
|
2018-03-22 17:45:20 -05:00
|
|
|
# self.fail()
|
2016-05-25 03:59:40 -05:00
|
|
|
|
2016-06-12 15:04:57 -05:00
|
|
|
def test_close_quit(self):
|
|
|
|
# quitting with no args
|
|
|
|
try:
|
2016-08-30 08:52:56 -05:00
|
|
|
self.bdb.close_quit()
|
2016-06-12 15:04:57 -05:00
|
|
|
except SystemExit as err:
|
|
|
|
self.assertEqual(err.args[0], 0)
|
|
|
|
# quitting with custom arg
|
|
|
|
try:
|
2016-08-30 08:52:56 -05:00
|
|
|
self.bdb.close_quit(1)
|
2016-06-12 15:04:57 -05:00
|
|
|
except SystemExit as err:
|
|
|
|
self.assertEqual(err.args[0], 1)
|
2016-05-25 03:59:40 -05:00
|
|
|
|
2016-05-31 17:06:17 -05:00
|
|
|
# def test_import_bookmark(self):
|
2018-03-22 17:45:20 -05:00
|
|
|
# self.fail()
|
|
|
|
|
2016-05-31 17:06:17 -05:00
|
|
|
|
2018-07-31 06:01:18 -05:00
|
|
|
@pytest.fixture(scope='function')
|
|
|
|
def refreshdb_fixture():
|
|
|
|
# Setup
|
|
|
|
os.environ['XDG_DATA_HOME'] = TEST_TEMP_DIR_PATH
|
|
|
|
|
|
|
|
# start every test from a clean state
|
|
|
|
if exists(TEST_TEMP_DBFILE_PATH):
|
|
|
|
os.remove(TEST_TEMP_DBFILE_PATH)
|
|
|
|
|
|
|
|
bdb = BukuDb()
|
|
|
|
|
|
|
|
yield bdb
|
|
|
|
|
|
|
|
# Teardown
|
|
|
|
os.environ['XDG_DATA_HOME'] = TEST_TEMP_DIR_PATH
|
|
|
|
|
|
|
|
|
|
|
|
@pytest.mark.parametrize(
|
|
|
|
"title_in, exp_res",
|
|
|
|
[
|
|
|
|
['?', 'Example Domain'],
|
|
|
|
[None, 'Example Domain'],
|
|
|
|
['', 'Example Domain'],
|
|
|
|
['random title', 'Example Domain'],
|
|
|
|
]
|
|
|
|
)
|
|
|
|
def test_refreshdb(refreshdb_fixture, title_in, exp_res):
|
|
|
|
bdb = refreshdb_fixture
|
2018-08-06 01:37:28 -05:00
|
|
|
args = ["http://example.com"]
|
2018-07-31 06:01:18 -05:00
|
|
|
if title_in:
|
|
|
|
args.append(title_in)
|
|
|
|
bdb.add_rec(*args)
|
|
|
|
bdb.refreshdb(1, 1)
|
|
|
|
from_db = bdb.get_rec_by_id(1)
|
|
|
|
assert from_db[2] == exp_res, 'from_db: {}'.format(from_db)
|
|
|
|
|
|
|
|
|
2017-03-28 11:30:12 -05:00
|
|
|
@given(
|
|
|
|
index=st.integers(min_value=-10, max_value=10),
|
|
|
|
low=st.integers(min_value=-10, max_value=10),
|
|
|
|
high=st.integers(min_value=-10, max_value=10),
|
|
|
|
is_range=st.booleans(),
|
|
|
|
)
|
2019-01-15 01:06:22 -06:00
|
|
|
@settings(deadline=None)
|
2017-03-28 11:30:12 -05:00
|
|
|
def test_print_rec_hypothesis(caplog, setup, index, low, high, is_range):
|
|
|
|
"""test when index, low or high is less than 0."""
|
2017-04-29 18:56:14 -05:00
|
|
|
# setup
|
|
|
|
caplog.handler.records.clear()
|
|
|
|
caplog.records.clear()
|
2017-03-28 11:30:12 -05:00
|
|
|
|
2017-04-29 18:56:14 -05:00
|
|
|
bdb = BukuDb()
|
|
|
|
# clear all record first before testing
|
|
|
|
bdb.delete_rec_all()
|
2017-03-28 11:30:12 -05:00
|
|
|
bdb.add_rec("http://one.com", "", parse_tags(['cat,ant,bee,1']), "")
|
|
|
|
db_len = 1
|
|
|
|
bdb.print_rec(index=index, low=low, high=high, is_range=is_range)
|
|
|
|
|
|
|
|
check_print = False
|
|
|
|
err_msg = ['Actual log:']
|
|
|
|
err_msg.extend(['{}:{}'.format(x.levelname, x.getMessage()) for x in caplog.records])
|
|
|
|
|
2017-04-29 18:56:14 -05:00
|
|
|
if index < 0 or (0 <= index <= db_len and not is_range):
|
|
|
|
check_print = True
|
|
|
|
# negative index/range on is_range
|
|
|
|
elif (is_range and any([low < 0, high < 0])):
|
|
|
|
assert any([x.levelname == "ERROR" for x in caplog.records]), \
|
|
|
|
'\n'.join(err_msg)
|
2017-03-28 11:30:12 -05:00
|
|
|
assert any([x.getMessage() == "Negative range boundary" for x in caplog.records]), \
|
|
|
|
'\n'.join(err_msg)
|
|
|
|
elif is_range:
|
|
|
|
check_print = True
|
|
|
|
else:
|
|
|
|
assert any([x.levelname == "ERROR" for x in caplog.records]), \
|
|
|
|
'\n'.join(err_msg)
|
|
|
|
assert any([x.getMessage().startswith("No matching index") for x in caplog.records]), \
|
|
|
|
'\n'.join(err_msg)
|
|
|
|
|
|
|
|
if check_print:
|
|
|
|
assert not any([x.levelname == "ERROR" for x in caplog.records]), \
|
|
|
|
'\n'.join(err_msg)
|
|
|
|
|
|
|
|
# teardown
|
|
|
|
bdb.delete_rec(index=1)
|
|
|
|
caplog.handler.records.clear()
|
|
|
|
caplog.records.clear()
|
|
|
|
|
|
|
|
|
2016-06-24 16:56:18 -05:00
|
|
|
def test_list_tags(capsys, setup):
|
|
|
|
bdb = BukuDb()
|
|
|
|
|
|
|
|
# adding bookmarks
|
2016-12-27 08:10:29 -06:00
|
|
|
bdb.add_rec("http://one.com", "", parse_tags(['cat,ant,bee,1']), "")
|
|
|
|
bdb.add_rec("http://two.com", "", parse_tags(['Cat,Ant,bee,1']), "")
|
|
|
|
bdb.add_rec("http://three.com", "", parse_tags(['Cat,Ant,3,Bee,2']), "")
|
2016-06-24 16:56:18 -05:00
|
|
|
|
|
|
|
# listing tags, asserting output
|
2016-06-25 13:59:33 -05:00
|
|
|
out, err = capsys.readouterr()
|
2018-09-02 01:37:09 -05:00
|
|
|
prompt(bdb, None, True, listtags=True)
|
2016-06-24 16:56:18 -05:00
|
|
|
out, err = capsys.readouterr()
|
2016-12-27 09:49:25 -06:00
|
|
|
assert out == " 1. 1 (2)\n 2. 2 (1)\n 3. 3 (1)\n 4. ant (3)\n 5. bee (3)\n 6. cat (3)\n\n"
|
2016-06-24 16:56:18 -05:00
|
|
|
assert err == ''
|
2016-05-31 17:06:17 -05:00
|
|
|
|
2016-11-06 12:44:44 -06:00
|
|
|
|
2016-06-25 13:59:33 -05:00
|
|
|
def test_compactdb(setup):
|
|
|
|
bdb = BukuDb()
|
|
|
|
|
|
|
|
# adding bookmarks
|
|
|
|
for bookmark in TEST_BOOKMARKS:
|
2016-12-27 08:10:29 -06:00
|
|
|
bdb.add_rec(*bookmark)
|
2016-06-25 13:59:33 -05:00
|
|
|
|
|
|
|
# manually deleting 2nd index from db, calling compactdb
|
|
|
|
bdb.cur.execute('DELETE FROM bookmarks WHERE id = ?', (2,))
|
|
|
|
bdb.compactdb(2)
|
|
|
|
|
|
|
|
# asserting bookmarks have correct indices
|
2018-03-22 17:45:20 -05:00
|
|
|
assert bdb.get_rec_by_id(1) == (
|
|
|
|
1, 'http://slashdot.org', 'SLASHDOT', ',news,old,', "News for old nerds, stuff that doesn't matter", 0)
|
|
|
|
assert bdb.get_rec_by_id(2) == (
|
2018-10-29 09:00:13 -05:00
|
|
|
2, 'http://example.com/', 'test', ',es,est,tes,test,', 'a case for replace_tag test', 0)
|
2016-12-27 08:10:29 -06:00
|
|
|
assert bdb.get_rec_by_id(3) is None
|
2016-06-25 13:59:33 -05:00
|
|
|
|
2017-03-20 09:15:33 -05:00
|
|
|
|
2018-10-29 08:33:57 -05:00
|
|
|
@vcr.use_cassette('tests/vcr_cassettes/test_delete_rec_range_and_delay_commit.yaml')
|
2017-03-27 12:01:57 -05:00
|
|
|
@given(
|
|
|
|
low=st.integers(min_value=-10, max_value=10),
|
|
|
|
high=st.integers(min_value=-10, max_value=10),
|
|
|
|
delay_commit=st.booleans(),
|
|
|
|
input_retval=st.characters()
|
2017-03-20 09:15:33 -05:00
|
|
|
)
|
2017-03-27 12:01:57 -05:00
|
|
|
@example(low=0, high=0, delay_commit=False, input_retval='y')
|
2019-01-15 00:58:47 -06:00
|
|
|
@settings(max_examples=2, deadline=None)
|
2017-03-27 12:01:57 -05:00
|
|
|
def test_delete_rec_range_and_delay_commit(setup, low, high, delay_commit, input_retval):
|
2017-03-20 09:15:33 -05:00
|
|
|
"""test delete rec, range and delay commit."""
|
|
|
|
bdb = BukuDb()
|
|
|
|
bdb_dc = BukuDb() # instance for delay_commit check.
|
|
|
|
index = 0
|
|
|
|
is_range = True
|
|
|
|
|
|
|
|
# Fill bookmark
|
|
|
|
for bookmark in TEST_BOOKMARKS:
|
|
|
|
bdb.add_rec(*bookmark)
|
|
|
|
db_len = len(TEST_BOOKMARKS)
|
|
|
|
|
|
|
|
# use normalized high and low variable
|
2017-03-27 12:01:57 -05:00
|
|
|
n_low, n_high = normalize_range(db_len=db_len, low=low, high=high)
|
2017-03-20 09:15:33 -05:00
|
|
|
|
|
|
|
exp_res = True
|
2018-07-18 15:57:15 -05:00
|
|
|
if n_high > db_len >= n_low:
|
2017-03-20 09:15:33 -05:00
|
|
|
exp_db_len = db_len - (db_len + 1 - n_low)
|
2018-07-18 15:57:15 -05:00
|
|
|
elif n_high == n_low > db_len:
|
2017-03-20 09:15:33 -05:00
|
|
|
exp_db_len = db_len
|
|
|
|
exp_res = False
|
2018-07-18 15:57:15 -05:00
|
|
|
elif n_high == n_low <= db_len:
|
2017-03-20 09:15:33 -05:00
|
|
|
exp_db_len = db_len - 1
|
|
|
|
else:
|
2017-03-27 12:01:57 -05:00
|
|
|
exp_db_len = db_len - (n_high + 1 - n_low)
|
|
|
|
|
|
|
|
with mock.patch('builtins.input', return_value=input_retval):
|
|
|
|
res = bdb.delete_rec(
|
|
|
|
index=index, low=low, high=high, is_range=is_range, delay_commit=delay_commit)
|
2017-03-20 09:15:33 -05:00
|
|
|
|
2017-04-12 10:36:11 -05:00
|
|
|
if n_low < 0:
|
|
|
|
assert not res
|
|
|
|
assert len(bdb_dc.get_rec_all()) == db_len
|
|
|
|
# teardown
|
|
|
|
os.environ['XDG_DATA_HOME'] = TEST_TEMP_DIR_PATH
|
|
|
|
return
|
2018-12-30 20:49:38 -06:00
|
|
|
if (low == 0 or high == 0) and input_retval != 'y':
|
2017-03-27 12:01:57 -05:00
|
|
|
assert not res
|
|
|
|
assert len(bdb_dc.get_rec_all()) == db_len
|
|
|
|
# teardown
|
|
|
|
os.environ['XDG_DATA_HOME'] = TEST_TEMP_DIR_PATH
|
|
|
|
return
|
2018-12-30 20:49:38 -06:00
|
|
|
if (low == 0 or high == 0) and input_retval == 'y':
|
2017-03-27 12:01:57 -05:00
|
|
|
assert res == exp_res
|
2019-12-06 21:06:29 -06:00
|
|
|
assert len(bdb_dc.get_rec_all()) == 0
|
2017-03-27 12:01:57 -05:00
|
|
|
# teardown
|
|
|
|
os.environ['XDG_DATA_HOME'] = TEST_TEMP_DIR_PATH
|
|
|
|
return
|
2018-12-30 20:49:38 -06:00
|
|
|
if n_low > db_len and n_low > 0:
|
2017-03-27 12:01:57 -05:00
|
|
|
assert not res
|
|
|
|
assert len(bdb_dc.get_rec_all()) == db_len
|
|
|
|
# teardown
|
|
|
|
os.environ['XDG_DATA_HOME'] = TEST_TEMP_DIR_PATH
|
|
|
|
return
|
2017-03-20 09:15:33 -05:00
|
|
|
assert res == exp_res
|
|
|
|
assert len(bdb.get_rec_all()) == exp_db_len
|
|
|
|
if delay_commit:
|
|
|
|
assert len(bdb_dc.get_rec_all()) == db_len
|
|
|
|
else:
|
|
|
|
assert len(bdb_dc.get_rec_all()) == exp_db_len
|
|
|
|
|
|
|
|
# teardown
|
|
|
|
os.environ['XDG_DATA_HOME'] = TEST_TEMP_DIR_PATH
|
|
|
|
|
|
|
|
|
|
|
|
@pytest.mark.parametrize(
|
2019-12-06 21:06:29 -06:00
|
|
|
'index, delay_commit, input_retval',
|
|
|
|
[
|
|
|
|
[-1, False, False],
|
|
|
|
[0, False, False],
|
|
|
|
[1, False, True],
|
|
|
|
[1, False, False],
|
|
|
|
[1, True, True],
|
|
|
|
[1, True, False],
|
|
|
|
[100, False, True],
|
|
|
|
]
|
2017-03-20 09:15:33 -05:00
|
|
|
)
|
2017-03-27 12:01:57 -05:00
|
|
|
def test_delete_rec_index_and_delay_commit(index, delay_commit, input_retval):
|
2017-03-20 09:15:33 -05:00
|
|
|
"""test delete rec, index and delay commit."""
|
|
|
|
bdb = BukuDb()
|
|
|
|
bdb_dc = BukuDb() # instance for delay_commit check.
|
|
|
|
|
|
|
|
# Fill bookmark
|
|
|
|
for bookmark in TEST_BOOKMARKS:
|
|
|
|
bdb.add_rec(*bookmark)
|
|
|
|
db_len = len(TEST_BOOKMARKS)
|
|
|
|
|
2017-03-27 12:01:57 -05:00
|
|
|
n_index = index
|
2017-03-20 09:15:33 -05:00
|
|
|
|
2017-03-27 12:01:57 -05:00
|
|
|
with mock.patch('builtins.input', return_value=input_retval):
|
|
|
|
res = bdb.delete_rec(index=index, delay_commit=delay_commit)
|
|
|
|
|
|
|
|
if n_index < 0:
|
|
|
|
assert not res
|
|
|
|
elif n_index > db_len:
|
|
|
|
assert not res
|
|
|
|
assert len(bdb.get_rec_all()) == db_len
|
|
|
|
elif index == 0 and input_retval != 'y':
|
2017-03-20 09:15:33 -05:00
|
|
|
assert not res
|
|
|
|
assert len(bdb.get_rec_all()) == db_len
|
|
|
|
else:
|
|
|
|
assert res
|
|
|
|
assert len(bdb.get_rec_all()) == db_len - 1
|
|
|
|
if delay_commit:
|
|
|
|
assert len(bdb_dc.get_rec_all()) == db_len
|
|
|
|
else:
|
|
|
|
assert len(bdb_dc.get_rec_all()) == db_len - 1
|
|
|
|
|
|
|
|
# teardown
|
|
|
|
os.environ['XDG_DATA_HOME'] = TEST_TEMP_DIR_PATH
|
|
|
|
|
|
|
|
|
|
|
|
@pytest.mark.parametrize(
|
|
|
|
'index, is_range, low, high',
|
|
|
|
[
|
|
|
|
# range on non zero index
|
|
|
|
(0, True, 1, 1),
|
|
|
|
# range on zero index
|
|
|
|
(0, True, 0, 0),
|
|
|
|
# zero index only
|
|
|
|
(0, False, 0, 0),
|
|
|
|
]
|
|
|
|
)
|
2017-03-27 12:01:57 -05:00
|
|
|
def test_delete_rec_on_empty_database(setup, index, is_range, low, high):
|
2017-03-20 09:15:33 -05:00
|
|
|
"""test delete rec, on empty database."""
|
|
|
|
bdb = BukuDb()
|
|
|
|
with mock.patch('builtins.input', return_value='y'):
|
|
|
|
res = bdb.delete_rec(index, is_range, low, high)
|
|
|
|
|
|
|
|
if (is_range and any([low == 0, high == 0])) or (not is_range and index == 0):
|
|
|
|
assert res
|
|
|
|
# teardown
|
|
|
|
os.environ['XDG_DATA_HOME'] = TEST_TEMP_DIR_PATH
|
|
|
|
return
|
|
|
|
|
|
|
|
if is_range and low > 1 and high > 1:
|
|
|
|
assert not res
|
|
|
|
|
|
|
|
# teardown
|
|
|
|
os.environ['XDG_DATA_HOME'] = TEST_TEMP_DIR_PATH
|
|
|
|
|
2017-03-27 12:01:57 -05:00
|
|
|
|
|
|
|
@pytest.mark.parametrize(
|
|
|
|
'index, low, high, is_range',
|
|
|
|
[
|
|
|
|
['a', 'a', 1, True],
|
|
|
|
['a', 'a', 1, False],
|
|
|
|
['a', 1, 'a', True],
|
|
|
|
]
|
|
|
|
)
|
|
|
|
def test_delete_rec_on_non_interger(index, low, high, is_range):
|
|
|
|
"""test delete rec on non integer arg."""
|
|
|
|
bdb = BukuDb()
|
|
|
|
|
|
|
|
for bookmark in TEST_BOOKMARKS:
|
|
|
|
bdb.add_rec(*bookmark)
|
|
|
|
|
|
|
|
if is_range and not (isinstance(low, int) and isinstance(high, int)):
|
|
|
|
with pytest.raises(TypeError):
|
|
|
|
bdb.delete_rec(index=index, low=low, high=high, is_range=is_range)
|
|
|
|
return
|
2018-12-30 20:49:38 -06:00
|
|
|
if not is_range and not isinstance(index, int):
|
2020-08-06 01:52:31 -05:00
|
|
|
with pytest.raises(TypeError):
|
|
|
|
bdb.delete_rec(index=index, low=low, high=high, is_range=is_range)
|
2017-03-27 12:01:57 -05:00
|
|
|
else:
|
|
|
|
assert bdb.delete_rec(index=index, low=low, high=high, is_range=is_range)
|
|
|
|
|
|
|
|
|
2017-03-30 11:52:47 -05:00
|
|
|
@pytest.mark.parametrize('url', ['', False, None, 0])
|
|
|
|
def test_add_rec_add_invalid_url(caplog, url):
|
|
|
|
"""test method."""
|
|
|
|
bdb = BukuDb()
|
|
|
|
res = bdb.add_rec(url=url)
|
|
|
|
assert res == -1
|
|
|
|
caplog.records[0].levelname == 'ERROR'
|
|
|
|
caplog.records[0].getMessage() == 'Invalid URL'
|
|
|
|
|
|
|
|
|
|
|
|
@pytest.mark.parametrize(
|
|
|
|
"kwargs, exp_arg",
|
|
|
|
[
|
|
|
|
[
|
|
|
|
{'url': 'example.com'},
|
|
|
|
('example.com', 'Example Domain', ',', '', 0)
|
|
|
|
],
|
|
|
|
[
|
|
|
|
{'url': 'http://example.com'},
|
|
|
|
('http://example.com', 'Example Domain', ',', '', 0)
|
|
|
|
],
|
|
|
|
[
|
|
|
|
{'url': 'http://example.com', 'immutable': 1},
|
|
|
|
('http://example.com', 'Example Domain', ',', '', 1)
|
|
|
|
],
|
|
|
|
[
|
|
|
|
{'url': 'http://example.com', 'desc': 'randomdesc'},
|
|
|
|
('http://example.com', 'Example Domain', ',', 'randomdesc', 0)
|
|
|
|
],
|
|
|
|
[
|
|
|
|
{'url': 'http://example.com', 'title_in': 'randomtitle'},
|
|
|
|
('http://example.com', 'randomtitle', ',', '', 0)
|
|
|
|
],
|
|
|
|
[
|
|
|
|
{'url': 'http://example.com', 'tags_in': 'tag1'},
|
2018-08-06 02:47:38 -05:00
|
|
|
('http://example.com', 'Example Domain', ',tag1,', '', 0),
|
2017-03-30 11:52:47 -05:00
|
|
|
],
|
|
|
|
[
|
|
|
|
{'url': 'http://example.com', 'tags_in': ',tag1'},
|
|
|
|
('http://example.com', 'Example Domain', ',tag1,', '', 0),
|
|
|
|
],
|
|
|
|
[
|
|
|
|
{'url': 'http://example.com', 'tags_in': ',tag1,'},
|
|
|
|
('http://example.com', 'Example Domain', ',tag1,', '', 0),
|
|
|
|
],
|
|
|
|
]
|
|
|
|
)
|
|
|
|
def test_add_rec_exec_arg(kwargs, exp_arg):
|
|
|
|
"""test func."""
|
|
|
|
bdb = BukuDb()
|
|
|
|
bdb.cur = mock.Mock()
|
|
|
|
bdb.get_rec_id = mock.Mock(return_value=-1)
|
|
|
|
bdb.add_rec(**kwargs)
|
|
|
|
assert bdb.cur.execute.call_args[0][1] == exp_arg
|
|
|
|
|
|
|
|
|
2017-04-02 15:29:45 -05:00
|
|
|
def test_update_rec_index_0(caplog):
|
|
|
|
"""test method."""
|
|
|
|
bdb = BukuDb()
|
|
|
|
res = bdb.update_rec(index=0, url='http://example.com')
|
|
|
|
assert not res
|
|
|
|
assert caplog.records[0].getMessage() == 'All URLs cannot be same'
|
|
|
|
assert caplog.records[0].levelname == 'ERROR'
|
|
|
|
|
|
|
|
|
|
|
|
def test_update_rec_only_index():
|
|
|
|
"""test method."""
|
|
|
|
bdb = BukuDb()
|
|
|
|
res = bdb.update_rec(index=1)
|
|
|
|
assert res
|
|
|
|
|
|
|
|
|
|
|
|
@pytest.mark.parametrize('url', [None, ''])
|
|
|
|
def test_update_rec_invalid_url(url):
|
|
|
|
"""test method."""
|
|
|
|
bdb = BukuDb()
|
|
|
|
res = bdb.update_rec(index=1, url=url)
|
|
|
|
assert res
|
|
|
|
|
|
|
|
|
|
|
|
@pytest.mark.parametrize('invalid_tag', ['+,', '-,'])
|
|
|
|
def test_update_rec_invalid_tag(caplog, invalid_tag):
|
|
|
|
"""test method."""
|
|
|
|
url = 'http://example.com'
|
|
|
|
bdb = BukuDb()
|
|
|
|
res = bdb.update_rec(index=1, url=url, tags_in=invalid_tag)
|
|
|
|
assert not res
|
2018-02-04 10:08:59 -06:00
|
|
|
try:
|
|
|
|
assert caplog.records[0].getMessage() == 'Please specify a tag'
|
|
|
|
assert caplog.records[0].levelname == 'ERROR'
|
|
|
|
except IndexError as e:
|
2018-03-22 17:45:20 -05:00
|
|
|
if (sys.version_info.major, sys.version_info.minor) == (3, 4):
|
2018-02-04 10:08:59 -06:00
|
|
|
print('caplog records: {}'.format(caplog.records))
|
|
|
|
for idx, record in enumerate(caplog.records):
|
|
|
|
print('idx:{};{};message:{};levelname:{}'.format(
|
|
|
|
idx, record, record.getMessage(), record.levelname))
|
|
|
|
else:
|
|
|
|
raise e
|
2017-04-02 15:29:45 -05:00
|
|
|
|
|
|
|
|
|
|
|
@pytest.mark.parametrize('read_in_retval', ['y', 'n', ''])
|
|
|
|
def test_update_rec_update_all_bookmark(caplog, read_in_retval):
|
|
|
|
"""test method."""
|
2020-08-09 19:11:05 -05:00
|
|
|
caplog.set_level(logging.DEBUG)
|
2017-04-02 15:29:45 -05:00
|
|
|
with mock.patch('buku.read_in', return_value=read_in_retval):
|
2020-08-09 19:11:05 -05:00
|
|
|
bdb = BukuDb()
|
2017-04-02 15:29:45 -05:00
|
|
|
res = bdb.update_rec(index=0, tags_in='tags1')
|
2020-08-09 19:11:05 -05:00
|
|
|
assert res if read_in_retval == 'y' else not res
|
|
|
|
if read_in_retval == 'y':
|
|
|
|
assert caplog.records[0].getMessage() == \
|
|
|
|
"update_rec query: " \
|
|
|
|
"\"UPDATE bookmarks SET tags = ?\", args: [',tags1,']"
|
2020-08-09 19:13:21 -05:00
|
|
|
else:
|
|
|
|
assert not caplog.records
|
2017-04-02 15:29:45 -05:00
|
|
|
|
|
|
|
|
2017-04-12 10:36:11 -05:00
|
|
|
@pytest.mark.parametrize(
|
|
|
|
'get_system_editor_retval, index, exp_res',
|
|
|
|
[
|
|
|
|
['none', 0, False],
|
2017-10-08 15:03:21 -05:00
|
|
|
['nano', -2, False],
|
2017-04-12 10:36:11 -05:00
|
|
|
]
|
|
|
|
)
|
|
|
|
def test_edit_update_rec_with_invalid_input(get_system_editor_retval, index, exp_res):
|
|
|
|
"""test method."""
|
|
|
|
with mock.patch('buku.get_system_editor', return_value=get_system_editor_retval):
|
|
|
|
import buku
|
|
|
|
bdb = buku.BukuDb()
|
|
|
|
res = bdb.edit_update_rec(index=index)
|
|
|
|
assert res == exp_res
|
|
|
|
|
2017-05-01 21:34:39 -05:00
|
|
|
|
2018-10-29 09:20:24 -05:00
|
|
|
@vcr.use_cassette('tests/vcr_cassettes/test_browse_by_index.yaml')
|
2017-05-01 21:34:39 -05:00
|
|
|
@given(
|
|
|
|
low=st.integers(min_value=-2, max_value=3),
|
|
|
|
high=st.integers(min_value=-2, max_value=3),
|
|
|
|
index=st.integers(min_value=-2, max_value=3),
|
|
|
|
is_range=st.booleans(),
|
|
|
|
empty_database=st.booleans(),
|
|
|
|
)
|
|
|
|
@example(low=0, high=0, index=0, is_range=False, empty_database=True)
|
2019-01-15 01:27:09 -06:00
|
|
|
@settings(max_examples=2, deadline=None)
|
2017-05-01 21:34:39 -05:00
|
|
|
def test_browse_by_index(low, high, index, is_range, empty_database):
|
|
|
|
"""test method."""
|
|
|
|
n_low, n_high = (high, low) if low > high else (low, high)
|
|
|
|
with mock.patch('buku.browse'):
|
|
|
|
import buku
|
|
|
|
bdb = buku.BukuDb()
|
|
|
|
bdb.delete_rec_all()
|
|
|
|
db_len = 0
|
|
|
|
if not empty_database:
|
|
|
|
bdb.add_rec("https://www.google.com/ncr", "?")
|
|
|
|
db_len += 1
|
|
|
|
res = bdb.browse_by_index(index=index, low=low, high=high, is_range=is_range)
|
|
|
|
if is_range and (low < 0 or high < 0):
|
|
|
|
assert not res
|
2018-12-30 20:49:38 -06:00
|
|
|
elif is_range and n_low > 0 and n_high > 0:
|
2017-05-01 21:34:39 -05:00
|
|
|
assert res
|
|
|
|
elif is_range:
|
|
|
|
assert not res
|
|
|
|
elif not is_range and index < 0:
|
|
|
|
assert not res
|
|
|
|
elif not is_range and index > db_len:
|
|
|
|
assert not res
|
|
|
|
elif not is_range and index >= 0 and empty_database:
|
|
|
|
assert not res
|
|
|
|
elif not is_range and 0 <= index <= db_len and not empty_database:
|
|
|
|
assert res
|
|
|
|
else:
|
|
|
|
raise ValueError
|
|
|
|
bdb.delete_rec_all()
|
|
|
|
|
2017-09-21 10:15:18 -05:00
|
|
|
|
|
|
|
@pytest.fixture()
|
2018-10-29 07:29:12 -05:00
|
|
|
def chrome_db():
|
2017-09-21 10:15:18 -05:00
|
|
|
# compatibility
|
2018-04-17 16:27:12 -05:00
|
|
|
dir_path = os.path.dirname(os.path.realpath(__file__))
|
|
|
|
res_yaml_file = os.path.join(dir_path, 'test_bukuDb', '25491522_res.yaml')
|
|
|
|
res_nopt_yaml_file = os.path.join(dir_path, 'test_bukuDb', '25491522_res_nopt.yaml')
|
2018-10-29 07:29:12 -05:00
|
|
|
json_file = os.path.join(dir_path, 'test_bukuDb', 'Bookmarks')
|
2018-04-17 16:27:12 -05:00
|
|
|
return json_file, res_yaml_file, res_nopt_yaml_file
|
2017-09-21 10:15:18 -05:00
|
|
|
|
|
|
|
|
|
|
|
@pytest.mark.parametrize('add_pt', [True, False])
|
|
|
|
def test_load_chrome_database(chrome_db, add_pt):
|
|
|
|
"""test method."""
|
|
|
|
# compatibility
|
|
|
|
json_file = chrome_db[0]
|
2018-04-17 16:27:12 -05:00
|
|
|
res_yaml_file = chrome_db[1] if add_pt else chrome_db[2]
|
2018-10-29 07:29:12 -05:00
|
|
|
dump_data = False # NOTE: change this value to dump data
|
2018-04-17 16:27:12 -05:00
|
|
|
if not dump_data:
|
|
|
|
with open(res_yaml_file, 'r') as f:
|
2019-07-20 22:23:51 -05:00
|
|
|
try:
|
|
|
|
res_yaml = yaml.load(f, Loader=yaml.FullLoader)
|
|
|
|
except RuntimeError:
|
2019-07-21 18:21:49 -05:00
|
|
|
res_yaml = yaml.load(f, Loader=PrettySafeLoader)
|
2017-09-21 10:15:18 -05:00
|
|
|
# init
|
|
|
|
import buku
|
|
|
|
bdb = buku.BukuDb()
|
|
|
|
bdb.add_rec = mock.Mock()
|
|
|
|
bdb.load_chrome_database(json_file, None, add_pt)
|
|
|
|
call_args_list_dict = dict(bdb.add_rec.call_args_list)
|
|
|
|
# test
|
2018-04-17 16:27:12 -05:00
|
|
|
if not dump_data:
|
|
|
|
assert call_args_list_dict == res_yaml
|
|
|
|
# dump data for new test
|
|
|
|
if dump_data:
|
|
|
|
with open(res_yaml_file, 'w') as f:
|
|
|
|
yaml.dump(call_args_list_dict, f)
|
2018-10-29 07:29:12 -05:00
|
|
|
print('call args list dict dumped to:{}'.format(res_yaml_file))
|
2017-09-21 10:15:18 -05:00
|
|
|
|
|
|
|
|
|
|
|
@pytest.fixture()
|
2018-10-29 21:15:05 -05:00
|
|
|
def firefox_db(tmpdir):
|
2020-01-07 21:07:17 -06:00
|
|
|
zip_url = 'https://github.com/jarun/buku/files/1319933/bookmarks.zip'
|
2018-04-17 16:27:12 -05:00
|
|
|
dir_path = os.path.dirname(os.path.realpath(__file__))
|
|
|
|
res_yaml_file = os.path.join(dir_path, 'test_bukuDb', 'firefox_res.yaml')
|
|
|
|
res_nopt_yaml_file = os.path.join(dir_path, 'test_bukuDb', 'firefox_res_nopt.yaml')
|
2018-10-29 07:29:12 -05:00
|
|
|
ff_db_path = os.path.join(dir_path, 'test_bukuDb', 'places.sqlite')
|
2018-10-29 21:15:05 -05:00
|
|
|
if not os.path.isfile(ff_db_path):
|
|
|
|
tmp_zip = tmpdir.join('bookmarks.zip')
|
|
|
|
with urllib.request.urlopen(zip_url) as response, open(tmp_zip.strpath, 'wb') as out_file:
|
|
|
|
shutil.copyfileobj(response, out_file)
|
|
|
|
zip_obj = zipfile.ZipFile(tmp_zip.strpath)
|
|
|
|
zip_obj.extractall(path=os.path.join(dir_path, 'test_bukuDb'))
|
2018-04-17 16:27:12 -05:00
|
|
|
return ff_db_path, res_yaml_file, res_nopt_yaml_file
|
2017-09-21 10:15:18 -05:00
|
|
|
|
|
|
|
|
|
|
|
@pytest.mark.parametrize('add_pt', [True, False])
|
|
|
|
def test_load_firefox_database(firefox_db, add_pt):
|
|
|
|
# compatibility
|
|
|
|
ff_db_path = firefox_db[0]
|
2018-10-29 07:29:12 -05:00
|
|
|
dump_data = False # NOTE: change this value to dump data
|
2018-04-17 16:27:12 -05:00
|
|
|
res_yaml_file = firefox_db[1] if add_pt else firefox_db[2]
|
|
|
|
if not dump_data:
|
|
|
|
with open(res_yaml_file, 'r') as f:
|
2019-07-20 22:23:51 -05:00
|
|
|
try:
|
|
|
|
res_yaml = yaml.load(f)
|
|
|
|
except RuntimeError:
|
2019-07-21 18:21:49 -05:00
|
|
|
res_yaml = yaml.load(f, Loader=PrettySafeLoader)
|
2017-09-21 10:15:18 -05:00
|
|
|
# init
|
|
|
|
import buku
|
|
|
|
bdb = buku.BukuDb()
|
|
|
|
bdb.add_rec = mock.Mock()
|
|
|
|
bdb.load_firefox_database(ff_db_path, None, add_pt)
|
|
|
|
call_args_list_dict = dict(bdb.add_rec.call_args_list)
|
|
|
|
# test
|
2018-04-17 16:27:12 -05:00
|
|
|
if not dump_data:
|
|
|
|
assert call_args_list_dict == res_yaml
|
|
|
|
if dump_data:
|
|
|
|
with open(res_yaml_file, 'w') as f:
|
|
|
|
yaml.dump(call_args_list_dict, f)
|
2018-10-29 07:29:12 -05:00
|
|
|
print('call args list dict dumped to:{}'.format(res_yaml_file))
|
|
|
|
|
2017-09-21 10:15:18 -05:00
|
|
|
|
2018-03-22 22:19:19 -05:00
|
|
|
@pytest.mark.parametrize(
|
|
|
|
'keyword_results, stag_results, exp_res',
|
|
|
|
[
|
|
|
|
([], [], []),
|
|
|
|
(['item1'], ['item1', 'item2'], ['item1']),
|
|
|
|
(['item2'], ['item1'], []),
|
|
|
|
]
|
|
|
|
)
|
|
|
|
def test_search_keywords_and_filter_by_tags(keyword_results, stag_results, exp_res):
|
|
|
|
"""test method."""
|
|
|
|
# init
|
|
|
|
import buku
|
|
|
|
bdb = buku.BukuDb()
|
|
|
|
bdb.searchdb = mock.Mock(return_value=keyword_results)
|
|
|
|
bdb.search_by_tag = mock.Mock(return_value=stag_results)
|
|
|
|
# test
|
|
|
|
res = bdb.search_keywords_and_filter_by_tags(
|
|
|
|
mock.Mock(), mock.Mock(), mock.Mock(), mock.Mock(), [])
|
|
|
|
assert exp_res == res
|
|
|
|
|
|
|
|
|
2018-03-25 12:40:06 -05:00
|
|
|
@pytest.mark.parametrize(
|
|
|
|
'search_results, exclude_results, exp_res',
|
|
|
|
[
|
|
|
|
([], [], []),
|
|
|
|
(['item1', 'item2'], ['item2'], ['item1']),
|
|
|
|
(['item2'], ['item1'], ['item2']),
|
|
|
|
(['item1', 'item2'], ['item1', 'item2'], []),
|
|
|
|
]
|
|
|
|
)
|
|
|
|
def test_exclude_results_from_search(search_results, exclude_results, exp_res):
|
|
|
|
"""test method."""
|
|
|
|
# init
|
|
|
|
import buku
|
|
|
|
bdb = buku.BukuDb()
|
|
|
|
bdb.searchdb = mock.Mock(return_value=exclude_results)
|
|
|
|
# test
|
|
|
|
res = bdb.exclude_results_from_search(
|
|
|
|
search_results, [], True)
|
|
|
|
assert exp_res == res
|
|
|
|
|
|
|
|
|
2019-04-27 19:12:00 -05:00
|
|
|
def test_exportdb_empty_db():
|
|
|
|
with NamedTemporaryFile(delete=False) as f:
|
|
|
|
db = BukuDb(dbfile=f.name)
|
|
|
|
with NamedTemporaryFile(delete=False) as f2:
|
|
|
|
res = db.exportdb(f2.name)
|
|
|
|
assert not res
|
|
|
|
|
|
|
|
|
|
|
|
def test_exportdb_single_rec(tmpdir):
|
|
|
|
with NamedTemporaryFile(delete=False) as f:
|
|
|
|
db = BukuDb(dbfile=f.name)
|
|
|
|
db.add_rec('http://example.com')
|
|
|
|
exp_file = tmpdir.join('export')
|
|
|
|
db.exportdb(exp_file.strpath)
|
2019-04-27 20:38:05 -05:00
|
|
|
with open(exp_file.strpath) as f:
|
2019-04-27 19:12:00 -05:00
|
|
|
assert f.read()
|
|
|
|
|
|
|
|
|
2019-04-28 01:10:39 -05:00
|
|
|
def test_exportdb_to_db():
|
|
|
|
with NamedTemporaryFile(delete=False) as f1, NamedTemporaryFile(delete=False, suffix='.db') as f2:
|
|
|
|
db = BukuDb(dbfile=f1.name)
|
|
|
|
db.add_rec('http://example.com')
|
|
|
|
db.add_rec('http://google.com')
|
|
|
|
with mock.patch('builtins.input', return_value='y'):
|
|
|
|
db.exportdb(f2.name)
|
|
|
|
db2 = BukuDb(dbfile=f2.name)
|
|
|
|
assert db.get_rec_all() == db2.get_rec_all()
|
|
|
|
|
|
|
|
|
2019-04-27 19:43:54 -05:00
|
|
|
@pytest.mark.parametrize(
|
|
|
|
'urls, exp_res',
|
|
|
|
[
|
|
|
|
[[], -1],
|
|
|
|
[['http://example.com'], 1],
|
|
|
|
[['htttp://example.com', 'http://google.com'], 2],
|
|
|
|
])
|
|
|
|
def test_get_max_id(urls, exp_res):
|
|
|
|
with NamedTemporaryFile(delete=False) as f:
|
|
|
|
db = BukuDb(dbfile=f.name)
|
|
|
|
if urls:
|
|
|
|
list(map(lambda x: db.add_rec(x), urls))
|
|
|
|
assert db.get_max_id() == exp_res
|
|
|
|
|
|
|
|
|
2016-09-07 01:55:59 -05:00
|
|
|
# Helper functions for testcases
|
|
|
|
|
2016-11-06 12:44:44 -06:00
|
|
|
|
2016-09-07 01:55:59 -05:00
|
|
|
def split_and_test_membership(a, b):
|
|
|
|
# :param a, b: comma separated strings to split
|
|
|
|
# test everything in a in b
|
2016-11-06 12:44:44 -06:00
|
|
|
return all(x in b.split(',') for x in a.split(','))
|
|
|
|
|
2016-09-07 01:55:59 -05:00
|
|
|
|
|
|
|
def inclusive_range(start, end):
|
2017-05-18 20:40:43 -05:00
|
|
|
return list(range(start, end + 1))
|
2016-09-07 01:55:59 -05:00
|
|
|
|
2017-03-27 12:01:57 -05:00
|
|
|
|
|
|
|
def normalize_range(db_len, low, high):
|
|
|
|
"""normalize index and range.
|
|
|
|
|
|
|
|
Args:
|
|
|
|
db_len (int): database length.
|
|
|
|
low (int): low limit.
|
|
|
|
high (int): high limit.
|
|
|
|
|
|
|
|
Returns:
|
|
|
|
Tuple contain following normalized variables (low, high)
|
|
|
|
"""
|
|
|
|
require_comparison = True
|
|
|
|
# don't deal with non instance of the variable.
|
|
|
|
if not isinstance(low, int):
|
|
|
|
n_low = low
|
|
|
|
require_comparison = False
|
|
|
|
if not isinstance(high, int):
|
|
|
|
n_high = high
|
|
|
|
require_comparison = False
|
|
|
|
|
|
|
|
max_value = db_len
|
|
|
|
if low == 'max' and high == 'max':
|
|
|
|
n_low = db_len
|
|
|
|
n_high = max_value
|
|
|
|
elif low == 'max' and high != 'max':
|
|
|
|
n_low = high
|
|
|
|
n_high = max_value
|
|
|
|
elif low != 'max' and high == 'max':
|
|
|
|
n_low = low
|
|
|
|
n_high = max_value
|
|
|
|
else:
|
|
|
|
n_low = low
|
|
|
|
n_high = high
|
|
|
|
|
|
|
|
if require_comparison:
|
|
|
|
if n_high < n_low:
|
|
|
|
n_high, n_low = n_low, n_high
|
|
|
|
|
|
|
|
return (n_low, n_high)
|
|
|
|
|
|
|
|
|
2016-05-31 17:06:17 -05:00
|
|
|
if __name__ == "__main__":
|
|
|
|
unittest.main()
|