Skip to content

Commit bb19720

Browse files
committed
WIP
1 parent 27dd5f4 commit bb19720

1 file changed

Lines changed: 50 additions & 16 deletions

File tree

tests/test_scraperwiki.py

Lines changed: 50 additions & 16 deletions
Original file line numberDiff line numberDiff line change
@@ -20,22 +20,66 @@
2020

2121
@pytest.fixture(scope="function")
2222
def clean_db():
23-
try:
24-
os.remove('scraperwiki.sqlite')
25-
except OSError:
26-
pass
23+
"""
24+
Robust database cleanup fixture.
25+
Crucially, this disposes of the SQLAlchemy engine to release file locks.
26+
"""
27+
# 1. Close the active transaction/connection
28+
if scraperwiki.sql._State._connection:
29+
try:
30+
scraperwiki.sql._State._connection.close()
31+
except Exception:
32+
pass
33+
34+
# 2. IMPORTANT: Dispose of the engine.
35+
# This closes all connections in the pool that might be holding the file open.
36+
if scraperwiki.sql._State.engine:
37+
scraperwiki.sql._State.engine.dispose()
38+
39+
# 3. Reset global state variables
40+
scraperwiki.sql._State._connection = None
41+
scraperwiki.sql._State._transaction = None
42+
scraperwiki.sql._State.engine = None
43+
scraperwiki.sql._State.metadata = None
44+
scraperwiki.sql._State.table = None
45+
scraperwiki.sql._State.table_pending = None
46+
47+
# 4. Delete the database file
48+
if os.path.exists('scraperwiki.sqlite'):
49+
try:
50+
os.remove('scraperwiki.sqlite')
51+
except OSError:
52+
# If we still can't delete it (e.g. Windows file locking),
53+
# it might be okay if we are overwriting, but usually this implies a leak.
54+
pass
55+
56+
yield # Run the test
57+
58+
# 5. Cleanup after test
59+
# (Repeat the close/dispose logic to ensure no leaking into the next test)
60+
if scraperwiki.sql._State._connection:
61+
try:
62+
scraperwiki.sql._State._connection.close()
63+
except Exception: pass
64+
65+
if scraperwiki.sql._State.engine:
66+
scraperwiki.sql._State.engine.dispose()
67+
68+
if os.path.exists('scraperwiki.sqlite'):
69+
try:
70+
os.remove('scraperwiki.sqlite')
71+
except OSError:
72+
pass
2773

2874
# called TestAAAWarning so that it gets run first by nosetests,
2975
# which we need, otherwise the warning has already happened.
30-
@pytest.mark.usefixtures("clean_db")
3176
class TestAAAWarning(TestCase):
3277
def test_save_no_warn(self):
3378
with warnings.catch_warnings():
3479
warnings.simplefilter("error")
3580
scraperwiki.sql.save(['id'], dict(id=4, tumble='weed'),
3681
table_name="warning_test")
3782

38-
@pytest.mark.usefixtures("clean_db")
3983
class TestSaveGetVar(TestCase):
4084
def savegetvar(self, var):
4185
scraperwiki.sql.save_var(u"weird\u1234", var)
@@ -76,12 +120,10 @@ def test_save_multiple_values(self):
76120
self.assertEqual(u'hello', scraperwiki.sql.get_var(u'foo\xc3'))
77121
self.assertEqual(u'goodbye\u1234', scraperwiki.sql.get_var(u'bar'))
78122

79-
@pytest.mark.usefixtures("clean_db")
80123
class TestGetNonexistantVar(TestCase):
81124
def test_get(self):
82125
self.assertIsNone(scraperwiki.sql.get_var(u'meatball\xff'))
83126

84-
@pytest.mark.usefixtures("clean_db")
85127
class TestSaveVar(TestCase):
86128
def setUp(self):
87129
scraperwiki.sql.save_var(u"birthday\xfe", u"\u1234November 30, 1888")
@@ -99,7 +141,6 @@ def test_insert(self):
99141
observed = [(colname, value.decode('utf-8'), _type)]
100142
self.assertEqual(observed, expected)
101143

102-
@pytest.mark.usefixtures("clean_db")
103144
class SaveAndCheck(TestCase):
104145
def save_and_check(self, dataIn, tableIn, dataOut, tableOut=None, twice=True):
105146
if tableOut == None:
@@ -127,14 +168,12 @@ def save_and_check(self, dataIn, tableIn, dataOut, tableOut=None, twice=True):
127168
self.assertListEqual(observed1, expected1)
128169
self.assertListEqual(observed2, expected2)
129170

130-
@pytest.mark.usefixtures("clean_db")
131171
class SaveAndSelect(TestCase):
132172
def save_and_select(self, d):
133173
scraperwiki.sql.save([], {u"foo\xdd": d})
134174
observed = scraperwiki.sql.select(u'* FROM swdata')[0][u'foo\xdd']
135175
self.assertEqual(d, observed)
136176

137-
@pytest.mark.usefixtures("clean_db")
138177
class TestUniqueKeys(SaveAndSelect):
139178
def test_empty(self):
140179
scraperwiki.sql.save([], {u"foo\xde": 3}, table_name=u"Chico\xcc")
@@ -181,7 +220,6 @@ def test_two(self):
181220
uniquecol = indices[u"keys"].index(u'unique')
182221
self.assertEqual(index[uniquecol], 1)
183222

184-
@pytest.mark.usefixtures("clean_db")
185223
class TestSaveColumn(TestCase):
186224
def test_add_column(self):
187225
# Indicative for
@@ -209,7 +247,6 @@ def test_add_column(self):
209247
self.assertEqual(stdout, "".encode('utf-8'))
210248
self.assertEqual(stderr, "".encode('utf-8'))
211249

212-
@pytest.mark.usefixtures("clean_db")
213250
class TestSave(SaveAndCheck):
214251
def test_save_int(self):
215252
self.save_and_check(
@@ -282,7 +319,6 @@ def test_save_and_drop(self):
282319
scraperwiki.sql.execute(u"DROP TABLE dropper\xaa")
283320
scraperwiki.sql.save([], dict(foo=9), table_name=u"dropper\xaa")
284321

285-
@pytest.mark.usefixtures("clean_db")
286322
class TestQuestionMark(TestCase):
287323
def test_one_question_mark_with_nonlist(self):
288324
scraperwiki.sql.execute(u'CREATE TABLE zhuozi\xaa (\xaa TEXT);')
@@ -306,7 +342,6 @@ def test_multiple_question_marks(self):
306342
self.assertListEqual(observed, [{'a': 'apple', 'b': 'banana'}])
307343
scraperwiki.sql.execute('DROP TABLE zhuozi')
308344

309-
@pytest.mark.usefixtures("clean_db")
310345
class TestDateTime(TestCase):
311346
def rawdate(self, table="swdata", column="datetime"):
312347
connection = sqlite3.connect(DB_NAME)
@@ -359,7 +394,6 @@ def test_status(self):
359394

360395
self.assertEqual(scraperwiki.status('ok'), None)
361396

362-
@pytest.mark.usefixtures("clean_db")
363397
class TestUnicodeColumns(TestCase):
364398
maxDiff = None
365399
def test_add_column_once_only(self):

0 commit comments

Comments
 (0)