diff --git a/appveyor.yml b/appveyor.yml
index 75800844..abfcf16f 100644
--- a/appveyor.yml
+++ b/appveyor.yml
@@ -9,7 +9,6 @@ environment:
     - nodejs_version: "10"
 
 platform:
-  - x86
   - x64
 
 install:
diff --git a/bench/db-bench-plot.sh b/bench/db-bench-plot.sh
deleted file mode 100755
index 4a8293db..00000000
--- a/bench/db-bench-plot.sh
+++ /dev/null
@@ -1,56 +0,0 @@
-#!/bin/sh
-
-gnuplot <<EOF
-  reset
-  set terminal pngcairo truecolor enhanced font "Ubuntu Mono,13" size 1920, 1080
-  set output "/tmp/5mbench.png"
-  set datafile separator ','
-
-  set logscale y
-  set nologscale y2
-  unset log y2
-  set autoscale y
-  set autoscale y2
-  set ytics nomirror
-  set y2tics
-  set tics out
-
-  set xlabel "Minutes" tc rgb "#777777"
-  set ylabel "Milliseconds per write" tc rgb "#777777"
-  set y2label "Throughput MB/s" tc rgb "#777777"
-
-  set title "Node.js LevelDB (LevelDOWN): 100,000,000 random writes, 64M write buffer, HDD RAID1" tc rgb "#777777"
-  set key left tc rgb "#777777"
-  set border lc rgb "#777777"
-
-  set style line 1 lt 7 ps 1.2 lc rgb "#55019FD7"
-  set style line 2 lt 7 ps 0.1 lc rgb "#55019FD7"
-  set style line 3 lt 1 lw 2   lc rgb "#55019FD7"
-
-  set style line 4 lt 7 ps 1.2 lc rgb "#559ECC3C"
-  set style line 5 lt 7 ps 0.1 lc rgb "#559ECC3C"
-  set style line 6 lt 1 lw 2   lc rgb "#559ECC3C"
-
-  set style line 7 lt 7 ps 1.2 lc rgb "#55CC3C3C"
-  set style line 8 lt 7 ps 0.1 lc rgb "#55CC3C3C"
-  set style line 9 lt 1 lw 2   lc rgb "#55CC3C3C"
-
-  set style line 10 lt 7 ps 1.2 lc rgb "#553C3C3C"
-  set style line 11 lt 7 ps 0.1 lc rgb "#553C3C3C"
-  set style line 12 lt 1 lw 2   lc rgb "#553C3C3C"
-
-  plot \
-      1/0 with points title "Google LevelDB" ls 1 \
-    , 1/0 with points title "Hyper LevelDB"  ls 4 \
-    , 1/0 with points title "Basho LevelDB"  ls 7 \
-    , 1/0 with points title "LMDB"  ls 10 \
-    , "5m_google.csv" using (\$1/1000/60):(\$4/1000000) notitle         ls 2 axes x1y1 \
-    , "5m_hyper.csv"  using (\$1/1000/60):(\$4/1000000) notitle         ls 5 axes x1y1 \
-    , "5m_basho.csv"  using (\$1/1000/60):(\$4/1000000) notitle         ls 8 axes x1y1 \
-    , "5m_lmdb.csv"   using (\$1/1000/60):(\$4/1000000) notitle         ls 11 axes x1y1 \
-    , "5m_google.csv" using (\$1/1000/60):(\$5)         w lines notitle ls 3 axes x1y2 \
-    , "5m_hyper.csv"  using (\$1/1000/60):(\$5)         w lines notitle ls 6 axes x1y2 \
-    , "5m_basho.csv"  using (\$1/1000/60):(\$5)         w lines notitle ls 9 axes x1y2 \
-    , "5m_lmdb.csv"   using (\$1/1000/60):(\$5)         w lines notitle ls 12 axes x1y2 \
-
-EOF
diff --git a/bench/db-bench.js b/bench/db-bench.js
deleted file mode 100755
index 8b9080af..00000000
--- a/bench/db-bench.js
+++ /dev/null
@@ -1,118 +0,0 @@
-#!/usr/bin/env node
-
-const leveldown = require('../')
-const fs = require('fs')
-const du = require('du')
-const path = require('path')
-
-const argv = require('optimist').argv
-
-const options = {
-  benchmark: argv.benchmark,
-  useExisting: argv.use_existing,
-  db: argv.db || path.join(__dirname, 'db'),
-  num: argv.num || 1000000,
-  concurrency: argv.concurrency || 4,
-  cacheSize: argv.cacheSize || 8,
-  writeBufferSize: argv.writeBufferSize || 4,
-  valueSize: argv.valueSize || 100,
-  timingOutput: argv.timingOutput || path.join(__dirname, 'timingOutput'),
-  throughputOutput: argv.throughputOutput
-}
-
-const randomString = require('slump').string
-const keyTmpl = '0000000000000000'
-
-if (!options.useExisting) {
-  leveldown.destroy(options.db, function () {})
-}
-
-var db = leveldown(options.db)
-var timesStream = fs.createWriteStream(options.timingOutput, 'utf8')
-
-function make16CharPaddedKey () {
-  var r = Math.floor(Math.random() * options.num)
-  var k = keyTmpl + r
-  return k.substr(k.length - 16)
-}
-
-timesStream.write('Elapsed (ms), Entries, Bytes, Last 1000 Avg Time, MB/s\n')
-
-function start () {
-  var inProgress = 0
-  var totalWrites = 0
-  var totalBytes = 0
-  var startTime = Date.now()
-  var timesAccum = 0
-  var elapsed
-
-  function report () {
-    console.log(
-      'Wrote'
-      , options.num
-      , 'entries in'
-      , Math.floor((Date.now() - startTime) / 1000) + 's,'
-      , (Math.floor((totalBytes / 1048576) * 100) / 100) + 'MB'
-    )
-    timesStream.end()
-
-    du(options.db, function (err, size) {
-      if (err) throw err
-      console.log('Database size:', Math.floor(size / 1024 / 1024) + 'M')
-    })
-  }
-
-  function write () {
-    if (totalWrites++ === options.num) {
-      db.close(function () {
-        report(Date.now() - startTime)
-      })
-    }
-    if (inProgress >= options.concurrency || totalWrites > options.num) return
-
-    inProgress++
-
-    if (totalWrites % 100000 === 0) {
-      console.log('' + inProgress, totalWrites, Math.round(totalWrites / options.num * 100) + '%')
-    }
-
-    if (totalWrites % 1000 === 0) {
-      elapsed = Date.now() - startTime
-      timesStream.write(
-        elapsed +
-        ',' + totalWrites +
-        ',' + totalBytes +
-        ',' + Math.floor(timesAccum / 1000) +
-        ',' + (Math.floor(((totalBytes / 1048576) / (elapsed / 1000)) * 100) / 100) +
-        '\n')
-      timesAccum = 0
-    }
-
-    var time = process.hrtime()
-
-    db.put(make16CharPaddedKey(), randomString({ length: options.valueSize }), function (err) {
-      if (err) throw err
-
-      totalBytes += keyTmpl.length + options.valueSize
-      timesAccum += process.hrtime(time)[1]
-      inProgress--
-      process.nextTick(write)
-    })
-  }
-
-  for (var i = 0; i < options.concurrency; i++) {
-    write()
-  }
-}
-
-setTimeout(function () {
-  db.open({
-    errorIfExists: false,
-    createIfMissing: true,
-    cacheSize: options.cacheSize << 20,
-    writeBufferSize: options.writeBufferSize << 20
-  }, function (err) {
-    if (err) throw err
-    start()
-  })
-}, 500)
diff --git a/bench/memory.js b/bench/memory.js
deleted file mode 100644
index c40bc31d..00000000
--- a/bench/memory.js
+++ /dev/null
@@ -1,98 +0,0 @@
-var leveldown = require('../')
-var path = require('path')
-
-var addr = '1111111111111111111114oLvT2'
-
-var db = leveldown(path.join(process.env.HOME, 'iterleak.db'))
-var records = {
-  'w/a/14r6JPSJNzBXXJEM2jnmoybQCw3arseKuY/primary': '00',
-  'w/a/17nJuKqjTyAeujSJnPCebpSTEz1v9kjNKg/primary': '00',
-  'w/a/18cxWLCiJMESL34Ev1LJ2meGTgL14bAxfj/primary': '00',
-  'w/a/18pghEAnqCRTrjd7iyUj6XNKmSNx4fAywB/primary': '00',
-  'w/a/19EWPPzY6XkQeM7DxqJ4THbY3DGekRQKbt/primary': '00',
-  'w/a/1DKDeLQyBCkV5aMG15XbAdpwwHnxqw9rvY/primary': '00',
-  'w/a/1HSJAoU5TaGKhAJxwpTC1imiMM1ab8SFGW/primary': '00',
-  'w/a/1HkeafxVvStf2Np6wxUjkTpCt1gTDJSLpi/primary': '00',
-  'w/a/1Hr5JduPFdZ4n4dHBUqRoxLQEG4P93C658/primary': '00',
-  'w/a/1KATodK9Ko8MchJZzDxLhjWz4d8oAuCqEh/primary': '00',
-  'w/a/1NhRKhiAAJrmwXoLhL9dGG1z6oMiFGrxZ7/primary': '00',
-  'w/a/1yTq3DpyUNqUCxDttczGjufbEBKAXMTSq/primary': '00',
-  'w/w/primary': '00'
-}
-
-db.open({
-  createIfMissing: true,
-  errorIfExists: false,
-  compression: true,
-  cacheSize: 8 << 20,
-  writeBufferSize: 4 << 20,
-  maxOpenFiles: 8192
-}, function (err) {
-  if (err) throw err
-
-  memory()
-
-  var batch = db.batch()
-  Object.keys(records).forEach(function (key) {
-    var value = Buffer.from(records[key], 'hex')
-    batch.put(key, value)
-  })
-
-  batch.write(function (err) {
-    if (err) throw err
-
-    // This will leak roughly 1mb per call.
-    setTimeout(function self () {
-      var i = 0
-      ;(function next (err) {
-        if (err) throw err
-        if (i++ >= 10000) {
-          memory()
-          return setTimeout(self, 1000)
-        }
-        iterate(addr, next)
-      })()
-    }, 1000)
-  })
-})
-
-function memory () {
-  var mem = process.memoryUsage()
-  console.log('Memory: rss=%dmb, js-heap=%d/%dmb native-heap=%dmb',
-    mb(mem.rss),
-    mb(mem.heapUsed),
-    mb(mem.heapTotal),
-    mb(mem.rss - mem.heapTotal))
-}
-
-function mb (size) {
-  return size / 1024 / 1024 | 0
-}
-
-function iterate (address, callback) {
-  var iter = db.iterator({
-    gte: 'w/a/' + address,
-    lte: 'w/a/' + address + '~',
-    keys: true,
-    values: false,
-    fillCache: false,
-    keyAsBuffer: false
-  })
-
-  ;(function next () {
-    iter.next(function (err, key, value) {
-      if (err) {
-        return iter.end(function (e) {
-          if (e) throw e
-          callback(err)
-        })
-      }
-
-      if (key === undefined) {
-        return iter.end(callback)
-      }
-
-      next()
-    })
-  })()
-}
diff --git a/bench/write-random-plot.sh b/bench/write-random-plot.sh
deleted file mode 100755
index f9294ae3..00000000
--- a/bench/write-random-plot.sh
+++ /dev/null
@@ -1,24 +0,0 @@
-#!/bin/sh
-
-gnuplot <<EOF
-  reset
-  set terminal png size 1920, 1080
-  set output "write_random_times.png"
-  set datafile separator ','
-
-  #set yrange [0:0.6]
-  set logscale y
-
-  set xlabel "Seconds"
-  set ylabel "Milliseconds per write"
-
-  set title "1.3G / 10,000,000 writes"
-  set key below
-  set grid
-
-  plot "write_random_times_g32.csv" using (\$1/1000):(\$2/1000000) title "Google LevelDB" lc rgb "red" lt 7 ps 0.3, \
-       "write_random_times_h32.csv" using (\$1/1000):(\$2/1000000) title "HyperDex LevelDB" lc rgb "green" lt 7 ps 0.3, \
-       "write_random_times_b32.csv" using (\$1/1000):(\$2/1000000) title "Basho LevelDB" lc rgb "blue" lt 7 ps 0.3, \
-       "write_random_times.csv" using (\$1/1000):(\$2/1000000) title "LMDB" lc rgb "black" lt 7 ps 0.3
-
-EOF
diff --git a/bench/write-random.js b/bench/write-random.js
deleted file mode 100644
index b68fb7e7..00000000
--- a/bench/write-random.js
+++ /dev/null
@@ -1,59 +0,0 @@
-const leveldown = require('../')
-const crypto = require('crypto')
-const fs = require('fs')
-const du = require('du')
-const uuid = require('uuid')
-
-const entryCount = 10000000
-const concurrency = 10
-const timesFile = './write_random_times.csv'
-const dbDir = './write_random.db'
-const data = crypto.randomBytes(256) // buffer
-
-var db = leveldown(dbDir)
-var timesStream = fs.createWriteStream(timesFile, 'utf8')
-
-function report (ms) {
-  console.log('Wrote', entryCount, 'in', Math.floor(ms / 1000) + 's')
-  timesStream.end()
-  du(dbDir, function (err, size) {
-    if (err) throw err
-    console.log('Database size:', Math.floor(size / 1024 / 1024) + 'M')
-  })
-  console.log('Wrote times to ', timesFile)
-}
-
-db.open(function (err) {
-  if (err) throw err
-
-  var inProgress = 0
-  var totalWrites = 0
-  var startTime = Date.now()
-  var writeBuf = ''
-
-  function write () {
-    if (totalWrites % 100000 === 0) console.log(inProgress, totalWrites)
-
-    if (totalWrites % 1000 === 0) {
-      timesStream.write(writeBuf)
-      writeBuf = ''
-    }
-
-    if (totalWrites++ === entryCount) return report(Date.now() - startTime)
-    if (inProgress >= concurrency || totalWrites > entryCount) return
-
-    var time = process.hrtime()
-    inProgress++
-
-    db.put(uuid.v4(), data, function (err) {
-      if (err) throw err
-      writeBuf += (Date.now() - startTime) + ',' + process.hrtime(time)[1] + '\n'
-      inProgress--
-      process.nextTick(write)
-    })
-
-    process.nextTick(write)
-  }
-
-  write()
-})
diff --git a/bench/write-sorted-plot.sh b/bench/write-sorted-plot.sh
deleted file mode 100755
index 24ee173a..00000000
--- a/bench/write-sorted-plot.sh
+++ /dev/null
@@ -1,22 +0,0 @@
-#!/bin/sh
-
-gnuplot <<EOF
-  reset
-  set terminal png size 1920, 1080
-  set output "write_sorted_times.png"
-  set datafile separator ','
-
-  #set yrange [0:0.6]
-
-  set xlabel "Seconds"
-  set ylabel "Milliseconds per write"
-
-  set title "1.3G / 10,000,000 writes"
-  set key below
-  set grid
-
-  plot "write_sorted_times_g.csv" using (\$1/1000):(\$2/1000000) title "Google LevelDB" lc rgb "red" lt 7 ps 0.3, \
-       "write_sorted_times_h.csv" using (\$1/1000):(\$2/1000000) title "HyperDex LevelDB" lc rgb "green" lt 7 ps 0.3, \
-       "write_sorted_times_b.csv" using (\$1/1000):(\$2/1000000) title "Basho LevelDB" lc rgb "blue" lt 7 ps 0.3
-
-EOF
\ No newline at end of file
diff --git a/bench/write-sorted.js b/bench/write-sorted.js
deleted file mode 100644
index 885c479e..00000000
--- a/bench/write-sorted.js
+++ /dev/null
@@ -1,59 +0,0 @@
-const leveldown = require('../')
-const timestamp = require('monotonic-timestamp')
-const crypto = require('crypto')
-const fs = require('fs')
-const du = require('du')
-
-const entryCount = 10000000
-const concurrency = 10
-const timesFile = './write_sorted_times.csv'
-const dbDir = './write_sorted.db'
-const data = crypto.randomBytes(256) // buffer
-
-var db = leveldown(dbDir)
-var timesStream = fs.createWriteStream(timesFile, 'utf8')
-
-function report (ms) {
-  console.log('Wrote', entryCount, 'in', Math.floor(ms / 1000) + 's')
-  timesStream.end()
-  du(dbDir, function (err, size) {
-    if (err) throw err
-    console.log('Database size:', Math.floor(size / 1024 / 1024) + 'M')
-  })
-  console.log('Wrote times to ', timesFile)
-}
-
-db.open({ errorIfExists: true, createIfMissing: true }, function (err) {
-  if (err) throw err
-
-  var inProgress = 0
-  var totalWrites = 0
-  var startTime = Date.now()
-  var writeBuf = ''
-
-  function write () {
-    if (totalWrites % 100000 === 0) console.log(inProgress, totalWrites)
-
-    if (totalWrites % 1000 === 0) {
-      timesStream.write(writeBuf)
-      writeBuf = ''
-    }
-
-    if (totalWrites++ === entryCount) return report(Date.now() - startTime)
-    if (inProgress >= concurrency || totalWrites > entryCount) return
-
-    var time = process.hrtime()
-    inProgress++
-
-    db.put(timestamp(), data, function (err) {
-      if (err) throw err
-      writeBuf += (Date.now() - startTime) + ',' + process.hrtime(time)[1] + '\n'
-      inProgress--
-      process.nextTick(write)
-    })
-
-    process.nextTick(write)
-  }
-
-  write()
-})
diff --git a/chained-batch.js b/chained-batch.js
index fadc76d4..f0f14cfe 100644
--- a/chained-batch.js
+++ b/chained-batch.js
@@ -14,12 +14,12 @@ ChainedBatch.prototype._del = function (key) {
   this.binding.del(key)
 }
 
-ChainedBatch.prototype._clear = function (key) {
-  this.binding.clear(key)
+ChainedBatch.prototype._clear = function () {
+  this.binding.clear()
 }
 
 ChainedBatch.prototype._write = function (options, callback) {
-  this.binding.write(options, callback)
+  this.binding.write(callback)
 }
 
 util.inherits(ChainedBatch, AbstractChainedBatch)
diff --git a/iterator.js b/iterator.js
index 9d412e56..1e99a819 100644
--- a/iterator.js
+++ b/iterator.js
@@ -13,18 +13,9 @@ function Iterator (db, options) {
 
 util.inherits(Iterator, AbstractIterator)
 
-Iterator.prototype.seek = function (target) {
-  if (this._ended) {
-    throw new Error('cannot call seek() after end()')
-  }
-  if (this._nexting) {
-    throw new Error('cannot call seek() before next() has completed')
-  }
-  if (typeof target !== 'string' && !Buffer.isBuffer(target)) {
-    throw new Error('seek() requires a string or buffer key')
-  }
+Iterator.prototype._seek = function (target) {
   if (target.length === 0) {
-    throw new Error('cannot seek() to an empty key')
+    throw new Error('cannot seek() to an empty target')
   }
 
   this.cache = null
diff --git a/leveldown.js b/leveldown.js
index 9a19c5b1..5d4f0d77 100644
--- a/leveldown.js
+++ b/leveldown.js
@@ -10,7 +10,13 @@ function LevelDOWN (location) {
     return new LevelDOWN(location)
   }
 
-  AbstractLevelDOWN.call(this, location)
+  if (typeof location !== 'string') {
+    throw new Error('constructor requires a location string argument')
+  }
+
+  AbstractLevelDOWN.call(this)
+
+  this.location = location
   this.binding = binding(location)
 }
 
@@ -24,6 +30,14 @@ LevelDOWN.prototype._close = function (callback) {
   this.binding.close(callback)
 }
 
+LevelDOWN.prototype._serializeKey = function (key) {
+  return Buffer.isBuffer(key) ? key : String(key)
+}
+
+LevelDOWN.prototype._serializeValue = function (value) {
+  return Buffer.isBuffer(value) ? value : String(value)
+}
+
 LevelDOWN.prototype._put = function (key, value, options, callback) {
   this.binding.put(key, value, options, callback)
 }
@@ -49,7 +63,7 @@ LevelDOWN.prototype.approximateSize = function (start, end, callback) {
       end == null ||
       typeof start === 'function' ||
       typeof end === 'function') {
-    throw new Error('approximateSize() requires valid `start`, `end` and `callback` arguments')
+    throw new Error('approximateSize() requires valid `start` and `end` arguments')
   }
 
   if (typeof callback !== 'function') {
@@ -63,6 +77,20 @@ LevelDOWN.prototype.approximateSize = function (start, end, callback) {
 }
 
 LevelDOWN.prototype.compactRange = function (start, end, callback) {
+  if (start == null ||
+      end == null ||
+      typeof start === 'function' ||
+      typeof end === 'function') {
+    throw new Error('compactRange() requires valid `start` and `end` arguments')
+  }
+
+  if (typeof callback !== 'function') {
+    throw new Error('compactRange() requires a callback argument')
+  }
+
+  start = this._serializeKey(start)
+  end = this._serializeKey(end)
+
   this.binding.compactRange(start, end, callback)
 }
 
@@ -73,6 +101,11 @@ LevelDOWN.prototype.getProperty = function (property) {
 }
 
 LevelDOWN.prototype._iterator = function (options) {
+  if (this.status !== 'open') {
+    // Prevent segfault
+    throw new Error('cannot call iterator() before open()')
+  }
+
   return new Iterator(this, options)
 }
 
diff --git a/package.json b/package.json
index aaa1ad87..cdfde2e1 100644
--- a/package.json
+++ b/package.json
@@ -10,11 +10,11 @@
     "coverage": "nyc report --reporter=text-lcov | coveralls",
     "rebuild": "prebuild --compile",
     "hallmark": "hallmark --fix",
-    "dependency-check": "dependency-check . test/*.js bench/*.js",
+    "dependency-check": "dependency-check . test/*.js",
     "prepublishOnly": "npm run dependency-check"
   },
   "dependencies": {
-    "abstract-leveldown": "~5.0.0",
+    "abstract-leveldown": "~6.0.3",
     "bindings": "~1.5.0",
     "fast-future": "~1.0.2",
     "nan": "~2.13.2",
@@ -27,20 +27,18 @@
     "dependency-check": "^3.3.0",
     "du": "~0.1.0",
     "hallmark": "^0.1.0",
-    "iota-array": "^1.0.0",
+    "level-concat-iterator": "^2.0.0",
     "level-community": "^3.0.0",
-    "lexicographic-integer": "^1.1.0",
     "mkfiletree": "^1.0.1",
     "monotonic-timestamp": "~0.0.8",
     "nyc": "^14.0.0",
-    "optimist": "~0.6.1",
     "prebuild": "^8.0.0",
     "prebuild-ci": "^2.0.0",
     "readfiletree": "~0.0.1",
     "rimraf": "^2.6.1",
-    "slump": "^3.0.0",
     "standard": "^12.0.0",
     "tape": "^4.10.0",
+    "tempy": "^0.2.1",
     "uuid": "^3.2.1",
     "verify-travis-appveyor": "^3.0.0"
   },
diff --git a/src/batch.cc b/src/batch.cc
index 88572352..1472c3fc 100644
--- a/src/batch.cc
+++ b/src/batch.cc
@@ -130,19 +130,13 @@ NAN_METHOD(Batch::Clear) {
 NAN_METHOD(Batch::Write) {
   Batch* batch = ObjectWrap::Unwrap<Batch>(info.Holder());
 
-  if (batch->hasData) {
-    Nan::Callback *callback =
-        new Nan::Callback(v8::Local<v8::Function>::Cast(info[0]));
-    BatchWriteWorker* worker  = new BatchWriteWorker(batch, callback);
-    // persist to prevent accidental GC
-    v8::Local<v8::Object> _this = info.This();
-    worker->SaveToPersistent("batch", _this);
-    Nan::AsyncQueueWorker(worker);
-  } else {
-    LD_RUN_CALLBACK("rocksdb::batch.write",
-                    v8::Local<v8::Function>::Cast(info[0]),
-                    0, NULL);
-  }
+  Nan::Callback *callback =
+      new Nan::Callback(v8::Local<v8::Function>::Cast(info[0]));
+  BatchWriteWorker* worker = new BatchWriteWorker(batch, callback);
+  // persist to prevent accidental GC
+  v8::Local<v8::Object> _this = info.This();
+  worker->SaveToPersistent("batch", _this);
+  Nan::AsyncQueueWorker(worker);
 }
 
 } // namespace leveldown
diff --git a/src/batch.h b/src/batch.h
index 49d166c2..d6bcbc17 100644
--- a/src/batch.h
+++ b/src/batch.h
@@ -22,11 +22,12 @@ class Batch : public Nan::ObjectWrap {
   ~Batch ();
   rocksdb::Status Write ();
 
+  bool hasData; // keep track of whether we're writing data or not
+
 private:
   leveldown::Database* database;
   rocksdb::WriteOptions* options;
   rocksdb::WriteBatch* batch;
-  bool hasData; // keep track of whether we're writing data or not
 
   static NAN_METHOD(New);
   static NAN_METHOD(Put);
diff --git a/src/batch_async.cc b/src/batch_async.cc
index fbb56f4e..db3846f7 100644
--- a/src/batch_async.cc
+++ b/src/batch_async.cc
@@ -16,7 +16,9 @@ BatchWriteWorker::BatchWriteWorker (
 BatchWriteWorker::~BatchWriteWorker () {}
 
 void BatchWriteWorker::Execute () {
-  SetStatus(batch->Write());
+  if (batch->hasData) {
+    SetStatus(batch->Write());
+  }
 }
 
 } // namespace leveldown
diff --git a/test/abstract-leveldown-test.js b/test/abstract-leveldown-test.js
new file mode 100644
index 00000000..12888ea0
--- /dev/null
+++ b/test/abstract-leveldown-test.js
@@ -0,0 +1 @@
+require('abstract-leveldown/test')(require('./common'))
diff --git a/test/approximate-size-test.js b/test/approximate-size-test.js
index d400cfa6..06dced35 100644
--- a/test/approximate-size-test.js
+++ b/test/approximate-size-test.js
@@ -1,13 +1,12 @@
 const test = require('tape')
-const leveldown = require('..')
-const testCommon = require('abstract-leveldown/testCommon')
+const testCommon = require('./common')
 
 var db
 
 test('setUp common for approximate size', testCommon.setUp)
 
 test('setUp db', function (t) {
-  db = leveldown(testCommon.location())
+  db = testCommon.factory()
   db.open(t.end.bind(t))
 })
 
@@ -67,7 +66,7 @@ test('test 1-arg + callback approximateSize() throws', function (t) {
 
 test('test custom _serialize*', function (t) {
   t.plan(4)
-  var db = leveldown(testCommon.location())
+  var db = testCommon.factory()
   db._serializeKey = function (data) { return data }
   db.approximateSize = function (start, end, callback) {
     t.deepEqual(start, { foo: 'bar' })
@@ -106,11 +105,7 @@ test('test approximateSize()', function (t) {
           t.equal(typeof size, 'number')
           // account for snappy compression, original would be ~100000
           t.ok(size > 40000, 'size reports a reasonable amount (' + size + ')')
-
-          db.close(function (err) {
-            t.error(err)
-            t.end()
-          })
+          t.end()
         })
       })
     })
diff --git a/test/batch-test.js b/test/batch-test.js
deleted file mode 100644
index f56b353a..00000000
--- a/test/batch-test.js
+++ /dev/null
@@ -1,5 +0,0 @@
-const test = require('tape')
-const leveldown = require('../')
-const abstract = require('abstract-leveldown/abstract/batch-test')
-
-abstract.all(leveldown, test)
diff --git a/test/chained-batch-test.js b/test/chained-batch-test.js
deleted file mode 100644
index 505dbd43..00000000
--- a/test/chained-batch-test.js
+++ /dev/null
@@ -1,5 +0,0 @@
-const test = require('tape')
-const leveldown = require('../')
-const abstract = require('abstract-leveldown/abstract/chained-batch-test')
-
-abstract.all(leveldown, test)
diff --git a/test/cleanup-hanging-iterators-test.js b/test/cleanup-hanging-iterators-test.js
index 09d84b57..e1e54092 100644
--- a/test/cleanup-hanging-iterators-test.js
+++ b/test/cleanup-hanging-iterators-test.js
@@ -5,11 +5,11 @@ makeTest('test ended iterator', function (db, t, done) {
 
   var it = db.iterator({ keyAsBuffer: false, valueAsBuffer: false })
   it.next(function (err, key, value) {
-    t.notOk(err, 'no error from next()')
+    t.ifError(err, 'no error from next()')
     t.equal(key, 'one', 'correct key')
     t.equal(value, '1', 'correct value')
     it.end(function (err) {
-      t.notOk(err, 'no error from next()')
+      t.ifError(err, 'no error from end()')
       done()
     })
   })
@@ -19,7 +19,7 @@ makeTest('test non-ended iterator', function (db, t, done) {
   // no end() call on our iterator, cleanup should crash Node if not handled properly
   var it = db.iterator({ keyAsBuffer: false, valueAsBuffer: false })
   it.next(function (err, key, value) {
-    t.notOk(err, 'no error from next()')
+    t.ifError(err, 'no error from next()')
     t.equal(key, 'one', 'correct key')
     t.equal(value, '1', 'correct value')
     done()
diff --git a/test/close-test.js b/test/close-test.js
deleted file mode 100644
index 967745b0..00000000
--- a/test/close-test.js
+++ /dev/null
@@ -1,22 +0,0 @@
-const test = require('tape')
-const testCommon = require('abstract-leveldown/testCommon')
-const leveldown = require('../')
-const abstract = require('abstract-leveldown/abstract/close-test')
-
-module.exports.setUp = function () {
-  test('setUp', testCommon.setUp)
-}
-
-module.exports.close = abstract.close
-
-module.exports.tearDown = function () {
-  test('tearDown', testCommon.tearDown)
-}
-
-module.exports.all = function (leveldown) {
-  module.exports.setUp()
-  module.exports.close(leveldown, test)
-  module.exports.tearDown()
-}
-
-module.exports.all(leveldown)
diff --git a/test/common.js b/test/common.js
new file mode 100644
index 00000000..14a4a904
--- /dev/null
+++ b/test/common.js
@@ -0,0 +1,11 @@
+const test = require('tape')
+const tempy = require('tempy')
+const leveldown = require('..')
+const suite = require('abstract-leveldown/test')
+
+module.exports = suite.common({
+  test: test,
+  factory: function () {
+    return leveldown(tempy.directory())
+  }
+})
diff --git a/test/compact-range-test.js b/test/compact-range-test.js
index 850637d0..d83263f2 100644
--- a/test/compact-range-test.js
+++ b/test/compact-range-test.js
@@ -1,13 +1,12 @@
 const test = require('tape')
-const testCommon = require('abstract-leveldown/testCommon')
-const leveldown = require('../')
+const testCommon = require('./common')
 
 var db
 
 test('setUp common', testCommon.setUp)
 
 test('setUp db', function (t) {
-  db = leveldown(testCommon.location())
+  db = testCommon.factory()
   db.open(t.end.bind(t))
 })
 
@@ -16,20 +15,26 @@ test('test compactRange() frees disk space after key deletion', function (t) {
   var key2 = '000001'
   var val1 = Buffer.allocUnsafe(64).fill(1)
   var val2 = Buffer.allocUnsafe(64).fill(1)
-  db.put(key1, val1, function () {
-    db.put(key2, val2, function () {
-      db.compactRange(key1, key2, function () {
-        db.approximateSize('0', 'z', function (err, sizeAfterPuts) {
-          t.error(err, 'no error')
-          db.del(key1, function () {
-            db.del(key2, function () {
-              db.compactRange(key1, key2, function () {
-                db.approximateSize('0', 'z', function (err, sizeAfterCompact) {
-                  t.error(err, 'no error')
-                  t.ok(sizeAfterCompact < sizeAfterPuts)
-                  t.end()
-                })
-              })
+
+  db.batch().put(key1, val1).put(key2, val2).write(function (err) {
+    t.ifError(err, 'no batch put error')
+
+    db.compactRange(key1, key2, function (err) {
+      t.ifError(err, 'no compactRange1 error')
+
+      db.approximateSize('0', 'z', function (err, sizeAfterPuts) {
+        t.error(err, 'no approximateSize1 error')
+
+        db.batch().del(key1).del(key2).write(function (err) {
+          t.ifError(err, 'no batch del error')
+
+          db.compactRange(key1, key2, function (err) {
+            t.ifError(err, 'no compactRange2 error')
+
+            db.approximateSize('0', 'z', function (err, sizeAfterCompact) {
+              t.error(err, 'no approximateSize2 error')
+              t.ok(sizeAfterCompact < sizeAfterPuts)
+              t.end()
             })
           })
         })
@@ -38,6 +43,22 @@ test('test compactRange() frees disk space after key deletion', function (t) {
   })
 })
 
+test('test compactRange() serializes start and end', function (t) {
+  t.plan(3)
+
+  var clone = Object.create(db)
+  var count = 0
+
+  clone._serializeKey = function (key) {
+    t.is(key, count++)
+    return db._serializeKey(key)
+  }
+
+  clone.compactRange(0, 1, function (err) {
+    t.ifError(err, 'no compactRange error')
+  })
+})
+
 test('tearDown', function (t) {
   db.close(testCommon.tearDown.bind(null, t))
 })
diff --git a/test/compression-test.js b/test/compression-test.js
index cb63f37d..a3b831c2 100644
--- a/test/compression-test.js
+++ b/test/compression-test.js
@@ -1,14 +1,9 @@
-/* Copyright (c) 2012-2017 LevelUP contributors
- * See list at <https://github.com/level/leveldown#contributing>
- * MIT License <https://github.com/level/leveldown/blob/master/LICENSE.md>
- */
-
-var async = require('async')
-var du = require('du')
-var delayed = require('delayed')
-var common = require('abstract-leveldown/testCommon')
-var leveldown = require('../')
-var test = require('tape')
+const async = require('async')
+const du = require('du')
+const delayed = require('delayed')
+const testCommon = require('./common')
+const leveldown = require('..')
+const test = require('tape')
 
 var compressableData = Buffer.from(Array.apply(null, Array(1024 * 100)).map(function () { return 'aaaaaaaaaa' }).join(''))
 var multiples = 10
@@ -44,10 +39,10 @@ var cycle = function (db, compression, t, callback) {
 
 test('compression', function (t) {
   t.plan(4)
-  t.test('set up', common.setUp)
+  t.test('set up', testCommon.setUp)
 
   t.test('test data is compressed by default (db.put())', function (t) {
-    var db = leveldown(common.location())
+    var db = testCommon.factory()
     db.open(function (err) {
       t.error(err)
       async.forEach(Array.apply(null, Array(multiples)).map(function (e, i) {
@@ -59,7 +54,7 @@ test('compression', function (t) {
   })
 
   t.test('test data is not compressed with compression=false on open() (db.put())', function (t) {
-    var db = leveldown(common.location())
+    var db = testCommon.factory()
     db.open({ compression: false }, function (err) {
       t.error(err)
       async.forEach(Array.apply(null, Array(multiples)).map(function (e, i) {
@@ -71,7 +66,7 @@ test('compression', function (t) {
   })
 
   t.test('test data is compressed by default (db.batch())', function (t) {
-    var db = leveldown(common.location())
+    var db = testCommon.factory()
     db.open(function (err) {
       t.error(err)
       db.batch(Array.apply(null, Array(multiples)).map(function (e, i) {
diff --git a/test/del-test.js b/test/del-test.js
deleted file mode 100644
index c3d49c14..00000000
--- a/test/del-test.js
+++ /dev/null
@@ -1,5 +0,0 @@
-const test = require('tape')
-const leveldown = require('../')
-const abstract = require('abstract-leveldown/abstract/del-test')
-
-abstract.all(leveldown, test)
diff --git a/test/destroy-test.js b/test/destroy-test.js
index a80d33ea..96b88748 100644
--- a/test/destroy-test.js
+++ b/test/destroy-test.js
@@ -1,5 +1,5 @@
 const test = require('tape')
-const testCommon = require('abstract-leveldown/testCommon')
+const tempy = require('tempy')
 const fs = require('fs')
 const path = require('path')
 const mkfiletree = require('mkfiletree')
@@ -26,7 +26,7 @@ test('test callback-less, 1-arg, destroy() throws', function (t) {
 test('test destroy non-existent directory', function (t) {
   t.plan(4)
 
-  var location = testCommon.location()
+  var location = tempy.directory()
   var parent = path.dirname(location)
 
   // For symmetry with the opposite test below.
@@ -34,7 +34,7 @@ test('test destroy non-existent directory', function (t) {
 
   // Cleanup to avoid conflicts with other tests
   rimraf(location, { glob: false }, function (err) {
-    t.ifError(err, 'no rimraf error')
+    t.ifError(err, 'no error from rimraf()')
 
     leveldown.destroy(location, function () {
       t.is(arguments.length, 0, 'no arguments returned on callback')
@@ -69,17 +69,17 @@ test('test destroy non leveldb directory', function (t) {
   }
 
   mkfiletree.makeTemp('destroy-test', tree, function (err, dir) {
-    t.ifError(err, 'no close error')
+    t.ifError(err, 'no error from makeTemp()')
 
     leveldown.destroy(dir, function (err) {
-      t.ifError(err, 'no destroy error')
+      t.ifError(err, 'no error from destroy()')
 
       readfiletree(dir, function (err, actual) {
-        t.ifError(err, 'no read error')
+        t.ifError(err, 'no error from readfiletree()')
         t.deepEqual(actual, tree, 'directory remains untouched')
 
         mkfiletree.cleanUp(function (err) {
-          t.ifError(err, 'no cleanup error')
+          t.ifError(err, 'no error from cleanup()')
           t.end()
         })
       })
@@ -87,33 +87,35 @@ test('test destroy non leveldb directory', function (t) {
   })
 })
 
-makeTest('test destroy() cleans and removes leveldb-only dir', function (db, t, done, location) {
+makeTest('test destroy() cleans and removes leveldb-only dir', function (db, t, done) {
+  var location = db.location
   db.close(function (err) {
-    t.ifError(err, 'no close error')
+    t.ifError(err, 'no error from close()')
 
     leveldown.destroy(location, function (err) {
-      t.ifError(err, 'no destroy error')
+      t.ifError(err, 'no error from destroy()')
       t.notOk(fs.existsSync(location), 'directory completely removed')
 
-      done(false)
+      done(null, false)
     })
   })
 })
 
-makeTest('test destroy() cleans and removes only leveldb parts of a dir', function (db, t, done, location) {
+makeTest('test destroy() cleans and removes only leveldb parts of a dir', function (db, t, done) {
+  var location = db.location
   fs.writeFileSync(path.join(location, 'foo'), 'FOO')
 
   db.close(function (err) {
-    t.ifError(err, 'no close error')
+    t.ifError(err, 'no error from close()')
 
     leveldown.destroy(location, function (err) {
-      t.ifError(err, 'no destroy error')
+      t.ifError(err, 'no error from destroy()')
 
       readfiletree(location, function (err, tree) {
-        t.ifError(err, 'no read error')
+        t.ifError(err, 'no error from readfiletree()')
         t.deepEqual(tree, { 'foo': 'FOO' }, 'non-leveldb files left intact')
 
-        done(false)
+        done(null, false)
       })
     })
   })
diff --git a/test/get-test.js b/test/get-test.js
deleted file mode 100644
index 423268e7..00000000
--- a/test/get-test.js
+++ /dev/null
@@ -1,5 +0,0 @@
-const test = require('tape')
-const leveldown = require('../')
-const abstract = require('abstract-leveldown/abstract/get-test')
-
-abstract.all(leveldown, test)
diff --git a/test/getproperty-test.js b/test/getproperty-test.js
index 4fca9ee8..e9b8ce49 100644
--- a/test/getproperty-test.js
+++ b/test/getproperty-test.js
@@ -1,13 +1,12 @@
 const test = require('tape')
-const testCommon = require('abstract-leveldown/testCommon')
-const leveldown = require('../')
+const testCommon = require('./common')
 
 var db
 
 test('setUp common', testCommon.setUp)
 
 test('setUp db', function (t) {
-  db = leveldown(testCommon.location())
+  db = testCommon.factory()
   db.open(t.end.bind(t))
 })
 
diff --git a/test/iterator-gc-test.js b/test/iterator-gc-test.js
new file mode 100644
index 00000000..51a3dbd0
--- /dev/null
+++ b/test/iterator-gc-test.js
@@ -0,0 +1,68 @@
+'use strict'
+
+const test = require('tape')
+const collectEntries = require('level-concat-iterator')
+const testCommon = require('./common')
+const sourceData = []
+
+for (let i = 0; i < 1e3; i++) {
+  sourceData.push({
+    type: 'put',
+    key: i.toString(),
+    value: Math.random().toString()
+  })
+}
+
+test('setUp', testCommon.setUp)
+
+// When you have a database open with an active iterator, but no references to
+// the db, V8 will GC the database and you'll get an failed assert from LevelDB.
+test('db without ref does not get GCed while iterating', function (t) {
+  t.plan(6)
+
+  let db = testCommon.factory()
+
+  db.open(function (err) {
+    t.ifError(err, 'no open error')
+
+    // Insert test data
+    db.batch(sourceData.slice(), function (err) {
+      t.ifError(err, 'no batch error')
+
+      // Set highWaterMark to 0 so that we don't preemptively fetch.
+      const it = db.iterator({ highWaterMark: 0 })
+
+      // Remove reference
+      db = null
+
+      if (global.gc) {
+        // This is the reliable way to trigger GC (and the bug if it exists).
+        // Useful for manual testing with "node --expose-gc".
+        global.gc()
+        iterate(it)
+      } else {
+        // But a timeout usually also allows GC to kick in. If not, the time
+        // between iterator ticks might. That's when "highWaterMark: 0" helps.
+        setTimeout(iterate.bind(null, it), 1000)
+      }
+    })
+  })
+
+  function iterate (it) {
+    // No reference to db here, could be GCed. It shouldn't..
+    collectEntries(it, function (err, entries) {
+      t.ifError(err, 'no iterator error')
+      t.is(entries.length, sourceData.length, 'got data')
+
+      // Because we also have a reference on the iterator. That's the fix.
+      t.ok(it.db, 'abstract iterator has reference to db')
+
+      // Which as luck would have it, also allows us to properly end this test.
+      it.db.close(function (err) {
+        t.ifError(err, 'no close error')
+      })
+    })
+  }
+})
+
+test('tearDown', testCommon.tearDown)
diff --git a/test/iterator-range-test.js b/test/iterator-range-test.js
deleted file mode 100644
index fa828e9b..00000000
--- a/test/iterator-range-test.js
+++ /dev/null
@@ -1,5 +0,0 @@
-const test = require('tape')
-const leveldown = require('..')
-const abstract = require('abstract-leveldown/abstract/iterator-range-test')
-
-abstract.all(leveldown, test)
diff --git a/test/iterator-recursion-test.js b/test/iterator-recursion-test.js
index 36c69a4f..71bd9edb 100644
--- a/test/iterator-recursion-test.js
+++ b/test/iterator-recursion-test.js
@@ -1,6 +1,5 @@
 const test = require('tape')
-const testCommon = require('abstract-leveldown/testCommon')
-const leveldown = require('../')
+const testCommon = require('./common')
 const fork = require('child_process').fork
 const path = require('path')
 
@@ -42,7 +41,7 @@ test('try to create an iterator with a blown stack', function (t) {
 })
 
 test('setUp db', function (t) {
-  db = leveldown(testCommon.location())
+  db = testCommon.factory()
   db.open(function (err) {
     t.error(err)
     db.batch(sourceData, t.end.bind(t))
diff --git a/test/iterator-test.js b/test/iterator-test.js
index d17c0a7a..40f59bfe 100644
--- a/test/iterator-test.js
+++ b/test/iterator-test.js
@@ -1,102 +1,31 @@
-const test = require('tape')
-const leveldown = require('../')
-const abstract = require('abstract-leveldown/abstract/iterator-test')
 const make = require('./make')
-const iota = require('iota-array')
-const lexi = require('lexicographic-integer')
-const util = require('util')
 
-abstract.all(leveldown, test)
+// This test isn't included in abstract-leveldown because
+// the empty-check is currently performed by leveldown.
+make('iterator#seek throws if target is empty', function (db, t, done) {
+  var targets = ['', Buffer.alloc(0), []]
+  var pending = targets.length
 
-make('iterator throws if key is not a string or buffer', function (db, t, done) {
-  var keys = [null, undefined, 1, true, false]
-  var pending = keys.length
-
-  keys.forEach(function (key) {
-    var error
+  targets.forEach(function (target) {
     var ite = db.iterator()
+    var error
 
     try {
-      ite.seek(key)
-    } catch (e) {
-      error = e
+      ite.seek(target)
+    } catch (err) {
+      error = err.message
     }
 
-    t.ok(error, 'had error from seek()')
+    t.is(error, 'cannot seek() to an empty target', 'got error')
     ite.end(end)
   })
 
   function end (err) {
-    t.error(err, 'no error from end()')
+    t.ifError(err, 'no error from end()')
     if (!--pending) done()
   }
 })
 
-make('iterator is seekable', function (db, t, done) {
-  var ite = db.iterator()
-  ite.seek('two')
-  ite.next(function (err, key, value) {
-    t.error(err, 'no error')
-    t.same(key.toString(), 'two', 'key matches')
-    t.same(value.toString(), '2', 'value matches')
-    ite.next(function (err, key, value) {
-      t.error(err, 'no error')
-      t.same(key, undefined, 'end of iterator')
-      t.same(value, undefined, 'end of iterator')
-      ite.end(done)
-    })
-  })
-})
-
-make('iterator is seekable with buffer', function (db, t, done) {
-  var ite = db.iterator()
-  ite.seek(Buffer.from('two'))
-  ite.next(function (err, key, value) {
-    t.error(err, 'no error from next()')
-    t.equal(key.toString(), 'two', 'key matches')
-    t.equal(value.toString(), '2', 'value matches')
-    ite.next(function (err, key, value) {
-      t.error(err, 'no error from next()')
-      t.equal(key, undefined, 'end of iterator')
-      t.equal(value, undefined, 'end of iterator')
-      ite.end(done)
-    })
-  })
-})
-
-make('reverse seek in the middle', function (db, t, done) {
-  var ite = db.iterator({ reverse: true, limit: 1 })
-  ite.seek('three!')
-  ite.next(function (err, key, value) {
-    t.error(err, 'no error')
-    t.same(key.toString(), 'three', 'key matches')
-    t.same(value.toString(), '3', 'value matches')
-    ite.end(done)
-  })
-})
-
-make('iterator invalid seek', function (db, t, done) {
-  var ite = db.iterator()
-  ite.seek('zzz')
-  ite.next(function (err, key, value) {
-    t.error(err, 'no error')
-    t.same(key, undefined, 'end of iterator')
-    t.same(value, undefined, 'end of iterator')
-    ite.end(done)
-  })
-})
-
-make('reverse seek from invalid range', function (db, t, done) {
-  var ite = db.iterator({ reverse: true })
-  ite.seek('zzz')
-  ite.next(function (err, key, value) {
-    t.error(err, 'no error')
-    t.same(key.toString(), 'two', 'end of iterator')
-    t.same(value.toString(), '2', 'end of iterator')
-    ite.end(done)
-  })
-})
-
 make('iterator optimized for seek', function (db, t, done) {
   var batch = db.batch()
   batch.put('a', 1)
@@ -108,23 +37,23 @@ make('iterator optimized for seek', function (db, t, done) {
   batch.put('g', 1)
   batch.write(function (err) {
     var ite = db.iterator()
-    t.error(err, 'no error from batch')
+    t.ifError(err, 'no error from batch()')
     ite.next(function (err, key, value) {
-      t.error(err, 'no error from next()')
+      t.ifError(err, 'no error from next()')
       t.equal(key.toString(), 'a', 'key matches')
       t.equal(ite.cache.length, 0, 'no cache')
       ite.next(function (err, key, value) {
-        t.error(err, 'no error from next()')
+        t.ifError(err, 'no error from next()')
         t.equal(key.toString(), 'b', 'key matches')
         t.ok(ite.cache.length > 0, 'has cached items')
         ite.seek('d')
         t.notOk(ite.cache, 'cache is removed')
         ite.next(function (err, key, value) {
-          t.error(err, 'no error from next()')
+          t.ifError(err, 'no error from next()')
           t.equal(key.toString(), 'd', 'key matches')
           t.equal(ite.cache.length, 0, 'no cache')
           ite.next(function (err, key, value) {
-            t.error(err, 'no error from next()')
+            t.ifError(err, 'no error from next()')
             t.equal(key.toString(), 'e', 'key matches')
             t.ok(ite.cache.length > 0, 'has cached items')
             ite.end(done)
@@ -135,151 +64,25 @@ make('iterator optimized for seek', function (db, t, done) {
   })
 })
 
-make('iterator seek before next has completed', function (db, t, done) {
-  var ite = db.iterator()
-  ite.next(function (err, key, value) {
-    t.error(err, 'no error from next()')
-    ite.end(done)
-  })
-  var error
-  try {
-    ite.seek('two')
-  } catch (e) {
-    error = e
-  }
-  t.ok(error, 'had error from seek() before next() has completed')
-})
-
 make('close db with open iterator', function (db, t, done) {
   var ite = db.iterator()
   var cnt = 0
+  var hadError = false
+
   ite.next(function loop (err, key, value) {
     if (cnt++ === 0) {
-      t.error(err, 'no error from next()')
+      t.ifError(err, 'no error from next()')
     } else {
       t.equal(err.message, 'iterator has ended')
+      hadError = true
     }
     if (key !== undefined) { ite.next(loop) }
   })
 
   db.close(function (err) {
-    t.error(err, 'no error from close()')
-    done(false)
-  })
-})
+    t.ifError(err, 'no error from close()')
+    t.ok(hadError)
 
-make('iterator seek after end', function (db, t, done) {
-  var ite = db.iterator()
-  ite.next(function (err, key, value) {
-    t.error(err, 'no error from next()')
-    ite.end(function (err) {
-      t.error(err, 'no error from end()')
-      var error
-      try {
-        ite.seek('two')
-      } catch (e) {
-        error = e
-      }
-      t.ok(error, 'had error from seek() after end()')
-      done()
-    })
+    done(null, false)
   })
 })
-
-make('iterator seek respects range', function (db, t, done) {
-  db.batch(pairs(10), function (err) {
-    t.error(err, 'no error from batch()')
-
-    var pending = 0
-
-    expect({ gt: '5' }, '4', undefined)
-    expect({ gt: '5' }, '5', undefined)
-    expect({ gt: '5' }, '6', '6')
-
-    expect({ gte: '5' }, '4', undefined)
-    expect({ gte: '5' }, '5', '5')
-    expect({ gte: '5' }, '6', '6')
-
-    expect({ start: '5' }, '4', undefined)
-    expect({ start: '5' }, '5', '5')
-    expect({ start: '5' }, '6', '6')
-
-    expect({ lt: '5' }, '4', '4')
-    expect({ lt: '5' }, '5', undefined)
-    expect({ lt: '5' }, '6', undefined)
-
-    expect({ lte: '5' }, '4', '4')
-    expect({ lte: '5' }, '5', '5')
-    expect({ lte: '5' }, '6', undefined)
-
-    expect({ end: '5' }, '4', '4')
-    expect({ end: '5' }, '5', '5')
-    expect({ end: '5' }, '6', undefined)
-
-    expect({ lt: '5', reverse: true }, '4', '4')
-    expect({ lt: '5', reverse: true }, '5', undefined)
-    expect({ lt: '5', reverse: true }, '6', undefined)
-
-    expect({ lte: '5', reverse: true }, '4', '4')
-    expect({ lte: '5', reverse: true }, '5', '5')
-    expect({ lte: '5', reverse: true }, '6', undefined)
-
-    expect({ start: '5', reverse: true }, '4', '4')
-    expect({ start: '5', reverse: true }, '5', '5')
-    expect({ start: '5', reverse: true }, '6', undefined)
-
-    expect({ gt: '5', reverse: true }, '4', undefined)
-    expect({ gt: '5', reverse: true }, '5', undefined)
-    expect({ gt: '5', reverse: true }, '6', '6')
-
-    expect({ gte: '5', reverse: true }, '4', undefined)
-    expect({ gte: '5', reverse: true }, '5', '5')
-    expect({ gte: '5', reverse: true }, '6', '6')
-
-    expect({ end: '5', reverse: true }, '4', undefined)
-    expect({ end: '5', reverse: true }, '5', '5')
-    expect({ end: '5', reverse: true }, '6', '6')
-
-    expect({ gt: '7', lt: '8' }, '7', undefined)
-    expect({ gte: '7', lt: '8' }, '7', '7')
-    expect({ gte: '7', lt: '8' }, '8', undefined)
-    expect({ gt: '7', lte: '8' }, '8', '8')
-
-    function expect (range, target, expected) {
-      pending++
-      var ite = db.iterator(range)
-
-      ite.seek(target)
-      ite.next(function (err, key, value) {
-        t.error(err, 'no error from next()')
-
-        var tpl = 'seek(%s) on %s yields %s'
-        var msg = util.format(tpl, target, util.inspect(range), expected)
-
-        if (expected === undefined) {
-          t.equal(value, undefined, msg)
-        } else {
-          t.equal(value.toString(), expected, msg)
-        }
-
-        ite.end(function (err) {
-          t.error(err, 'no error from end()')
-          if (!--pending) done()
-        })
-      })
-    }
-  })
-})
-
-function pairs (length, opts) {
-  opts = opts || {}
-  return iota(length).filter(not(opts.not)).map(function (k) {
-    var key = opts.lex ? lexi.pack(k, 'hex') : '' + k
-    return { type: 'put', key: key, value: '' + k }
-  })
-}
-
-function not (n) {
-  if (typeof n === 'function') return function (k) { return !n(k) }
-  return function (k) { return k !== n }
-}
diff --git a/test/leak-tester-batch.js b/test/leak-tester-batch.js
index 1a90e88a..03315119 100644
--- a/test/leak-tester-batch.js
+++ b/test/leak-tester-batch.js
@@ -3,12 +3,13 @@
 const BUFFERS = false
 const CHAINED = false
 
-var leveldown = require('..')
-var crypto = require('crypto')
-var assert = require('assert')
-var writeCount = 0
-var rssBase
-var db
+const testCommon = require('./common')
+const crypto = require('crypto')
+const assert = require('assert')
+
+let writeCount = 0
+let rssBase
+let db
 
 function print () {
   if (writeCount % 100 === 0) {
@@ -73,10 +74,8 @@ var run = CHAINED
     print()
   }
 
-leveldown.destroy('./leakydb', function () {
-  db = leveldown('./leakydb')
-  db.open({ xcacheSize: 0, xmaxOpenFiles: 10 }, function () {
-    rssBase = process.memoryUsage().rss
-    run()
-  })
+db = testCommon.factory()
+db.open(function () {
+  rssBase = process.memoryUsage().rss
+  run()
 })
diff --git a/test/leak-tester.js b/test/leak-tester.js
index 44c8d5c8..c745d3b0 100644
--- a/test/leak-tester.js
+++ b/test/leak-tester.js
@@ -1,9 +1,9 @@
 /* global gc */
 
-const BUFFERS = false
+const testCommon = require('./common')
+const crypto = require('crypto')
 
-var leveldown = require('..')
-var crypto = require('crypto')
+var BUFFERS = false
 var putCount = 0
 var getCount = 0
 var rssBase
@@ -42,10 +42,8 @@ function run () {
   }
 }
 
-leveldown.destroy('./leakydb', function () {
-  db = leveldown('./leakydb')
-  db.open({ xcacheSize: 0, xmaxOpenFiles: 10 }, function () {
-    rssBase = process.memoryUsage().rss
-    run()
-  })
+db = testCommon.factory()
+db.open(function () {
+  rssBase = process.memoryUsage().rss
+  run()
 })
diff --git a/test/leveldown-test.js b/test/leveldown-test.js
index 7c2d0d83..bf002307 100644
--- a/test/leveldown-test.js
+++ b/test/leveldown-test.js
@@ -1,5 +1,11 @@
 const test = require('tape')
-const leveldown = require('../')
-const abstract = require('abstract-leveldown/abstract/leveldown-test')
+const leveldown = require('..')
 
-abstract.args(leveldown, test)
+test('test database creation non-string location throws', function (t) {
+  t.throws(
+    leveldown.bind(null, {}),
+    /constructor requires a location string argument/,
+    'non-string location leveldown() throws'
+  )
+  t.end()
+})
diff --git a/test/make.js b/test/make.js
index 80e9035d..61fff6d3 100644
--- a/test/make.js
+++ b/test/make.js
@@ -1,40 +1,31 @@
 const test = require('tape')
-const testCommon = require('abstract-leveldown/testCommon')
-const cleanup = testCommon.cleanup
-const location = testCommon.location
-const leveldown = require('../')
+const testCommon = require('./common')
 
 function makeTest (name, testFn) {
   test(name, function (t) {
-    cleanup(function () {
-      var loc = location()
-      var db = leveldown(loc)
-      var done = function (close) {
-        if (close === false) {
-          cleanup(function (err) {
-            t.error(err, 'no error after cleanup')
-            t.end()
-          })
-          return
-        }
-        db.close(function (err) {
-          t.notOk(err, 'no error from close()')
-          cleanup(function (err) {
-            t.error(err, 'no error after cleanup')
-            t.end()
-          })
-        })
+    var db = testCommon.factory()
+    var done = function (err, close) {
+      t.ifError(err, 'no error from done()')
+
+      if (close === false) {
+        t.end()
+        return
       }
-      db.open(function (err) {
-        t.notOk(err, 'no error from open()')
-        db.batch([
-          { type: 'put', key: 'one', value: '1' },
-          { type: 'put', key: 'two', value: '2' },
-          { type: 'put', key: 'three', value: '3' }
-        ], function (err) {
-          t.notOk(err, 'no error from batch()')
-          testFn(db, t, done, loc)
-        })
+
+      db.close(function (err) {
+        t.ifError(err, 'no error from close()')
+        t.end()
+      })
+    }
+    db.open(function (err) {
+      t.ifError(err, 'no error from open()')
+      db.batch([
+        { type: 'put', key: 'one', value: '1' },
+        { type: 'put', key: 'two', value: '2' },
+        { type: 'put', key: 'three', value: '3' }
+      ], function (err) {
+        t.ifError(err, 'no error from batch()')
+        testFn(db, t, done)
       })
     })
   })
diff --git a/test/open-read-only-test.js b/test/open-read-only-test.js
index c05ed5e8..5c8961b1 100644
--- a/test/open-read-only-test.js
+++ b/test/open-read-only-test.js
@@ -1,39 +1,36 @@
 'use strict'
 
 const test = require('tape')
-const leveldown = require('../')
-const testCommon = require('abstract-leveldown/testCommon')
+const leveldown = require('..')
+const tempy = require('tempy')
 const fs = require('fs')
 const path = require('path')
 
-var location = testCommon.location()
+const location = tempy.directory()
 
 // This is used because it's not sufficient on windows to set a parent folder as readonly
-function chmodFilesSync (dir, mode) {
-  var files = fs.readdirSync(dir)
-  files.forEach(function (file) {
-    var fullPath = path.join(dir, file)
-    fs.chmodSync(fullPath, mode)
+function chmodRecursive (mode) {
+  fs.readdirSync(location).forEach(function (file) {
+    fs.chmodSync(path.join(location, file), mode)
   })
+  fs.chmodSync(location, mode)
 }
 
-test('setUp', function (t) {
-  // just in case we thew an error last time and don't have perms to remove db
-  if (fs.existsSync(location)) {
-    fs.chmodSync(location, 0o755)
-    chmodFilesSync(location, 0o755)
-  }
-  testCommon.setUp(t)
-})
+function factory (mode) {
+  if (mode != null) chmodRecursive(mode)
+  return leveldown(location)
+}
 
 test('test write to read/write database', function (t) {
-  var db = leveldown(location)
+  const db = factory()
+
   db.open(function (err) {
-    t.error(err)
+    t.ifError(err, 'no error from open()')
+
     db.put('my key', 'my value', function (err) {
-      t.error(err, 'no write error')
+      t.ifError(err, 'no error from put()')
       db.get('my key', function (err, value) {
-        t.error(err, 'no read error')
+        t.ifError(err, 'no error from get()')
         t.equal(value.toString(), 'my value', 'correct value')
         db.close(t.end.bind(t))
       })
@@ -42,9 +39,8 @@ test('test write to read/write database', function (t) {
 })
 
 test('test throw error reading read-only database', function (t) {
-  chmodFilesSync(location, 0o555)
-  fs.chmodSync(location, 0o555)
-  var db = leveldown(location)
+  const db = factory(0o555)
+
   db.open(function (err) {
     t.ok(err, 'should get error reading read only database')
     t.ok(/IO Error/i.test(err && err.message), 'should get io error')
@@ -53,13 +49,13 @@ test('test throw error reading read-only database', function (t) {
 })
 
 test('test read from a read-only database if readOnly is true', function (t) {
-  chmodFilesSync(location, 0o555)
-  fs.chmodSync(location, 0o555)
-  var db = leveldown(location)
+  const db = factory(0o555)
+
   db.open({ readOnly: true }, function (err) {
-    t.error(err)
+    t.ifError(err, 'no error from open()')
+
     db.get('my key', function (err, value) {
-      t.error(err, 'no read error')
+      t.ifError(err, 'no error from get()')
       t.equal(value.toString(), 'my value', 'correct value')
       db.close(t.end.bind(t))
     })
@@ -67,9 +63,8 @@ test('test read from a read-only database if readOnly is true', function (t) {
 })
 
 test('test throw error reading read-only database if readOnly is false', function (t) {
-  chmodFilesSync(location, 0o555)
-  fs.chmodSync(location, 0o555)
-  var db = leveldown(location)
+  const db = factory(0o555)
+
   db.open({ readOnly: false }, function (err) {
     t.ok(err, 'should get error reading read only database')
     t.ok(/IO Error/i.test(err && err.message), 'should get io error')
@@ -78,11 +73,11 @@ test('test throw error reading read-only database if readOnly is false', functio
 })
 
 test('test throw error putting data to read-only db if readOnly is true', function (t) {
-  chmodFilesSync(location, 0o555)
-  fs.chmodSync(location, 0o555)
-  var db = leveldown(location)
+  const db = factory(0o555)
+
   db.open({ readOnly: true }, function (err) {
-    t.error(err)
+    t.ifError(err, 'no error from open()')
+
     db.put('my key', 'my value', function (err) {
       t.ok(err, 'should get write error')
       t.ok(/Not supported operation in read only mode/i.test(err && err.message), 'should get io error')
@@ -92,11 +87,11 @@ test('test throw error putting data to read-only db if readOnly is true', functi
 })
 
 test('test throw error deleting data from read-only db if readOnly is true', function (t) {
-  chmodFilesSync(location, 0o555)
-  fs.chmodSync(location, 0o555)
-  var db = leveldown(location)
+  const db = factory(0o555)
+
   db.open({ readOnly: true }, function (err) {
-    t.error(err)
+    t.ifError(err, 'no error from open()')
+
     db.del('my key', function (err) {
       t.ok(err, 'should get write error')
       t.ok(/Not supported operation in read only mode/i.test(err && err.message), 'should get io error')
@@ -104,12 +99,3 @@ test('test throw error deleting data from read-only db if readOnly is true', fun
     })
   })
 })
-
-test('tearDown', function (t) {
-  // just in case we thew an error last time and don't have perms to remove db
-  if (fs.existsSync(location)) {
-    fs.chmodSync(location, 0o755)
-    chmodFilesSync(location, 0o755)
-  }
-  testCommon.tearDown(t)
-})
diff --git a/test/open-test.js b/test/open-test.js
deleted file mode 100644
index 606be1a9..00000000
--- a/test/open-test.js
+++ /dev/null
@@ -1,5 +0,0 @@
-const test = require('tape')
-const leveldown = require('../')
-const abstract = require('abstract-leveldown/abstract/open-test')
-
-abstract.all(leveldown, test)
diff --git a/test/put-get-del-test.js b/test/put-get-del-test.js
deleted file mode 100644
index 55dec6cb..00000000
--- a/test/put-get-del-test.js
+++ /dev/null
@@ -1,5 +0,0 @@
-const test = require('tape')
-const leveldown = require('../')
-const abstract = require('abstract-leveldown/abstract/put-get-del-test')
-
-abstract.all(leveldown, test)
diff --git a/test/put-test.js b/test/put-test.js
deleted file mode 100644
index 87e73439..00000000
--- a/test/put-test.js
+++ /dev/null
@@ -1,5 +0,0 @@
-const test = require('tape')
-const leveldown = require('../')
-const abstract = require('abstract-leveldown/abstract/put-test')
-
-abstract.all(leveldown, test)
diff --git a/test/repair-test.js b/test/repair-test.js
index 8814571d..6591b770 100644
--- a/test/repair-test.js
+++ b/test/repair-test.js
@@ -27,17 +27,24 @@ test('test repair non-existent directory returns error', function (t) {
 })
 
 // a proxy indicator that RepairDB is being called and doing its thing
-makeTest('test repair() compacts', function (db, t, done, location) {
+makeTest('test repair() compacts', function (db, t, done) {
+  var location = db.location
+
   db.close(function (err) {
-    t.notOk(err, 'no error')
+    t.ifError(err, 'no error from close()')
+
     var files = fs.readdirSync(location)
     t.ok(files.some(function (f) { return (/\.log$/).test(f) }), 'directory contains log file(s)')
     t.notOk(files.some(function (f) { return (/\.sst$/).test(f) }), 'directory does not contain sst file(s)')
-    leveldown.repair(location, function () {
+
+    leveldown.repair(location, function (err) {
+      t.ifError(err, 'no error from repair()')
+
       files = fs.readdirSync(location)
       t.notOk(files.some(function (f) { return (/\.log$/).test(f) }), 'directory does not contain log file(s)')
       t.ok(files.some(function (f) { return (/\.sst$/).test(f) }), 'directory contains sst file(s)')
-      done(false)
+
+      done(null, false)
     })
   })
 })
diff --git a/test/segfault-test.js b/test/segfault-test.js
new file mode 100644
index 00000000..e35210cc
--- /dev/null
+++ b/test/segfault-test.js
@@ -0,0 +1,43 @@
+const test = require('tape')
+const testCommon = require('./common')
+
+// See https://github.com/Level/leveldown/issues/157, not yet ported to rocksdb.
+test.skip('close() does not segfault if there is a pending write', function (t) {
+  t.plan(3)
+
+  const db = testCommon.factory()
+
+  db.open(function (err) {
+    t.ifError(err, 'no open error')
+
+    // The "sync" option seems to be a reliable way to trigger a segfault,
+    // but is not necessarily the cause of that segfault. More likely, it
+    // exposes a race condition that's already there.
+    db.put('foo', 'bar', { sync: true }, function (err) {
+      // We never get here, due to segfault.
+      t.ifError(err, 'no put error')
+    })
+
+    db.close(function (err) {
+      // We never get here, due to segfault.
+      t.ifError(err, 'no close error')
+    })
+  })
+})
+
+// See https://github.com/Level/leveldown/issues/134
+test('iterator() does not segfault if db is not open', function (t) {
+  t.plan(2)
+
+  const db = testCommon.factory()
+
+  try {
+    db.iterator()
+  } catch (err) {
+    t.is(err.message, 'cannot call iterator() before open()')
+  }
+
+  db.close(function (err) {
+    t.ifError(err, 'no close error')
+  })
+})
diff --git a/test/stack-blower.js b/test/stack-blower.js
index c6cc825b..9d464b99 100644
--- a/test/stack-blower.js
+++ b/test/stack-blower.js
@@ -5,26 +5,23 @@
   * iterator-recursion-test.js. To prevent tap from trying to run this test
   * directly, we check for a command-line argument.
   */
-const testCommon = require('abstract-leveldown/testCommon')
-const leveldown = require('../')
+const testCommon = require('./common')
 
 if (process.argv[2] === 'run') {
-  testCommon.cleanup(function () {
-    var db = leveldown(testCommon.location())
-    var depth = 0
+  var db = testCommon.factory()
+  var depth = 0
 
-    db.open(function () {
-      function recurse () {
-        db.iterator({ start: '0' })
-        depth++
-        recurse()
-      }
+  db.open(function () {
+    function recurse () {
+      db.iterator({ start: '0' })
+      depth++
+      recurse()
+    }
 
-      try {
-        recurse()
-      } catch (e) {
-        process.send('Catchable error at depth ' + depth)
-      }
-    })
+    try {
+      recurse()
+    } catch (e) {
+      process.send('Catchable error at depth ' + depth)
+    }
   })
 }