-
Notifications
You must be signed in to change notification settings - Fork 364
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
Integrate TileFeature into PersistenceSpec
- Loading branch information
James McClain
committed
Apr 16, 2016
1 parent
4cc03ed
commit dd0c48b
Showing
13 changed files
with
718 additions
and
55 deletions.
There are no files selected for viewing
32 changes: 32 additions & 0 deletions
32
accumulo/src/test/scala/geotrellis/spark/io/accumulo/AccumuloTileFeatureSpaceTimeSpec.scala
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,32 @@ | ||
package geotrellis.spark.io.accumulo | ||
|
||
import geotrellis.raster.{Tile, TileFeature} | ||
import geotrellis.spark._ | ||
import geotrellis.spark.io._ | ||
import geotrellis.spark.io.index._ | ||
import geotrellis.spark.testfiles.TestTileFeatureFiles | ||
|
||
import com.github.nscala_time.time.Imports._ | ||
import org.joda.time.DateTime | ||
|
||
|
||
class AccumuloSpaceTimeSpec | ||
extends PersistenceSpec[SpaceTimeKey, TileFeature[Tile, Tile], TileLayerMetadata[SpaceTimeKey]] | ||
with SpaceTimeKeyIndexMethods | ||
with TestEnvironment | ||
with AccumuloTestEnvironment | ||
with TestTileFeatureFiles | ||
with CoordinateTileFeatureSpaceTimeTests | ||
with LayerUpdateSpaceTimeTileFeatureTests { | ||
implicit lazy val instance = MockAccumuloInstance() | ||
|
||
lazy val reader = AccumuloLayerReader(instance) | ||
lazy val writer = AccumuloLayerWriter(instance, "tiles", SocketWriteStrategy()) | ||
lazy val deleter = AccumuloLayerDeleter(instance) | ||
lazy val reindexer = AccumuloLayerReindexer(instance, SocketWriteStrategy()) | ||
lazy val updater = AccumuloLayerUpdater(instance, SocketWriteStrategy()) | ||
lazy val tiles = AccumuloValueReader(instance) | ||
lazy val sample = CoordinateSpaceTime | ||
lazy val copier = AccumuloLayerCopier(instance, reader, writer) | ||
lazy val mover = AccumuloLayerMover(copier, deleter) | ||
} |
30 changes: 30 additions & 0 deletions
30
accumulo/src/test/scala/geotrellis/spark/io/accumulo/AccumuloTileFeatureSpatialSpec.scala
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,30 @@ | ||
package geotrellis.spark.io.accumulo | ||
|
||
import geotrellis.raster.{Tile, TileFeature} | ||
import geotrellis.spark._ | ||
import geotrellis.spark.io._ | ||
import geotrellis.spark.io.index._ | ||
import geotrellis.spark.testfiles.TestTileFeatureFiles | ||
|
||
|
||
class AccumuloSpatialSpec | ||
extends PersistenceSpec[SpatialKey, TileFeature[Tile, Tile], TileLayerMetadata[SpatialKey]] | ||
with SpatialKeyIndexMethods | ||
with TestEnvironment | ||
with AccumuloTestEnvironment | ||
with TestTileFeatureFiles | ||
with AllOnesTestTileFeatureTests { | ||
|
||
implicit lazy val instance = MockAccumuloInstance() | ||
|
||
lazy val reader = AccumuloLayerReader(instance) | ||
lazy val writer = AccumuloLayerWriter(instance, "tiles", SocketWriteStrategy()) | ||
lazy val deleter = AccumuloLayerDeleter(instance) | ||
lazy val reindexer = AccumuloLayerReindexer(instance, SocketWriteStrategy()) | ||
lazy val updater = AccumuloLayerUpdater(instance, SocketWriteStrategy()) | ||
lazy val tiles = AccumuloValueReader(instance) | ||
lazy val sample = AllOnesTestFile | ||
|
||
lazy val copier = AccumuloLayerCopier(instance, reader, writer) | ||
lazy val mover = AccumuloLayerMover(copier, deleter) | ||
} |
55 changes: 55 additions & 0 deletions
55
s3/src/test/scala/geotrellis/spark/io/s3/S3TileFeatureSpaceTimeSpec.scala
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,55 @@ | ||
package geotrellis.spark.io.s3 | ||
|
||
import geotrellis.raster.{Tile, TileFeature} | ||
import geotrellis.spark.io._ | ||
import geotrellis.spark.io.index._ | ||
import geotrellis.spark.testfiles.TestTileFeatureFiles | ||
import geotrellis.spark._ | ||
|
||
import com.github.nscala_time.time.Imports._ | ||
import org.joda.time.DateTime | ||
import org.scalatest._ | ||
|
||
|
||
class S3TileFeatureSpaceTimeSpec | ||
extends PersistenceSpec[SpaceTimeKey, TileFeature[Tile, Tile], TileLayerMetadata[SpaceTimeKey]] | ||
with SpaceTimeKeyIndexMethods | ||
with TestEnvironment | ||
with TestTileFeatureFiles | ||
with CoordinateSpaceTimeTileFeatureTests | ||
with LayerUpdateSpaceTimeTileFeatureTests | ||
with BeforeAndAfterAll { | ||
|
||
registerAfterAll { () => | ||
MockS3Client.reset() | ||
} | ||
|
||
lazy val bucket = "mock-bucket" | ||
lazy val prefix = "catalog" | ||
|
||
lazy val attributeStore = new S3AttributeStore(bucket, prefix) { | ||
override val s3Client = new MockS3Client | ||
} | ||
|
||
lazy val rddReader = | ||
new S3RDDReader { | ||
def getS3Client = () => new MockS3Client() | ||
} | ||
|
||
lazy val rddWriter = | ||
new S3RDDWriter { | ||
def getS3Client = () => new MockS3Client | ||
} | ||
|
||
lazy val reader = new MockS3LayerReader(attributeStore) | ||
lazy val writer = new MockS3LayerWriter(attributeStore, bucket, prefix) | ||
lazy val updater = new S3LayerUpdater(attributeStore, reader) { override def rddWriter = S3SpaceTimeSpec.this.rddWriter } | ||
lazy val deleter = new S3LayerDeleter(attributeStore) { override val getS3Client = () => new MockS3Client } | ||
lazy val copier = new S3LayerCopier(attributeStore, bucket, prefix) { override val getS3Client = () => new MockS3Client } | ||
lazy val reindexer = GenericLayerReindexer[S3LayerHeader](attributeStore, reader, writer, deleter, copier) | ||
lazy val mover = GenericLayerMover(copier, deleter) | ||
lazy val tiles = new S3ValueReader(attributeStore) { | ||
override val s3Client = new MockS3Client | ||
} | ||
lazy val sample = CoordinateSpaceTime | ||
} |
49 changes: 49 additions & 0 deletions
49
s3/src/test/scala/geotrellis/spark/io/s3/S3TileFeatureSpatialSpec.scala
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,49 @@ | ||
package geotrellis.spark.io.s3 | ||
|
||
import geotrellis.raster.{Tile, TileFeature} | ||
import geotrellis.spark._ | ||
import geotrellis.spark.io._ | ||
import geotrellis.spark.io.index._ | ||
import geotrellis.spark.testfiles.TestTileFeatureFiles | ||
|
||
import org.scalatest._ | ||
|
||
|
||
class S3SpatialSpec | ||
extends PersistenceSpec[SpatialKey, TileFeature[Tile, Tile], TileLayerMetadata[SpatialKey]] | ||
with SpatialKeyIndexMethods | ||
with TestEnvironment | ||
with TestTileFeatureFiles | ||
with AllOnesTestTileFeatureTests { | ||
|
||
lazy val bucket = "mock-bucket" | ||
lazy val prefix = "catalog" | ||
|
||
registerAfterAll { () => | ||
MockS3Client.reset() | ||
} | ||
|
||
lazy val attributeStore = new S3AttributeStore(bucket, prefix) { | ||
override val s3Client = new MockS3Client() | ||
} | ||
|
||
lazy val rddReader = | ||
new S3RDDReader { | ||
def getS3Client = () => new MockS3Client() | ||
} | ||
|
||
lazy val rddWriter = | ||
new S3RDDWriter { | ||
def getS3Client = () => new MockS3Client() | ||
} | ||
|
||
lazy val reader = new MockS3LayerReader(attributeStore) | ||
lazy val writer = new MockS3LayerWriter(attributeStore, bucket, prefix) | ||
lazy val updater = new S3LayerUpdater(attributeStore, reader) { override def rddWriter = S3SpatialSpec.this.rddWriter } | ||
lazy val deleter = new S3LayerDeleter(attributeStore) { override val getS3Client = () => new MockS3Client() } | ||
lazy val copier = new S3LayerCopier(attributeStore, bucket, prefix) { override val getS3Client = () => new MockS3Client } | ||
lazy val reindexer = GenericLayerReindexer[S3LayerHeader](attributeStore, reader, writer, deleter, copier) | ||
lazy val mover = GenericLayerMover(copier, deleter) | ||
lazy val tiles = new S3ValueReader(attributeStore) { override val s3Client = new MockS3Client() } | ||
lazy val sample = AllOnesTestFile | ||
} |
62 changes: 62 additions & 0 deletions
62
spark/src/test/scala/geotrellis/spark/io/AllOnesTestTileFeatureTests.scala
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,62 @@ | ||
package geotrellis.spark.io | ||
|
||
import geotrellis.raster.{GridBounds, Tile, TileFeature} | ||
import geotrellis.spark._ | ||
import geotrellis.spark.io.avro.codecs._ | ||
import geotrellis.spark.io.json._ | ||
import geotrellis.vector.Extent | ||
|
||
|
||
trait AllOnesTestTileFeatureTests { self: PersistenceSpec[SpatialKey, TileFeature[Tile, Tile], TileLayerMetadata[SpatialKey]] => | ||
|
||
val bounds1 = GridBounds(1,1,3,3) | ||
val bounds2 = GridBounds(4,5,6,6) | ||
|
||
for(PersistenceSpecDefinition(keyIndexMethodName, _, layerIds) <- specLayerIds) { | ||
val layerId = layerIds.layerId | ||
val query = reader.query[SpatialKey, TileFeature[Tile, Tile], TileLayerMetadata[SpatialKey]](layerId) | ||
|
||
describe(s"AllOnes query tests for $keyIndexMethodName") { | ||
it("filters past layout bounds") { | ||
query.where(Intersects(GridBounds(6, 2, 7, 3))).result.keys.collect() should | ||
contain theSameElementsAs Array(SpatialKey(6, 3), SpatialKey(6, 2)) | ||
} | ||
|
||
it("query inside layer bounds") { | ||
val actual = query.where(Intersects(bounds1)).result.keys.collect() | ||
val expected = for ((x, y) <- bounds1.coords) yield SpatialKey(x, y) | ||
|
||
if (expected.diff(actual).nonEmpty) | ||
info(s"missing: ${(expected diff actual).toList}") | ||
if (actual.diff(expected).nonEmpty) | ||
info(s"unwanted: ${(actual diff expected).toList}") | ||
|
||
actual should contain theSameElementsAs expected | ||
} | ||
|
||
it("query outside of layer bounds") { | ||
query.where(Intersects(GridBounds(10, 10, 15, 15))).result.collect() should be(empty) | ||
} | ||
|
||
it("disjoint query on space") { | ||
val actual = query.where(Intersects(bounds1) or Intersects(bounds2)).result.keys.collect() | ||
val expected = for ((x, y) <- bounds1.coords ++ bounds2.coords) yield SpatialKey(x, y) | ||
|
||
if (expected.diff(actual).nonEmpty) | ||
info(s"missing: ${(expected diff actual).toList}") | ||
if (actual.diff(expected).nonEmpty) | ||
info(s"unwanted: ${(actual diff expected).toList}") | ||
|
||
actual should contain theSameElementsAs expected | ||
} | ||
|
||
it("should filter by extent") { | ||
val extent = Extent(-10, -10, 10, 10) // this should intersect the four central tiles in 8x8 layout | ||
query.where(Intersects(extent)).result.keys.collect() should | ||
contain theSameElementsAs { | ||
for ((col, row) <- GridBounds(3, 3, 4, 4).coords) yield SpatialKey(col, row) | ||
} | ||
} | ||
} | ||
} | ||
} |
90 changes: 90 additions & 0 deletions
90
spark/src/test/scala/geotrellis/spark/io/CoordinateSpaceTimeTileFeatureTests.scala
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,90 @@ | ||
package geotrellis.spark.io | ||
|
||
import com.github.nscala_time.time.Imports._ | ||
import geotrellis.raster.{GridBounds, Tile, TileFeature} | ||
import geotrellis.spark._ | ||
import geotrellis.spark.io.avro.codecs._ | ||
import geotrellis.spark.io.json._ | ||
import org.joda.time.DateTime | ||
|
||
|
||
trait CoordinateSpaceTimeTileFeatureTests { self: PersistenceSpec[SpaceTimeKey, TileFeature[Tile, Tile], TileLayerMetadata[SpaceTimeKey]] => | ||
val dates = Vector( // all the dates in the layer | ||
new DateTime(2010,1,1,0,0,0, DateTimeZone.UTC), | ||
new DateTime(2011,1,1,0,0,0, DateTimeZone.UTC), | ||
new DateTime(2012,1,1,0,0,0, DateTimeZone.UTC), | ||
new DateTime(2013,1,1,0,0,0, DateTimeZone.UTC), | ||
new DateTime(2014,1,1,0,0,0, DateTimeZone.UTC)) | ||
val bounds1 = GridBounds(1,1,3,3) | ||
val bounds2 = GridBounds(4,5,6,6) | ||
|
||
for(PersistenceSpecDefinition(keyIndexMethodName, _, layerIds) <- specLayerIds) { | ||
val layerId = layerIds.layerId | ||
val query = reader.query[SpaceTimeKey, TileFeature[Tile, Tile], TileLayerMetadata[SpaceTimeKey]](layerId) | ||
describe(s"CoordinateSpaceTime query tests for $keyIndexMethodName") { | ||
it("query outside of layer bounds") { | ||
query.where(Intersects(GridBounds(10, 10, 15, 15))).result.collect() should be(empty) | ||
} | ||
|
||
it("query disjunction on space") { | ||
val actual = query.where(Intersects(bounds1) or Intersects(bounds2)).result.keys.collect() | ||
|
||
val expected = { | ||
for { | ||
(col, row) <- bounds1.coords ++ bounds2.coords | ||
time <- dates | ||
} yield SpaceTimeKey(col, row, time) | ||
} | ||
|
||
if (expected.diff(actual).nonEmpty) | ||
info(s"missing: ${(expected diff actual).toList}") | ||
if (actual.diff(expected).nonEmpty) | ||
info(s"unwanted: ${(actual diff expected).toList}") | ||
|
||
actual should contain theSameElementsAs expected | ||
} | ||
|
||
it("query disjunction on space and time") { | ||
val actual = query.where(Intersects(bounds1) or Intersects(bounds2)) | ||
.where(Between(dates(0), dates(1)) or Between(dates(3), dates(4))).result.keys.collect() | ||
|
||
val expected = { | ||
for { | ||
(col, row) <- bounds1.coords ++ bounds2.coords | ||
time <- dates diff Seq(dates(2)) | ||
} yield { | ||
SpaceTimeKey(col, row, time) | ||
} | ||
} | ||
|
||
if (expected.diff(actual).nonEmpty) | ||
info(s"missing: ${(expected diff actual).toList}") | ||
if (actual.diff(expected).nonEmpty) | ||
info(s"unwanted: ${(actual diff expected).toList}") | ||
|
||
actual should contain theSameElementsAs expected | ||
} | ||
|
||
it("query at particular times") { | ||
val actual = query.where(Intersects(bounds1) or Intersects(bounds2)) | ||
.where(At(dates(0)) or At(dates(4))).result.keys.collect() | ||
|
||
val expected = { | ||
for { | ||
(col, row) <- bounds1.coords ++ bounds2.coords | ||
time <- Seq(dates(0), dates(4)) | ||
} yield { | ||
SpaceTimeKey(col, row, time) | ||
} | ||
} | ||
|
||
if (expected.diff(actual).nonEmpty) | ||
info(s"missing: ${(expected diff actual).toList}") | ||
if (actual.diff(expected).nonEmpty) | ||
info(s"unwanted: ${(actual diff expected).toList}") | ||
|
||
actual should contain theSameElementsAs expected | ||
} | ||
} | ||
} | ||
} |
Oops, something went wrong.