-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy pathTestBug.scala
118 lines (91 loc) · 3.83 KB
/
TestBug.scala
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
package test.bug
import com.sksamuel.scrimage.{Image, ImageMetadata, ScaleMethod}
import java.awt.image.BufferedImage
import java.io.File
import javax.imageio.ImageIO
import org.apache.mxnet.module.Module
import org.apache.mxnet.{Context, DataBatch, DataDesc, NDArray, Shape, Symbol, module, Model}
import scala.collection.mutable
object TestBug {
val WIDTH = 224
val HEIGHT = 224
val CHANNELS = 3
val MAX_BATCH_SIZE = 32
val CONTEXT = Context.gpu(0)
def loadImageIntoImageBuffer(filename: String): BufferedImage = {
val img = ImageIO.read(new File(filename))
val resized = new Image(img, ImageMetadata.empty).scaleTo(WIDTH, HEIGHT, ScaleMethod.Bicubic).awt
resized
}
def imageBufferToFloatArray(img: BufferedImage): Array[Float] = {
val bytes = img.getData.getDataElements(0, 0, img.getWidth, img.getHeight, null).asInstanceOf[Array[Byte]]
val numPixels = bytes.length / CHANNELS
// pull out each channel individually
val res = Array.fill(CHANNELS)(Array.fill(numPixels)(0f))
for {
p <- 0 until numPixels
c <- 0 until CHANNELS
} res(c)(p) = (bytes(p * CHANNELS + c) & 0xFF).toFloat
res.reduceLeft(_ ++ _)
}
/**
* @param model The loaded Module model
* @param floats The list of input images to predict
* @return The raw results for each input image from the model.
*/
def predict(model: Module, input: NDArray, batchSize: Int): Array[Float] = {
val batch = new DataBatch(
IndexedSeq(input),
IndexedSeq.empty[NDArray],
IndexedSeq.fill(batchSize)(0L),
pad = 0
)
// call the model and extract the results
val prediction = model.predict(batch)
assert((prediction.head.size == batchSize*1000), // Imagenet
s"The correct number of class prediction results is returned. ${prediction.head.size} returned expected ${batchSize*1000}")
val expectedSize = prediction.head.size / batchSize
val reshapeNDarray = prediction.head.reshape(Shape(batchSize, expectedSize))
val predNDarray = NDArray.argmax(reshapeNDarray, 1)
val out = predNDarray.toArray
// Should clear the used GPU mem
prediction.foreach(_.dispose())
reshapeNDarray.dispose()
predNDarray.dispose()
input.dispose()
out
}
def main(args: Array[String]): Unit = {
val r = new scala.util.Random(8675309)
// make net
val (symbol, argParams, auxParams) = Model.loadCheckpoint("../squeezenet-v1.1", 0)
val model = new module.Module(symbolVar=symbol, labelNames=IndexedSeq.empty[String], contexts=CONTEXT)
val dataShapes = IndexedSeq(DataDesc(
name = "data",
shape = Shape(MAX_BATCH_SIZE, CHANNELS, HEIGHT, WIDTH)
))
model.bind(dataShapes = dataShapes, forTraining = false)
model.setParams(argParams, auxParams, allowExtra = false, allowMissing = false)
val file = "../tabby.tiff"
val img = loadImageIntoImageBuffer(file)
// convert the images to float arrays
val imgFloat = imageBufferToFloatArray(img)
val imgNDArray = NDArray.array(imgFloat, Shape(1, CHANNELS, HEIGHT, WIDTH), CONTEXT)
println("Starting test")
(1 to 10000).toList.foreach(i => {
val randomBatchSize = r.nextInt(MAX_BATCH_SIZE) + 1
val t0 = System.currentTimeMillis()
val input = NDArray.tile(imgNDArray, Shape(randomBatchSize, 1, 1, 1))
val results = predict(model, input, randomBatchSize)
print (s"$i: Batch Size $randomBatchSize ")
assert(results.size == randomBatchSize, "The correct number of feature vectors are returned.")
results.foreach(result => {
assert((result == 281 || result == 282), s"The image is classified correctly. $result returned") // its a cat
})
val t1 = System.currentTimeMillis()
println(s"pred time ${t1 - t0} ms")
})
println("Pausing execution so you can view memory usage")
val a=scala.io.StdIn.readChar()
}
}