/external/tensorflow/tensorflow/lite/java/ovic/src/main/java/org/tensorflow/ovic/ |
D | OvicValidator.java | 60 ByteBuffer imgData = createByteBuffer(inputDims[1], inputDims[2]); in main() local 61 if (!detector.detectByteBuffer(imgData, /*imageId=*/ 0)) { in main() 67 ByteBuffer imgData = createByteBuffer(inputDims[1], inputDims[2]); in main() local 68 OvicClassificationResult testResult = classifier.classifyByteBuffer(imgData); in main() 81 ByteBuffer imgData = ByteBuffer.allocateDirect(imgHeight * imgWidth * 3); in createByteBuffer() local 82 imgData.order(ByteOrder.nativeOrder()); in createByteBuffer() 87 imgData.put((byte) ((val >> 16) & 0xFF)); in createByteBuffer() 88 imgData.put((byte) ((val >> 8) & 0xFF)); in createByteBuffer() 89 imgData.put((byte) (val & 0xFF)); in createByteBuffer() 92 return imgData; in createByteBuffer()
|
D | OvicBenchmarker.java | 54 protected ByteBuffer imgData = null; field in OvicBenchmarker 127 if (imgData == null || intValues == null) { in loadsInputToByteBuffer() 131 imgData.rewind(); in loadsInputToByteBuffer() 138 imgData.put((byte) ((pixelValue >> 16) & 0xFF)); in loadsInputToByteBuffer() 139 imgData.put((byte) ((pixelValue >> 8) & 0xFF)); in loadsInputToByteBuffer() 140 imgData.put((byte) (pixelValue & 0xFF)); in loadsInputToByteBuffer()
|
D | OvicDetectorBenchmarker.java | 68 imgData = ByteBuffer.allocateDirect(DIM_BATCH_SIZE * imgHeight * imgWidth * DIM_PIXEL_SIZE); in getReadyToTest() 69 imgData.order(ByteOrder.nativeOrder()); in getReadyToTest() 119 if (!processBuffer(imgData, imageId)) { in processBitmap()
|
D | OvicClassifierBenchmarker.java | 61 imgData = ByteBuffer.allocateDirect(DIM_BATCH_SIZE * imgHeight * imgWidth * DIM_PIXEL_SIZE); in getReadyToTest() 62 imgData.order(ByteOrder.nativeOrder()); in getReadyToTest() 86 iterResult = classifier.classifyByteBuffer(imgData); in processBitmap()
|
D | OvicClassifier.java | 109 public OvicClassificationResult classifyByteBuffer(ByteBuffer imgData) { in classifyByteBuffer() argument 117 tflite.run(imgData, labelProbArray); in classifyByteBuffer() 119 tflite.run(imgData, inferenceOutputArray); in classifyByteBuffer()
|
D | OvicDetector.java | 126 boolean detectByteBuffer(ByteBuffer imgData, int imageId) { in detectByteBuffer() argument 131 Object[] inputArray = {imgData}; in detectByteBuffer()
|
/external/tensorflow/tensorflow/lite/examples/android/app/src/main/java/org/tensorflow/demo/ |
D | TFLiteObjectDetectionAPIModel.java | 73 private ByteBuffer imgData; field in TFLiteObjectDetectionAPIModel 134 d.imgData = ByteBuffer.allocateDirect(1 * d.inputSize * d.inputSize * 3 * numBytesPerChannel); in create() 135 d.imgData.order(ByteOrder.nativeOrder()); in create() 158 imgData.rewind(); in recognizeImage() 164 imgData.put((byte) ((pixelValue >> 16) & 0xFF)); in recognizeImage() 165 imgData.put((byte) ((pixelValue >> 8) & 0xFF)); in recognizeImage() 166 imgData.put((byte) (pixelValue & 0xFF)); in recognizeImage() 168 imgData.putFloat((((pixelValue >> 16) & 0xFF) - IMAGE_MEAN) / IMAGE_STD); in recognizeImage() 169 imgData.putFloat((((pixelValue >> 8) & 0xFF) - IMAGE_MEAN) / IMAGE_STD); in recognizeImage() 170 imgData.putFloat(((pixelValue & 0xFF) - IMAGE_MEAN) / IMAGE_STD); in recognizeImage() [all …]
|
D | TFLiteImageClassifier.java | 61 private ByteBuffer imgData = null; field in TFLiteImageClassifier 104 c.imgData = in create() 108 c.imgData.order(ByteOrder.nativeOrder()); in create() 128 if (imgData == null) { in convertBitmapToByteBuffer() 131 imgData.rewind(); in convertBitmapToByteBuffer() 139 imgData.put((byte) ((val >> 16) & 0xFF)); in convertBitmapToByteBuffer() 140 imgData.put((byte) ((val >> 8) & 0xFF)); in convertBitmapToByteBuffer() 141 imgData.put((byte) (val & 0xFF)); in convertBitmapToByteBuffer() 164 tfLite.run(imgData, labelProb); in recognizeImage()
|
/external/tensorflow/tensorflow/lite/java/demo/app/src/main/java/com/example/android/tflitecamerademo/ |
D | ImageClassifierQuantizedMobileNet.java | 73 imgData.put((byte) ((pixelValue >> 16) & 0xFF)); in addPixelValue() 74 imgData.put((byte) ((pixelValue >> 8) & 0xFF)); in addPixelValue() 75 imgData.put((byte) (pixelValue & 0xFF)); in addPixelValue() 95 tflite.run(imgData, labelProbArray); in runInference()
|
D | ImageClassifierFloatMobileNet.java | 70 imgData.putFloat(((pixelValue >> 16) & 0xFF) / 255.f); in addPixelValue() 71 imgData.putFloat(((pixelValue >> 8) & 0xFF) / 255.f); in addPixelValue() 72 imgData.putFloat((pixelValue & 0xFF) / 255.f); in addPixelValue() 92 tflite.run(imgData, labelProbArray); in runInference()
|
D | ImageClassifierFloatInception.java | 80 imgData.putFloat((((pixelValue >> 16) & 0xFF) - IMAGE_MEAN) / IMAGE_STD); in addPixelValue() 81 imgData.putFloat((((pixelValue >> 8) & 0xFF) - IMAGE_MEAN) / IMAGE_STD); in addPixelValue() 82 imgData.putFloat(((pixelValue & 0xFF) - IMAGE_MEAN) / IMAGE_STD); in addPixelValue() 103 tflite.run(imgData, labelProbArray); in runInference()
|
D | ImageClassifier.java | 79 protected ByteBuffer imgData = null; field in ImageClassifier 105 imgData = in ImageClassifier() 112 imgData.order(ByteOrder.nativeOrder()); in ImageClassifier() 227 if (imgData == null) { in convertBitmapToByteBuffer() 230 imgData.rewind(); in convertBitmapToByteBuffer()
|
/external/tensorflow/tensorflow/lite/java/ovic/src/test/java/org/tensorflow/ovic/ |
D | OvicDetectorTest.java | 90 ByteBuffer imgData = ByteBuffer.allocateDirect(image.getHeight() * image.getWidth() * 3); in toByteBuffer() local 91 imgData.order(ByteOrder.nativeOrder()); in toByteBuffer() 95 imgData.put((byte) ((pixelValue >> 16) & 0xFF)); in toByteBuffer() 96 imgData.put((byte) ((pixelValue >> 8) & 0xFF)); in toByteBuffer() 97 imgData.put((byte) (pixelValue & 0xFF)); in toByteBuffer() 100 return imgData; in toByteBuffer()
|
D | OvicClassifierTest.java | 136 ByteBuffer imgData = ByteBuffer.allocateDirect( in toByteBuffer() local 138 imgData.order(ByteOrder.nativeOrder()); in toByteBuffer() 142 imgData.put((byte) ((val >> 16) & 0xFF)); in toByteBuffer() 143 imgData.put((byte) ((val >> 8) & 0xFF)); in toByteBuffer() 144 imgData.put((byte) (val & 0xFF)); in toByteBuffer() 147 return imgData; in toByteBuffer()
|
/external/tensorflow/tensorflow/lite/g3doc/models/image_classification/ |
D | android.md | 98 c.imgData = 101 c.imgData.order(ByteOrder.nativeOrder()); 106 single byte for each channel. `imgData` will contain an encoded `Color` for each 112 imgData.rewind(); 119 imgData.put((byte) ((val >> 16) & 0xFF)); 120 imgData.put((byte) ((val >> 8) & 0xFF)); 121 imgData.put((byte) (val & 0xFF)); 163 tfLite.run(imgData, labelProb);
|
/external/skqp/experimental/canvaskit/canvaskit/ |
D | README.md | 95 let imgData = skcanvas.toDataURL(); 96 // imgData is now a base64 encoded image.
|
/external/skia/modules/canvaskit/canvaskit/ |
D | README.md | 95 let imgData = skcanvas.toDataURL(); 96 // imgData is now a base64 encoded image.
|
/external/skqp/experimental/canvaskit/ |
D | canvaskit_bindings.cpp | 510 uint8_t* imgData = reinterpret_cast<uint8_t*>(iptr); in EMSCRIPTEN_BINDINGS() local 511 sk_sp<SkData> bytes = SkData::MakeWithoutCopy(imgData, length); in EMSCRIPTEN_BINDINGS()
|
/external/skia/modules/canvaskit/ |
D | canvaskit_bindings.cpp | 586 uint8_t* imgData = reinterpret_cast<uint8_t*>(iptr); in EMSCRIPTEN_BINDINGS() local 587 sk_sp<SkData> bytes = SkData::MakeFromMalloc(imgData, length); in EMSCRIPTEN_BINDINGS()
|