/*
* Copyright 2017 The Android Things Samples Authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.example.androidthings.imageclassifier.env;
import android.graphics.Bitmap;
import android.graphics.Canvas;
import android.graphics.Matrix;
import android.media.Image;
import android.os.Environment;
import junit.framework.Assert;
import java.io.File;
import java.io.FileOutputStream;
import java.nio.ByteBuffer;
/**
* Utility class for manipulating images.
**/
public class ImageUtils {
@SuppressWarnings("unused")
private static final Logger LOGGER = new Logger();
// This value is 2 ^ 18 - 1, and is used to clamp the RGB values before their ranges
// are normalized to eight bits.
static final int kMaxChannelValue = 262143;
/**
* Utility method to compute the allocated size in bytes of a YUV420SP image
* of the given dimensions.
*/
public static int getYUVByteSize(final int width, final int height) {
// The luminance plane requires 1 byte per pixel.
final int ySize = width * height;
// The UV plane works on 2x2 blocks, so dimensions with odd size must be rounded up.
// Each 2x2 block takes 2 bytes to encode, one each for U and V.
final int uvSize = ((width + 1) / 2) * ((height + 1) / 2) * 2;
return ySize + uvSize;
}
/**
* Saves a Bitmap object to disk for analysis.
*
* @param bitmap The bitmap to save.
*/
public static void saveBitmap(final Bitmap bitmap) {
final String root =
Environment.getExternalStorageDirectory().getAbsolutePath() + File.separator + "tensorflow";
LOGGER.i("Saving %dx%d bitmap to %s.", bitmap.getWidth(), bitmap.getHeight(), root);
final File myDir = new File(root);
if (!myDir.mkdirs()) {
LOGGER.i("Make dir failed");
}
final String fname = "preview.png";
final File file = new File(myDir, fname);
if (file.exists()) {
file.delete();
}
try {
final FileOutputStream out = new FileOutputStream(file);
bitmap.compress(Bitmap.CompressFormat.PNG, 99, out);
out.flush();
out.close();
} catch (final Exception e) {
LOGGER.e(e, "Exception!");
}
}
public static int[] convertImageToBitmap(Image image, int[] output, byte[][] cachedYuvBytes) {
if (cachedYuvBytes == null || cachedYuvBytes.length != 3) {
cachedYuvBytes = new byte[3][];
}
Image.Plane[] planes = image.getPlanes();
fillBytes(planes, cachedYuvBytes);
final int yRowStride = planes[0].getRowStride();
final int uvRowStride = planes[1].getRowStride();
final int uvPixelStride = planes[1].getPixelStride();
convertYUV420ToARGB8888(cachedYuvBytes[0], cachedYuvBytes[1], cachedYuvBytes[2],
image.getWidth(), image.getHeight(), yRowStride, uvRowStride, uvPixelStride, output);
return output;
}
private static void convertYUV420ToARGB8888(byte[] yData, byte[] uData, byte[] vData, int width, int height,
int yRowStride, int uvRowStride, int uvPixelStride, int[] out) {
int i = 0;
for (int y = 0; y < height; y++) {
int pY = yRowStride * y;
int uv_row_start = uvRowStride * (y >> 1);
int pU = uv_row_start;
int pV = uv_row_start;
for (int x = 0; x < width; x++) {
int uv_offset = (x >> 1) * uvPixelStride;
out[i++] = YUV2RGB(
convertByteToInt(yData, pY + x),
convertByteToInt(uData, pU + uv_offset),
convertByteToInt(vData, pV + uv_offset));
}
}
}
private static int convertByteToInt(byte[] arr, int pos) {
return arr[pos] & 0xFF;
}
private static int YUV2RGB(int nY, int nU, int nV) {
nY -= 16;
nU -= 128;
nV -= 128;
if (nY < 0) nY = 0;
// This is the floating point equivalent. We do the conversion in integer
// because some Android devices do not have floating point in hardware.
// nR = (int)(1.164 * nY + 2.018 * nU);
// nG = (int)(1.164 * nY - 0.813 * nV - 0.391 * nU);
// nB = (int)(1.164 * nY + 1.596 * nV);
int nR = (int) (1192 * nY + 1634 * nV);
int nG = (int) (1192 * nY - 833 * nV - 400 * nU);
int nB = (int) (1192 * nY + 2066 * nU);
nR = Math.min(kMaxChannelValue, Math.max(0, nR));
nG = Math.min(kMaxChannelValue, Math.max(0, nG));
nB = Math.min(kMaxChannelValue, Math.max(0, nB));
nR = (nR >> 10) & 0xff;
nG = (nG >> 10) & 0xff;
nB = (nB >> 10) & 0xff;
return 0xff000000 | (nR << 16) | (nG << 8) | nB;
}
private static void fillBytes(final Image.Plane[] planes, final byte[][] yuvBytes) {
// Because of the variable row stride it's not possible to know in
// advance the actual necessary dimensions of the yuv planes.
for (int i = 0; i < planes.length; ++i) {
final ByteBuffer buffer = planes[i].getBuffer();
if (yuvBytes[i] == null || yuvBytes[i].length != buffer.capacity()) {
yuvBytes[i] = new byte[buffer.capacity()];
}
buffer.get(yuvBytes[i]);
}
}
public static void cropAndRescaleBitmap(final Bitmap src, final Bitmap dst, int sensorOrientation) {
Assert.assertEquals(dst.getWidth(), dst.getHeight());
final float minDim = Math.min(src.getWidth(), src.getHeight());
final Matrix matrix = new Matrix();
// We only want the center square out of the original rectangle.
final float translateX = -Math.max(0, (src.getWidth() - minDim) / 2);
final float translateY = -Math.max(0, (src.getHeight() - minDim) / 2);
matrix.preTranslate(translateX, translateY);
final float scaleFactor = dst.getHeight() / minDim;
matrix.postScale(scaleFactor, scaleFactor);
// Rotate around the center if necessary.
if (sensorOrientation != 0) {
matrix.postTranslate(-dst.getWidth() / 2.0f, -dst.getHeight() / 2.0f);
matrix.postRotate(sensorOrientation);
matrix.postTranslate(dst.getWidth() / 2.0f, dst.getHeight() / 2.0f);
}
final Canvas canvas = new Canvas(dst);
canvas.drawBitmap(src, matrix, null);
}
}