Run clang-format (#7)

This commit is contained in:
w 2022-11-19 08:57:30 -08:00 committed by GitHub
parent 7879b4527e
commit 16eda64574
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
25 changed files with 337 additions and 313 deletions

2
.clang-format Normal file
View File

@ -0,0 +1,2 @@
DerivePointerAlignment: false
PointerAlignment: Left

View File

@ -5,27 +5,30 @@
#include "borders.h"
#include <cmath>
bool inline isBlackPixel(const uint8_t* pixels, uint32_t width, uint32_t x, uint32_t y) {
const uint8_t pixel = *((uint8_t *)pixels + (y * width + x));
bool inline isBlackPixel(const uint8_t* pixels, uint32_t width, uint32_t x,
uint32_t y) {
const uint8_t pixel = *((uint8_t*)pixels + (y * width + x));
return pixel < thresholdForBlack;
}
bool inline isWhitePixel(const uint8_t* pixels, uint32_t width, uint32_t x, uint32_t y) {
const uint8_t pixel = *((uint8_t *)pixels + (y * width + x));
bool inline isWhitePixel(const uint8_t* pixels, uint32_t width, uint32_t x,
uint32_t y) {
const uint8_t pixel = *((uint8_t*)pixels + (y * width + x));
return pixel > thresholdForWhite;
}
/** Return the first x position where there is a substantial amount of fill,
* starting the search from the left. */
uint32_t findBorderLeft(uint8_t* pixels, uint32_t width, uint32_t height, uint32_t top, uint32_t bottom) {
uint32_t findBorderLeft(uint8_t* pixels, uint32_t width, uint32_t height,
uint32_t top, uint32_t bottom) {
int x, y;
const auto filledLimit = (uint32_t) round(height * filledRatioLimit / 2);
const auto filledLimit = (uint32_t)round(height * filledRatioLimit / 2);
// Scan first line to detect dominant color
uint32_t whitePixels = 0;
uint32_t blackPixels = 0;
for (y = top; y < bottom; y+=2) {
for (y = top; y < bottom; y += 2) {
if (isBlackPixel(pixels, width, 0, y)) {
blackPixels++;
} else if (isWhitePixel(pixels, width, 0, y)) {
@ -45,7 +48,7 @@ uint32_t findBorderLeft(uint8_t* pixels, uint32_t width, uint32_t height, uint32
for (x = 1; x < width; x++) {
uint32_t filledCount = 0;
for (y = top; y < bottom; y+=2) {
for (y = top; y < bottom; y += 2) {
if (detectFunc(pixels, width, x, y)) {
filledCount++;
}
@ -63,16 +66,17 @@ uint32_t findBorderLeft(uint8_t* pixels, uint32_t width, uint32_t height, uint32
/** Return the first x position where there is a substantial amount of fill,
* starting the search from the right. */
uint32_t findBorderRight(uint8_t* pixels, uint32_t width, uint32_t height, uint32_t top, uint32_t bottom) {
uint32_t findBorderRight(uint8_t* pixels, uint32_t width, uint32_t height,
uint32_t top, uint32_t bottom) {
int x, y;
const auto filledLimit = (uint32_t) round(height * filledRatioLimit / 2);
const auto filledLimit = (uint32_t)round(height * filledRatioLimit / 2);
// Scan first line to detect dominant color
uint32_t whitePixels = 0;
uint32_t blackPixels = 0;
uint32_t lastX = width - 1;
for (y = top; y < bottom; y+=2) {
for (y = top; y < bottom; y += 2) {
if (isBlackPixel(pixels, width, lastX, y)) {
blackPixels++;
} else if (isWhitePixel(pixels, width, lastX, y)) {
@ -92,7 +96,7 @@ uint32_t findBorderRight(uint8_t* pixels, uint32_t width, uint32_t height, uint3
for (x = width - 2; x > 0; x--) {
uint32_t filledCount = 0;
for (y = top; y < bottom; y+=2) {
for (y = top; y < bottom; y += 2) {
if (detectFunc(pixels, width, x, y)) {
filledCount++;
}
@ -112,13 +116,13 @@ uint32_t findBorderRight(uint8_t* pixels, uint32_t width, uint32_t height, uint3
* starting the search from the top. */
uint32_t findBorderTop(uint8_t* pixels, uint32_t width, uint32_t height) {
int x, y;
const auto filledLimit = (uint32_t) round(width * filledRatioLimit / 2);
const auto filledLimit = (uint32_t)round(width * filledRatioLimit / 2);
// Scan first line to detect dominant color
uint32_t whitePixels = 0;
uint32_t blackPixels = 0;
for (x = 0; x < width; x+=2) {
for (x = 0; x < width; x += 2) {
if (isBlackPixel(pixels, width, x, 0)) {
blackPixels++;
} else if (isWhitePixel(pixels, width, x, 0)) {
@ -138,7 +142,7 @@ uint32_t findBorderTop(uint8_t* pixels, uint32_t width, uint32_t height) {
for (y = 1; y < height; y++) {
uint32_t filledCount = 0;
for (x = 0; x < width; x+=2) {
for (x = 0; x < width; x += 2) {
if (detectFunc(pixels, width, x, y)) {
filledCount++;
}
@ -158,14 +162,14 @@ uint32_t findBorderTop(uint8_t* pixels, uint32_t width, uint32_t height) {
* starting the search from the bottom. */
uint32_t findBorderBottom(uint8_t* pixels, uint32_t width, uint32_t height) {
int x, y;
const auto filledLimit = (uint32_t) round(width * filledRatioLimit / 2);
const auto filledLimit = (uint32_t)round(width * filledRatioLimit / 2);
// Scan first line to detect dominant color
uint32_t whitePixels = 0;
uint32_t blackPixels = 0;
uint32_t lastY = height - 1;
for (x = 0; x < width; x+=2) {
for (x = 0; x < width; x += 2) {
if (isBlackPixel(pixels, width, x, lastY)) {
blackPixels++;
} else if (isWhitePixel(pixels, width, x, lastY)) {
@ -185,7 +189,7 @@ uint32_t findBorderBottom(uint8_t* pixels, uint32_t width, uint32_t height) {
for (y = height - 2; y > 0; y--) {
uint32_t filledCount = 0;
for (x = 0; x < width; x+=2) {
for (x = 0; x < width; x += 2) {
if (detectFunc(pixels, width, x, y)) {
filledCount++;
}
@ -201,16 +205,11 @@ uint32_t findBorderBottom(uint8_t* pixels, uint32_t width, uint32_t height) {
return height;
}
Rect findBorders(uint8_t *pixels, uint32_t width, uint32_t height) {
Rect findBorders(uint8_t* pixels, uint32_t width, uint32_t height) {
uint32_t top = findBorderTop(pixels, width, height);
uint32_t bottom = findBorderBottom(pixels, width, height);
uint32_t left = findBorderLeft(pixels, width, height, top, bottom);
uint32_t right = findBorderRight(pixels, width, height, top, bottom);
return {
.x = left,
.y = top,
.width = right - left,
.height = bottom - top
};
return {.x = left, .y = top, .width = right - left, .height = bottom - top};
}

View File

@ -17,7 +17,8 @@ const uint8_t thresholdForBlack = (uint8_t)(255.0 * THRESHOLD);
const uint8_t thresholdForWhite = (uint8_t)(255.0 - 255.0 * THRESHOLD);
/** Finds the borders of the image. This only works on bitmaps of a single component (grayscale) **/
Rect findBorders(uint8_t *pixels, uint32_t width, uint32_t height);
/** Finds the borders of the image. This only works on bitmaps of a single
* component (grayscale) **/
Rect findBorders(uint8_t* pixels, uint32_t width, uint32_t height);
#endif //IMAGEDECODER_BORDERS_H
#endif // IMAGEDECODER_BORDERS_H

View File

@ -5,8 +5,8 @@
#ifndef IMAGEDECODER_DECODER_BASE_H
#define IMAGEDECODER_DECODER_BASE_H
#include "java_stream.h"
#include "borders.h"
#include "java_stream.h"
struct ImageInfo {
uint32_t imageWidth;
@ -21,9 +21,10 @@ public:
this->stream = std::move(stream);
this->cropBorders = cropBorders;
}
virtual ~BaseDecoder() {};
virtual ~BaseDecoder(){};
virtual void decode(uint8_t* outPixels, Rect outRect, Rect inRect, bool rgb565, uint32_t sampleSize) = 0;
virtual void decode(uint8_t* outPixels, Rect outRect, Rect inRect,
bool rgb565, uint32_t sampleSize) = 0;
protected:
std::shared_ptr<Stream> stream;
@ -33,4 +34,4 @@ public:
ImageInfo info;
};
#endif //IMAGEDECODER_DECODER_BASE_H
#endif // IMAGEDECODER_DECODER_BASE_H

View File

@ -24,9 +24,9 @@ bool is_gif(const uint8_t* data) {
}
enum ftyp_image_type {
ftyp_image_type_no,
ftyp_image_type_heif,
ftyp_image_type_avif
ftyp_image_type_no,
ftyp_image_type_heif,
ftyp_image_type_avif
};
ftyp_image_type get_ftyp_image_type(const uint8_t* data, uint32_t size) {
@ -39,7 +39,8 @@ ftyp_image_type get_ftyp_image_type(const uint8_t* data, uint32_t size) {
uint32_t offset = 8;
while (offset <= maxOffset) {
auto brand = data + offset;
if (brand[0] == 'h' && brand[1] == 'e' && (brand[2] == 'i' || brand[2] == 'v')) {
if (brand[0] == 'h' && brand[1] == 'e' &&
(brand[2] == 'i' || brand[2] == 'v')) {
return ftyp_image_type_heif;
} else if (brand[0] == 'a' && brand[1] == 'v' && brand[2] == 'i') {
return ftyp_image_type_avif;
@ -58,4 +59,4 @@ bool is_jxl(const uint8_t* data) {
(data[0] == 0xff && data[1] == 0x0a); // codestream
}
#endif //IMAGEDECODER_DECODER_HEADERS_H
#endif // IMAGEDECODER_DECODER_HEADERS_H

View File

@ -3,8 +3,8 @@
//
#include "decoder_heif.h"
#include <libheif/heif_cxx.h>
#include "row_convert.h"
#include <libheif/heif_cxx.h>
bool is_libheif_compatible(const uint8_t* bytes, uint32_t size) {
return heif_check_filetype(bytes, size) != heif_filetype_no;
@ -16,10 +16,8 @@ auto init_heif_context(Stream* stream) {
return ctx;
}
HeifDecoder::HeifDecoder(
std::shared_ptr<Stream>&& stream,
bool cropBorders
) : BaseDecoder(std::move(stream), cropBorders) {
HeifDecoder::HeifDecoder(std::shared_ptr<Stream>&& stream, bool cropBorders)
: BaseDecoder(std::move(stream), cropBorders) {
this->info = parseInfo();
}
@ -29,35 +27,40 @@ ImageInfo HeifDecoder::parseInfo() {
uint32_t imageWidth = handle.get_width();
uint32_t imageHeight = handle.get_height();
Rect bounds = { .x = 0, .y = 0, .width = imageWidth, .height = imageHeight };
Rect bounds = {.x = 0, .y = 0, .width = imageWidth, .height = imageHeight};
if (cropBorders) {
try {
auto img = handle.decode_image(heif_colorspace_YCbCr, heif_chroma_undefined);
auto img =
handle.decode_image(heif_colorspace_YCbCr, heif_chroma_undefined);
auto pixels = img.get_plane(heif_channel_Y, nullptr);
bounds = findBorders(pixels, imageWidth, imageHeight);
} catch (std::exception &ex) {
LOGW("Couldn't crop borders on a HEIF/AVIF image of size %dx%d", imageWidth, imageHeight);
} catch (std::exception& ex) {
LOGW("Couldn't crop borders on a HEIF/AVIF image of size %dx%d",
imageWidth, imageHeight);
} catch (heif::Error& error) {
throw std::runtime_error(error.get_message());
}
}
return ImageInfo {
.imageWidth = imageWidth,
.imageHeight = imageHeight,
.isAnimated = false,
.bounds = bounds
return ImageInfo{
.imageWidth = imageWidth,
.imageHeight = imageHeight,
.isAnimated = false,
.bounds = bounds,
};
}
void HeifDecoder::decode(uint8_t *outPixels, Rect outRect, Rect inRect, bool rgb565, uint32_t sampleSize) {
// Decode full image (regions, subsamples or row by row are not supported sadly)
void HeifDecoder::decode(uint8_t* outPixels, Rect outRect, Rect inRect,
bool rgb565, uint32_t sampleSize) {
// Decode full image (regions, subsamples or row by row are not supported
// sadly)
heif::Image img;
try {
auto ctx = init_heif_context(stream.get());
auto handle = ctx.get_primary_image_handle();
auto chroma = rgb565 ? heif_chroma_interleaved_RGB : heif_chroma_interleaved_RGBA;
auto chroma =
rgb565 ? heif_chroma_interleaved_RGB : heif_chroma_interleaved_RGBA;
img = handle.decode_image(heif_colorspace_RGB, chroma);
} catch (heif::Error& error) {
throw std::runtime_error(error.get_message());
@ -69,7 +72,7 @@ void HeifDecoder::decode(uint8_t *outPixels, Rect outRect, Rect inRect, bool rgb
// Calculate stride of the decoded image with the requested region
uint32_t inStride = stride;
uint32_t inStrideOffset = inRect.x * (stride / info.imageWidth);
auto inPixelsPos = (uint8_t*) inPixels + inStride * inRect.y;
auto inPixelsPos = (uint8_t*)inPixels + inStride * inRect.y;
// Calculate output stride
uint32_t outStride = outRect.width * (rgb565 ? 2 : 4);
@ -81,7 +84,8 @@ void HeifDecoder::decode(uint8_t *outPixels, Rect outRect, Rect inRect, bool rgb
if (sampleSize == 1) {
for (uint32_t i = 0; i < outRect.height; i++) {
// Apply row conversion function to the following row
rowFn(outPixelsPos, inPixelsPos + inStrideOffset, nullptr, outRect.width, 1);
rowFn(outPixelsPos, inPixelsPos + inStrideOffset, nullptr, outRect.width,
1);
// Shift row to read and write
inPixelsPos += inStride;
@ -97,9 +101,11 @@ void HeifDecoder::decode(uint8_t *outPixels, Rect outRect, Rect inRect, bool rgb
inPixelsPos += inStride * skipStart;
// Apply row conversion function to the following two rows
rowFn(outPixelsPos, inPixelsPos + inStrideOffset, inPixelsPos + inStride + inStrideOffset, outRect.width, sampleSize);
rowFn(outPixelsPos, inPixelsPos + inStrideOffset,
inPixelsPos + inStride + inStrideOffset, outRect.width, sampleSize);
// Shift row to read to the next 2 rows (the ones we've just read) + the skipped end rows
// Shift row to read to the next 2 rows (the ones we've just read) + the
// skipped end rows
inPixelsPos += inStride * (2 + skipEnd);
// Shift row to write

View File

@ -9,14 +9,15 @@
bool is_libheif_compatible(const uint8_t* bytes, uint32_t size);
class HeifDecoder: public BaseDecoder {
class HeifDecoder : public BaseDecoder {
public:
HeifDecoder(std::shared_ptr<Stream>&& stream, bool cropBorders);
HeifDecoder(std::shared_ptr<Stream>&& stream, bool cropBorders);
void decode(uint8_t* outPixels, Rect outRect, Rect inRect, bool rgb565, uint32_t sampleSize);
void decode(uint8_t* outPixels, Rect outRect, Rect inRect, bool rgb565,
uint32_t sampleSize);
private:
ImageInfo parseInfo();
ImageInfo parseInfo();
};
#endif //IMAGEDECODER_DECODER_HEIF_H
#endif // IMAGEDECODER_DECODER_HEIF_H

View File

@ -3,34 +3,31 @@
//
#include "decoder_jpeg.h"
#include <algorithm>
#include "row_convert.h"
#include <algorithm>
JpegDecoder::JpegDecoder(
std::shared_ptr<Stream>&& stream,
bool cropBorders
) : BaseDecoder(std::move(stream), cropBorders) {
JpegDecoder::JpegDecoder(std::shared_ptr<Stream>&& stream, bool cropBorders)
: BaseDecoder(std::move(stream), cropBorders) {
this->info = parseInfo();
}
JpegDecodeSession::JpegDecodeSession() : jinfo(jpeg_decompress_struct{}), jerr(jpeg_error_mgr{}) {
JpegDecodeSession::JpegDecodeSession()
: jinfo(jpeg_decompress_struct{}), jerr(jpeg_error_mgr{}) {
jinfo.err = jpeg_std_error(&jerr);
jerr.error_exit = [](j_common_ptr info){
jerr.error_exit = [](j_common_ptr info) {
char jpegLastErrorMsg[JMSG_LENGTH_MAX];
(*(info->err->format_message))(info, jpegLastErrorMsg);
throw std::runtime_error(jpegLastErrorMsg);
};
}
void JpegDecodeSession::init(Stream *stream) {
void JpegDecodeSession::init(Stream* stream) {
jpeg_create_decompress(&jinfo);
jpeg_mem_src(&jinfo, stream->bytes, stream->size);
jpeg_read_header(&jinfo, true);
}
JpegDecodeSession::~JpegDecodeSession() {
jpeg_destroy_decompress(&jinfo);
}
JpegDecodeSession::~JpegDecodeSession() { jpeg_destroy_decompress(&jinfo); }
std::unique_ptr<JpegDecodeSession> JpegDecoder::initDecodeSession() {
auto session = std::make_unique<JpegDecodeSession>();
@ -45,7 +42,7 @@ ImageInfo JpegDecoder::parseInfo() {
uint32_t imageWidth = jinfo.image_width;
uint32_t imageHeight = jinfo.image_height;
Rect bounds = { .x = 0, .y = 0, .width = imageWidth, .height = imageHeight };
Rect bounds = {.x = 0, .y = 0, .width = imageWidth, .height = imageHeight};
if (cropBorders) {
try {
auto pixels = std::make_unique<uint8_t[]>(imageWidth * imageHeight);
@ -53,27 +50,29 @@ ImageInfo JpegDecoder::parseInfo() {
jinfo.out_color_space = JCS_GRAYSCALE;
jpeg_start_decompress(&jinfo);
uint8_t *pixelsPtr = pixels.get();
uint8_t* pixelsPtr = pixels.get();
while (jinfo.output_scanline < jinfo.output_height) {
uint8_t *offset = pixelsPtr + jinfo.output_scanline * imageWidth;
uint8_t* offset = pixelsPtr + jinfo.output_scanline * imageWidth;
jpeg_read_scanlines(&jinfo, &offset, 1);
}
jpeg_finish_decompress(&jinfo);
bounds = findBorders(pixels.get(), imageWidth, imageHeight);
} catch (std::exception &ex) {
LOGW("Couldn't crop borders on a JPEG image of size %dx%d", imageWidth, imageHeight);
} catch (std::exception& ex) {
LOGW("Couldn't crop borders on a JPEG image of size %dx%d", imageWidth,
imageHeight);
}
}
return ImageInfo {
.imageWidth = jinfo.image_width,
.imageHeight = jinfo.image_height,
.isAnimated = false,
.bounds = bounds
return ImageInfo{
.imageWidth = jinfo.image_width,
.imageHeight = jinfo.image_height,
.isAnimated = false,
.bounds = bounds,
};
}
void JpegDecoder::decode(uint8_t* outPixels, Rect outRect, Rect inRect, bool rgb565, uint32_t sampleSize) {
void JpegDecoder::decode(uint8_t* outPixels, Rect outRect, Rect inRect,
bool rgb565, uint32_t sampleSize) {
auto session = initDecodeSession();
auto* jinfo = &session->jinfo;
@ -83,13 +82,14 @@ void JpegDecoder::decode(uint8_t* outPixels, Rect outRect, Rect inRect, bool rgb
jinfo->dither_mode = JDITHER_NONE;
}
// 8 is the maximum supported scale by libjpeg, so we'll use custom logic for further samples.
// 8 is the maximum supported scale by libjpeg, so we'll use custom logic for
// further samples.
jinfo->scale_denom = std::min(sampleSize, 8u);
uint32_t customSample = sampleSize <= 8 ? 0 : sampleSize / 8;
Rect decRect = customSample == 0 ? outRect : outRect.upsample(customSample);
// libjpeg X axis need to be a multiple of the defined DCT. We need to know these values
// in order to crop the unwanted region of the final image.
// libjpeg X axis need to be a multiple of the defined DCT. We need to know
// these values in order to crop the unwanted region of the final image.
uint32_t decX = decRect.x;
uint32_t decWidth = decRect.width;
@ -103,7 +103,8 @@ void JpegDecoder::decode(uint8_t* outPixels, Rect outRect, Rect inRect, bool rgb
uint32_t inStride = decWidth * pixelSize;
uint32_t inStartStride = (decRect.x - decX) * pixelSize;
// Allocate row and get pointers to the row and the row aligned for output image.
// Allocate row and get pointers to the row and the row aligned for output
// image.
auto inRow = std::make_unique<uint8_t[]>(inStride);
uint8_t* inRowPtr = inRow.get();
uint8_t* inRowPtrAligned = inRowPtr + inStartStride;
@ -121,7 +122,8 @@ void JpegDecoder::decode(uint8_t* outPixels, Rect outRect, Rect inRect, bool rgb
}
} else {
// Custom sampler needed, we need to decode two rows and downsample them.
// Allocate second row and get pointers to the row and the row aligned for output image.
// Allocate second row and get pointers to the row and the row aligned for
// output image.
auto inRow2 = std::make_unique<uint8_t[]>(inStride);
uint8_t* inRow2Ptr = inRow2.get();
uint8_t* inRow2PtrAligned = inRow2Ptr + inStartStride;
@ -138,7 +140,8 @@ void JpegDecoder::decode(uint8_t* outPixels, Rect outRect, Rect inRect, bool rgb
jpeg_read_scanlines(jinfo, &inRowPtr, 1);
jpeg_read_scanlines(jinfo, &inRow2Ptr, 1);
rowFn(outPixelsPos, inRowPtrAligned, inRow2PtrAligned, outRect.width, customSample);
rowFn(outPixelsPos, inRowPtrAligned, inRow2PtrAligned, outRect.width,
customSample);
outPixelsPos += outStride;
jpeg_skip_scanlines(jinfo, skipEnd);

View File

@ -5,11 +5,11 @@
#ifndef IMAGEDECODER_DECODER_JPEG_H
#define IMAGEDECODER_DECODER_JPEG_H
#include <stdio.h>
#include <memory>
#include "decoder_base.h"
#include "jpeglib.h"
#include "log.h"
#include <memory>
#include <stdio.h>
// Wrap the JPEG C API in this class to automatically manage memory
class JpegDecodeSession {
@ -23,11 +23,11 @@ public:
void init(Stream* stream);
};
class JpegDecoder: public BaseDecoder {
class JpegDecoder : public BaseDecoder {
public:
JpegDecoder(std::shared_ptr<Stream>&& stream, bool cropBorders);
void decode(uint8_t *outPixels, Rect outRect, Rect srcRegion, bool rgb565,
void decode(uint8_t* outPixels, Rect outRect, Rect srcRegion, bool rgb565,
uint32_t sampleSize);
private:
@ -35,4 +35,4 @@ private:
std::unique_ptr<JpegDecodeSession> initDecodeSession();
};
#endif //IMAGEDECODER_DECODER_JPEG_H
#endif // IMAGEDECODER_DECODER_JPEG_H

View File

@ -142,10 +142,12 @@ ImageInfo JpegxlDecoder::parseInfo() {
.x = 0, .y = 0, .width = jxl_info.xsize, .height = jxl_info.ysize};
}
return ImageInfo{.imageWidth = jxl_info.xsize,
.imageHeight = jxl_info.ysize,
.isAnimated = false, // (bool)jxl_info.have_animation,
.bounds = bounds};
return ImageInfo{
.imageWidth = jxl_info.xsize,
.imageHeight = jxl_info.ysize,
.isAnimated = false, // (bool)jxl_info.have_animation,
.bounds = bounds,
};
}
void JpegxlDecoder::decode(uint8_t* outPixels, Rect outRect, Rect inRect,

View File

@ -12,16 +12,14 @@ static void png_skip_rows(png_structrp png_ptr, png_uint_32 num_rows) {
}
}
PngDecoder::PngDecoder(
std::shared_ptr<Stream>&& stream,
bool cropBorders
) : BaseDecoder(std::move(stream), cropBorders) {
PngDecoder::PngDecoder(std::shared_ptr<Stream>&& stream, bool cropBorders)
: BaseDecoder(std::move(stream), cropBorders) {
this->info = parseInfo();
}
PngDecodeSession::PngDecodeSession(Stream* stream) : png(nullptr), pinfo(nullptr),
reader({ .bytes = stream->bytes, .read = 0, .remain = stream->size }) {
}
PngDecodeSession::PngDecodeSession(Stream* stream)
: png(nullptr), pinfo(nullptr),
reader({.bytes = stream->bytes, .read = 0, .remain = stream->size}) {}
#pragma clang diagnostic push
#pragma ide diagnostic ignored "EndlessLoop"
@ -29,9 +27,7 @@ void PngDecodeSession::init() {
auto errorFn = [](png_struct*, png_const_charp msg) {
throw std::runtime_error(msg);
};
auto warnFn = [](png_struct*, png_const_charp msg) {
LOGW("%s", msg);
};
auto warnFn = [](png_struct*, png_const_charp msg) { LOGW("%s", msg); };
png = png_create_read_struct(PNG_LIBPNG_VER_STRING, nullptr, errorFn, warnFn);
if (!png) {
throw std::runtime_error("Failed to create png read struct");
@ -42,8 +38,8 @@ void PngDecodeSession::init() {
}
auto readFn = [](png_struct* p, png_byte* data, png_size_t length) {
auto* r = (PngReader*) png_get_io_ptr(p);
uint32_t next = std::min(r->remain, (uint32_t) length);
auto* r = (PngReader*)png_get_io_ptr(p);
uint32_t next = std::min(r->remain, (uint32_t)length);
if (next > 0) {
memcpy(data, r->bytes + r->read, next);
r->read += next;
@ -73,7 +69,7 @@ ImageInfo PngDecoder::parseInfo() {
uint32_t imageWidth = png_get_image_width(png, pinfo);
uint32_t imageHeight = png_get_image_height(png, pinfo);
Rect bounds = { .x = 0, .y = 0, .width = imageWidth, .height = imageHeight };
Rect bounds = {.x = 0, .y = 0, .width = imageWidth, .height = imageHeight};
if (cropBorders) {
try {
auto pixels = std::make_unique<uint8_t[]>(imageWidth * imageHeight);
@ -85,10 +81,10 @@ ImageInfo PngDecoder::parseInfo() {
if (bitDepth == 16) {
png_set_scale_16(png);
}
if (colorType & (uint8_t) PNG_COLOR_MASK_COLOR) {
if (colorType & (uint8_t)PNG_COLOR_MASK_COLOR) {
png_set_rgb_to_gray(png, 1, -1, -1);
png_set_strip_alpha(png);
} else if (colorType & (uint8_t) PNG_COLOR_MASK_ALPHA) {
} else if (colorType & (uint8_t)PNG_COLOR_MASK_ALPHA) {
png_set_strip_alpha(png);
}
@ -103,20 +99,22 @@ ImageInfo PngDecoder::parseInfo() {
}
}
bounds = findBorders(pixels.get(), imageWidth, imageHeight);
} catch (std::bad_alloc &ex) {
LOGW("Couldn't crop borders on a PNG image of size %dx%d", imageWidth, imageHeight);
} catch (std::bad_alloc& ex) {
LOGW("Couldn't crop borders on a PNG image of size %dx%d", imageWidth,
imageHeight);
}
}
return ImageInfo {
.imageWidth = imageWidth,
.imageHeight = imageHeight,
.isAnimated = false,
.bounds = bounds
return ImageInfo{
.imageWidth = imageWidth,
.imageHeight = imageHeight,
.isAnimated = false,
.bounds = bounds,
};
}
void PngDecoder::decode(uint8_t* outPixels, Rect outRect, Rect inRect, bool rgb565, uint32_t sampleSize) {
void PngDecoder::decode(uint8_t* outPixels, Rect outRect, Rect inRect,
bool rgb565, uint32_t sampleSize) {
auto session = initDecodeSession();
auto png = session->png;
auto pinfo = session->pinfo;
@ -128,10 +126,11 @@ void PngDecoder::decode(uint8_t* outPixels, Rect outRect, Rect inRect, bool rgb5
if (bitDepth == 16) {
png_set_scale_16(png);
}
if (colorType == PNG_COLOR_TYPE_GRAY || colorType == PNG_COLOR_TYPE_GRAY_ALPHA) {
if (colorType == PNG_COLOR_TYPE_GRAY ||
colorType == PNG_COLOR_TYPE_GRAY_ALPHA) {
png_set_gray_to_rgb(png);
}
if (!(colorType & (uint8_t) PNG_COLOR_MASK_ALPHA)) {
if (!(colorType & (uint8_t)PNG_COLOR_MASK_ALPHA)) {
png_set_add_alpha(png, 0xff, PNG_FILLER_AFTER);
}
@ -156,7 +155,8 @@ void PngDecoder::decode(uint8_t* outPixels, Rect outRect, Rect inRect, bool rgb5
png_skip_rows(png, inRect.y);
for (uint32_t i = 0; i < inRect.height; ++i) {
png_read_row(png, inRowPtr, nullptr);
rowFn(outPixelsPos, inRowPtr + inStrideOffset, nullptr, outRect.width, 1);
rowFn(outPixelsPos, inRowPtr + inStrideOffset, nullptr, outRect.width,
1);
outPixelsPos += outStride;
}
png_skip_rows(png, inRemainY);
@ -174,7 +174,8 @@ void PngDecoder::decode(uint8_t* outPixels, Rect outRect, Rect inRect, bool rgb5
inPixelsPos = inPixels.get();
}
for (uint32_t i = 0; i < inRect.height; ++i) {
rowFn(outPixelsPos, inPixelsPos + inStrideOffset, nullptr, outRect.width, sampleSize);
rowFn(outPixelsPos, inPixelsPos + inStrideOffset, nullptr,
outRect.width, sampleSize);
inPixelsPos += inStride;
outPixelsPos += outStride;
}
@ -195,12 +196,14 @@ void PngDecoder::decode(uint8_t* outPixels, Rect outRect, Rect inRect, bool rgb5
png_skip_rows(png, skipStart);
png_read_row(png, inRow1Ptr, nullptr);
png_read_row(png, inRow2Ptr, nullptr);
rowFn(outPixelsPos, inRow1Ptr + inStrideOffset, inRow2Ptr + inStrideOffset, outRect.width, sampleSize);
rowFn(outPixelsPos, inRow1Ptr + inStrideOffset,
inRow2Ptr + inStrideOffset, outRect.width, sampleSize);
png_skip_rows(png, skipEnd);
outPixelsPos += outStride;
}
} else {
auto tmpPixels = std::make_unique<uint8_t[]>(inStride * outRect.height * 2);
auto tmpPixels =
std::make_unique<uint8_t[]>(inStride * outRect.height * 2);
auto* tmpPixelsPos = tmpPixels.get();
uint32_t inHeightRounded = outRect.height * sampleSize;
@ -221,8 +224,9 @@ void PngDecoder::decode(uint8_t* outPixels, Rect outRect, Rect inRect, bool rgb5
}
for (uint32_t i = 0; i < outRect.height; ++i) {
rowFn(outPixelsPos, tmpPixelsPos + inStrideOffset, tmpPixelsPos + inStride + inStrideOffset,
outRect.width, sampleSize);
rowFn(outPixelsPos, tmpPixelsPos + inStrideOffset,
tmpPixelsPos + inStride + inStrideOffset, outRect.width,
sampleSize);
outPixelsPos += outStride;
tmpPixelsPos += inStride * 2;
}

View File

@ -27,15 +27,16 @@ public:
void init();
};
class PngDecoder: public BaseDecoder {
class PngDecoder : public BaseDecoder {
public:
PngDecoder(std::shared_ptr<Stream>&& stream, bool cropBorders);
void decode(uint8_t* outPixels, Rect outRect, Rect inRect, bool rgb565, uint32_t sampleSize);
void decode(uint8_t* outPixels, Rect outRect, Rect inRect, bool rgb565,
uint32_t sampleSize);
private:
ImageInfo parseInfo();
std::unique_ptr<PngDecodeSession> initDecodeSession();
};
#endif //IMAGEDECODER_DECODER_PNG_H
#endif // IMAGEDECODER_DECODER_PNG_H

View File

@ -4,10 +4,8 @@
#include "decoder_webp.h"
WebpDecoder::WebpDecoder(
std::shared_ptr<Stream>&& stream,
bool cropBorders
) : BaseDecoder(std::move(stream), cropBorders) {
WebpDecoder::WebpDecoder(std::shared_ptr<Stream>&& stream, bool cropBorders)
: BaseDecoder(std::move(stream), cropBorders) {
this->info = parseInfo();
}
@ -21,35 +19,39 @@ ImageInfo WebpDecoder::parseInfo() {
uint32_t imageHeight = features.height;
bool isAnimated = features.has_animation;
Rect bounds = { .x = 0, .y = 0, .width = imageWidth, .height = imageHeight };
Rect bounds = {.x = 0, .y = 0, .width = imageWidth, .height = imageHeight};
if (!isAnimated && cropBorders) {
int iw = features.width;
int ih = features.height;
uint8_t *u, *v;
int stride, uvStride;
auto* luma = WebPDecodeYUV(stream->bytes, stream->size, &iw, &ih, &u, &v, &stride, &uvStride);
auto* luma = WebPDecodeYUV(stream->bytes, stream->size, &iw, &ih, &u, &v,
&stride, &uvStride);
if (luma != nullptr) {
bounds = findBorders(luma, imageWidth, imageHeight);
WebPFree(luma);
} else {
LOGW("Couldn't crop borders on a WebP image of size %dx%d", imageWidth, imageHeight);
LOGW("Couldn't crop borders on a WebP image of size %dx%d", imageWidth,
imageHeight);
}
}
return ImageInfo {
.imageWidth = imageWidth,
.imageHeight = imageHeight,
.isAnimated = isAnimated,
.bounds = bounds
return ImageInfo{
.imageWidth = imageWidth,
.imageHeight = imageHeight,
.isAnimated = isAnimated,
.bounds = bounds,
};
}
void WebpDecoder::decode(uint8_t *outPixels, Rect outRect, Rect inRect, bool rgb565, uint32_t sampleSize) {
void WebpDecoder::decode(uint8_t* outPixels, Rect outRect, Rect inRect,
bool rgb565, uint32_t sampleSize) {
WebPDecoderConfig config;
WebPInitDecoderConfig(&config);
// Set decode region
config.options.use_cropping = inRect.width != info.imageWidth || inRect.height != info.imageHeight;
config.options.use_cropping =
inRect.width != info.imageWidth || inRect.height != info.imageHeight;
config.options.crop_left = inRect.x;
config.options.crop_top = inRect.y;
config.options.crop_width = inRect.width;
@ -73,10 +75,8 @@ void WebpDecoder::decode(uint8_t *outPixels, Rect outRect, Rect inRect, bool rgb
code = WebPDecode(stream->bytes, stream->size, &config);
} else {
WebPData data = {.bytes = stream->bytes, .size = stream->size};
auto demuxer = std::unique_ptr<WebPDemuxer, decltype(&WebPDemuxDelete)> {
WebPDemux(&data),
WebPDemuxDelete
};
auto demuxer = std::unique_ptr<WebPDemuxer, decltype(&WebPDemuxDelete)>{
WebPDemux(&data), WebPDemuxDelete};
WebPIterator iterator;
if (!WebPDemuxGetFrame(demuxer.get(), 1, &iterator)) {

View File

@ -9,14 +9,15 @@
#include <src/webp/decode.h>
#include <src/webp/demux.h>
class WebpDecoder: public BaseDecoder {
class WebpDecoder : public BaseDecoder {
public:
WebpDecoder(std::shared_ptr<Stream>&& stream, bool cropBorders);
void decode(uint8_t* outPixels, Rect outRect, Rect inRect, bool rgb565, uint32_t sampleSize);
void decode(uint8_t* outPixels, Rect outRect, Rect inRect, bool rgb565,
uint32_t sampleSize);
private:
ImageInfo parseInfo();
};
#endif //IMAGEDECODER_DECODER_WEBP_H
#endif // IMAGEDECODER_DECODER_WEBP_H

View File

@ -22,4 +22,4 @@
#include "decoder_jxl.h"
#endif
#endif //IMAGEDECODER_DECODERS_H
#endif // IMAGEDECODER_DECODERS_H

View File

@ -14,24 +14,28 @@ void init_java_objects(JNIEnv* env) {
jclass tmpCls;
tmpCls = env->FindClass("tachiyomi/decoder/ImageDecoder");
imageDecoderCls = (jclass) env->NewGlobalRef(tmpCls);
imageDecoderCls = (jclass)env->NewGlobalRef(tmpCls);
imageDecoderCtor = env->GetMethodID(imageDecoderCls, "<init>", "(JII)V");
tmpCls = env->FindClass("tachiyomi/decoder/ImageType");
imageTypeCls = (jclass) env->NewGlobalRef(tmpCls);
imageTypeCls = (jclass)env->NewGlobalRef(tmpCls);
imageTypeCtor = env->GetMethodID(imageTypeCls, "<init>", "(IZ)V");
createBitmapMethod = env->GetStaticMethodID(imageDecoderCls, "createBitmap", "(IIZ)Landroid/graphics/Bitmap;");
createBitmapMethod = env->GetStaticMethodID(imageDecoderCls, "createBitmap",
"(IIZ)Landroid/graphics/Bitmap;");
env->DeleteLocalRef(tmpCls);
}
jobject create_image_decoder(JNIEnv* env, jlong decoderPtr, jint width, jint height) {
return env->NewObject(imageDecoderCls, imageDecoderCtor, decoderPtr, width, height);
jobject create_image_decoder(JNIEnv* env, jlong decoderPtr, jint width,
jint height) {
return env->NewObject(imageDecoderCls, imageDecoderCtor, decoderPtr, width,
height);
}
jobject create_bitmap(JNIEnv* env, jint width, jint height, jboolean rgb565) {
return env->CallStaticObjectMethod(imageDecoderCls, createBitmapMethod, width, height, rgb565);
return env->CallStaticObjectMethod(imageDecoderCls, createBitmapMethod, width,
height, rgb565);
}
jobject create_image_type(JNIEnv* env, jint format, jboolean isAnimated) {

View File

@ -9,10 +9,11 @@
void init_java_objects(JNIEnv* env);
jobject create_image_decoder(JNIEnv* env, jlong decoderPtr, jint width, jint height);
jobject create_image_decoder(JNIEnv* env, jlong decoderPtr, jint width,
jint height);
jobject create_bitmap(JNIEnv* env, jint width, jint height, jboolean rgb565);
jobject create_image_type(JNIEnv* env, jint format, jboolean isAnimated);
#endif //IMAGEDECODER_JAVA_OBJECTS_H
#endif // IMAGEDECODER_JAVA_OBJECTS_H

View File

@ -22,7 +22,8 @@ std::shared_ptr<Stream> read_all_java_stream(JNIEnv* env, jobject jstream) {
uint8_t* stream = nullptr;
int available = env->CallIntMethod(jstream, availableMethod);
uint32_t streamReservedSize = available > BUFFER_SIZE ? available : CONTAINER_DEFAULT_SIZE;
uint32_t streamReservedSize =
available > BUFFER_SIZE ? available : CONTAINER_DEFAULT_SIZE;
uint32_t streamOffset = 0;
// Make sure the stream didn't throw an exception before env calls
@ -36,8 +37,9 @@ std::shared_ptr<Stream> read_all_java_stream(JNIEnv* env, jobject jstream) {
goto fail;
}
// Use malloc to make it compatible with realloc and C++ unique_ptr with custom deleter
stream = (uint8_t*) malloc(streamReservedSize);
// Use malloc to make it compatible with realloc and C++ unique_ptr with
// custom deleter
stream = (uint8_t*)malloc(streamReservedSize);
if (!stream) {
goto fail;
}
@ -53,8 +55,8 @@ std::shared_ptr<Stream> read_all_java_stream(JNIEnv* env, jobject jstream) {
break;
}
while (streamReservedSize < streamOffset + read) {
streamReservedSize = (int) (streamReservedSize * 1.5);
auto* tmp = (uint8_t*) realloc(stream, streamReservedSize);
streamReservedSize = (int)(streamReservedSize * 1.5);
auto* tmp = (uint8_t*)realloc(stream, streamReservedSize);
if (!tmp) {
goto fail;
}
@ -72,8 +74,10 @@ std::shared_ptr<Stream> read_all_java_stream(JNIEnv* env, jobject jstream) {
}
return std::shared_ptr<Stream>(new Stream(stream, streamOffset),
[](Stream* stream) { free(stream->bytes); delete stream; }
);
[](Stream* stream) {
free(stream->bytes);
delete stream;
});
fail:
free(stream);

View File

@ -5,14 +5,14 @@
#ifndef IMAGEDECODER_JAVA_STREAM_H
#define IMAGEDECODER_JAVA_STREAM_H
#include <stdlib.h>
#include <memory>
#include <jni.h>
#include "log.h"
#include "stream.h"
#include <jni.h>
#include <memory>
#include <stdlib.h>
void init_java_stream(JNIEnv* env);
std::shared_ptr<Stream> read_all_java_stream(JNIEnv* env, jobject jstream);
#endif //IMAGEDECODER_JAVA_STREAM_H
#endif // IMAGEDECODER_JAVA_STREAM_H

View File

@ -2,16 +2,16 @@
// Created by len on 23/12/20.
//
#include <jni.h>
#include <android/bitmap.h>
#include "java_stream.h"
#include "java_objects.h"
#include "decoders.h"
#include "borders.h"
#include "decoders.h"
#include "java_objects.h"
#include "java_stream.h"
#include <android/bitmap.h>
#include <jni.h>
jint JNI_OnLoad(JavaVM* vm, void*) {
JNIEnv* env;
if (vm->GetEnv((void **) &env, JNI_VERSION_1_6) == JNI_OK) {
if (vm->GetEnv((void**)&env, JNI_VERSION_1_6) == JNI_OK) {
init_java_stream(env);
init_java_objects(env);
@ -21,11 +21,10 @@ jint JNI_OnLoad(JavaVM* vm, void*) {
return JNI_VERSION_1_6;
}
extern "C"
JNIEXPORT jobject JNICALL
Java_tachiyomi_decoder_ImageDecoder_nativeNewInstance(
JNIEnv* env, jclass, jobject jstream, jboolean cropBorders
) {
extern "C" JNIEXPORT jobject JNICALL
Java_tachiyomi_decoder_ImageDecoder_nativeNewInstance(JNIEnv* env, jclass,
jobject jstream,
jboolean cropBorders) {
auto stream = read_all_java_stream(env, jstream);
if (!stream) {
return nullptr;
@ -33,7 +32,8 @@ Java_tachiyomi_decoder_ImageDecoder_nativeNewInstance(
BaseDecoder* decoder;
try {
if (false) {} // This should be optimized out by the compiler.
if (false) {
} // This should be optimized out by the compiler.
#ifdef HAVE_LIBJPEG
else if (is_jpeg(stream->bytes)) {
decoder = new JpegDecoder(std::move(stream), cropBorders);
@ -63,33 +63,27 @@ Java_tachiyomi_decoder_ImageDecoder_nativeNewInstance(
LOGE("No decoder found to handle this stream");
return nullptr;
}
} catch (std::exception &ex) {
} catch (std::exception& ex) {
LOGE("%s", ex.what());
return nullptr;
}
Rect bounds = decoder->info.bounds;
return create_image_decoder(env, (jlong) decoder, bounds.width, bounds.height);
return create_image_decoder(env, (jlong)decoder, bounds.width, bounds.height);
}
extern "C"
JNIEXPORT jobject JNICALL
extern "C" JNIEXPORT jobject JNICALL
Java_tachiyomi_decoder_ImageDecoder_nativeDecode(
JNIEnv* env, jobject, jlong decoderPtr, jboolean rgb565, jint sampleSize,
jint x, jint y, jint width, jint height
) {
auto* decoder = (BaseDecoder*) decoderPtr;
JNIEnv* env, jobject, jlong decoderPtr, jboolean rgb565, jint sampleSize,
jint x, jint y, jint width, jint height) {
auto* decoder = (BaseDecoder*)decoderPtr;
// Bounds of the image when crop borders is enabled, otherwise it matches the entire image.
// Bounds of the image when crop borders is enabled, otherwise it matches the
// entire image.
Rect bounds = decoder->info.bounds;
// Translated requested bounds to the original image.
Rect inRect = {
x + bounds.x,
y + bounds.y,
(uint32_t) width,
(uint32_t) height
};
Rect inRect = {x + bounds.x, y + bounds.y, (uint32_t)width, (uint32_t)height};
// Sampled requested bounds according to sampleSize.
// It matches the translated bounds when the value is 1
@ -101,12 +95,13 @@ Java_tachiyomi_decoder_ImageDecoder_nativeDecode(
auto* bitmap = create_bitmap(env, outRect.width, outRect.height, rgb565);
if (!bitmap) {
LOGE("Failed to create a bitmap of size %dx%dx%d", outRect.width, outRect.height, rgb565 ? 2 : 4);
LOGE("Failed to create a bitmap of size %dx%dx%d", outRect.width,
outRect.height, rgb565 ? 2 : 4);
return nullptr;
}
uint8_t* pixels;
AndroidBitmap_lockPixels(env, bitmap, (void**) &pixels);
AndroidBitmap_lockPixels(env, bitmap, (void**)&pixels);
if (!pixels) {
LOGE("Failed to lock pixels");
return nullptr;
@ -114,7 +109,7 @@ Java_tachiyomi_decoder_ImageDecoder_nativeDecode(
try {
decoder->decode(pixels, outRect, inRect, rgb565, sampleSize);
} catch (std::exception &ex) {
} catch (std::exception& ex) {
LOGE("%s", ex.what());
AndroidBitmap_unlockPixels(env, bitmap);
return nullptr;
@ -124,18 +119,16 @@ Java_tachiyomi_decoder_ImageDecoder_nativeDecode(
return bitmap;
}
extern "C"
JNIEXPORT void JNICALL
Java_tachiyomi_decoder_ImageDecoder_nativeRecycle(JNIEnv*, jobject, jlong decoderPtr) {
auto* decoder = (BaseDecoder*) decoderPtr;
extern "C" JNIEXPORT void JNICALL
Java_tachiyomi_decoder_ImageDecoder_nativeRecycle(JNIEnv*, jobject,
jlong decoderPtr) {
auto* decoder = (BaseDecoder*)decoderPtr;
delete decoder;
}
extern "C"
JNIEXPORT jobject JNICALL
Java_tachiyomi_decoder_ImageDecoder_nativeFindType(
JNIEnv* env, jclass, jbyteArray array
) {
extern "C" JNIEXPORT jobject JNICALL
Java_tachiyomi_decoder_ImageDecoder_nativeFindType(JNIEnv* env, jclass,
jbyteArray array) {
uint32_t toRead = 32;
uint32_t size = env->GetArrayLength(array);
@ -146,7 +139,7 @@ Java_tachiyomi_decoder_ImageDecoder_nativeFindType(
auto _bytes = std::make_unique<uint8_t[]>(toRead);
auto bytes = _bytes.get();
env->GetByteArrayRegion(array, 0, toRead, (jbyte*) bytes);
env->GetByteArrayRegion(array, 0, toRead, (jbyte*)bytes);
if (is_jpeg(bytes)) {
return create_image_type(env, 0, false);
@ -155,12 +148,13 @@ Java_tachiyomi_decoder_ImageDecoder_nativeFindType(
} else if (is_webp(bytes)) {
try {
#ifdef HAVE_LIBWEBP
auto decoder = std::make_unique<WebpDecoder>(std::make_shared<Stream>(bytes, size), false);
auto decoder = std::make_unique<WebpDecoder>(
std::make_shared<Stream>(bytes, size), false);
return create_image_type(env, 2, decoder->info.isAnimated);
#else
throw std::runtime_error("WebP decoder not available");
#endif
} catch (std::exception &ex) {
} catch (std::exception& ex) {
LOGW("Failed to parse WebP header. Falling back to non animated WebP");
return create_image_type(env, 2, false);
}
@ -171,9 +165,12 @@ Java_tachiyomi_decoder_ImageDecoder_nativeFindType(
}
switch (get_ftyp_image_type(bytes, toRead)) {
case ftyp_image_type_heif: return create_image_type(env, 4, false);
case ftyp_image_type_avif: return create_image_type(env, 5, false);
case ftyp_image_type_no: break;
case ftyp_image_type_heif:
return create_image_type(env, 4, false);
case ftyp_image_type_avif:
return create_image_type(env, 5, false);
case ftyp_image_type_no:
break;
}
LOGW("Failed to find image type");

View File

@ -4,11 +4,11 @@
#include "android/log.h"
#define TAG "ImageDecoder"
#define LOGV(...) __android_log_print(ANDROID_LOG_VERBOSE, TAG ,__VA_ARGS__)
#define LOGD(...) __android_log_print(ANDROID_LOG_DEBUG, TAG ,__VA_ARGS__)
#define LOGI(...) __android_log_print(ANDROID_LOG_INFO, TAG ,__VA_ARGS__)
#define LOGW(...) __android_log_print(ANDROID_LOG_WARN, TAG ,__VA_ARGS__)
#define LOGE(...) __android_log_print(ANDROID_LOG_ERROR, TAG ,__VA_ARGS__)
#define LOGF(...) __android_log_print(ANDROID_LOG_FATAL, TAG ,__VA_ARGS__)
#define LOGV(...) __android_log_print(ANDROID_LOG_VERBOSE, TAG, __VA_ARGS__)
#define LOGD(...) __android_log_print(ANDROID_LOG_DEBUG, TAG, __VA_ARGS__)
#define LOGI(...) __android_log_print(ANDROID_LOG_INFO, TAG, __VA_ARGS__)
#define LOGW(...) __android_log_print(ANDROID_LOG_WARN, TAG, __VA_ARGS__)
#define LOGE(...) __android_log_print(ANDROID_LOG_ERROR, TAG, __VA_ARGS__)
#define LOGF(...) __android_log_print(ANDROID_LOG_FATAL, TAG, __VA_ARGS__)
#endif //LOG_H
#endif // LOG_H

View File

@ -18,10 +18,10 @@ struct Rect {
return *this;
}
return {
.x = x / scale,
.y = y / scale,
.width = width / scale,
.height = height / scale,
.x = x / scale,
.y = y / scale,
.width = width / scale,
.height = height / scale,
};
}
@ -30,12 +30,12 @@ struct Rect {
return *this;
}
return {
.x = x * scale,
.y = y * scale,
.width = width * scale,
.height = height * scale,
.x = x * scale,
.y = y * scale,
.width = width * scale,
.height = height * scale,
};
}
};
#endif //IMAGEDECODER_RECT_H
#endif // IMAGEDECODER_RECT_H

View File

@ -19,14 +19,13 @@
#include "row_convert.h"