Initial commit

This commit is contained in:
inorichi 2021-01-04 18:53:50 +01:00
commit 3621050eb2
38 changed files with 1854 additions and 0 deletions

10
.gitignore vendored Normal file
View File

@ -0,0 +1,10 @@
*.iml
.gradle
/local.properties
/.idea
.DS_Store
/build
/captures
.externalNativeBuild
.cxx
local.properties

26
build.gradle Normal file
View File

@ -0,0 +1,26 @@
// Top-level build file where you can add configuration options common to all sub-projects/modules.
buildscript {
ext {
kotlin_version = '1.4.21'
}
repositories {
google()
jcenter()
}
dependencies {
classpath "com.android.tools.build:gradle:4.1.1"
classpath "org.jetbrains.kotlin:kotlin-gradle-plugin:1.4.21"
}
}
allprojects {
repositories {
google()
jcenter()
maven { url 'https://jitpack.io' }
}
}
task clean(type: Delete) {
delete rootProject.buildDir
}

19
gradle.properties Normal file
View File

@ -0,0 +1,19 @@
# Project-wide Gradle settings.
# IDE (e.g. Android Studio) users:
# Gradle settings configured through the IDE *will override*
# any settings specified in this file.
# For more details on how to configure your build environment visit
# http://www.gradle.org/docs/current/userguide/build_environment.html
# Specifies the JVM arguments used for the daemon process.
# The setting is particularly useful for tweaking memory settings.
org.gradle.jvmargs=-Xmx2048m -Dfile.encoding=UTF-8
# When configured, Gradle will run in incubating parallel mode.
# This option should only be used with decoupled projects. More details, visit
# http://www.gradle.org/docs/current/userguide/multi_project_builds.html#sec:decoupled_projects
# org.gradle.parallel=true
# AndroidX package structure to make it clearer which packages are bundled with the
# Android operating system, and which are packaged with your app"s APK
# https://developer.android.com/topic/libraries/support-library/androidx-rn
android.useAndroidX=true
# Kotlin code style for this project: "official" or "obsolete":
kotlin.code.style=official

BIN
gradle/wrapper/gradle-wrapper.jar vendored Normal file

Binary file not shown.

View File

@ -0,0 +1,6 @@
#Sun Dec 20 14:18:42 CET 2020
distributionBase=GRADLE_USER_HOME
distributionUrl=https\://services.gradle.org/distributions/gradle-6.7.1-bin.zip
distributionPath=wrapper/dists
zipStorePath=wrapper/dists
zipStoreBase=GRADLE_USER_HOME

172
gradlew vendored Executable file
View File

@ -0,0 +1,172 @@
#!/usr/bin/env sh
##############################################################################
##
## Gradle start up script for UN*X
##
##############################################################################
# Attempt to set APP_HOME
# Resolve links: $0 may be a link
PRG="$0"
# Need this for relative symlinks.
while [ -h "$PRG" ] ; do
ls=`ls -ld "$PRG"`
link=`expr "$ls" : '.*-> \(.*\)$'`
if expr "$link" : '/.*' > /dev/null; then
PRG="$link"
else
PRG=`dirname "$PRG"`"/$link"
fi
done
SAVED="`pwd`"
cd "`dirname \"$PRG\"`/" >/dev/null
APP_HOME="`pwd -P`"
cd "$SAVED" >/dev/null
APP_NAME="Gradle"
APP_BASE_NAME=`basename "$0"`
# Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script.
DEFAULT_JVM_OPTS=""
# Use the maximum available, or set MAX_FD != -1 to use that value.
MAX_FD="maximum"
warn () {
echo "$*"
}
die () {
echo
echo "$*"
echo
exit 1
}
# OS specific support (must be 'true' or 'false').
cygwin=false
msys=false
darwin=false
nonstop=false
case "`uname`" in
CYGWIN* )
cygwin=true
;;
Darwin* )
darwin=true
;;
MINGW* )
msys=true
;;
NONSTOP* )
nonstop=true
;;
esac
CLASSPATH=$APP_HOME/gradle/wrapper/gradle-wrapper.jar
# Determine the Java command to use to start the JVM.
if [ -n "$JAVA_HOME" ] ; then
if [ -x "$JAVA_HOME/jre/sh/java" ] ; then
# IBM's JDK on AIX uses strange locations for the executables
JAVACMD="$JAVA_HOME/jre/sh/java"
else
JAVACMD="$JAVA_HOME/bin/java"
fi
if [ ! -x "$JAVACMD" ] ; then
die "ERROR: JAVA_HOME is set to an invalid directory: $JAVA_HOME
Please set the JAVA_HOME variable in your environment to match the
location of your Java installation."
fi
else
JAVACMD="java"
which java >/dev/null 2>&1 || die "ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH.
Please set the JAVA_HOME variable in your environment to match the
location of your Java installation."
fi
# Increase the maximum file descriptors if we can.
if [ "$cygwin" = "false" -a "$darwin" = "false" -a "$nonstop" = "false" ] ; then
MAX_FD_LIMIT=`ulimit -H -n`
if [ $? -eq 0 ] ; then
if [ "$MAX_FD" = "maximum" -o "$MAX_FD" = "max" ] ; then
MAX_FD="$MAX_FD_LIMIT"
fi
ulimit -n $MAX_FD
if [ $? -ne 0 ] ; then
warn "Could not set maximum file descriptor limit: $MAX_FD"
fi
else
warn "Could not query maximum file descriptor limit: $MAX_FD_LIMIT"
fi
fi
# For Darwin, add options to specify how the application appears in the dock
if $darwin; then
GRADLE_OPTS="$GRADLE_OPTS \"-Xdock:name=$APP_NAME\" \"-Xdock:icon=$APP_HOME/media/gradle.icns\""
fi
# For Cygwin, switch paths to Windows format before running java
if $cygwin ; then
APP_HOME=`cygpath --path --mixed "$APP_HOME"`
CLASSPATH=`cygpath --path --mixed "$CLASSPATH"`
JAVACMD=`cygpath --unix "$JAVACMD"`
# We build the pattern for arguments to be converted via cygpath
ROOTDIRSRAW=`find -L / -maxdepth 1 -mindepth 1 -type d 2>/dev/null`
SEP=""
for dir in $ROOTDIRSRAW ; do
ROOTDIRS="$ROOTDIRS$SEP$dir"
SEP="|"
done
OURCYGPATTERN="(^($ROOTDIRS))"
# Add a user-defined pattern to the cygpath arguments
if [ "$GRADLE_CYGPATTERN" != "" ] ; then
OURCYGPATTERN="$OURCYGPATTERN|($GRADLE_CYGPATTERN)"
fi
# Now convert the arguments - kludge to limit ourselves to /bin/sh
i=0
for arg in "$@" ; do
CHECK=`echo "$arg"|egrep -c "$OURCYGPATTERN" -`
CHECK2=`echo "$arg"|egrep -c "^-"` ### Determine if an option
if [ $CHECK -ne 0 ] && [ $CHECK2 -eq 0 ] ; then ### Added a condition
eval `echo args$i`=`cygpath --path --ignore --mixed "$arg"`
else
eval `echo args$i`="\"$arg\""
fi
i=$((i+1))
done
case $i in
(0) set -- ;;
(1) set -- "$args0" ;;
(2) set -- "$args0" "$args1" ;;
(3) set -- "$args0" "$args1" "$args2" ;;
(4) set -- "$args0" "$args1" "$args2" "$args3" ;;
(5) set -- "$args0" "$args1" "$args2" "$args3" "$args4" ;;
(6) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" ;;
(7) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" ;;
(8) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" "$args7" ;;
(9) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" "$args7" "$args8" ;;
esac
fi
# Escape application args
save () {
for i do printf %s\\n "$i" | sed "s/'/'\\\\''/g;1s/^/'/;\$s/\$/' \\\\/" ; done
echo " "
}
APP_ARGS=$(save "$@")
# Collect all arguments for the java command, following the shell quoting and substitution rules
eval set -- $DEFAULT_JVM_OPTS $JAVA_OPTS $GRADLE_OPTS "\"-Dorg.gradle.appname=$APP_BASE_NAME\"" -classpath "\"$CLASSPATH\"" org.gradle.wrapper.GradleWrapperMain "$APP_ARGS"
# by default we should be in the correct project dir, but when run from Finder on Mac, the cwd is wrong
if [ "$(uname)" = "Darwin" ] && [ "$HOME" = "$PWD" ]; then
cd "$(dirname "$0")"
fi
exec "$JAVACMD" "$@"

84
gradlew.bat vendored Normal file
View File

@ -0,0 +1,84 @@
@if "%DEBUG%" == "" @echo off
@rem ##########################################################################
@rem
@rem Gradle startup script for Windows
@rem
@rem ##########################################################################
@rem Set local scope for the variables with windows NT shell
if "%OS%"=="Windows_NT" setlocal
set DIRNAME=%~dp0
if "%DIRNAME%" == "" set DIRNAME=.
set APP_BASE_NAME=%~n0
set APP_HOME=%DIRNAME%
@rem Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script.
set DEFAULT_JVM_OPTS=
@rem Find java.exe
if defined JAVA_HOME goto findJavaFromJavaHome
set JAVA_EXE=java.exe
%JAVA_EXE% -version >NUL 2>&1
if "%ERRORLEVEL%" == "0" goto init
echo.
echo ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH.
echo.
echo Please set the JAVA_HOME variable in your environment to match the
echo location of your Java installation.
goto fail
:findJavaFromJavaHome
set JAVA_HOME=%JAVA_HOME:"=%
set JAVA_EXE=%JAVA_HOME%/bin/java.exe
if exist "%JAVA_EXE%" goto init
echo.
echo ERROR: JAVA_HOME is set to an invalid directory: %JAVA_HOME%
echo.
echo Please set the JAVA_HOME variable in your environment to match the
echo location of your Java installation.
goto fail
:init
@rem Get command-line arguments, handling Windows variants
if not "%OS%" == "Windows_NT" goto win9xME_args
:win9xME_args
@rem Slurp the command line arguments.
set CMD_LINE_ARGS=
set _SKIP=2
:win9xME_args_slurp
if "x%~1" == "x" goto execute
set CMD_LINE_ARGS=%*
:execute
@rem Setup the command line
set CLASSPATH=%APP_HOME%\gradle\wrapper\gradle-wrapper.jar
@rem Execute Gradle
"%JAVA_EXE%" %DEFAULT_JVM_OPTS% %JAVA_OPTS% %GRADLE_OPTS% "-Dorg.gradle.appname=%APP_BASE_NAME%" -classpath "%CLASSPATH%" org.gradle.wrapper.GradleWrapperMain %CMD_LINE_ARGS%
:end
@rem End local scope for the variables with windows NT shell
if "%ERRORLEVEL%"=="0" goto mainEnd
:fail
rem Set variable GRADLE_EXIT_CONSOLE if you need the _script_ return code instead of
rem the _cmd.exe /c_ return code!
if not "" == "%GRADLE_EXIT_CONSOLE%" exit 1
exit /b 1
:mainEnd
if "%OS%"=="Windows_NT" endlocal
:omega

1
library/.gitignore vendored Normal file
View File

@ -0,0 +1 @@
/build

45
library/build.gradle Normal file
View File

@ -0,0 +1,45 @@
plugins {
id 'com.android.library'
id 'kotlin-android'
}
android {
compileSdkVersion 30
buildToolsVersion "30.0.3"
defaultConfig {
minSdkVersion 21
targetSdkVersion 30
versionCode 1
versionName "1.0"
consumerProguardFiles "consumer-rules.pro"
externalNativeBuild {
cmake {
}
}
}
buildTypes {
release {
minifyEnabled false
proguardFiles getDefaultProguardFile('proguard-android-optimize.txt'), 'proguard-rules.pro'
}
}
compileOptions {
sourceCompatibility JavaVersion.VERSION_1_8
targetCompatibility JavaVersion.VERSION_1_8
}
kotlinOptions {
jvmTarget = '1.8'
}
externalNativeBuild {
cmake {
path file('src/main/cpp/CMakeLists.txt')
version '3.18.1'
}
}
}
dependencies {
}

View File

@ -0,0 +1,3 @@
-keep class tachiyomi.decoder.ImageDecoder {
*;
}

21
library/proguard-rules.pro vendored Normal file
View File

@ -0,0 +1,21 @@
# Add project specific ProGuard rules here.
# You can control the set of applied configuration files using the
# proguardFiles setting in build.gradle.
#
# For more details, see
# http://developer.android.com/guide/developing/tools/proguard.html
# If your project uses WebView with JS, uncomment the following
# and specify the fully qualified class name to the JavaScript interface
# class:
#-keepclassmembers class fqcn.of.javascript.interface.for.webview {
# public *;
#}
# Uncomment this to preserve the line number information for
# debugging stack traces.
#-keepattributes SourceFile,LineNumberTable
# If you keep the line number information, uncomment this to
# hide the original source file name.
#-renamesourcefileattribute SourceFile

View File

@ -0,0 +1,5 @@
<?xml version="1.0" encoding="utf-8"?>
<manifest xmlns:android="http://schemas.android.com/apk/res/android"
package="tachiyomi.decoder">
</manifest>

View File

@ -0,0 +1,21 @@
cmake_minimum_required(VERSION 3.14)
project(imagedecoder C CXX ASM)
set(CMAKE_BUILD_TYPE Release)
add_library(imagedecoder SHARED
java_stream.cpp
java_wrapper.cpp
java_objects.cpp
decoder_jpeg.cpp
decoder_png.cpp
decoder_webp.cpp
borders.cpp
row_convert.cpp
)
include(cmake/libjpeg-turbo.cmake)
include(cmake/libpng.cmake)
include(cmake/libwebp.cmake)
target_link_libraries(imagedecoder android jnigraphics log)

View File

@ -0,0 +1,216 @@
//
// Created by len on 25/12/20.
//
#include "borders.h"
#include <math.h>
bool inline isBlackPixel(const uint8_t* pixels, uint32_t width, uint32_t x, uint32_t y) {
const uint8_t pixel = *((uint8_t *)pixels + (y * width + x));
return pixel < thresholdForBlack;
}
bool inline isWhitePixel(const uint8_t* pixels, uint32_t width, uint32_t x, uint32_t y) {
const uint8_t pixel = *((uint8_t *)pixels + (y * width + x));
return pixel > thresholdForWhite;
}
/** Return the first x position where there is a substantial amount of fill,
* starting the search from the left. */
uint32_t findBorderLeft(uint8_t* pixels, uint32_t width, uint32_t height, uint32_t top, uint32_t bottom) {
uint32_t x, y;
const auto filledLimit = (uint32_t) round(height * filledRatioLimit / 2);
// Scan first line to detect dominant color
uint32_t whitePixels = 0;
uint32_t blackPixels = 0;
for (y = top; y < bottom; y+=2) {
if (isBlackPixel(pixels, width, 0, y)) {
blackPixels++;
} else if (isWhitePixel(pixels, width, 0, y)) {
whitePixels++;
}
}
auto detectFunc = isBlackPixel;
if (whitePixels > filledLimit && blackPixels > filledLimit) {
// Mixed fill found... don't crop anything
return 0;
} else if (blackPixels > filledLimit) {
detectFunc = isWhitePixel;
}
// Scan vertical lines in search of filled lines
for (x = 1; x < width; x++) {
uint32_t filledCount = 0;
for (y = top; y < bottom; y+=2) {
if (detectFunc(pixels, width, x, y)) {
filledCount++;
}
}
if (filledCount > filledLimit) {
// This line contains enough fill
return x;
}
}
// No fill found... don't crop anything
return 0;
}
/** Return the first x position where there is a substantial amount of fill,
* starting the search from the right. */
uint32_t findBorderRight(uint8_t* pixels, uint32_t width, uint32_t height, uint32_t top, uint32_t bottom) {
uint32_t x, y;
const auto filledLimit = (uint32_t) round(height * filledRatioLimit / 2);
// Scan first line to detect dominant color
uint32_t whitePixels = 0;
uint32_t blackPixels = 0;
uint32_t lastX = width - 1;
for (y = top; y < bottom; y+=2) {
if (isBlackPixel(pixels, width, lastX, y)) {
blackPixels++;
} else if (isWhitePixel(pixels, width, lastX, y)) {
whitePixels++;
}
}
auto detectFunc = isBlackPixel;
if (whitePixels > filledLimit && blackPixels > filledLimit) {
// Mixed fill found... don't crop anything
return width;
} else if (blackPixels > filledLimit) {
detectFunc = isWhitePixel;
}
// Scan vertical lines in search of filled lines
for (x = width - 2; x >= 0; x--) {
uint32_t filledCount = 0;
for (y = top; y < bottom; y+=2) {
if (detectFunc(pixels, width, x, y)) {
filledCount++;
}
}
if (filledCount > filledLimit) {
// This line contains enough fill
return x + 1;
}
}
// No fill found... don't crop anything
return width;
}
/** Return the first y position where there is a substantial amount of fill,
* starting the search from the top. */
uint32_t findBorderTop(uint8_t* pixels, uint32_t width, uint32_t height) {
uint32_t x, y;
const auto filledLimit = (uint32_t) round(width * filledRatioLimit / 2);
// Scan first line to detect dominant color
uint32_t whitePixels = 0;
uint32_t blackPixels = 0;
for (x = 0; x < width; x+=2) {
if (isBlackPixel(pixels, width, x, 0)) {
blackPixels++;
} else if (isWhitePixel(pixels, width, x, 0)) {
whitePixels++;
}
}
auto detectFunc = isBlackPixel;
if (whitePixels > filledLimit && blackPixels > filledLimit) {
// Mixed fill found... don't crop anything
return 0;
} else if (blackPixels > filledLimit) {
detectFunc = isWhitePixel;
}
// Scan horizontal lines in search of filled lines
for (y = 1; y < height; y++) {
uint32_t filledCount = 0;
for (x = 0; x < width; x+=2) {
if (detectFunc(pixels, width, x, y)) {
filledCount++;
}
}
if (filledCount > filledLimit) {
// This line contains enough fill
return y;
}
}
// No fill found... don't crop anything
return 0;
}
/** Return the first y position where there is a substantial amount of fill,
* starting the search from the bottom. */
uint32_t findBorderBottom(uint8_t* pixels, uint32_t width, uint32_t height) {
uint32_t x, y;
const auto filledLimit = (uint32_t) round(width * filledRatioLimit / 2);
// Scan first line to detect dominant color
uint32_t whitePixels = 0;
uint32_t blackPixels = 0;
uint32_t lastY = height - 1;
for (x = 0; x < width; x+=2) {
if (isBlackPixel(pixels, width, x, lastY)) {
blackPixels++;
} else if (isWhitePixel(pixels, width, x, lastY)) {
whitePixels++;
}
}
auto detectFunc = isBlackPixel;
if (whitePixels > filledLimit && blackPixels > filledLimit) {
// Mixed fill found... don't crop anything
return height;
} else if (blackPixels > filledLimit) {
detectFunc = isWhitePixel;
}
// Scan horizontal lines in search of filled lines
for (y = height - 2; y >= 0; y--) {
uint32_t filledCount = 0;
for (x = 0; x < width; x+=2) {
if (detectFunc(pixels, width, x, y)) {
filledCount++;
}
}
if (filledCount > filledLimit) {
// This line contains enough fill
return y + 1;
}
}
// No fill found... don't crop anything
return height;
}
Rect findBorders(uint8_t *pixels, uint32_t width, uint32_t height) {
uint32_t top = findBorderTop(pixels, width, height);
uint32_t bottom = findBorderBottom(pixels, width, height);
uint32_t left = findBorderLeft(pixels, width, height, top, bottom);
uint32_t right = findBorderRight(pixels, width, height, top, bottom);
return {
.x = left,
.y = top,
.width = right - left,
.height = bottom - top
};
}

View File

@ -0,0 +1,23 @@
//
// Created by len on 25/12/20.
//
#ifndef IMAGEDECODER_BORDERS_H
#define IMAGEDECODER_BORDERS_H
#include "rect.h"
/** A line will be considered as having content if 0.25% of it is filled. */
const float filledRatioLimit = 0.0025;
/** When the threshold is closer to 1, less content will be cropped. **/
#define THRESHOLD 0.75
const uint8_t thresholdForBlack = (uint8_t)(255.0 * THRESHOLD);
const uint8_t thresholdForWhite = (uint8_t)(255.0 - 255.0 * THRESHOLD);
/** Finds the borders of the image. This only works on bitmaps of a single component (grayscale) **/
Rect findBorders(uint8_t *pixels, uint32_t width, uint32_t height);
#endif //IMAGEDECODER_BORDERS_H

View File

@ -0,0 +1,19 @@
cmake_minimum_required(VERSION 3.14)
include(FetchContent)
set(CMAKE_BUILD_TYPE Release)
FetchContent_Declare(libjpeg-turbo
GIT_REPOSITORY https://github.com/libjpeg-turbo/libjpeg-turbo
GIT_TAG 2.0.90
)
option(WITH_JPEG8 "" 1)
option(WITH_TURBOJPEG "" 0)
option(ENABLE_SHARED "" 0)
option(REQUIRE_SIMD "" 1)
FetchContent_MakeAvailable(libjpeg-turbo)
include_directories(${libjpeg-turbo_BINARY_DIR} ${libjpeg-turbo_SOURCE_DIR})
target_link_libraries(imagedecoder jpeg-static)

View File

@ -0,0 +1,18 @@
cmake_minimum_required(VERSION 3.14)
include(FetchContent)
set(CMAKE_BUILD_TYPE Release)
FetchContent_Declare(libpng
GIT_REPOSITORY https://github.com/glennrp/libpng
GIT_TAG v1.6.37
)
option(PNG_SHARED "" OFF)
option(PNG_EXECUTABLES "" OFF)
option(PNG_TESTS "" OFF)
FetchContent_MakeAvailable(libpng)
include_directories(${libpng_BINARY_DIR} ${libpng_SOURCE_DIR})
target_link_libraries(imagedecoder png_static z)

View File

@ -0,0 +1,26 @@
cmake_minimum_required(VERSION 3.14)
include(FetchContent)
set(CMAKE_BUILD_TYPE Release)
FetchContent_Declare(libwebp
GIT_REPOSITORY https://chromium.googlesource.com/webm/libwebp
GIT_TAG v1.1.0
)
option(WEBP_BUILD_ANIM_UTILS "" OFF)
option(WEBP_BUILD_CWEBP "" OFF)
option(WEBP_BUILD_DWEBP "" OFF)
option(WEBP_BUILD_GIF2WEBP "" OFF)
option(WEBP_BUILD_IMG2WEBP "" OFF)
option(WEBP_BUILD_VWEBP "" OFF)
option(WEBP_BUILD_WEBPINFO "" OFF)
option(WEBP_BUILD_WEBPMUX "" OFF)
option(WEBP_BUILD_EXTRAS "" OFF)
option(WEBP_BUILD_WEBP_JS "" OFF)
option(WEBP_ENABLE_SWAP_16BIT_CSP "" ON)
FetchContent_MakeAvailable(libwebp)
include_directories(${libwebp_BINARY_DIR} ${libwebp_SOURCE_DIR})
target_link_libraries(imagedecoder webpdecoder)

View File

@ -0,0 +1,36 @@
//
// Created by len on 23/12/20.
//
#ifndef IMAGEDECODER_DECODER_BASE_H
#define IMAGEDECODER_DECODER_BASE_H
#include "java_stream.h"
#include "borders.h"
struct ImageInfo {
uint32_t imageWidth;
uint32_t imageHeight;
bool isAnimated;
Rect bounds;
};
class BaseDecoder {
public:
BaseDecoder(std::unique_ptr<Stream>&& stream, bool cropBorders) {
this->stream = std::move(stream);
this->cropBorders = cropBorders;
}
virtual ~BaseDecoder() {};
virtual void decode(uint8_t* outPixels, Rect outRect, Rect inRect, bool rgb565, uint32_t sampleSize) = 0;
protected:
std::unique_ptr<Stream> stream;
public:
bool cropBorders;
ImageInfo info;
};
#endif //IMAGEDECODER_DECODER_BASE_H

View File

@ -0,0 +1,114 @@
//
// Created by len on 23/12/20.
//
#include "decoder_jpeg.h"
bool JpegDecoder::handles(const uint8_t* stream) {
return stream[0] == 0xFF && stream[1] == 0xD8 && stream[2] == 0xFF;
}
JpegDecoder::JpegDecoder(
std::unique_ptr<Stream>&& stream,
bool cropBorders
) : BaseDecoder(std::move(stream), cropBorders) {
this->info = parseInfo();
}
JpegDecodeSession::JpegDecodeSession() : jinfo(jpeg_decompress_struct{}), jerr(jpeg_error_mgr{}) {
}
void JpegDecodeSession::init(Stream *stream) {
jinfo.err = jpeg_std_error(&jerr);
jerr.error_exit = [](j_common_ptr info){
char jpegLastErrorMsg[JMSG_LENGTH_MAX];
(*(info->err->format_message))(info, jpegLastErrorMsg);
throw std::runtime_error(jpegLastErrorMsg);
};
jpeg_create_decompress(&jinfo);
jpeg_mem_src(&jinfo, stream->bytes, stream->size);
jpeg_read_header(&jinfo, true);
}
JpegDecodeSession::~JpegDecodeSession() {
jpeg_destroy_decompress(&jinfo);
}
std::unique_ptr<JpegDecodeSession> JpegDecoder::initDecodeSession() {
auto session = std::make_unique<JpegDecodeSession>();
session->init(stream.get());
return session;
}
ImageInfo JpegDecoder::parseInfo() {
auto session = initDecodeSession();
auto jinfo = session->jinfo;
Rect bounds{};
if (cropBorders) {
jinfo.out_color_space = JCS_GRAYSCALE;
jpeg_start_decompress(&jinfo);
auto pixels = std::make_unique<uint8_t[]>(jinfo.image_width * jinfo.image_height);
uint8_t* pixelsPtr = pixels.get();
while (jinfo.output_scanline < jinfo.output_height) {
uint8_t* offset = pixelsPtr + jinfo.output_scanline * jinfo.image_width;
jpeg_read_scanlines(&jinfo, &offset, 1);
}
bounds = findBorders(pixels.get(), jinfo.image_width, jinfo.image_height);
jpeg_finish_decompress(&jinfo);
} else {
bounds = Rect { .x = 0, .y = 0, .width = jinfo.image_width, .height = jinfo.image_height };
}
ImageInfo info {
.imageWidth = jinfo.image_width,
.imageHeight = jinfo.image_height,
.isAnimated = false,
.bounds = bounds
};
return info;
}
void JpegDecoder::decode(uint8_t* outPixels, Rect outRect, Rect, bool rgb565, uint32_t sampleSize) {
auto session = initDecodeSession();
auto jinfo = session->jinfo;
jinfo.scale_denom = sampleSize;
jinfo.out_color_space = rgb565 ? JCS_RGB565 : JCS_EXT_RGBA;
if (rgb565) {
jinfo.dither_mode = JDITHER_NONE;
}
jpeg_start_decompress(&jinfo);
uint32_t pixelSize = rgb565 ? 2 : 4;
uint32_t inX = outRect.x;
uint32_t inWidth = outRect.width;
uint32_t outStride = outRect.width * pixelSize;
jpeg_crop_scanline(&jinfo, &inX, &inWidth);
jpeg_skip_scanlines(&jinfo, outRect.y);
// This has to be called after jpeg_crop_scanline as inWidth might change
uint32_t inStride = inWidth * pixelSize;
auto inPixels = std::make_unique<uint8_t[]>(inStride);
uint8_t* inPixelsPos = inPixels.get();
// libjpeg doesn't always provide the exact requested region because it has to be a multiple of
// the DCT, so we have to account for shifts.
uint8_t* inPixelsPosAligned = inPixelsPos + (outRect.x - inX) * pixelSize;
uint8_t* outPixelsPos = outPixels;
for (uint32_t i = 0; i < outRect.height; i++) {
jpeg_read_scanlines(&jinfo, &inPixelsPos, 1);
memcpy(outPixelsPos, inPixelsPosAligned, outStride);
outPixelsPos += outStride;
}
jpeg_skip_scanlines(&jinfo, outRect.height - outRect.y);
jpeg_finish_decompress(&jinfo);
}

View File

@ -0,0 +1,39 @@
//
// Created by len on 23/12/20.
//
#ifndef IMAGEDECODER_DECODER_JPEG_H
#define IMAGEDECODER_DECODER_JPEG_H
#include <stdio.h>
#include <memory>
#include "decoder_base.h"
#include "jpeglib.h"
#include "log.h"
// Wrap the JPEG C API in this class to automatically manage memory
class JpegDecodeSession {
public:
JpegDecodeSession();
~JpegDecodeSession();
jpeg_decompress_struct jinfo;
jpeg_error_mgr jerr;
void init(Stream* stream);
};
class JpegDecoder: public BaseDecoder {
public:
JpegDecoder(std::unique_ptr<Stream>&& stream, bool cropBorders);
static bool handles(const uint8_t* stream);
void decode(uint8_t *outPixels, Rect outRect, Rect srcRegion, bool rgb565,
uint32_t sampleSize);
private:
ImageInfo parseInfo();
std::unique_ptr<JpegDecodeSession> initDecodeSession();
};
#endif //IMAGEDECODER_DECODER_JPEG_H

View File

@ -0,0 +1,212 @@
//
// Created by len on 24/12/20.
//
#include "decoder_png.h"
#include "row_convert.h"
#include <algorithm>
static void png_skip_rows(png_structrp png_ptr, png_uint_32 num_rows) {
for (png_uint_32 i = 0; i < num_rows; ++i) {
png_read_row(png_ptr, nullptr, nullptr);
}
}
bool PngDecoder::handles(const uint8_t* stream) {
return stream[0] == 0x89 && stream[1] == 0x50 && stream[2] == 0x4E && stream[3] == 0x47;
}
PngDecoder::PngDecoder(
std::unique_ptr<Stream>&& stream,
bool cropBorders
) : BaseDecoder(std::move(stream), cropBorders) {
this->info = parseInfo();
}
PngDecodeSession::PngDecodeSession(Stream* stream) : png(nullptr), pinfo(nullptr),
reader({ .bytes = stream->bytes, .read = 0, .remain = stream->size }) {
}
void PngDecodeSession::init() {
auto errorFn = [](png_struct*, png_const_charp msg) {
throw std::runtime_error(msg);
};
auto warnFn = [](png_struct*, png_const_charp msg) {
LOGW("%s", msg);
};
png = png_create_read_struct(PNG_LIBPNG_VER_STRING, nullptr, errorFn, warnFn);
if (!png) {
throw std::runtime_error("Failed to create png read struct");
}
pinfo = png_create_info_struct(png);
if (!pinfo) {
throw std::runtime_error("Failed to create png info struct");
}
auto readFn = [](png_struct* p, png_byte* data, png_size_t length) {
auto* r = (PngReader*) png_get_io_ptr(p);
uint32_t next = std::min(r->remain, (uint32_t) length);
if (next > 0) {
memcpy(data, r->bytes + r->read, next);
r->read += next;
r->remain -= next;
}
};
png_set_read_fn(png, &reader, readFn);
png_read_info(png, pinfo);
}
PngDecodeSession::~PngDecodeSession() {
png_destroy_read_struct(&png, &pinfo, nullptr);
}
std::unique_ptr<PngDecodeSession> PngDecoder::initDecodeSession() {
auto session = std::make_unique<PngDecodeSession>(stream.get());
session->init();
return session;
}
ImageInfo PngDecoder::parseInfo() {
auto session = initDecodeSession();
auto png = session->png;
auto pinfo = session->pinfo;
uint32_t imageWidth = png_get_image_width(png, pinfo);
uint32_t imageHeight = png_get_image_height(png, pinfo);
Rect bounds{};
if (cropBorders) {
uint8_t colorType = png_get_color_type(png, pinfo);
uint8_t bitDepth = png_get_bit_depth(png, pinfo);
png_set_expand(png);
if (bitDepth == 16) {
png_set_scale_16(png);
}
if (!(colorType == PNG_COLOR_TYPE_GRAY || colorType == PNG_COLOR_TYPE_GRAY_ALPHA)) {
png_set_rgb_to_gray(png, 1, -1, -1);
}
int32_t passes = png_set_interlace_handling(png);
auto pixels = std::make_unique<uint8_t[]>(imageWidth * imageHeight);
uint8_t* pixelsPos;
while (--passes >= 0) {
pixelsPos = pixels.get();
for (uint32_t i = 0; i < imageHeight; ++i) {
png_read_row(png, pixelsPos, nullptr);
pixelsPos += imageWidth;
}
}
bounds = findBorders(pixels.get(), imageWidth, imageHeight);
} else {
bounds = Rect { .x = 0, .y = 0, .width = imageWidth, .height = imageHeight };
}
ImageInfo info {
.imageWidth = imageWidth,
.imageHeight = imageHeight,
.isAnimated = false,
.bounds = bounds
};
return info;
}
void PngDecoder::decode(uint8_t* outPixels, Rect outRect, Rect inRect, bool rgb565, uint32_t sampleSize) {
auto session = initDecodeSession();
auto png = session->png;
auto pinfo = session->pinfo;
uint8_t colorType = png_get_color_type(png, pinfo);
uint8_t bitDepth = png_get_bit_depth(png, pinfo);
png_set_expand(png);
if (bitDepth == 16) {
png_set_scale_16(png);
}
if (colorType == PNG_COLOR_TYPE_GRAY || colorType == PNG_COLOR_TYPE_GRAY_ALPHA) {
png_set_gray_to_rgb(png);
}
if (!(colorType & (uint8_t) PNG_COLOR_MASK_ALPHA)) {
png_set_add_alpha(png, 0xff, PNG_FILLER_AFTER);
}
int32_t passes = png_set_interlace_handling(png);
uint32_t inComponents = 4; // RGB565 is not supported by libpng
uint32_t inStride = info.imageWidth * inComponents;
uint32_t inStrideOffset = inRect.x * inComponents;
uint32_t outStride = outRect.width * (rgb565 ? 2 : 4);
auto rowFunc = rgb565 ? &RGBA8888_to_RGB565_row : &RGBA8888_to_RGBA8888_row;
if (sampleSize == 1) {
auto inRow = std::make_unique<uint8_t[]>(inStride);
auto* inRowPtr = inRow.get();
uint8_t* outPixelsPos = nullptr;
uint32_t inRemainY = info.imageHeight - inRect.height - inRect.y;
while (--passes >= 0) {
outPixelsPos = outPixels;
png_skip_rows(png, inRect.y);
for (uint32_t i = 0; i < inRect.height; ++i) {
png_read_row(png, inRowPtr, nullptr);
rowFunc(outPixelsPos, inRowPtr + inStrideOffset, nullptr, outRect.width, 1);
outPixelsPos += outStride;
}
png_skip_rows(png, inRemainY);
}
} else {
uint32_t skipStart = (sampleSize - 2) / 2;
uint32_t skipEnd = sampleSize - 2 - skipStart;
uint32_t inWidthRounded = outRect.width * sampleSize;
uint32_t inHeightRounded = outRect.height * sampleSize;
uint32_t inRemainY = info.imageHeight - inHeightRounded - inRect.y;
if (passes == 1) {
auto inRow1 = std::make_unique<uint8_t[]>(inStride).get();
auto inRow2 = std::make_unique<uint8_t[]>(inStride).get();
uint8_t* outPixelsPos = outPixels;
png_skip_rows(png, inRect.y);
for (uint32_t i = 0; i < outRect.height; ++i) {
png_skip_rows(png, skipStart);
png_read_row(png, inRow1, nullptr);
png_read_row(png, inRow2, nullptr);
rowFunc(outPixelsPos, inRow1 + inStrideOffset, inRow2 + inStrideOffset, outRect.width, sampleSize);
outPixelsPos += outStride;
png_skip_rows(png, skipEnd);
}
} else {
auto tmpPixels = std::make_unique<uint8_t[]>(inStride * outRect.height * 2);
uint8_t* tmpPixelsPos = nullptr;
while (--passes >= 0) {
png_skip_rows(png, inRect.y);
tmpPixelsPos = tmpPixels.get();
for (uint32_t i = 0; i < outRect.height; ++i) {
png_skip_rows(png, skipStart);
png_read_row(png, tmpPixelsPos, nullptr);
tmpPixelsPos += inStride;
png_read_row(png, tmpPixelsPos, nullptr);
tmpPixelsPos += inStride;
png_skip_rows(png, skipEnd);
}
png_skip_rows(png, inRemainY);
}
uint8_t* outPixelsPos = outPixels;
tmpPixelsPos = tmpPixels.get();
for (uint32_t i = 0; i < outRect.height; ++i) {
rowFunc(outPixelsPos, tmpPixelsPos + inStrideOffset, tmpPixelsPos + inStride + inStrideOffset,
outRect.width, sampleSize);
outPixelsPos += outStride;
tmpPixelsPos += inStride * 2;
}
}
}
}

View File

@ -0,0 +1,42 @@
//
// Created by len on 24/12/20.
//
#ifndef IMAGEDECODER_DECODER_PNG_H
#define IMAGEDECODER_DECODER_PNG_H
#include "decoder_base.h"
#include "png.h"
struct PngReader {
uint8_t* bytes;
uint32_t read;
uint32_t remain;
};
// Wrap the PNG C API in this class to automatically manage memory
class PngDecodeSession {
public:
PngDecodeSession(Stream* stream);
~PngDecodeSession();
png_struct* png;
png_info* pinfo;
PngReader reader;
void init();
};
class PngDecoder: public BaseDecoder {
public:
PngDecoder(std::unique_ptr<Stream>&& stream, bool cropBorders);
static bool handles(const uint8_t* stream);
void decode(uint8_t* outPixels, Rect outRect, Rect inRect, bool rgb565, uint32_t sampleSize);
private:
ImageInfo parseInfo();
std::unique_ptr<PngDecodeSession> initDecodeSession();
};
#endif //IMAGEDECODER_DECODER_PNG_H

View File

@ -0,0 +1,77 @@
//
// Created by len on 30/12/20.
//
#include "decoder_webp.h"
bool WebpDecoder::handles(const uint8_t* stream) {
return stream[0] == 0x52 && stream[1] == 0x49 && stream[2] == 0x46 && stream[3] == 0x46;
}
WebpDecoder::WebpDecoder(
std::unique_ptr<Stream>&& stream,
bool cropBorders
) : BaseDecoder(std::move(stream), cropBorders) {
this->info = parseInfo();
}
ImageInfo WebpDecoder::parseInfo() {
WebPBitstreamFeatures features;
if (WebPGetFeatures(stream->bytes, 32, &features) != VP8_STATUS_OK) {
throw std::runtime_error("Failed to parse webp");
}
uint32_t imageWidth = features.width;
uint32_t imageHeight = features.height;
Rect bounds{};
if (cropBorders) {
int iw = features.width;
int ih = features.height;
uint8_t *u, *v;
int stride, uvStride;
auto* luma = WebPDecodeYUV(stream->bytes, stream->size, &iw, &ih, &u, &v, &stride, &uvStride);
bounds = findBorders(luma, imageWidth, imageHeight);
WebPFree(luma);
} else {
bounds = { .x = 0, .y = 0, .width = imageWidth, .height = imageHeight };
}
return ImageInfo {
.imageWidth = imageWidth,
.imageHeight = imageHeight,
.isAnimated = false,
.bounds = bounds
};
}
void WebpDecoder::decode(uint8_t *outPixels, Rect outRect, Rect inRect, bool rgb565, uint32_t sampleSize) {
WebPDecoderConfig config;
WebPInitDecoderConfig(&config);
// Set decode region
config.options.use_cropping = inRect.width != info.imageWidth || inRect.height != info.imageHeight;
config.options.crop_left = inRect.x;
config.options.crop_top = inRect.y;
config.options.crop_width = inRect.width;
config.options.crop_height = inRect.height;
// Set sample size
config.options.use_scaling = sampleSize > 1;
config.options.scaled_width = outRect.width;
config.options.scaled_height = outRect.height;
// Set colorpsace and stride params
uint32_t outStride = outRect.width * (rgb565 ? 2 : 4);
config.output.colorspace = rgb565 ? MODE_RGB_565 : MODE_RGBA;
config.output.u.RGBA.rgba = outPixels;
config.output.u.RGBA.size = outStride * outRect.height;
config.output.u.RGBA.stride = outStride;
config.output.is_external_memory = 1;
VP8StatusCode code = WebPDecode(stream->bytes, stream->size, &config);
if (code != VP8_STATUS_OK) {
throw std::runtime_error("Failed to decode image");
}
}

View File

@ -0,0 +1,23 @@
//
// Created by len on 30/12/20.
//
#ifndef IMAGEDECODER_DECODER_WEBP_H
#define IMAGEDECODER_DECODER_WEBP_H
#include "decoder_base.h"
#include "stream.h"
#include <src/webp/decode.h>
class WebpDecoder: public BaseDecoder {
public:
WebpDecoder(std::unique_ptr<Stream>&& stream, bool cropBorders);
static bool handles(const uint8_t* stream);
void decode(uint8_t* outPixels, Rect outRect, Rect inRect, bool rgb565, uint32_t sampleSize);
private:
ImageInfo parseInfo();
};
#endif //IMAGEDECODER_DECODER_WEBP_H

View File

@ -0,0 +1,27 @@
//
// Created by len on 23/12/20.
//
#include "java_objects.h"
static jclass imageDecoderCls;
static jmethodID imageDecoderCtor;
static jmethodID createBitmapMethod;
void init_java_objects(JNIEnv* env) {
jclass tmp;
tmp = env->FindClass("tachiyomi/decoder/ImageDecoder");
imageDecoderCls = (jclass) env->NewGlobalRef(tmp);
imageDecoderCtor = env->GetMethodID(imageDecoderCls, "<init>", "(JII)V");
createBitmapMethod = env->GetStaticMethodID(imageDecoderCls, "createBitmap", "(IIZ)Landroid/graphics/Bitmap;");
env->DeleteLocalRef(tmp);
}
jobject create_image_decoder(JNIEnv* env, jlong decoderPtr, jint width, jint height) {
return env->NewObject(imageDecoderCls, imageDecoderCtor, decoderPtr, width, height);
}
jobject create_bitmap(JNIEnv* env, jint width, jint height, jboolean rgb565) {
return env->CallStaticObjectMethod(imageDecoderCls, createBitmapMethod, width, height, rgb565);
}

View File

@ -0,0 +1,16 @@
//
// Created by len on 23/12/20.
//
#ifndef IMAGEDECODER_JAVA_OBJECTS_H
#define IMAGEDECODER_JAVA_OBJECTS_H
#include <jni.h>
void init_java_objects(JNIEnv* env);
jobject create_image_decoder(JNIEnv* env, jlong decoderPtr, jint width, jint height);
jobject create_bitmap(JNIEnv* env, jint width, jint height, jboolean rgb565);
#endif //IMAGEDECODER_JAVA_OBJECTS_H

View File

@ -0,0 +1,84 @@
//
// Created by len on 23/12/20.
//
#include "java_stream.h"
#define BUFFER_SIZE 8192
#define CONTAINER_DEFAULT_SIZE (BUFFER_SIZE * 50)
static jmethodID readMethod;
static jmethodID closeMethod;
static jmethodID availableMethod;
void init_java_stream(JNIEnv* env) {
jclass streamCls = env->FindClass("java/io/InputStream");
readMethod = env->GetMethodID(streamCls, "read", "([BII)I");
closeMethod = env->GetMethodID(streamCls, "close", "()V");
availableMethod = env->GetMethodID(streamCls, "available", "()I");
env->DeleteLocalRef(streamCls);
}
void close_java_stream(JNIEnv* env, jobject stream) {
env->CallVoidMethod(stream, closeMethod);
if (env->ExceptionCheck()) {
env->ExceptionClear();
}
}
std::unique_ptr<Stream> read_all_java_stream(JNIEnv* env, jobject jstream) {
jbyteArray buffer;
uint8_t* stream = nullptr;
int available = env->CallIntMethod(jstream, availableMethod);
uint32_t streamReservedSize = available > 0 ? available : CONTAINER_DEFAULT_SIZE;
uint32_t streamOffset = 0;
buffer = env->NewByteArray(BUFFER_SIZE);
if (!buffer) {
goto fail;
}
// Use malloc to make it compatible with realloc and C++ unique_ptr with custom deleter
stream = (uint8_t*) malloc(streamReservedSize);
if (!stream) {
goto fail;
}
int read;
while (true) {
read = env->CallIntMethod(jstream, readMethod, buffer, 0, BUFFER_SIZE);
if (env->ExceptionCheck()) {
env->ExceptionClear();
goto fail;
}
if (read < 0) {
break;
}
if (streamReservedSize < streamOffset + read) {
streamReservedSize = (int) (streamReservedSize * 1.5);
auto* tmp = (uint8_t*) realloc(stream, streamReservedSize);
if (!tmp) {
goto fail;
}
stream = tmp;
}
auto* dest = reinterpret_cast<jbyte*>(stream + streamOffset);
env->GetByteArrayRegion(buffer, 0, read, dest);
streamOffset += read;
}
if (streamOffset == 0) {
goto fail;
}
close_java_stream(env, jstream);
return std::make_unique<Stream>(stream, streamOffset);
fail:
free(stream);
close_java_stream(env, jstream);
return nullptr;
}

View File

@ -0,0 +1,18 @@
//
// Created by len on 23/12/20.
//
#ifndef IMAGEDECODER_JAVA_STREAM_H
#define IMAGEDECODER_JAVA_STREAM_H
#include <stdlib.h>
#include <memory>
#include <jni.h>
#include "log.h"
#include "stream.h"
void init_java_stream(JNIEnv* env);
std::unique_ptr<Stream> read_all_java_stream(JNIEnv* env, jobject jstream);
#endif //IMAGEDECODER_JAVA_STREAM_H

View File

@ -0,0 +1,114 @@
//
// Created by len on 23/12/20.
//
#include <jni.h>