android_debug: reduce debug app size by 139 mbs
All checks were successful
firka/firka/pipeline/head This commit looks good
All checks were successful
firka/firka/pipeline/head This commit looks good
This one is a really big hack but essentially we extract the app-debug.apk after its done building, then compress all the libflutter.so files with brotli (level 6 on debug builds) and then decompress the right one and load it on app launch.
This commit is contained in:
parent
b5a59c510c
commit
3787c697e5
@ -1,5 +1,11 @@
|
||||
import org.apache.commons.io.FileUtils
|
||||
import java.io.FileInputStream
|
||||
import java.security.MessageDigest
|
||||
import java.util.Properties
|
||||
import java.util.zip.ZipEntry
|
||||
import java.util.zip.ZipOutputStream
|
||||
import java.util.zip.ZipOutputStream.STORED
|
||||
import java.util.zip.ZipOutputStream.DEFLATED
|
||||
|
||||
plugins {
|
||||
id("com.android.application")
|
||||
@ -84,6 +90,8 @@ android {
|
||||
logger.error("[WARNING] DO NOT STORE ANY SENSITIVE DATA INSIDE THE APP")
|
||||
logger.error("[WARNING] Because an attacker could steal it, if you sideload their malicious app.")
|
||||
}
|
||||
|
||||
proguardFiles(getDefaultProguardFile("proguard-android-optimize.txt"), "proguard-rules.pro")
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -94,3 +102,346 @@ dependencies {
|
||||
flutter {
|
||||
source = "../.."
|
||||
}
|
||||
|
||||
tasks.register("transformAndResignDebugApk") {
|
||||
group = "build"
|
||||
description = "Transform and resign debug APK with debug key"
|
||||
|
||||
dependsOn("assembleDebug")
|
||||
|
||||
doLast {
|
||||
transformApks(true)
|
||||
}
|
||||
}
|
||||
|
||||
tasks.register("transformAndResignReleaseApk") {
|
||||
group = "build"
|
||||
description = "Transform and resign debug APK with debug key"
|
||||
|
||||
dependsOn("assembleRelease")
|
||||
|
||||
doLast {
|
||||
transformApks(false)
|
||||
}
|
||||
}
|
||||
|
||||
afterEvaluate {
|
||||
tasks.findByName("assembleDebug")?.finalizedBy("transformAndResignDebugApk")
|
||||
tasks.findByName("assembleRelease")?.finalizedBy("transformAndResignReleaseApk")
|
||||
}
|
||||
|
||||
fun transformApks(debug: Boolean) {
|
||||
val buildDir = project.buildDir
|
||||
val apkDir = File(buildDir, "outputs/flutter-apk")
|
||||
val apks = apkDir.listFiles()!!
|
||||
val flavor = if (debug) { "debug" } else { "release" }
|
||||
|
||||
println("Starting APK transformation process...")
|
||||
|
||||
var c = 0;
|
||||
apks
|
||||
.filter { apk -> apk.name.startsWith("app-") && apk.name.endsWith("-$flavor.apk") }
|
||||
.forEach { c++; transformAndSignApk(apkDir, it.nameWithoutExtension, debug) }
|
||||
|
||||
println("Transformed: $c apks")
|
||||
|
||||
}
|
||||
|
||||
fun transformAndSignApk(apkDir: File, name: String, debug: Boolean) {
|
||||
val originalApk = File(apkDir, "$name.apk")
|
||||
val transformedApk = File(apkDir, "$name-transformed.apk")
|
||||
val finalApk = File(apkDir, "$name-resigned.apk")
|
||||
val finalIdsig = File(apkDir, "$name-resigned.apk.idsig")
|
||||
|
||||
if (!originalApk.exists()) {
|
||||
throw GradleException("Original APK not found at: ${originalApk.absolutePath}")
|
||||
}
|
||||
|
||||
if (transformedApk.exists()) transformedApk.delete()
|
||||
if (finalApk.exists()) finalApk.delete()
|
||||
|
||||
println("Original APK: ${originalApk.absolutePath}")
|
||||
|
||||
try {
|
||||
println("Transforming APK...")
|
||||
transformApk(originalApk, transformedApk, if (debug) { "6" } else {"Z"})
|
||||
|
||||
if (debug) {
|
||||
println("Signing with debug key...")
|
||||
signWithDebugKey(transformedApk, finalApk)
|
||||
} else {
|
||||
println("Signing with release key...")
|
||||
signWithReleaseKey(transformedApk, finalApk)
|
||||
}
|
||||
|
||||
if (finalApk.exists()) {
|
||||
originalApk.delete()
|
||||
finalIdsig.delete()
|
||||
finalApk.renameTo(originalApk)
|
||||
println("APK successfully transformed")
|
||||
println("Final APK: ${originalApk.absolutePath}")
|
||||
}
|
||||
|
||||
transformedApk.delete()
|
||||
} catch (e: Exception) {
|
||||
throw GradleException("Failed to transform and resign APK: ${e.message}", e)
|
||||
}
|
||||
}
|
||||
|
||||
fun transformApk(input: File, output: File, compressionLevel: String = "Z") {
|
||||
val tempDir = File(project.buildDir, "tmp/apk-transform")
|
||||
tempDir.deleteRecursively()
|
||||
tempDir.mkdirs()
|
||||
|
||||
val brotli = findToolInPath("brotli")
|
||||
?: throw Exception("Brotli not found in path")
|
||||
|
||||
copy {
|
||||
from(zipTree(input))
|
||||
into(tempDir)
|
||||
}
|
||||
|
||||
val assetsDir = File(tempDir, "assets")
|
||||
|
||||
val metaInf = File(tempDir, "META-INF")
|
||||
val metaInfFiles = metaInf.listFiles();
|
||||
for (file in metaInfFiles!!) {
|
||||
if (file.name.endsWith("MF") || file.name.endsWith("SF")
|
||||
|| file.name.endsWith("RSA")) {
|
||||
file.delete()
|
||||
}
|
||||
}
|
||||
|
||||
val arches = File(tempDir, "lib").listFiles()
|
||||
val compressedLibs = mutableMapOf<String, String>()
|
||||
for (arch in arches!!) {
|
||||
val libFlutter = File(arch, "libflutter.so")
|
||||
val libApp = File(arch, "libapp.so")
|
||||
|
||||
if (!libFlutter.exists()) continue
|
||||
|
||||
val compressedDir = File(assetsDir, "flutter-br-${arch.name}")
|
||||
val compressedFlutter = File(compressedDir, "libflutter.so.br")
|
||||
|
||||
if (!compressedDir.exists()) compressedDir.mkdirs()
|
||||
|
||||
compressedLibs["${arch.name}/libflutter.so"] = libFlutter.sha256()
|
||||
|
||||
println("Compressing ${arch.name}/libflutter.so with brotli")
|
||||
exec {
|
||||
commandLine(
|
||||
brotli,
|
||||
"-$compressionLevel",
|
||||
libFlutter.absolutePath,
|
||||
"-o", compressedFlutter.absolutePath
|
||||
)
|
||||
}
|
||||
libFlutter.delete()
|
||||
}
|
||||
|
||||
val json = groovy.json.JsonBuilder(compressedLibs)
|
||||
File(assetsDir, "flutter-br.json").writeText(json.toString())
|
||||
|
||||
val topDirL = tempDir.absolutePath.length + 1
|
||||
val zos = ZipOutputStream(output.outputStream());
|
||||
tempDir.walkTopDown().forEach { f ->
|
||||
if (f.absolutePath == tempDir.absolutePath) return@forEach
|
||||
|
||||
var relName = f.absolutePath.substring(topDirL).replace("\\", "/")
|
||||
if (f.isDirectory && !relName.endsWith("/")) relName += "/"
|
||||
|
||||
val compress = !relName.endsWith(".so") && !relName.endsWith(".arsc")
|
||||
zos.setMethod(if (compress) { DEFLATED } else { STORED })
|
||||
val entry = ZipEntry(relName)
|
||||
if (!compress) {
|
||||
entry.size = f.length()
|
||||
entry.crc = FileUtils.checksumCRC32(f)
|
||||
}
|
||||
zos.putNextEntry(entry)
|
||||
if (f.isFile) {
|
||||
zos.write(f.readBytes())
|
||||
}
|
||||
zos.closeEntry()
|
||||
}
|
||||
zos.close()
|
||||
|
||||
ant.invokeMethod("zip", mapOf(
|
||||
"destfile" to output.absolutePath,
|
||||
"basedir" to tempDir.absolutePath,
|
||||
"level" to 0
|
||||
))
|
||||
|
||||
tempDir.deleteRecursively()
|
||||
println("APK transformed successfully")
|
||||
}
|
||||
|
||||
fun File.sha256(): String {
|
||||
val md = MessageDigest.getInstance("SHA-256")
|
||||
val digest = md.digest(this.readBytes())
|
||||
return digest.fold("") { str, it -> str + "%02x".format(it) }
|
||||
}
|
||||
|
||||
fun getDebugKeystorePath(): String {
|
||||
val userHome = System.getProperty("user.home")
|
||||
val debugKeystore = File(userHome, ".android/debug.keystore")
|
||||
|
||||
if (!debugKeystore.exists()) {
|
||||
throw GradleException("Debug keystore not found at: ${debugKeystore.absolutePath}")
|
||||
}
|
||||
|
||||
return debugKeystore.absolutePath
|
||||
}
|
||||
|
||||
fun getDefaultAndroidSdkPath(): String? {
|
||||
val os = System.getProperty("os.name").lowercase()
|
||||
val userHome = System.getProperty("user.home")
|
||||
|
||||
return when {
|
||||
os.contains("win") ->
|
||||
"$userHome\\AppData\\Local\\Android\\Sdk"
|
||||
os.contains("mac") ->
|
||||
"$userHome/Library/Android/sdk"
|
||||
os.contains("linux") ->
|
||||
"$userHome/Android/Sdk"
|
||||
else -> null
|
||||
}
|
||||
}
|
||||
|
||||
fun findToolInPath(toolName: String): String? {
|
||||
val pathEnvironment = System.getenv("PATH")
|
||||
val pathDirs = pathEnvironment.split(File.pathSeparator)
|
||||
|
||||
val executableNames = when {
|
||||
System.getProperty("os.name").lowercase().contains("win") ->
|
||||
listOf("$toolName.exe", toolName)
|
||||
else ->
|
||||
listOf(toolName)
|
||||
}
|
||||
|
||||
for (pathDir in pathDirs) {
|
||||
for (execName in executableNames) {
|
||||
val possibleTool = File(pathDir, execName)
|
||||
if (possibleTool.exists() && possibleTool.canExecute()) {
|
||||
return possibleTool.absolutePath
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return null
|
||||
}
|
||||
|
||||
fun findToolInSdkPath(toolName: String): String? {
|
||||
var androidHome : String? = System.getenv("ANDROID_HOME")
|
||||
?: System.getenv("ANDROID_SDK_ROOT")
|
||||
|
||||
if (androidHome == null) androidHome = getDefaultAndroidSdkPath()
|
||||
|
||||
if (androidHome != null) {
|
||||
val buildTools = File(androidHome, "build-tools")
|
||||
if (buildTools.exists()) {
|
||||
val latestVersion = buildTools.listFiles()
|
||||
?.filter { it.isDirectory }
|
||||
?.filter { it.name != "debian" }
|
||||
?.maxByOrNull { it.name }
|
||||
|
||||
if (latestVersion != null) {
|
||||
val toolExec = File(latestVersion, toolName)
|
||||
if (toolExec.exists()) {
|
||||
return toolExec.absolutePath
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (!toolName.contains(".exe")) {
|
||||
val exeTool = findToolInSdkPath("$toolName.exe")
|
||||
if (exeTool != null) return exeTool;
|
||||
}
|
||||
if (!toolName.contains(".sh")) {
|
||||
val shTool = findToolInSdkPath("$toolName.sh")
|
||||
if (shTool != null) return shTool;
|
||||
}
|
||||
if (!toolName.contains(".bat")) {
|
||||
val batTool = findToolInSdkPath("$toolName.bat")
|
||||
if (batTool != null) return batTool;
|
||||
}
|
||||
|
||||
return null
|
||||
}
|
||||
|
||||
fun signWithDebugKey(input: File, output: File) {
|
||||
val debugKeystore = getDebugKeystorePath()
|
||||
val debugKeystorePassword = "android"
|
||||
val debugKeyAlias = "androiddebugkey"
|
||||
val debugKeyPassword = "android"
|
||||
|
||||
val zipAlign: String = findToolInSdkPath("zipalign")
|
||||
?: throw Exception("Could not find zipalign either in ANDROID_SDK")
|
||||
val apksigner: String = findToolInSdkPath("apksigner")
|
||||
?: throw Exception("Could not find zipalign either in ANDROID_SDK")
|
||||
|
||||
exec {
|
||||
commandLine(
|
||||
zipAlign,
|
||||
"-v", "4",
|
||||
input.absolutePath,
|
||||
output.absolutePath
|
||||
)
|
||||
}
|
||||
|
||||
exec {
|
||||
commandLine(
|
||||
apksigner, "sign",
|
||||
"--ks", debugKeystore,
|
||||
"--ks-pass", "pass:$debugKeystorePassword",
|
||||
"--ks-key-alias", debugKeyAlias,
|
||||
"--key-pass", "pass:$debugKeyPassword",
|
||||
output.absolutePath
|
||||
)
|
||||
}
|
||||
|
||||
println("APK signed and aligned successfully")
|
||||
}
|
||||
|
||||
fun signWithReleaseKey(input: File, output: File) {
|
||||
val secretsDir = File(projectDir.absolutePath, "../../../secrets/")
|
||||
val propsFile = File(secretsDir, "keystore.properties")
|
||||
|
||||
if (!propsFile.exists()) {
|
||||
throw Exception("Release keystore not found!")
|
||||
}
|
||||
|
||||
val props = loadProperties(propsFile)
|
||||
|
||||
val releaseKeystore = File(secretsDir, props["storeFile"].toString())
|
||||
val releaseKeystorePassword = props["storePassword"] as String
|
||||
val releaseKeyAlias = props["keyAlias"] as String
|
||||
val releaseKeyPassword = props["keyPassword"] as String
|
||||
|
||||
val zipAlign: String = findToolInSdkPath("zipalign")
|
||||
?: throw Exception("Could not find zipalign either in ANDROID_SDK")
|
||||
val apksigner: String = findToolInSdkPath("apksigner")
|
||||
?: throw Exception("Could not find zipalign either in ANDROID_SDK")
|
||||
|
||||
exec {
|
||||
commandLine(
|
||||
zipAlign,
|
||||
"-v", "4",
|
||||
input.absolutePath,
|
||||
output.absolutePath
|
||||
)
|
||||
}
|
||||
|
||||
exec {
|
||||
commandLine(
|
||||
apksigner, "sign",
|
||||
"--ks", releaseKeystore,
|
||||
"--ks-pass", "pass:$releaseKeystorePassword",
|
||||
"--ks-key-alias", releaseKeyAlias,
|
||||
"--key-pass", "pass:$releaseKeyPassword",
|
||||
output.absolutePath
|
||||
)
|
||||
}
|
||||
|
||||
println("APK signed and aligned successfully")
|
||||
}
|
1
firka/android/app/proguard-rules.pro
vendored
Normal file
1
firka/android/app/proguard-rules.pro
vendored
Normal file
@ -0,0 +1 @@
|
||||
-keep class org.brotli.** { *; }
|
@ -8,7 +8,7 @@
|
||||
<uses-permission android:name="android.permission.VIBRATE" />
|
||||
|
||||
<application
|
||||
android:name="${applicationName}"
|
||||
android:name=".AppMain"
|
||||
android:icon="@mipmap/launcher_icon">
|
||||
<activity
|
||||
android:name=".MainActivity"
|
||||
|
@ -0,0 +1,15 @@
|
||||
/* Copyright 2018 Google Inc. All Rights Reserved.
|
||||
|
||||
Distributed under MIT license.
|
||||
See file LICENSE for detail or copy at https://opensource.org/licenses/MIT
|
||||
*/
|
||||
package org.brotli.common;
|
||||
|
||||
/** POJO enum that mirrors C BrotliSharedDictionaryType. */
|
||||
public class SharedDictionaryType {
|
||||
// Disallow instantiation.
|
||||
private SharedDictionaryType() {}
|
||||
|
||||
public static final int RAW = 0;
|
||||
public static final int SERIALIZED = 1;
|
||||
}
|
289
firka/android/app/src/main/java/org/brotli/dec/BitReader.java
Normal file
289
firka/android/app/src/main/java/org/brotli/dec/BitReader.java
Normal file
@ -0,0 +1,289 @@
|
||||
/* Copyright 2015 Google Inc. All Rights Reserved.
|
||||
|
||||
Distributed under MIT license.
|
||||
See file LICENSE for detail or copy at https://opensource.org/licenses/MIT
|
||||
*/
|
||||
|
||||
package org.brotli.dec;
|
||||
|
||||
/**
|
||||
* Bit reading helpers.
|
||||
*/
|
||||
final class BitReader {
|
||||
|
||||
// Possible values: {5, 6}. 5 corresponds to 32-bit build, 6 to 64-bit. This value is used for
|
||||
// JIT conditional compilation.
|
||||
private static final int LOG_BITNESS = Utils.getLogBintness();
|
||||
|
||||
// Not only Java compiler prunes "if (const false)" code, but JVM as well.
|
||||
// Code under "if (DEBUG != 0)" have zero performance impact (outside unit tests).
|
||||
private static final int DEBUG = Utils.isDebugMode();
|
||||
|
||||
static final int BITNESS = 1 << LOG_BITNESS;
|
||||
|
||||
private static final int BYTENESS = BITNESS / 8;
|
||||
private static final int CAPACITY = 4096;
|
||||
// After encountering the end of the input stream, this amount of zero bytes will be appended.
|
||||
private static final int SLACK = 64;
|
||||
private static final int BUFFER_SIZE = CAPACITY + SLACK;
|
||||
// Don't bother to replenish the buffer while this number of bytes is available.
|
||||
private static final int SAFEGUARD = 36;
|
||||
private static final int WATERLINE = CAPACITY - SAFEGUARD;
|
||||
|
||||
// "Half" refers to "half of native integer type", i.e. on 64-bit machines it is 32-bit type,
|
||||
// on 32-bit machines it is 16-bit.
|
||||
private static final int HALF_BITNESS = BITNESS / 2;
|
||||
private static final int HALF_SIZE = BYTENESS / 2;
|
||||
private static final int HALVES_CAPACITY = CAPACITY / HALF_SIZE;
|
||||
private static final int HALF_BUFFER_SIZE = BUFFER_SIZE / HALF_SIZE;
|
||||
private static final int HALF_WATERLINE = WATERLINE / HALF_SIZE;
|
||||
|
||||
private static final int LOG_HALF_SIZE = LOG_BITNESS - 4;
|
||||
|
||||
/**
|
||||
* Fills up the input buffer.
|
||||
*
|
||||
* <p> No-op if there are at least 36 bytes present after current position.
|
||||
*
|
||||
* <p> After encountering the end of the input stream, 64 additional zero bytes are copied to the
|
||||
* buffer.
|
||||
*/
|
||||
static void readMoreInput(State s) {
|
||||
if (s.halfOffset > HALF_WATERLINE) {
|
||||
doReadMoreInput(s);
|
||||
}
|
||||
}
|
||||
|
||||
static void doReadMoreInput(State s) {
|
||||
if (s.endOfStreamReached != 0) {
|
||||
if (halfAvailable(s) >= -2) {
|
||||
return;
|
||||
}
|
||||
throw new BrotliRuntimeException("No more input");
|
||||
}
|
||||
final int readOffset = s.halfOffset << LOG_HALF_SIZE;
|
||||
int bytesInBuffer = CAPACITY - readOffset;
|
||||
// Move unused bytes to the head of the buffer.
|
||||
Utils.copyBytesWithin(s.byteBuffer, 0, readOffset, CAPACITY);
|
||||
s.halfOffset = 0;
|
||||
while (bytesInBuffer < CAPACITY) {
|
||||
final int spaceLeft = CAPACITY - bytesInBuffer;
|
||||
final int len = Utils.readInput(s.input, s.byteBuffer, bytesInBuffer, spaceLeft);
|
||||
// EOF is -1 in Java, but 0 in C#.
|
||||
if (len <= 0) {
|
||||
s.endOfStreamReached = 1;
|
||||
s.tailBytes = bytesInBuffer;
|
||||
bytesInBuffer += HALF_SIZE - 1;
|
||||
break;
|
||||
}
|
||||
bytesInBuffer += len;
|
||||
}
|
||||
bytesToNibbles(s, bytesInBuffer);
|
||||
}
|
||||
|
||||
static void checkHealth(State s, int endOfStream) {
|
||||
if (s.endOfStreamReached == 0) {
|
||||
return;
|
||||
}
|
||||
final int byteOffset = (s.halfOffset << LOG_HALF_SIZE) + ((s.bitOffset + 7) >> 3) - BYTENESS;
|
||||
if (byteOffset > s.tailBytes) {
|
||||
throw new BrotliRuntimeException("Read after end");
|
||||
}
|
||||
if ((endOfStream != 0) && (byteOffset != s.tailBytes)) {
|
||||
throw new BrotliRuntimeException("Unused bytes after end");
|
||||
}
|
||||
}
|
||||
|
||||
static void assertAccumulatorHealthy(State s) {
|
||||
if (s.bitOffset > BITNESS) {
|
||||
throw new IllegalStateException("Accumulator underloaded: " + s.bitOffset);
|
||||
}
|
||||
}
|
||||
|
||||
static void fillBitWindow(State s) {
|
||||
if (DEBUG != 0) {
|
||||
assertAccumulatorHealthy(s);
|
||||
}
|
||||
if (s.bitOffset >= HALF_BITNESS) {
|
||||
// Same as doFillBitWindow. JVM fails to inline it.
|
||||
if (BITNESS == 64) {
|
||||
s.accumulator64 = ((long) s.intBuffer[s.halfOffset++] << HALF_BITNESS)
|
||||
| (s.accumulator64 >>> HALF_BITNESS);
|
||||
} else {
|
||||
s.accumulator32 = ((int) s.shortBuffer[s.halfOffset++] << HALF_BITNESS)
|
||||
| (s.accumulator32 >>> HALF_BITNESS);
|
||||
}
|
||||
s.bitOffset -= HALF_BITNESS;
|
||||
}
|
||||
}
|
||||
|
||||
static void doFillBitWindow(State s) {
|
||||
if (DEBUG != 0) {
|
||||
assertAccumulatorHealthy(s);
|
||||
}
|
||||
if (BITNESS == 64) {
|
||||
s.accumulator64 = ((long) s.intBuffer[s.halfOffset++] << HALF_BITNESS)
|
||||
| (s.accumulator64 >>> HALF_BITNESS);
|
||||
} else {
|
||||
s.accumulator32 = ((int) s.shortBuffer[s.halfOffset++] << HALF_BITNESS)
|
||||
| (s.accumulator32 >>> HALF_BITNESS);
|
||||
}
|
||||
s.bitOffset -= HALF_BITNESS;
|
||||
}
|
||||
|
||||
static int peekBits(State s) {
|
||||
if (BITNESS == 64) {
|
||||
return (int) (s.accumulator64 >>> s.bitOffset);
|
||||
} else {
|
||||
return s.accumulator32 >>> s.bitOffset;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Fetches bits from accumulator.
|
||||
*
|
||||
* WARNING: accumulator MUST contain at least the specified amount of bits,
|
||||
* otherwise BitReader will become broken.
|
||||
*/
|
||||
static int readFewBits(State s, int n) {
|
||||
final int val = peekBits(s) & ((1 << n) - 1);
|
||||
s.bitOffset += n;
|
||||
return val;
|
||||
}
|
||||
|
||||
static int readBits(State s, int n) {
|
||||
if (HALF_BITNESS >= 24) {
|
||||
return readFewBits(s, n);
|
||||
} else {
|
||||
return (n <= 16) ? readFewBits(s, n) : readManyBits(s, n);
|
||||
}
|
||||
}
|
||||
|
||||
private static int readManyBits(State s, int n) {
|
||||
final int low = readFewBits(s, 16);
|
||||
doFillBitWindow(s);
|
||||
return low | (readFewBits(s, n - 16) << 16);
|
||||
}
|
||||
|
||||
static void initBitReader(State s) {
|
||||
s.byteBuffer = new byte[BUFFER_SIZE];
|
||||
if (BITNESS == 64) {
|
||||
s.accumulator64 = 0;
|
||||
s.intBuffer = new int[HALF_BUFFER_SIZE];
|
||||
} else {
|
||||
s.accumulator32 = 0;
|
||||
s.shortBuffer = new short[HALF_BUFFER_SIZE];
|
||||
}
|
||||
s.bitOffset = BITNESS;
|
||||
s.halfOffset = HALVES_CAPACITY;
|
||||
s.endOfStreamReached = 0;
|
||||
prepare(s);
|
||||
}
|
||||
|
||||
private static void prepare(State s) {
|
||||
readMoreInput(s);
|
||||
checkHealth(s, 0);
|
||||
doFillBitWindow(s);
|
||||
doFillBitWindow(s);
|
||||
}
|
||||
|
||||
static void reload(State s) {
|
||||
if (s.bitOffset == BITNESS) {
|
||||
prepare(s);
|
||||
}
|
||||
}
|
||||
|
||||
static void jumpToByteBoundary(State s) {
|
||||
final int padding = (BITNESS - s.bitOffset) & 7;
|
||||
if (padding != 0) {
|
||||
final int paddingBits = readFewBits(s, padding);
|
||||
if (paddingBits != 0) {
|
||||
throw new BrotliRuntimeException("Corrupted padding bits");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
static int halfAvailable(State s) {
|
||||
int limit = HALVES_CAPACITY;
|
||||
if (s.endOfStreamReached != 0) {
|
||||
limit = (s.tailBytes + (HALF_SIZE - 1)) >> LOG_HALF_SIZE;
|
||||
}
|
||||
return limit - s.halfOffset;
|
||||
}
|
||||
|
||||
static void copyRawBytes(State s, byte[] data, int offset, int length) {
|
||||
if ((s.bitOffset & 7) != 0) {
|
||||
throw new BrotliRuntimeException("Unaligned copyBytes");
|
||||
}
|
||||
|
||||
// Drain accumulator.
|
||||
while ((s.bitOffset != BITNESS) && (length != 0)) {
|
||||
data[offset++] = (byte) peekBits(s);
|
||||
s.bitOffset += 8;
|
||||
length--;
|
||||
}
|
||||
if (length == 0) {
|
||||
return;
|
||||
}
|
||||
|
||||
// Get data from shadow buffer with "sizeof(int)" granularity.
|
||||
final int copyNibbles = Math.min(halfAvailable(s), length >> LOG_HALF_SIZE);
|
||||
if (copyNibbles > 0) {
|
||||
final int readOffset = s.halfOffset << LOG_HALF_SIZE;
|
||||
final int delta = copyNibbles << LOG_HALF_SIZE;
|
||||
System.arraycopy(s.byteBuffer, readOffset, data, offset, delta);
|
||||
offset += delta;
|
||||
length -= delta;
|
||||
s.halfOffset += copyNibbles;
|
||||
}
|
||||
if (length == 0) {
|
||||
return;
|
||||
}
|
||||
|
||||
// Read tail bytes.
|
||||
if (halfAvailable(s) > 0) {
|
||||
// length = 1..3
|
||||
fillBitWindow(s);
|
||||
while (length != 0) {
|
||||
data[offset++] = (byte) peekBits(s);
|
||||
s.bitOffset += 8;
|
||||
length--;
|
||||
}
|
||||
checkHealth(s, 0);
|
||||
return;
|
||||
}
|
||||
|
||||
// Now it is possible to copy bytes directly.
|
||||
while (length > 0) {
|
||||
final int len = Utils.readInput(s.input, data, offset, length);
|
||||
if (len == -1) {
|
||||
throw new BrotliRuntimeException("Unexpected end of input");
|
||||
}
|
||||
offset += len;
|
||||
length -= len;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Translates bytes to halves (int/short).
|
||||
*/
|
||||
static void bytesToNibbles(State s, int byteLen) {
|
||||
final byte[] byteBuffer = s.byteBuffer;
|
||||
final int halfLen = byteLen >> LOG_HALF_SIZE;
|
||||
if (BITNESS == 64) {
|
||||
final int[] intBuffer = s.intBuffer;
|
||||
for (int i = 0; i < halfLen; ++i) {
|
||||
intBuffer[i] = ((byteBuffer[i * 4] & 0xFF))
|
||||
| ((byteBuffer[(i * 4) + 1] & 0xFF) << 8)
|
||||
| ((byteBuffer[(i * 4) + 2] & 0xFF) << 16)
|
||||
| ((byteBuffer[(i * 4) + 3] & 0xFF) << 24);
|
||||
}
|
||||
} else {
|
||||
final short[] shortBuffer = s.shortBuffer;
|
||||
for (int i = 0; i < halfLen; ++i) {
|
||||
shortBuffer[i] = (short) ((byteBuffer[i * 2] & 0xFF)
|
||||
| ((byteBuffer[(i * 2) + 1] & 0xFF) << 8));
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
@ -0,0 +1,172 @@
|
||||
/* Copyright 2015 Google Inc. All Rights Reserved.
|
||||
|
||||
Distributed under MIT license.
|
||||
See file LICENSE for detail or copy at https://opensource.org/licenses/MIT
|
||||
*/
|
||||
|
||||
package org.brotli.dec;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.io.InputStream;
|
||||
|
||||
/**
|
||||
* {@link InputStream} decorator that decompresses brotli data.
|
||||
*
|
||||
* <p> Not thread-safe.
|
||||
*/
|
||||
public class BrotliInputStream extends InputStream {
|
||||
|
||||
public static final int DEFAULT_INTERNAL_BUFFER_SIZE = 256;
|
||||
|
||||
/**
|
||||
* Value expected by InputStream contract when stream is over.
|
||||
*
|
||||
* In Java it is -1.
|
||||
* In C# it is 0 (should be patched during transpilation).
|
||||
*/
|
||||
private static final int END_OF_STREAM_MARKER = -1;
|
||||
|
||||
/**
|
||||
* Internal buffer used for efficient byte-by-byte reading.
|
||||
*/
|
||||
private byte[] buffer;
|
||||
|
||||
/**
|
||||
* Number of decoded but still unused bytes in internal buffer.
|
||||
*/
|
||||
private int remainingBufferBytes;
|
||||
|
||||
/**
|
||||
* Next unused byte offset.
|
||||
*/
|
||||
private int bufferOffset;
|
||||
|
||||
/**
|
||||
* Decoder state.
|
||||
*/
|
||||
private final State state = new State();
|
||||
|
||||
/**
|
||||
* Creates a {@link InputStream} wrapper that decompresses brotli data.
|
||||
*
|
||||
* <p> For byte-by-byte reading ({@link #read()}) internal buffer with
|
||||
* {@link #DEFAULT_INTERNAL_BUFFER_SIZE} size is allocated and used.
|
||||
*
|
||||
* <p> Will block the thread until first {@link BitReader#CAPACITY} bytes of data of source
|
||||
* are available.
|
||||
*
|
||||
* @param source underlying data source
|
||||
* @throws IOException in case of corrupted data or source stream problems
|
||||
*/
|
||||
public BrotliInputStream(InputStream source) throws IOException {
|
||||
this(source, DEFAULT_INTERNAL_BUFFER_SIZE);
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a {@link InputStream} wrapper that decompresses brotli data.
|
||||
*
|
||||
* <p> For byte-by-byte reading ({@link #read()}) internal buffer of specified size is
|
||||
* allocated and used.
|
||||
*
|
||||
* <p> Will block the thread until first {@link BitReader#CAPACITY} bytes of data of source
|
||||
* are available.
|
||||
*
|
||||
* @param source compressed data source
|
||||
* @param byteReadBufferSize size of internal buffer used in case of
|
||||
* byte-by-byte reading
|
||||
* @throws IOException in case of corrupted data or source stream problems
|
||||
*/
|
||||
public BrotliInputStream(InputStream source, int byteReadBufferSize) throws IOException {
|
||||
if (byteReadBufferSize <= 0) {
|
||||
throw new IllegalArgumentException("Bad buffer size:" + byteReadBufferSize);
|
||||
} else if (source == null) {
|
||||
throw new IllegalArgumentException("source is null");
|
||||
}
|
||||
this.buffer = new byte[byteReadBufferSize];
|
||||
this.remainingBufferBytes = 0;
|
||||
this.bufferOffset = 0;
|
||||
try {
|
||||
Decode.initState(state, source);
|
||||
} catch (BrotliRuntimeException ex) {
|
||||
throw new IOException("Brotli decoder initialization failed", ex);
|
||||
}
|
||||
}
|
||||
|
||||
public void attachDictionaryChunk(byte[] data) {
|
||||
Decode.attachDictionaryChunk(state, data);
|
||||
}
|
||||
|
||||
public void enableEagerOutput() {
|
||||
Decode.enableEagerOutput(state);
|
||||
}
|
||||
|
||||
public void enableLargeWindow() {
|
||||
Decode.enableLargeWindow(state);
|
||||
}
|
||||
|
||||
/**
|
||||
* {@inheritDoc}
|
||||
*/
|
||||
@Override
|
||||
public void close() throws IOException {
|
||||
Decode.close(state);
|
||||
}
|
||||
|
||||
/**
|
||||
* {@inheritDoc}
|
||||
*/
|
||||
@Override
|
||||
public int read() throws IOException {
|
||||
if (bufferOffset >= remainingBufferBytes) {
|
||||
remainingBufferBytes = read(buffer, 0, buffer.length);
|
||||
bufferOffset = 0;
|
||||
if (remainingBufferBytes == END_OF_STREAM_MARKER) {
|
||||
// Both Java and C# return the same value for EOF on single-byte read.
|
||||
return -1;
|
||||
}
|
||||
}
|
||||
return buffer[bufferOffset++] & 0xFF;
|
||||
}
|
||||
|
||||
/**
|
||||
* {@inheritDoc}
|
||||
*/
|
||||
@Override
|
||||
public int read(byte[] destBuffer, int destOffset, int destLen) throws IOException {
|
||||
if (destOffset < 0) {
|
||||
throw new IllegalArgumentException("Bad offset: " + destOffset);
|
||||
} else if (destLen < 0) {
|
||||
throw new IllegalArgumentException("Bad length: " + destLen);
|
||||
} else if (destOffset + destLen > destBuffer.length) {
|
||||
throw new IllegalArgumentException(
|
||||
"Buffer overflow: " + (destOffset + destLen) + " > " + destBuffer.length);
|
||||
} else if (destLen == 0) {
|
||||
return 0;
|
||||
}
|
||||
int copyLen = Math.max(remainingBufferBytes - bufferOffset, 0);
|
||||
if (copyLen != 0) {
|
||||
copyLen = Math.min(copyLen, destLen);
|
||||
System.arraycopy(buffer, bufferOffset, destBuffer, destOffset, copyLen);
|
||||
bufferOffset += copyLen;
|
||||
destOffset += copyLen;
|
||||
destLen -= copyLen;
|
||||
if (destLen == 0) {
|
||||
return copyLen;
|
||||
}
|
||||
}
|
||||
try {
|
||||
state.output = destBuffer;
|
||||
state.outputOffset = destOffset;
|
||||
state.outputLength = destLen;
|
||||
state.outputUsed = 0;
|
||||
Decode.decompress(state);
|
||||
copyLen += state.outputUsed;
|
||||
copyLen = (copyLen > 0) ? copyLen : END_OF_STREAM_MARKER;
|
||||
return copyLen;
|
||||
} catch (BrotliRuntimeException ex) {
|
||||
throw new IOException("Brotli stream decoding failed", ex);
|
||||
}
|
||||
|
||||
// <{[INJECTED CODE]}>
|
||||
}
|
||||
}
|
@ -0,0 +1,21 @@
|
||||
/* Copyright 2015 Google Inc. All Rights Reserved.
|
||||
|
||||
Distributed under MIT license.
|
||||
See file LICENSE for detail or copy at https://opensource.org/licenses/MIT
|
||||
*/
|
||||
|
||||
package org.brotli.dec;
|
||||
|
||||
/**
|
||||
* Unchecked exception used internally.
|
||||
*/
|
||||
class BrotliRuntimeException extends RuntimeException {
|
||||
|
||||
BrotliRuntimeException(String message) {
|
||||
super(message);
|
||||
}
|
||||
|
||||
BrotliRuntimeException(String message, Throwable cause) {
|
||||
super(message, cause);
|
||||
}
|
||||
}
|
58
firka/android/app/src/main/java/org/brotli/dec/Context.java
Normal file
58
firka/android/app/src/main/java/org/brotli/dec/Context.java
Normal file
@ -0,0 +1,58 @@
|
||||
/* Copyright 2015 Google Inc. All Rights Reserved.
|
||||
|
||||
Distributed under MIT license.
|
||||
See file LICENSE for detail or copy at https://opensource.org/licenses/MIT
|
||||
*/
|
||||
|
||||
package org.brotli.dec;
|
||||
|
||||
/**
|
||||
* Common context lookup table for all context modes.
|
||||
*/
|
||||
final class Context {
|
||||
|
||||
static final int[] LOOKUP = new int[2048];
|
||||
|
||||
private static final String UTF_MAP = " !! ! \"#$##%#$&'##(#)#+++++++++"
|
||||
+ "+((&*'##,---,---,-----,-----,-----&#'###.///.///./////./////./////&#'# ";
|
||||
private static final String UTF_RLE = "A/* ': & : $ \u0081 @";
|
||||
|
||||
private static void unpackLookupTable(int[] lookup, String map, String rle) {
|
||||
// LSB6, MSB6, SIGNED
|
||||
for (int i = 0; i < 256; ++i) {
|
||||
lookup[i] = i & 0x3F;
|
||||
lookup[512 + i] = i >> 2;
|
||||
lookup[1792 + i] = 2 + (i >> 6);
|
||||
}
|
||||
// UTF8
|
||||
for (int i = 0; i < 128; ++i) {
|
||||
lookup[1024 + i] = 4 * (map.charAt(i) - 32);
|
||||
}
|
||||
for (int i = 0; i < 64; ++i) {
|
||||
lookup[1152 + i] = i & 1;
|
||||
lookup[1216 + i] = 2 + (i & 1);
|
||||
}
|
||||
int offset = 1280;
|
||||
for (int k = 0; k < 19; ++k) {
|
||||
final int value = k & 3;
|
||||
final int rep = rle.charAt(k) - 32;
|
||||
for (int i = 0; i < rep; ++i) {
|
||||
lookup[offset++] = value;
|
||||
}
|
||||
}
|
||||
// SIGNED
|
||||
for (int i = 0; i < 16; ++i) {
|
||||
lookup[1792 + i] = 1;
|
||||
lookup[2032 + i] = 6;
|
||||
}
|
||||
lookup[1792] = 0;
|
||||
lookup[2047] = 7;
|
||||
for (int i = 0; i < 256; ++i) {
|
||||
lookup[1536 + i] = lookup[1792 + i] << 3;
|
||||
}
|
||||
}
|
||||
|
||||
static {
|
||||
unpackLookupTable(LOOKUP, UTF_MAP, UTF_RLE);
|
||||
}
|
||||
}
|
1357
firka/android/app/src/main/java/org/brotli/dec/Decode.java
Normal file
1357
firka/android/app/src/main/java/org/brotli/dec/Decode.java
Normal file
File diff suppressed because it is too large
Load Diff
72
firka/android/app/src/main/java/org/brotli/dec/Decoder.java
Normal file
72
firka/android/app/src/main/java/org/brotli/dec/Decoder.java
Normal file
@ -0,0 +1,72 @@
|
||||
package org.brotli.dec;
|
||||
|
||||
import java.io.FileInputStream;
|
||||
import java.io.FileOutputStream;
|
||||
import java.io.IOException;
|
||||
import java.io.InputStream;
|
||||
import java.io.OutputStream;
|
||||
|
||||
public class Decoder {
|
||||
private static long decodeBytes(InputStream input, OutputStream output, byte[] buffer)
|
||||
throws IOException {
|
||||
long totalOut = 0;
|
||||
int readBytes;
|
||||
BrotliInputStream in = new BrotliInputStream(input);
|
||||
in.enableLargeWindow();
|
||||
try {
|
||||
while ((readBytes = in.read(buffer)) >= 0) {
|
||||
output.write(buffer, 0, readBytes);
|
||||
totalOut += readBytes;
|
||||
}
|
||||
} finally {
|
||||
in.close();
|
||||
}
|
||||
return totalOut;
|
||||
}
|
||||
|
||||
private static void decompress(String fromPath, String toPath, byte[] buffer) throws IOException {
|
||||
long start;
|
||||
long bytesDecoded;
|
||||
long end;
|
||||
InputStream in = null;
|
||||
OutputStream out = null;
|
||||
try {
|
||||
in = new FileInputStream(fromPath);
|
||||
out = new FileOutputStream(toPath);
|
||||
start = System.nanoTime();
|
||||
bytesDecoded = decodeBytes(in, out, buffer);
|
||||
end = System.nanoTime();
|
||||
} finally {
|
||||
if (in != null) {
|
||||
in.close(); // Hopefully, does not throw exception.
|
||||
}
|
||||
if (out != null) {
|
||||
out.close();
|
||||
}
|
||||
}
|
||||
|
||||
double timeDelta = (end - start) / 1000000000.0;
|
||||
if (timeDelta <= 0) {
|
||||
return;
|
||||
}
|
||||
double mbDecoded = bytesDecoded / (1024.0 * 1024.0);
|
||||
System.out.println(mbDecoded / timeDelta + " MiB/s");
|
||||
}
|
||||
|
||||
public static void main(String... args) throws IOException {
|
||||
if (args.length != 2 && args.length != 3) {
|
||||
System.out.println("Usage: decoder <compressed_in> <decompressed_out> [repeat]");
|
||||
return;
|
||||
}
|
||||
|
||||
int repeat = 1;
|
||||
if (args.length == 3) {
|
||||
repeat = Integer.parseInt(args[2]);
|
||||
}
|
||||
|
||||
byte[] buffer = new byte[1024 * 1024];
|
||||
for (int i = 0; i < repeat; ++i) {
|
||||
decompress(args[0], args[1], buffer);
|
||||
}
|
||||
}
|
||||
}
|
@ -0,0 +1,94 @@
|
||||
/* Copyright 2015 Google Inc. All Rights Reserved.
|
||||
|
||||
Distributed under MIT license.
|
||||
See file LICENSE for detail or copy at https://opensource.org/licenses/MIT
|
||||
*/
|
||||
|
||||
package org.brotli.dec;
|
||||
|
||||
import java.nio.ByteBuffer;
|
||||
|
||||
/**
|
||||
* Collection of static dictionary words.
|
||||
*
|
||||
* <p>Dictionary content is loaded from binary resource when {@link #getData()} is executed for the
|
||||
* first time. Consequently, it saves memory and CPU in case dictionary is not required.
|
||||
*
|
||||
* <p>One possible drawback is that multiple threads that need dictionary data may be blocked (only
|
||||
* once in each classworld). To avoid this, it is enough to call {@link #getData()} proactively.
|
||||
*/
|
||||
public final class Dictionary {
|
||||
static final int MIN_DICTIONARY_WORD_LENGTH = 4;
|
||||
static final int MAX_DICTIONARY_WORD_LENGTH = 31;
|
||||
|
||||
private static ByteBuffer data = ByteBuffer.allocateDirect(0);
|
||||
static final int[] offsets = new int[32];
|
||||
static final int[] sizeBits = new int[32];
|
||||
|
||||
private static class DataLoader {
|
||||
static final boolean OK;
|
||||
|
||||
static {
|
||||
boolean ok = true;
|
||||
try {
|
||||
Class.forName(Dictionary.class.getPackage().getName() + ".DictionaryData");
|
||||
} catch (Throwable ex) {
|
||||
ok = false;
|
||||
}
|
||||
OK = ok;
|
||||
}
|
||||
}
|
||||
|
||||
public static void setData(ByteBuffer newData, int[] newSizeBits) {
|
||||
if ((Utils.isDirect(newData) == 0) || (Utils.isReadOnly(newData) == 0)) {
|
||||
throw new BrotliRuntimeException("newData must be a direct read-only byte buffer");
|
||||
}
|
||||
// TODO: is that so?
|
||||
if (newSizeBits.length > MAX_DICTIONARY_WORD_LENGTH) {
|
||||
throw new BrotliRuntimeException(
|
||||
"sizeBits length must be at most " + String.valueOf(MAX_DICTIONARY_WORD_LENGTH));
|
||||
}
|
||||
for (int i = 0; i < MIN_DICTIONARY_WORD_LENGTH; ++i) {
|
||||
if (newSizeBits[i] != 0) {
|
||||
throw new BrotliRuntimeException(
|
||||
"first " + String.valueOf(MIN_DICTIONARY_WORD_LENGTH) + " must be 0");
|
||||
}
|
||||
}
|
||||
final int[] dictionaryOffsets = Dictionary.offsets;
|
||||
final int[] dictionarySizeBits = Dictionary.sizeBits;
|
||||
System.arraycopy(newSizeBits, 0, dictionarySizeBits, 0, newSizeBits.length);
|
||||
int pos = 0;
|
||||
final int limit = newData.capacity();
|
||||
for (int i = 0; i < newSizeBits.length; ++i) {
|
||||
dictionaryOffsets[i] = pos;
|
||||
final int bits = dictionarySizeBits[i];
|
||||
if (bits != 0) {
|
||||
if (bits >= 31) {
|
||||
throw new BrotliRuntimeException("newSizeBits values must be less than 31");
|
||||
}
|
||||
pos += i << bits;
|
||||
if (pos <= 0 || pos > limit) {
|
||||
throw new BrotliRuntimeException("newSizeBits is inconsistent: overflow");
|
||||
}
|
||||
}
|
||||
}
|
||||
for (int i = newSizeBits.length; i < 32; ++i) {
|
||||
dictionaryOffsets[i] = pos;
|
||||
}
|
||||
if (pos != limit) {
|
||||
throw new BrotliRuntimeException("newSizeBits is inconsistent: underflow");
|
||||
}
|
||||
Dictionary.data = newData;
|
||||
}
|
||||
|
||||
public static ByteBuffer getData() {
|
||||
if (data.capacity() != 0) {
|
||||
return data;
|
||||
}
|
||||
if (!DataLoader.OK) {
|
||||
throw new BrotliRuntimeException("brotli dictionary is not set");
|
||||
}
|
||||
/* Might have been set when {@link DictionaryData} was loaded.*/
|
||||
return data;
|
||||
}
|
||||
}
|
File diff suppressed because one or more lines are too long
137
firka/android/app/src/main/java/org/brotli/dec/Huffman.java
Normal file
137
firka/android/app/src/main/java/org/brotli/dec/Huffman.java
Normal file
@ -0,0 +1,137 @@
|
||||
/* Copyright 2015 Google Inc. All Rights Reserved.
|
||||
|
||||
Distributed under MIT license.
|
||||
See file LICENSE for detail or copy at https://opensource.org/licenses/MIT
|
||||
*/
|
||||
|
||||
package org.brotli.dec;
|
||||
|
||||
/**
|
||||
* Utilities for building Huffman decoding tables.
|
||||
*/
|
||||
final class Huffman {
|
||||
|
||||
private static final int MAX_LENGTH = 15;
|
||||
|
||||
/**
|
||||
* Returns reverse(reverse(key, len) + 1, len).
|
||||
*
|
||||
* <p> reverse(key, len) is the bit-wise reversal of the len least significant bits of key.
|
||||
*/
|
||||
private static int getNextKey(int key, int len) {
|
||||
int step = 1 << (len - 1);
|
||||
while ((key & step) != 0) {
|
||||
step >>= 1;
|
||||
}
|
||||
return (key & (step - 1)) + step;
|
||||
}
|
||||
|
||||
/**
|
||||
* Stores {@code item} in {@code table[0], table[step], table[2 * step] .., table[end]}.
|
||||
*
|
||||
* <p> Assumes that end is an integer multiple of step.
|
||||
*/
|
||||
private static void replicateValue(int[] table, int offset, int step, int end, int item) {
|
||||
do {
|
||||
end -= step;
|
||||
table[offset + end] = item;
|
||||
} while (end > 0);
|
||||
}
|
||||
|
||||
/**
|
||||
* @param count histogram of bit lengths for the remaining symbols,
|
||||
* @param len code length of the next processed symbol.
|
||||
* @return table width of the next 2nd level table.
|
||||
*/
|
||||
private static int nextTableBitSize(int[] count, int len, int rootBits) {
|
||||
int left = 1 << (len - rootBits);
|
||||
while (len < MAX_LENGTH) {
|
||||
left -= count[len];
|
||||
if (left <= 0) {
|
||||
break;
|
||||
}
|
||||
len++;
|
||||
left <<= 1;
|
||||
}
|
||||
return len - rootBits;
|
||||
}
|
||||
|
||||
/**
|
||||
* Builds Huffman lookup table assuming code lengths are in symbol order.
|
||||
*
|
||||
* @return number of slots used by resulting Huffman table
|
||||
*/
|
||||
static int buildHuffmanTable(int[] tableGroup, int tableIdx, int rootBits, int[] codeLengths,
|
||||
int codeLengthsSize) {
|
||||
final int tableOffset = tableGroup[tableIdx];
|
||||
int key; // Reversed prefix code.
|
||||
final int[] sorted = new int[codeLengthsSize]; // Symbols sorted by code length.
|
||||
// TODO(eustas): fill with zeroes?
|
||||
final int[] count = new int[MAX_LENGTH + 1]; // Number of codes of each length.
|
||||
final int[] offset = new int[MAX_LENGTH + 1]; // Offsets in sorted table for each length.
|
||||
int symbol;
|
||||
|
||||
// Build histogram of code lengths.
|
||||
for (symbol = 0; symbol < codeLengthsSize; symbol++) {
|
||||
count[codeLengths[symbol]]++;
|
||||
}
|
||||
|
||||
// Generate offsets into sorted symbol table by code length.
|
||||
offset[1] = 0;
|
||||
for (int len = 1; len < MAX_LENGTH; len++) {
|
||||
offset[len + 1] = offset[len] + count[len];
|
||||
}
|
||||
|
||||
// Sort symbols by length, by symbol order within each length.
|
||||
for (symbol = 0; symbol < codeLengthsSize; symbol++) {
|
||||
if (codeLengths[symbol] != 0) {
|
||||
sorted[offset[codeLengths[symbol]]++] = symbol;
|
||||
}
|
||||
}
|
||||
|
||||
int tableBits = rootBits;
|
||||
int tableSize = 1 << tableBits;
|
||||
int totalSize = tableSize;
|
||||
|
||||
// Special case code with only one value.
|
||||
if (offset[MAX_LENGTH] == 1) {
|
||||
for (key = 0; key < totalSize; key++) {
|
||||
tableGroup[tableOffset + key] = sorted[0];
|
||||
}
|
||||
return totalSize;
|
||||
}
|
||||
|
||||
// Fill in root table.
|
||||
key = 0;
|
||||
symbol = 0;
|
||||
for (int len = 1, step = 2; len <= rootBits; len++, step <<= 1) {
|
||||
for (; count[len] > 0; count[len]--) {
|
||||
replicateValue(tableGroup, tableOffset + key, step, tableSize,
|
||||
len << 16 | sorted[symbol++]);
|
||||
key = getNextKey(key, len);
|
||||
}
|
||||
}
|
||||
|
||||
// Fill in 2nd level tables and add pointers to root table.
|
||||
final int mask = totalSize - 1;
|
||||
int low = -1;
|
||||
int currentOffset = tableOffset;
|
||||
for (int len = rootBits + 1, step = 2; len <= MAX_LENGTH; len++, step <<= 1) {
|
||||
for (; count[len] > 0; count[len]--) {
|
||||
if ((key & mask) != low) {
|
||||
currentOffset += tableSize;
|
||||
tableBits = nextTableBitSize(count, len, rootBits);
|
||||
tableSize = 1 << tableBits;
|
||||
totalSize += tableSize;
|
||||
low = key & mask;
|
||||
tableGroup[tableOffset + low] =
|
||||
(tableBits + rootBits) << 16 | (currentOffset - tableOffset - low);
|
||||
}
|
||||
replicateValue(tableGroup, currentOffset + (key >> rootBits), step, tableSize,
|
||||
(len - rootBits) << 16 | sorted[symbol++]);
|
||||
key = getNextKey(key, len);
|
||||
}
|
||||
}
|
||||
return totalSize;
|
||||
}
|
||||
}
|
100
firka/android/app/src/main/java/org/brotli/dec/State.java
Normal file
100
firka/android/app/src/main/java/org/brotli/dec/State.java
Normal file
@ -0,0 +1,100 @@
|
||||
/* Copyright 2015 Google Inc. All Rights Reserved.
|
||||
|
||||
Distributed under MIT license.
|
||||
See file LICENSE for detail or copy at https://opensource.org/licenses/MIT
|
||||
*/
|
||||
|
||||
package org.brotli.dec;
|
||||
|
||||
import java.io.InputStream;
|
||||
|
||||
final class State {
|
||||
byte[] ringBuffer;
|
||||
byte[] contextModes;
|
||||
byte[] contextMap;
|
||||
byte[] distContextMap;
|
||||
byte[] distExtraBits;
|
||||
byte[] output;
|
||||
byte[] byteBuffer; // BitReader
|
||||
|
||||
short[] shortBuffer; // BitReader
|
||||
|
||||
int[] intBuffer; // BitReader
|
||||
int[] rings;
|
||||
int[] blockTrees;
|
||||
int[] literalTreeGroup;
|
||||
int[] commandTreeGroup;
|
||||
int[] distanceTreeGroup;
|
||||
int[] distOffset;
|
||||
|
||||
long accumulator64; // BitReader: pre-fetched bits.
|
||||
|
||||
int runningState; // Default value is 0 == Decode.UNINITIALIZED
|
||||
int nextRunningState;
|
||||
int accumulator32; // BitReader: pre-fetched bits.
|
||||
int bitOffset; // BitReader: bit-reading position in accumulator.
|
||||
int halfOffset; // BitReader: offset of next item in intBuffer/shortBuffer.
|
||||
int tailBytes; // BitReader: number of bytes in unfinished half.
|
||||
int endOfStreamReached; // BitReader: input stream is finished.
|
||||
int metaBlockLength;
|
||||
int inputEnd;
|
||||
int isUncompressed;
|
||||
int isMetadata;
|
||||
int literalBlockLength;
|
||||
int numLiteralBlockTypes;
|
||||
int commandBlockLength;
|
||||
int numCommandBlockTypes;
|
||||
int distanceBlockLength;
|
||||
int numDistanceBlockTypes;
|
||||
int pos;
|
||||
int maxDistance;
|
||||
int distRbIdx;
|
||||
int trivialLiteralContext;
|
||||
int literalTreeIdx;
|
||||
int commandTreeIdx;
|
||||
int j;
|
||||
int insertLength;
|
||||
int contextMapSlice;
|
||||
int distContextMapSlice;
|
||||
int contextLookupOffset1;
|
||||
int contextLookupOffset2;
|
||||
int distanceCode;
|
||||
int numDirectDistanceCodes;
|
||||
int distancePostfixBits;
|
||||
int distance;
|
||||
int copyLength;
|
||||
int maxBackwardDistance;
|
||||
int maxRingBufferSize;
|
||||
int ringBufferSize;
|
||||
int expectedTotalSize;
|
||||
int outputOffset;
|
||||
int outputLength;
|
||||
int outputUsed;
|
||||
int ringBufferBytesWritten;
|
||||
int ringBufferBytesReady;
|
||||
int isEager;
|
||||
int isLargeWindow;
|
||||
|
||||
// Compound dictionary
|
||||
int cdNumChunks;
|
||||
int cdTotalSize;
|
||||
int cdBrIndex;
|
||||
int cdBrOffset;
|
||||
int cdBrLength;
|
||||
int cdBrCopied;
|
||||
byte[][] cdChunks;
|
||||
int[] cdChunkOffsets;
|
||||
int cdBlockBits;
|
||||
byte[] cdBlockMap;
|
||||
|
||||
InputStream /* @Nullable */ input; // BitReader
|
||||
|
||||
State() {
|
||||
this.ringBuffer = new byte[0];
|
||||
this.rings = new int[10];
|
||||
this.rings[0] = 16;
|
||||
this.rings[1] = 15;
|
||||
this.rings[2] = 11;
|
||||
this.rings[3] = 4;
|
||||
}
|
||||
}
|
236
firka/android/app/src/main/java/org/brotli/dec/Transform.java
Normal file
236
firka/android/app/src/main/java/org/brotli/dec/Transform.java
Normal file
@ -0,0 +1,236 @@
|
||||
/* Copyright 2015 Google Inc. All Rights Reserved.
|
||||
|
||||
Distributed under MIT license.
|
||||
See file LICENSE for detail or copy at https://opensource.org/licenses/MIT
|
||||
*/
|
||||
|
||||
package org.brotli.dec;
|
||||
|
||||
import java.nio.ByteBuffer;
|
||||
|
||||
/**
|
||||
* Transformations on dictionary words.
|
||||
*
|
||||
* Transform descriptor is a triplet: {prefix, operator, suffix}.
|
||||
* "prefix" and "suffix" are short strings inserted before and after transformed dictionary word.
|
||||
* "operator" is applied to dictionary word itself.
|
||||
*
|
||||
* Some operators has "built-in" parameters, i.e. parameter is defined by operator ordinal. Other
|
||||
* operators have "external" parameters, supplied via additional table encoded in shared dictionary.
|
||||
*
|
||||
* Operators:
|
||||
* - IDENTITY (0): dictionary word is inserted "as is"
|
||||
* - OMIT_LAST_N (1 - 9): last N octets of dictionary word are not inserted; N == ordinal
|
||||
* - OMIT_FIRST_M (12-20): first M octets of dictionary word are not inserted; M == ordinal - 11
|
||||
* - UPPERCASE_FIRST (10): first "scalar" is XOR'ed with number 32
|
||||
* - UPPERCASE_ALL (11): all "scalars" are XOR'ed with number 32
|
||||
* - SHIFT_FIRST (21): first "scalar" is shifted by number form parameter table
|
||||
* - SHIFT_ALL (22): all "scalar" is shifted by number form parameter table
|
||||
*
|
||||
* Here "scalar" is a variable length character coding similar to UTF-8 encoding.
|
||||
* UPPERCASE_XXX / SHIFT_XXX operators were designed to change the case of UTF-8 encoded characters.
|
||||
* While UPPERCASE_XXX works well only on ASCII charset, SHIFT is much more generic and could be
|
||||
* used for most (all?) alphabets.
|
||||
*/
|
||||
final class Transform {
|
||||
|
||||
static final class Transforms {
|
||||
final int numTransforms;
|
||||
final int[] triplets;
|
||||
final byte[] prefixSuffixStorage;
|
||||
final int[] prefixSuffixHeads;
|
||||
final short[] params;
|
||||
|
||||
Transforms(int numTransforms, int prefixSuffixLen, int prefixSuffixCount) {
|
||||
this.numTransforms = numTransforms;
|
||||
this.triplets = new int[numTransforms * 3];
|
||||
this.params = new short[numTransforms];
|
||||
this.prefixSuffixStorage = new byte[prefixSuffixLen];
|
||||
this.prefixSuffixHeads = new int[prefixSuffixCount + 1];
|
||||
}
|
||||
}
|
||||
|
||||
static final int NUM_RFC_TRANSFORMS = 121;
|
||||
static final Transforms RFC_TRANSFORMS = new Transforms(NUM_RFC_TRANSFORMS, 167, 50);
|
||||
|
||||
private static final int OMIT_FIRST_LAST_LIMIT = 9;
|
||||
|
||||
private static final int IDENTITY = 0;
|
||||
private static final int OMIT_LAST_BASE = IDENTITY + 1 - 1; // there is no OMIT_LAST_0.
|
||||
private static final int UPPERCASE_FIRST = OMIT_LAST_BASE + OMIT_FIRST_LAST_LIMIT + 1;
|
||||
private static final int UPPERCASE_ALL = UPPERCASE_FIRST + 1;
|
||||
private static final int OMIT_FIRST_BASE = UPPERCASE_ALL + 1 - 1; // there is no OMIT_FIRST_0.
|
||||
private static final int SHIFT_FIRST = OMIT_FIRST_BASE + OMIT_FIRST_LAST_LIMIT + 1;
|
||||
private static final int SHIFT_ALL = SHIFT_FIRST + 1;
|
||||
|
||||
// Bundle of 0-terminated strings.
|
||||
private static final String PREFIX_SUFFIX_SRC = "# #s #, #e #.# the #.com/#\u00C2\u00A0# of # and"
|
||||
+ " # in # to #\"#\">#\n#]# for # a # that #. # with #'# from # by #. The # on # as # is #ing"
|
||||
+ " #\n\t#:#ed #(# at #ly #=\"# of the #. This #,# not #er #al #='#ful #ive #less #est #ize #"
|
||||
+ "ous #";
|
||||
private static final String TRANSFORMS_SRC = " !! ! , *! &! \" ! ) * * - ! # ! #!*! "
|
||||
+ "+ ,$ ! - % . / # 0 1 . \" 2 3!* 4% ! # / 5 6 7 8 0 1 & $ 9 + : "
|
||||
+ " ; < ' != > ?! 4 @ 4 2 & A *# ( B C& ) % ) !*# *-% A +! *. D! %' & E *6 F "
|
||||
+ " G% ! *A *% H! D I!+! J!+ K +- *4! A L!*4 M N +6 O!*% +.! K *G P +%( ! G *D +D "
|
||||
+ " Q +# *K!*G!+D!+# +G +A +4!+% +K!+4!*D!+K!*K";
|
||||
|
||||
private static void unpackTransforms(byte[] prefixSuffix,
|
||||
int[] prefixSuffixHeads, int[] transforms, String prefixSuffixSrc, String transformsSrc) {
|
||||
final int n = prefixSuffixSrc.length();
|
||||
int index = 1;
|
||||
int j = 0;
|
||||
for (int i = 0; i < n; ++i) {
|
||||
final char c = prefixSuffixSrc.charAt(i);
|
||||
if (c == 35) { // == #
|
||||
prefixSuffixHeads[index++] = j;
|
||||
} else {
|
||||
prefixSuffix[j++] = (byte) c;
|
||||
}
|
||||
}
|
||||
|
||||
for (int i = 0; i < NUM_RFC_TRANSFORMS * 3; ++i) {
|
||||
transforms[i] = transformsSrc.charAt(i) - 32;
|
||||
}
|
||||
}
|
||||
|
||||
static {
|
||||
unpackTransforms(RFC_TRANSFORMS.prefixSuffixStorage, RFC_TRANSFORMS.prefixSuffixHeads,
|
||||
RFC_TRANSFORMS.triplets, PREFIX_SUFFIX_SRC, TRANSFORMS_SRC);
|
||||
}
|
||||
|
||||
static int transformDictionaryWord(byte[] dst, int dstOffset, ByteBuffer src, int srcOffset,
|
||||
int len, Transforms transforms, int transformIndex) {
|
||||
int offset = dstOffset;
|
||||
final int[] triplets = transforms.triplets;
|
||||
final byte[] prefixSuffixStorage = transforms.prefixSuffixStorage;
|
||||
final int[] prefixSuffixHeads = transforms.prefixSuffixHeads;
|
||||
final int transformOffset = 3 * transformIndex;
|
||||
final int prefixIdx = triplets[transformOffset];
|
||||
final int transformType = triplets[transformOffset + 1];
|
||||
final int suffixIdx = triplets[transformOffset + 2];
|
||||
int prefix = prefixSuffixHeads[prefixIdx];
|
||||
final int prefixEnd = prefixSuffixHeads[prefixIdx + 1];
|
||||
int suffix = prefixSuffixHeads[suffixIdx];
|
||||
final int suffixEnd = prefixSuffixHeads[suffixIdx + 1];
|
||||
|
||||
int omitFirst = transformType - OMIT_FIRST_BASE;
|
||||
int omitLast = transformType - OMIT_LAST_BASE;
|
||||
if (omitFirst < 1 || omitFirst > OMIT_FIRST_LAST_LIMIT) {
|
||||
omitFirst = 0;
|
||||
}
|
||||
if (omitLast < 1 || omitLast > OMIT_FIRST_LAST_LIMIT) {
|
||||
omitLast = 0;
|
||||
}
|
||||
|
||||
// Copy prefix.
|
||||
while (prefix != prefixEnd) {
|
||||
dst[offset++] = prefixSuffixStorage[prefix++];
|
||||
}
|
||||
|
||||
// Copy trimmed word.
|
||||
if (omitFirst > len) {
|
||||
omitFirst = len;
|
||||
}
|
||||
srcOffset += omitFirst;
|
||||
len -= omitFirst;
|
||||
len -= omitLast;
|
||||
int i = len;
|
||||
while (i > 0) {
|
||||
dst[offset++] = src.get(srcOffset++);
|
||||
i--;
|
||||
}
|
||||
|
||||
// Ferment.
|
||||
if (transformType == UPPERCASE_FIRST || transformType == UPPERCASE_ALL) {
|
||||
int uppercaseOffset = offset - len;
|
||||
if (transformType == UPPERCASE_FIRST) {
|
||||
len = 1;
|
||||
}
|
||||
while (len > 0) {
|
||||
final int c0 = dst[uppercaseOffset] & 0xFF;
|
||||
if (c0 < 0xC0) {
|
||||
if (c0 >= 97 && c0 <= 122) { // in [a..z] range
|
||||
dst[uppercaseOffset] ^= (byte) 32;
|
||||
}
|
||||
uppercaseOffset += 1;
|
||||
len -= 1;
|
||||
} else if (c0 < 0xE0) {
|
||||
dst[uppercaseOffset + 1] ^= (byte) 32;
|
||||
uppercaseOffset += 2;
|
||||
len -= 2;
|
||||
} else {
|
||||
dst[uppercaseOffset + 2] ^= (byte) 5;
|
||||
uppercaseOffset += 3;
|
||||
len -= 3;
|
||||
}
|
||||
}
|
||||
} else if (transformType == SHIFT_FIRST || transformType == SHIFT_ALL) {
|
||||
int shiftOffset = offset - len;
|
||||
final short param = transforms.params[transformIndex];
|
||||
/* Limited sign extension: scalar < (1 << 24). */
|
||||
int scalar = (param & 0x7FFF) + (0x1000000 - (param & 0x8000));
|
||||
while (len > 0) {
|
||||
int step = 1;
|
||||
final int c0 = dst[shiftOffset] & 0xFF;
|
||||
if (c0 < 0x80) {
|
||||
/* 1-byte rune / 0sssssss / 7 bit scalar (ASCII). */
|
||||
scalar += c0;
|
||||
dst[shiftOffset] = (byte) (scalar & 0x7F);
|
||||
} else if (c0 < 0xC0) {
|
||||
/* Continuation / 10AAAAAA. */
|
||||
} else if (c0 < 0xE0) {
|
||||
/* 2-byte rune / 110sssss AAssssss / 11 bit scalar. */
|
||||
if (len >= 2) {
|
||||
final byte c1 = dst[shiftOffset + 1];
|
||||
scalar += (c1 & 0x3F) | ((c0 & 0x1F) << 6);
|
||||
dst[shiftOffset] = (byte) (0xC0 | ((scalar >> 6) & 0x1F));
|
||||
dst[shiftOffset + 1] = (byte) ((c1 & 0xC0) | (scalar & 0x3F));
|
||||
step = 2;
|
||||
} else {
|
||||
step = len;
|
||||
}
|
||||
} else if (c0 < 0xF0) {
|
||||
/* 3-byte rune / 1110ssss AAssssss BBssssss / 16 bit scalar. */
|
||||
if (len >= 3) {
|
||||
final byte c1 = dst[shiftOffset + 1];
|
||||
final byte c2 = dst[shiftOffset + 2];
|
||||
scalar += (c2 & 0x3F) | ((c1 & 0x3F) << 6) | ((c0 & 0x0F) << 12);
|
||||
dst[shiftOffset] = (byte) (0xE0 | ((scalar >> 12) & 0x0F));
|
||||
dst[shiftOffset + 1] = (byte) ((c1 & 0xC0) | ((scalar >> 6) & 0x3F));
|
||||
dst[shiftOffset + 2] = (byte) ((c2 & 0xC0) | (scalar & 0x3F));
|
||||
step = 3;
|
||||
} else {
|
||||
step = len;
|
||||
}
|
||||
} else if (c0 < 0xF8) {
|
||||
/* 4-byte rune / 11110sss AAssssss BBssssss CCssssss / 21 bit scalar. */
|
||||
if (len >= 4) {
|
||||
final byte c1 = dst[shiftOffset + 1];
|
||||
final byte c2 = dst[shiftOffset + 2];
|
||||
final byte c3 = dst[shiftOffset + 3];
|
||||
scalar += (c3 & 0x3F) | ((c2 & 0x3F) << 6) | ((c1 & 0x3F) << 12) | ((c0 & 0x07) << 18);
|
||||
dst[shiftOffset] = (byte) (0xF0 | ((scalar >> 18) & 0x07));
|
||||
dst[shiftOffset + 1] = (byte) ((c1 & 0xC0) | ((scalar >> 12) & 0x3F));
|
||||
dst[shiftOffset + 2] = (byte) ((c2 & 0xC0) | ((scalar >> 6) & 0x3F));
|
||||
dst[shiftOffset + 3] = (byte) ((c3 & 0xC0) | (scalar & 0x3F));
|
||||
step = 4;
|
||||
} else {
|
||||
step = len;
|
||||
}
|
||||
}
|
||||
shiftOffset += step;
|
||||
len -= step;
|
||||
if (transformType == SHIFT_FIRST) {
|
||||
len = 0;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Copy suffix.
|
||||
while (suffix != suffixEnd) {
|
||||
dst[offset++] = prefixSuffixStorage[suffix++];
|
||||
}
|
||||
|
||||
return offset - dstOffset;
|
||||
}
|
||||
}
|
119
firka/android/app/src/main/java/org/brotli/dec/Utils.java
Normal file
119
firka/android/app/src/main/java/org/brotli/dec/Utils.java
Normal file
@ -0,0 +1,119 @@
|
||||
/* Copyright 2015 Google Inc. All Rights Reserved.
|
||||
|
||||
Distributed under MIT license.
|
||||
See file LICENSE for detail or copy at https://opensource.org/licenses/MIT
|
||||
*/
|
||||
|
||||
package org.brotli.dec;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.io.InputStream;
|
||||
import java.io.UnsupportedEncodingException;
|
||||
import java.nio.Buffer;
|
||||
import java.nio.ByteBuffer;
|
||||
|
||||
/**
|
||||
* A set of utility methods.
|
||||
*/
|
||||
final class Utils {
|
||||
|
||||
private static final byte[] BYTE_ZEROES = new byte[1024];
|
||||
|
||||
private static final int[] INT_ZEROES = new int[1024];
|
||||
|
||||
/**
|
||||
* Fills byte array with zeroes.
|
||||
*
|
||||
* <p> Current implementation uses {@link System#arraycopy}, so it should be used for length not
|
||||
* less than 16.
|
||||
*
|
||||
* @param dest array to fill with zeroes
|
||||
* @param offset the first byte to fill
|
||||
* @param length number of bytes to change
|
||||
*/
|
||||
static void fillBytesWithZeroes(byte[] dest, int start, int end) {
|
||||
int cursor = start;
|
||||
while (cursor < end) {
|
||||
int step = Math.min(cursor + 1024, end) - cursor;
|
||||
System.arraycopy(BYTE_ZEROES, 0, dest, cursor, step);
|
||||
cursor += step;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Fills int array with zeroes.
|
||||
*
|
||||
* <p> Current implementation uses {@link System#arraycopy}, so it should be used for length not
|
||||
* less than 16.
|
||||
*
|
||||
* @param dest array to fill with zeroes
|
||||
* @param offset the first item to fill
|
||||
* @param length number of item to change
|
||||
*/
|
||||
static void fillIntsWithZeroes(int[] dest, int start, int end) {
|
||||
int cursor = start;
|
||||
while (cursor < end) {
|
||||
int step = Math.min(cursor + 1024, end) - cursor;
|
||||
System.arraycopy(INT_ZEROES, 0, dest, cursor, step);
|
||||
cursor += step;
|
||||
}
|
||||
}
|
||||
|
||||
static void copyBytes(byte[] dst, int target, byte[] src, int start, int end) {
|
||||
System.arraycopy(src, start, dst, target, end - start);
|
||||
}
|
||||
|
||||
static void copyBytesWithin(byte[] bytes, int target, int start, int end) {
|
||||
System.arraycopy(bytes, start, bytes, target, end - start);
|
||||
}
|
||||
|
||||
static int readInput(InputStream src, byte[] dst, int offset, int length) {
|
||||
try {
|
||||
return src.read(dst, offset, length);
|
||||
} catch (IOException e) {
|
||||
throw new BrotliRuntimeException("Failed to read input", e);
|
||||
}
|
||||
}
|
||||
|
||||
static void closeInput(InputStream src) throws IOException {
|
||||
src.close();
|
||||
}
|
||||
|
||||
static byte[] toUsAsciiBytes(String src) {
|
||||
try {
|
||||
// NB: String#getBytes(String) is present in JDK 1.1, while other variants require JDK 1.6 and
|
||||
// above.
|
||||
return src.getBytes("US-ASCII");
|
||||
} catch (UnsupportedEncodingException e) {
|
||||
throw new RuntimeException(e); // cannot happen
|
||||
}
|
||||
}
|
||||
|
||||
static ByteBuffer asReadOnlyBuffer(ByteBuffer src) {
|
||||
return src.asReadOnlyBuffer();
|
||||
}
|
||||
|
||||
static int isReadOnly(ByteBuffer src) {
|
||||
return src.isReadOnly() ? 1 : 0;
|
||||
}
|
||||
|
||||
static int isDirect(ByteBuffer src) {
|
||||
return src.isDirect() ? 1 : 0;
|
||||
}
|
||||
|
||||
// Crazy pills factory: code compiled for JDK8 does not work on JRE9.
|
||||
static void flipBuffer(Buffer buffer) {
|
||||
buffer.flip();
|
||||
}
|
||||
|
||||
static int isDebugMode() {
|
||||
boolean assertsEnabled = Boolean.parseBoolean(System.getProperty("BROTLI_ENABLE_ASSERTS"));
|
||||
return assertsEnabled ? 1 : 0;
|
||||
}
|
||||
|
||||
// See BitReader.LOG_BITNESS
|
||||
static int getLogBintness() {
|
||||
boolean isLongExpensive = Boolean.parseBoolean(System.getProperty("BROTLI_32_BIT_CPU"));
|
||||
return isLongExpensive ? 5 : 6;
|
||||
}
|
||||
}
|
@ -0,0 +1,16 @@
|
||||
/* Copyright 2018 Google Inc. All Rights Reserved.
|
||||
|
||||
Distributed under MIT license.
|
||||
See file LICENSE for detail or copy at https://opensource.org/licenses/MIT
|
||||
*/
|
||||
|
||||
package org.brotli.enc;
|
||||
|
||||
import java.nio.ByteBuffer;
|
||||
|
||||
/**
|
||||
* Prepared dictionary data provider.
|
||||
*/
|
||||
public interface PreparedDictionary {
|
||||
ByteBuffer getData();
|
||||
}
|
@ -0,0 +1,185 @@
|
||||
/* Copyright 2017 Google Inc. All Rights Reserved.
|
||||
|
||||
Distributed under MIT license.
|
||||
See file LICENSE for detail or copy at https://opensource.org/licenses/MIT
|
||||
*/
|
||||
|
||||
package org.brotli.enc;
|
||||
|
||||
import java.nio.Buffer;
|
||||
import java.nio.ByteBuffer;
|
||||
import java.nio.ByteOrder;
|
||||
import java.nio.IntBuffer;
|
||||
import java.nio.ShortBuffer;
|
||||
|
||||
/**
|
||||
* Java prepared (raw) dictionary producer.
|
||||
*/
|
||||
public class PreparedDictionaryGenerator {
|
||||
|
||||
private static final int MAGIC = 0xDEBCEDE0;
|
||||
private static final long HASH_MULTIPLIER = 0x1fe35a7bd3579bd3L;
|
||||
|
||||
private static class PreparedDictionaryImpl implements PreparedDictionary {
|
||||
private final ByteBuffer data;
|
||||
|
||||
private PreparedDictionaryImpl(ByteBuffer data) {
|
||||
this.data = data;
|
||||
}
|
||||
|
||||
@Override
|
||||
public ByteBuffer getData() {
|
||||
return data;
|
||||
}
|
||||
}
|
||||
|
||||
// Disallow instantiation.
|
||||
private PreparedDictionaryGenerator() { }
|
||||
|
||||
public static PreparedDictionary generate(ByteBuffer src) {
|
||||
return generate(src, 17, 3, 40, 5);
|
||||
}
|
||||
|
||||
public static PreparedDictionary generate(ByteBuffer src,
|
||||
int bucketBits, int slotBits, int hashBits, int blockBits) {
|
||||
((Buffer) src).clear(); // Just in case...
|
||||
if (blockBits > 12) {
|
||||
throw new IllegalArgumentException("blockBits is too big");
|
||||
}
|
||||
if (bucketBits >= 24) {
|
||||
throw new IllegalArgumentException("bucketBits is too big");
|
||||
}
|
||||
if (bucketBits - slotBits >= 16) {
|
||||
throw new IllegalArgumentException("slotBits is too small");
|
||||
}
|
||||
int bucketLimit = 1 << blockBits;
|
||||
int numBuckets = 1 << bucketBits;
|
||||
int numSlots = 1 << slotBits;
|
||||
int slotMask = numSlots - 1;
|
||||
int hashShift = 64 - bucketBits;
|
||||
long hashMask = (~0L) >>> (64 - hashBits);
|
||||
int sourceSize = src.capacity();
|
||||
if (sourceSize < 8) {
|
||||
throw new IllegalArgumentException("src is too short");
|
||||
}
|
||||
|
||||
/* Step 1: create "bloated" hasher. */
|
||||
short[] num = new short[numBuckets];
|
||||
int[] bucketHeads = new int[numBuckets];
|
||||
int[] nextBucket = new int[sourceSize];
|
||||
|
||||
long accumulator = 0;
|
||||
for (int i = 0; i < 7; ++i) {
|
||||
accumulator |= (src.get(i) & 0xFFL) << (8 * i);
|
||||
}
|
||||
accumulator <<= 8;
|
||||
/* TODO(eustas): apply custom "store" order. */
|
||||
for (int i = 0; i + 7 < sourceSize; ++i) {
|
||||
accumulator = (accumulator >>> 8) | ((src.get(i + 7) & 0xFFL) << 56);
|
||||
long h = (accumulator & hashMask) * HASH_MULTIPLIER;
|
||||
int key = (int) (h >>> hashShift);
|
||||
int count = num[key];
|
||||
nextBucket[i] = (count == 0) ? -1 : bucketHeads[key];
|
||||
bucketHeads[key] = i;
|
||||
count++;
|
||||
if (count > bucketLimit) {
|
||||
count = bucketLimit;
|
||||
}
|
||||
num[key] = (short) count;
|
||||
}
|
||||
|
||||
/* Step 2: find slot limits. */
|
||||
int[] slotLimit = new int[numSlots];
|
||||
int[] slotSize = new int[numSlots];
|
||||
int totalItems = 0;
|
||||
for (int i = 0; i < numSlots; ++i) {
|
||||
boolean overflow = false;
|
||||
slotLimit[i] = bucketLimit;
|
||||
while (true) {
|
||||
overflow = false;
|
||||
int limit = slotLimit[i];
|
||||
int count = 0;
|
||||
for (int j = i; j < numBuckets; j += numSlots) {
|
||||
int size = num[j];
|
||||
/* Last chain may span behind 64K limit; overflow happens only if
|
||||
we are about to use 0xFFFF+ as item offset. */
|
||||
if (count >= 0xFFFF) {
|
||||
overflow = true;
|
||||
break;
|
||||
}
|
||||
if (size > limit) {
|
||||
size = limit;
|
||||
}
|
||||
count += size;
|
||||
}
|
||||
if (!overflow) {
|
||||
slotSize[i] = count;
|
||||
totalItems += count;
|
||||
break;
|
||||
}
|
||||
slotLimit[i]--;
|
||||
}
|
||||
}
|
||||
|
||||
/* Step 3: transfer data to "slim" hasher. */
|
||||
int part0 = 6 * 4;
|
||||
int part1 = numSlots * 4;
|
||||
int part2 = numBuckets * 2;
|
||||
int part3 = totalItems * 4;
|
||||
int allocSize = part0 + part1 + part2 + part3 + sourceSize;
|
||||
ByteBuffer flat = ByteBuffer.allocateDirect(allocSize);
|
||||
ByteBuffer pointer = flat.slice();
|
||||
pointer.order(ByteOrder.nativeOrder());
|
||||
|
||||
IntBuffer struct = pointer.asIntBuffer();
|
||||
pointer.position(pointer.position() + part0);
|
||||
IntBuffer slotOffsets = pointer.asIntBuffer();
|
||||
pointer.position(pointer.position() + part1);
|
||||
ShortBuffer heads = pointer.asShortBuffer();
|
||||
pointer.position(pointer.position() + part2);
|
||||
IntBuffer items = pointer.asIntBuffer();
|
||||
pointer.position(pointer.position() + part3);
|
||||
ByteBuffer sourceCopy = pointer.slice();
|
||||
|
||||
/* magic */ struct.put(0, MAGIC);
|
||||
/* source_offset */ struct.put(1, totalItems);
|
||||
/* source_size */ struct.put(2, sourceSize);
|
||||
/* hash_bits */ struct.put(3, hashBits);
|
||||
/* bucket_bits */ struct.put(4, bucketBits);
|
||||
/* slot_bits */ struct.put(5, slotBits);
|
||||
|
||||
totalItems = 0;
|
||||
for (int i = 0; i < numSlots; ++i) {
|
||||
slotOffsets.put(i, totalItems);
|
||||
totalItems += slotSize[i];
|
||||
slotSize[i] = 0;
|
||||
}
|
||||
|
||||
for (int i = 0; i < numBuckets; ++i) {
|
||||
int slot = i & slotMask;
|
||||
int count = num[i];
|
||||
if (count > slotLimit[slot]) {
|
||||
count = slotLimit[slot];
|
||||
}
|
||||
if (count == 0) {
|
||||
heads.put(i, (short) 0xFFFF);
|
||||
continue;
|
||||
}
|
||||
int cursor = slotSize[slot];
|
||||
heads.put(i, (short) cursor);
|
||||
cursor += slotOffsets.get(slot);
|
||||
slotSize[slot] += count;
|
||||
int pos = bucketHeads[i];
|
||||
for (int j = 0; j < count; j++) {
|
||||
items.put(cursor++, pos);
|
||||
pos = nextBucket[pos];
|
||||
}
|
||||
cursor--;
|
||||
items.put(cursor, items.get(cursor) | 0x80000000);
|
||||
}
|
||||
|
||||
sourceCopy.put(src);
|
||||
|
||||
return new PreparedDictionaryImpl(flat);
|
||||
}
|
||||
}
|
63
firka/android/app/src/main/kotlin/app/firka/naplo/AppMain.kt
Normal file
63
firka/android/app/src/main/kotlin/app/firka/naplo/AppMain.kt
Normal file
@ -0,0 +1,63 @@
|
||||
package app.firka.naplo
|
||||
|
||||
import android.annotation.SuppressLint
|
||||
import android.app.Application
|
||||
import android.os.Build
|
||||
import android.util.Log
|
||||
import org.brotli.dec.BrotliInputStream
|
||||
import org.json.JSONArray
|
||||
import org.json.JSONObject
|
||||
import java.io.File
|
||||
import java.io.FileOutputStream
|
||||
import java.security.MessageDigest
|
||||
|
||||
class AppMain : Application() {
|
||||
|
||||
private fun File.sha256(): String {
|
||||
if (!exists()) return "0000000000000000000000000000000000000000000000000000000000000000"
|
||||
|
||||
val md = MessageDigest.getInstance("SHA-256")
|
||||
val digest = md.digest(this.readBytes())
|
||||
return digest.fold("") { str, it -> str + "%02x".format(it) }
|
||||
}
|
||||
|
||||
@SuppressLint("UnsafeDynamicallyLoadedCode")
|
||||
override fun onCreate() {
|
||||
super.onCreate()
|
||||
|
||||
val am = assets
|
||||
val abi = Build.SUPPORTED_ABIS[0]
|
||||
val assets = am.list("")
|
||||
|
||||
if (!assets?.contains("flutter-br-$abi")!!) {
|
||||
throw Exception("Unsupported abi: $abi, try downloading an apk with a different abi")
|
||||
}
|
||||
|
||||
val compressedLibsIndex = am.open("flutter-br.json")
|
||||
val compressedLibs = JSONObject(compressedLibsIndex.readBytes().toString(Charsets.UTF_8))
|
||||
|
||||
val natives = am.list("flutter-br-$abi")!!
|
||||
for (lib in natives) {
|
||||
val so = lib.substring(0, lib.length-".br".length)
|
||||
val soFile = File(cacheDir, so)
|
||||
|
||||
if (soFile.sha256() == compressedLibs.getString("${abi}/$so")) {
|
||||
System.load(soFile.absolutePath)
|
||||
return
|
||||
}
|
||||
|
||||
Log.d("AppMain", "Decompressing: $so")
|
||||
val brInput = am.open("flutter-br-$abi/$lib")
|
||||
val soOutput = FileOutputStream(soFile)
|
||||
|
||||
val brIn = BrotliInputStream(brInput)
|
||||
brIn.copyTo(soOutput)
|
||||
|
||||
brInput.close()
|
||||
soOutput.close()
|
||||
|
||||
System.load(soFile.absolutePath)
|
||||
}
|
||||
}
|
||||
|
||||
}
|
Loading…
x
Reference in New Issue
Block a user