Add BLZ decompress functionality. Used in KIP1.
All checks were successful
continuous-integration/drone/push Build is passing

This commit is contained in:
Dmitry Isaenko 2023-01-12 12:51:01 +03:00
parent ce16e19985
commit 365326456b
9 changed files with 232 additions and 61 deletions

View file

@ -60,6 +60,22 @@ public class Converter {
return String.format("%32s", Integer.toBinaryString( value )).replace(' ', '0'); return String.format("%32s", Integer.toBinaryString( value )).replace(' ', '0');
} }
public static String byteToBinaryString(byte value){
String str = String.format("%8s", Integer.toBinaryString( value )).replace(' ', '0');
int decrease = 0;
if (str.length() > 8)
decrease = str.length()-8;
return str.substring(decrease);
}
public static String shortToBinaryString(short value){
String str = String.format("%16s", Integer.toBinaryString( value )).replace(' ', '0');
int decrease = 0;
if (str.length() > 16)
decrease = str.length()-16;
return str.substring(decrease);
}
public static String longToOctString(long value){ public static String longToOctString(long value){
return String.format("%64s", Long.toBinaryString( value )).replace(' ', '0'); return String.format("%64s", Long.toBinaryString( value )).replace(' ', '0');
} }

View file

@ -6,8 +6,7 @@ import java.io.File;
import java.nio.file.Files; import java.nio.file.Files;
import java.nio.file.Paths; import java.nio.file.Paths;
public class ExportAble { public abstract class ExportAble {
protected BufferedInputStream stream; protected BufferedInputStream stream;
protected boolean export(String saveTo, String fileName, long skip, long size) throws Exception{ protected boolean export(String saveTo, String fileName, long skip, long size) throws Exception{

View file

@ -88,6 +88,7 @@ public class KernelAccessControlProvider {
DEBUGFLAGS = 16; DEBUGFLAGS = 16;
// RAW data // RAW data
private final LinkedList<Integer> rawData; private final LinkedList<Integer> rawData;
private byte[] raw;
// Kernel flags // Kernel flags
private boolean kernelFlagsAvailable; private boolean kernelFlagsAvailable;
private int kernelFlagCpuIdHi; private int kernelFlagCpuIdHi;
@ -120,6 +121,7 @@ public class KernelAccessControlProvider {
throw new Exception("ACID-> KernelAccessControlProvider: too small size of the Kernel Access Control"); throw new Exception("ACID-> KernelAccessControlProvider: too small size of the Kernel Access Control");
this.rawData = new LinkedList<>(); this.rawData = new LinkedList<>();
this.raw = bytes;
this.interruptPairs = new LinkedHashMap<>(); this.interruptPairs = new LinkedHashMap<>();
this.syscallMasks = new LinkedHashMap<>(); this.syscallMasks = new LinkedHashMap<>();
this.mapIoOrNormalRange = new LinkedHashMap<>(); this.mapIoOrNormalRange = new LinkedHashMap<>();
@ -222,6 +224,8 @@ public class KernelAccessControlProvider {
return minBitCnt; return minBitCnt;
} }
public LinkedList<Integer> getRawData() { return rawData; } public LinkedList<Integer> getRawData() { return rawData; }
public byte[] getRaw() { return raw; }
public boolean isKernelFlagsAvailable() { return kernelFlagsAvailable; } public boolean isKernelFlagsAvailable() { return kernelFlagsAvailable; }
public int getKernelFlagCpuIdHi() { return kernelFlagCpuIdHi; } public int getKernelFlagCpuIdHi() { return kernelFlagCpuIdHi; }
public int getKernelFlagCpuIdLo() { return kernelFlagCpuIdLo; } public int getKernelFlagCpuIdLo() { return kernelFlagCpuIdLo; }

View file

@ -21,18 +21,18 @@ package libKonogonka.Tools.NSO;
import libKonogonka.Converter; import libKonogonka.Converter;
public class SegmentHeader { public class SegmentHeader {
private final int segmentOffset; private final int segmentOffset; // useless constant for KIP1
private final int memoryOffset; private final int memoryOffset; // In case of KIP1 it's decompressed size
private final int sizeAsDecompressed; private final int size; // as decompressed for NSO0; as compressed for KIP1
SegmentHeader(byte[] data){ public SegmentHeader(byte[] data){
this(data, 0); this(data, 0);
} }
public SegmentHeader(byte[] data, int fromOffset){ public SegmentHeader(byte[] data, int fromOffset){
this.segmentOffset = Converter.getLEint(data, fromOffset); this.segmentOffset = Converter.getLEint(data, fromOffset);
this.memoryOffset = Converter.getLEint(data, fromOffset+4); this.memoryOffset = Converter.getLEint(data, fromOffset+4);
this.sizeAsDecompressed = Converter.getLEint(data, fromOffset+8); this.size = Converter.getLEint(data, fromOffset+8);
} }
public int getSegmentOffset() { public int getSegmentOffset() {
@ -47,6 +47,6 @@ public class SegmentHeader {
* @return Size as decompressed if used in NSO0; size of compressed if used in KIP1. * @return Size as decompressed if used in NSO0; size of compressed if used in KIP1.
* */ * */
public int getSize() { public int getSize() {
return sizeAsDecompressed; return size;
} }
} }

View file

@ -129,7 +129,7 @@ public class KIP1Header {
"Main thread priority : " + String.format("0x%x", mainThreadPriority) + "\n" + "Main thread priority : " + String.format("0x%x", mainThreadPriority) + "\n" +
"Main thread core number : " + String.format("0x%x", mainThreadCoreNumber) + "\n" + "Main thread core number : " + String.format("0x%x", mainThreadCoreNumber) + "\n" +
"Reserved 1 : " + String.format("0x%x", reserved1) + "\n" + "Reserved 1 : " + String.format("0x%x", reserved1) + "\n" +
"Flags : " + Converter.intToBinaryString(flags) + "\n" + "Flags : " + Converter.byteToBinaryString(flags) + "\n" +
" 0| .text compress : " + ((flags & 1) == 1 ? "YES" : "NO") + "\n" + " 0| .text compress : " + ((flags & 1) == 1 ? "YES" : "NO") + "\n" +
" 1| .ro compress : " + ((flags >> 1 & 1) == 1 ? "YES" : "NO") + "\n" + " 1| .ro compress : " + ((flags >> 1 & 1) == 1 ? "YES" : "NO") + "\n" +
" 2| .rw compress : " + ((flags >> 2 & 1) == 1 ? "YES" : "NO") + "\n" + " 2| .rw compress : " + ((flags >> 2 & 1) == 1 ? "YES" : "NO") + "\n" +

View file

@ -24,6 +24,7 @@ import libKonogonka.ctraesclassic.InFileStreamClassicProducer;
import java.nio.file.Paths; import java.nio.file.Paths;
public class KIP1Provider extends ExportAble { public class KIP1Provider extends ExportAble {
public static final int HEADER_SIZE = 0x100;
private KIP1Header header; private KIP1Header header;
private final InFileStreamClassicProducer producer; private final InFileStreamClassicProducer producer;
@ -34,8 +35,8 @@ public class KIP1Provider extends ExportAble {
public KIP1Provider(String fileLocation) throws Exception{ public KIP1Provider(String fileLocation) throws Exception{
this.producer = new InFileStreamClassicProducer(Paths.get(fileLocation)); this.producer = new InFileStreamClassicProducer(Paths.get(fileLocation));
this.stream = producer.produce(); this.stream = producer.produce();
byte[] kip1HeaderBytes = new byte[0x100]; byte[] kip1HeaderBytes = new byte[HEADER_SIZE];
if (0x100 != stream.read(kip1HeaderBytes)) if (HEADER_SIZE != stream.read(kip1HeaderBytes))
throw new Exception("Unable to read KIP1 file header"); throw new Exception("Unable to read KIP1 file header");
makeHeader(kip1HeaderBytes); makeHeader(kip1HeaderBytes);
@ -54,7 +55,7 @@ public class KIP1Provider extends ExportAble {
} }
private void calculateOffsets(long kip1StartOffset){ private void calculateOffsets(long kip1StartOffset){
this.startOffset = kip1StartOffset; this.startOffset = kip1StartOffset;
this.endOffset = 0x100 + kip1StartOffset + this.endOffset = HEADER_SIZE + kip1StartOffset +
header.getTextSegmentHeader().getSize() + header.getRoDataSegmentHeader().getSize() + header.getTextSegmentHeader().getSize() + header.getRoDataSegmentHeader().getSize() +
header.getRwDataSegmentHeader().getSize() + header.getBssSegmentHeader().getSize(); header.getRwDataSegmentHeader().getSize() + header.getBssSegmentHeader().getSize();
} }
@ -69,11 +70,11 @@ public class KIP1Provider extends ExportAble {
return export(saveTo, header.getName()+".kip1", startOffset, endOffset - startOffset); return export(saveTo, header.getName()+".kip1", startOffset, endOffset - startOffset);
} }
public boolean exportAsDecompressed(String saveToLocation) throws Exception{ public boolean exportAsDecompressed(String saveToLocation) throws Exception{
return Kip1Unpacker.unpack(header, producer, saveToLocation); return Kip1Unpacker.unpack(header, producer.getSuccessor(startOffset, true), saveToLocation);
} }
public KIP1Raw getAsDecompressed() throws Exception{ public KIP1Raw getAsDecompressed() throws Exception{
return Kip1Unpacker.getNSO0Raw(header, producer); return Kip1Unpacker.getKIP1Raw(header, producer.getSuccessor(startOffset, true));
} }
public void printDebug(){ public void printDebug(){

View file

@ -19,6 +19,7 @@
package libKonogonka.Tools.other.System2.ini1; package libKonogonka.Tools.other.System2.ini1;
import libKonogonka.Tools.NSO.SegmentHeader; import libKonogonka.Tools.NSO.SegmentHeader;
import libKonogonka.blz.BlzDecompress;
import libKonogonka.ctraesclassic.InFileStreamClassicProducer; import libKonogonka.ctraesclassic.InFileStreamClassicProducer;
import java.io.BufferedInputStream; import java.io.BufferedInputStream;
@ -28,6 +29,8 @@ import java.nio.ByteBuffer;
import java.nio.ByteOrder; import java.nio.ByteOrder;
import java.nio.charset.StandardCharsets; import java.nio.charset.StandardCharsets;
import static libKonogonka.Tools.other.System2.ini1.KIP1Provider.HEADER_SIZE;
public class Kip1Unpacker { public class Kip1Unpacker {
private static final String DECOMPRESSED_FILE_POSTFIX = "_decompressed"; private static final String DECOMPRESSED_FILE_POSTFIX = "_decompressed";
@ -58,7 +61,7 @@ public class Kip1Unpacker {
return true; return true;
} }
static KIP1Raw getNSO0Raw(KIP1Header kip1Header, InFileStreamClassicProducer producer) throws Exception{ static KIP1Raw getKIP1Raw(KIP1Header kip1Header, InFileStreamClassicProducer producer) throws Exception{
Kip1Unpacker instance = new Kip1Unpacker(kip1Header, producer); Kip1Unpacker instance = new Kip1Unpacker(kip1Header, producer);
return new KIP1Raw(instance.header, return new KIP1Raw(instance.header,
@ -69,69 +72,118 @@ public class Kip1Unpacker {
private void decompressSections() throws Exception{ private void decompressSections() throws Exception{
decompressTextSection(); decompressTextSection();
decompressRodataSection(); decompressRoDataSection();
decompressDataSection(); decompressRwDataSection();
} }
private void decompressTextSection() throws Exception{ private void decompressTextSection() throws Exception{
if (kip1Header.isTextCompressFlag()) if (kip1Header.isTextCompressFlag())
_textDecompressedSection = decompressSection(kip1Header.getTextSegmentHeader(), kip1Header.getTextSegmentHeader().getSize()); _textDecompressedSection = decompressSection(kip1Header.getTextSegmentHeader(), HEADER_SIZE);
else else
_textDecompressedSection = duplicateSection(kip1Header.getTextSegmentHeader()); _textDecompressedSection = duplicateSection(kip1Header.getTextSegmentHeader(), HEADER_SIZE);
} }
private void decompressRodataSection() throws Exception{ private void decompressRoDataSection() throws Exception{
int offset = HEADER_SIZE + kip1Header.getTextSegmentHeader().getSize();
if (kip1Header.isRoDataCompressFlag()) if (kip1Header.isRoDataCompressFlag())
_roDataDecompressedSection = decompressSection(kip1Header.getRoDataSegmentHeader(), kip1Header.getRoDataSegmentHeader().getSize()); _roDataDecompressedSection = decompressSection(kip1Header.getRoDataSegmentHeader(), offset);
else else
_roDataDecompressedSection = duplicateSection(kip1Header.getRoDataSegmentHeader()); _roDataDecompressedSection = duplicateSection(kip1Header.getRoDataSegmentHeader(), offset);
} }
private void decompressDataSection() throws Exception{ private void decompressRwDataSection() throws Exception{
int offset = HEADER_SIZE + kip1Header.getTextSegmentHeader().getSize() + kip1Header.getRoDataSegmentHeader().getSize();
if (kip1Header.isRwDataCompressFlag()) if (kip1Header.isRwDataCompressFlag())
_rwDataDecompressedSection = decompressSection(kip1Header.getRwDataSegmentHeader(), kip1Header.getRwDataSegmentHeader().getSize()); _rwDataDecompressedSection = decompressSection(kip1Header.getRwDataSegmentHeader(), offset);
else else
_rwDataDecompressedSection = duplicateSection(kip1Header.getRwDataSegmentHeader()); _rwDataDecompressedSection = duplicateSection(kip1Header.getRwDataSegmentHeader(), offset);
} }
private byte[] decompressSection(SegmentHeader segmentHeader, int compressedSectionSize) throws Exception{ private byte[] decompressSection(SegmentHeader segmentHeader, int offset) throws Exception{
// TODO
return new byte[1];
}
private byte[] duplicateSection(SegmentHeader segmentHeader) throws Exception{
try (BufferedInputStream stream = producer.produce()) { try (BufferedInputStream stream = producer.produce()) {
int size = segmentHeader.getSize(); int sectionDecompressedSize = segmentHeader.getMemoryOffset();
byte[] compressed = new byte[segmentHeader.getSize()];
if (offset != stream.skip(offset))
throw new Exception("Failed to skip " + offset + " bytes till section");
byte[] sectionContent = new byte[size]; if (segmentHeader.getSize() != stream.read(compressed))
if (segmentHeader.getSegmentOffset() != stream.skip(segmentHeader.getSegmentOffset()))
throw new Exception("Failed to skip " + segmentHeader.getSegmentOffset() + " bytes till section");
if (size != stream.read(sectionContent))
throw new Exception("Failed to read entire section"); throw new Exception("Failed to read entire section");
return sectionContent; BlzDecompress decompressor = new BlzDecompress();
byte[] restored = new byte[sectionDecompressedSize];
int decompressedLength = decompressor.decompress(compressed, restored);
if (decompressedLength != sectionDecompressedSize)
throw new Exception("Decompression failure. Expected vs. actual decompressed sizes mismatch: " +
decompressedLength + " / " + sectionDecompressedSize);
return restored;
} }
} }
private void makeHeader() throws Exception{ private byte[] duplicateSection(SegmentHeader segmentHeader, int offset) throws Exception{
int size = segmentHeader.getSize();
byte[] content = new byte[size];
try (BufferedInputStream stream = producer.produce()) { try (BufferedInputStream stream = producer.produce()) {
byte[] headerBytes = new byte[0x100]; if (offset != stream.skip(offset))
throw new Exception("Failed to skip header bytes");
if (0x100 != stream.read(headerBytes)) int blockSize = Math.min(size, 0x200);
throw new Exception("Unable to read initial 0x100 bytes needed for export.");
//TODO
//textFileOffsetNew = kip1Header.getTextSegmentHeader().getMemoryOffset()+0x100;
//roDataFileOffsetNew = kip1Header.getRoDataSegmentHeader().getMemoryOffset()+0x100;
//rwDataFileOffsetNew = kip1Header.getRwDataSegmentHeader().getMemoryOffset()+0x100;
ByteBuffer resultingHeader = ByteBuffer.allocate(0x100).order(ByteOrder.LITTLE_ENDIAN); long i = 0;
resultingHeader.put("KIP1".getBytes(StandardCharsets.US_ASCII)); byte[] block = new byte[blockSize];
//.putInt(kip1Header.getVersion())
//.put(kip1Header.getUpperReserved())
int actuallyRead;
while (true) {
if ((actuallyRead = stream.read(block)) != blockSize)
throw new Exception("Read failure. Block Size: " + blockSize + ", actuallyRead: " + actuallyRead);
System.arraycopy(block, 0, content, (int) i, blockSize);
i += blockSize;
if ((i + blockSize) > size) {
blockSize = (int) (size - i);
if (blockSize == 0)
break;
block = new byte[blockSize];
}
}
}
return content;
}
private void makeHeader(){
textFileOffsetNew = kip1Header.getTextSegmentHeader().getMemoryOffset();
roDataFileOffsetNew = kip1Header.getRoDataSegmentHeader().getMemoryOffset();
rwDataFileOffsetNew = kip1Header.getRwDataSegmentHeader().getMemoryOffset();
byte flags = kip1Header.getFlags();
flags &= ~0b111; //mark .text .ro .rw as 'not compress'
ByteBuffer resultingHeader = ByteBuffer.allocate(HEADER_SIZE).order(ByteOrder.LITTLE_ENDIAN);
resultingHeader.put("KIP1".getBytes(StandardCharsets.US_ASCII))
.put(kip1Header.getName().getBytes(StandardCharsets.US_ASCII));
resultingHeader.position(0x10);
resultingHeader.put(kip1Header.getProgramId())
.putInt(kip1Header.getVersion())
.put(kip1Header.getMainThreadPriority())
.put(kip1Header.getMainThreadCoreNumber())
.put(kip1Header.getReserved1())
.put(flags)
.putInt(kip1Header.getTextSegmentHeader().getSegmentOffset())
.putInt(textFileOffsetNew)
.putInt(textFileOffsetNew)
.putInt(kip1Header.getThreadAffinityMask())
.putInt(kip1Header.getRoDataSegmentHeader().getSegmentOffset())
.putInt(roDataFileOffsetNew)
.putInt(roDataFileOffsetNew)
.putInt(kip1Header.getMainThreadStackSize())
.putInt(kip1Header.getRwDataSegmentHeader().getSegmentOffset())
.putInt(rwDataFileOffsetNew)
.putInt(rwDataFileOffsetNew)
.put(kip1Header.getReserved2())
.putInt(kip1Header.getBssSegmentHeader().getSegmentOffset())
.putInt(kip1Header.getBssSegmentHeader().getMemoryOffset())
.putInt(kip1Header.getBssSegmentHeader().getSize())
.put(kip1Header.getReserved3())
.put(kip1Header.getKernelCapabilityData().getRaw());
header = resultingHeader.array(); header = resultingHeader.array();
} }
}
private void writeFile(String saveToLocation) throws Exception{ private void writeFile(String saveToLocation) throws Exception{
File location = new File(saveToLocation); File location = new File(saveToLocation);
@ -140,11 +192,11 @@ public class Kip1Unpacker {
try (RandomAccessFile raf = new RandomAccessFile( try (RandomAccessFile raf = new RandomAccessFile(
saveToLocation+File.separator+kip1Header.getName()+DECOMPRESSED_FILE_POSTFIX+".kip1", "rw")){ saveToLocation+File.separator+kip1Header.getName()+DECOMPRESSED_FILE_POSTFIX+".kip1", "rw")){
raf.write(header); raf.write(header);
raf.seek(textFileOffsetNew); raf.seek(HEADER_SIZE);
raf.write(_textDecompressedSection); raf.write(_textDecompressedSection);
raf.seek(roDataFileOffsetNew); raf.seek(HEADER_SIZE + textFileOffsetNew);
raf.write(_roDataDecompressedSection); raf.write(_roDataDecompressedSection);
raf.seek(roDataFileOffsetNew); raf.seek(HEADER_SIZE + textFileOffsetNew + roDataFileOffsetNew);
raf.write(_rwDataDecompressedSection); raf.write(_rwDataDecompressedSection);
} }
} }

View file

@ -0,0 +1,89 @@
/*
Copyright 2019-2023 Dmitry Isaenko
This file is part of libKonogonka.
libKonogonka is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
libKonogonka is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with libKonogonka. If not, see <https://www.gnu.org/licenses/>.
*/
package libKonogonka.blz;
import libKonogonka.Converter;
public class BlzDecompress {
public static final byte BLZ_MASK = (byte) 0x80;
/**
* Decompress BLZ section. Adapted for NS.
* @param compressed byte array with compressed data
* @param decompressed byte array where decompressed data should be saved in
* */
public int decompress(byte[] compressed, byte[] decompressed) throws Exception{
/* NOTE: values must be unsigned int ! */
int additionalLength = Converter.getLEint(compressed, compressed.length-4);
int headerSize = Converter.getLEint(compressed, compressed.length-2*4); // 'Footer' aka 'Header'
int compressedAndHeaderSize = Converter.getLEint(compressed, compressed.length-3*4);
if (additionalLength == 0)
throw new Exception("File not compressed");
else if (additionalLength < 0)
throw new Exception("File not supported. Please file a bug "+additionalLength);
int compressedOffset = compressedAndHeaderSize - headerSize;
int finalOffset = compressedAndHeaderSize + additionalLength;
/*
System.out.printf(
"Additional length : 0x%-8x %d %n" +
"Header size : 0x%-8x %d %n" +
"Compressed+Header size : 0x%-8x %d %n" +
"Compressed offset : 0x%-8x %d %n" +
"Final offset : 0x%-8x %d %n",
additionalLength, additionalLength,
headerSize, headerSize,
compressedAndHeaderSize, compressedAndHeaderSize,
compressedOffset, compressedOffset,
finalOffset, finalOffset);
*/
decompress_loop:
while (true){
byte control = compressed[--compressedOffset];
for (int i = 0; i < 8; i++){
if ((control & BLZ_MASK) == 0) {
if (compressedOffset < 1)
throw new Exception("BLZ decompression is out of range");
decompressed[--finalOffset] = compressed[--compressedOffset];
}
else {
if (compressedOffset < 2)
throw new Exception("BLZ decompression is out of range");
compressedOffset -= 2;
short segmentValue = (short) (( (compressed[compressedOffset+1]) << 8) | (compressed[compressedOffset] & 0xFF));
int segmentSize = ((segmentValue >> 12) & 0xF) + 3;
int segmentPosition = (segmentValue & 0xFFF) + 3;
if (segmentSize > finalOffset)
segmentSize = finalOffset;
finalOffset -= segmentSize;
for (int j = 0; j < segmentSize; j++)
decompressed[finalOffset + j] = decompressed[finalOffset + j + segmentPosition];
}
control <<= 1;
if (finalOffset == 0)
break decompress_loop;
}
}
return additionalLength+compressedAndHeaderSize;
}
}

View file

@ -115,10 +115,20 @@ public class Package2UnpackedTest {
String.format(" Size 0x%x", Files.size(Paths.get("/home/loper/Projects/libKonogonka/FilesForTests/own/KIP1s/"+ kip1Provider.getHeader().getName()+".kip1")))); String.format(" Size 0x%x", Files.size(Paths.get("/home/loper/Projects/libKonogonka/FilesForTests/own/KIP1s/"+ kip1Provider.getHeader().getName()+".kip1"))));
} }
} }
@DisplayName("KIP1 read reference")
@DisplayName("KIP1 unpack test")
@Test @Test
void checkReference() throws Exception{ void unpackKip1() throws Exception{
KIP1Provider kip1Provider = new KIP1Provider("/home/loper/Projects/libKonogonka/FilesForTests/FS.kip1-fat.dec"); keyChainHolder = new KeyChainHolder(keysFileLocation, null);
System2Provider provider = new System2Provider(fileLocation, keyChainHolder);
Ini1Provider ini1Provider = provider.getIni1Provider();
for (KIP1Provider kip1Provider : ini1Provider.getKip1List())
if (kip1Provider.getHeader().getName().startsWith("FS"))
kip1Provider.printDebug(); kip1Provider.printDebug();
for (KIP1Provider kip1Provider : ini1Provider.getKip1List()) {
if (kip1Provider.getHeader().getName().startsWith("FS"))
kip1Provider.exportAsDecompressed("/tmp");
}
} }
} }