Refactoring. Start moving to log4j2
continuous-integration/drone/push Build is failing Details

master
Dmitry Isaenko 2022-09-05 00:39:48 +03:00
parent 55fb58f25b
commit 220d14e2c1
46 changed files with 2242 additions and 1048 deletions

View File

@ -1,4 +1,4 @@
# konogonka
# libKonogonka
[![Build Status](https://ci.redrise.ru/api/badges/desu/libKonogonka/status.svg)](https://ci.redrise.ru/desu/libKonogonka)

40
pom.xml
View File

@ -6,7 +6,7 @@
<groupId>ru.redrise</groupId>
<artifactId>libKonogonka</artifactId>
<version>0.1</version>
<version>0.1-SNAPSHOT</version>
<url>https://git.redrise.ru/desu/${project.name}}/</url>
<description>
@ -62,8 +62,34 @@
<version>1.0</version>
<scope>compile</scope>
</dependency>
</dependencies>
<!-- https://mvnrepository.com/artifact/org.apache.logging.log4j/log4j-core -->
<dependency>
<groupId>org.apache.logging.log4j</groupId>
<artifactId>log4j-core</artifactId>
<version>2.18.0</version>
<scope>compile</scope>
</dependency>
<!-- testing -->
<dependency>
<groupId>org.junit.jupiter</groupId>
<artifactId>junit-jupiter-engine</artifactId>
<version>5.5.2</version>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.junit.jupiter</groupId>
<artifactId>junit-jupiter-api</artifactId>
<version>5.5.2</version>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.junit.jupiter</groupId>
<artifactId>junit-jupiter-params</artifactId>
<version>5.5.2</version>
<scope>test</scope>
</dependency>
</dependencies>
<build>
<finalName>${project.artifactId}-${project.version}-${maven.build.timestamp}</finalName>
<plugins>
@ -100,6 +126,7 @@
</executions>
</plugin>
-->
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-compiler-plugin</artifactId>
@ -109,7 +136,7 @@
<target>8</target>
</configuration>
</plugin>
<!-- Generate JAR with dependencies -->
<!-- Generate JAR with dependencies
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-assembly-plugin</artifactId>
@ -123,18 +150,19 @@
<descriptorRefs>
<descriptorRef>jar-with-dependencies</descriptorRef>
</descriptorRefs>
<!-- <appendAssemblyId>false</appendAssemblyId> -->
<!- <appendAssemblyId>false</appendAssemblyId> -
</configuration>
<executions>
<execution>
<id>make-assembly</id> <!-- this is used for inheritance merges -->
<phase>package</phase> <!-- bind to the packaging phase -->
<id>make-assembly</id>
<phase>package</phase>
<goals>
<goal>single</goal>
</goals>
</execution>
</executions>
</plugin>
-->
</plugins>
</build>
</project>

View File

@ -21,7 +21,7 @@ package libKonogonka;
import java.nio.ByteBuffer;
import java.nio.ByteOrder;
public class LoperConverter {
public class Converter {
public static int getLEint(byte[] bytes, int fromOffset){
return ByteBuffer.wrap(bytes, fromOffset, 0x4).order(ByteOrder.LITTLE_ENDIAN).getInt();
}
@ -30,7 +30,7 @@ public class LoperConverter {
return ByteBuffer.wrap(bytes, fromOffset, 0x8).order(ByteOrder.LITTLE_ENDIAN).getLong();
}
/**
* Convert int to long. Workaround to store unsigned int
* Convert (usually unsigned) int to long. Workaround to store unsigned int
* @param bytes original array
* @param fromOffset start position of the 4-bytes value
* */

View File

@ -0,0 +1,103 @@
/*
Copyright 2019-2022 Dmitry Isaenko
This file is part of libKonogonka.
libKonogonka is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
libKonogonka is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with libKonogonka. If not, see <https://www.gnu.org/licenses/>.
*/
package libKonogonka;
import java.io.BufferedReader;
import java.io.File;
import java.io.FileReader;
import java.util.HashMap;
public class KeyChainHolder {
private final File keysFile;
private final String xci_header_key;
private HashMap<String, String> rawKeySet;
private HashMap<String, String> key_area_key_application,
key_area_key_ocean,
key_area_key_system,
titlekek;
public KeyChainHolder(String pathToKeysFile, String xci_header_key) throws Exception{
this(new File(pathToKeysFile), xci_header_key);
}
public KeyChainHolder(File keysFile, String xci_header_key) throws Exception{
this.keysFile = keysFile;
this.xci_header_key = xci_header_key;
collectEverything();
}
private void collectEverything() throws Exception{
rawKeySet = new HashMap<>();
BufferedReader br = new BufferedReader(new FileReader(keysFile));
String fileLine;
String[] keyValue;
while ((fileLine = br.readLine()) != null){
keyValue = fileLine.trim().split("\\s+?=\\s+?", 2);
if (keyValue.length == 2)
rawKeySet.put(keyValue[0], keyValue[1]);
}
key_area_key_application = collectKeysByType("key_area_key_application");
key_area_key_ocean = collectKeysByType("key_area_key_ocean");
key_area_key_system = collectKeysByType("key_area_key_system");
titlekek = collectKeysByType("titlekek");
}
private HashMap<String, String> collectKeysByType(String keyName){
HashMap<String, String> tempKeySet = new HashMap<>();
String keyNamePattern = keyName+"_%02x";
HashMap<String, String> map = new HashMap<>();
String keyParsed;
int counter = 0;
while ((keyParsed = map.get(String.format(keyNamePattern, counter))) != null){
tempKeySet.put(String.format(keyNamePattern, counter), keyParsed);
counter++;
}
return tempKeySet;
}
public String getXci_header_key() {
return xci_header_key;
}
public String getHeader_key() {
return rawKeySet.get("header_key");
}
public HashMap<String, String> getRawKeySet() {
return rawKeySet;
}
public HashMap<String, String> getKey_area_key_application() {
return key_area_key_application;
}
public HashMap<String, String> getKey_area_key_ocean() {
return key_area_key_ocean;
}
public HashMap<String, String> getKey_area_key_system() {
return key_area_key_system;
}
public HashMap<String, String> getTitlekek() {
return titlekek;
}
}

View File

@ -18,8 +18,9 @@
*/
package libKonogonka.Tools.NCA;
import libKonogonka.LoperConverter;
import libKonogonka.Tools.NCA.NCASectionTableBlock.NCASectionBlock;
import libKonogonka.Converter;
import libKonogonka.RainbowDump;
import libKonogonka.Tools.NCA.NCASectionTableBlock.NcaFsHeader;
import libKonogonka.Tools.PFS0.IPFS0Provider;
import libKonogonka.Tools.PFS0.PFS0EncryptedProvider;
import libKonogonka.Tools.PFS0.PFS0Provider;
@ -27,6 +28,8 @@ import libKonogonka.Tools.RomFs.IRomFsProvider;
import libKonogonka.Tools.RomFs.RomFsEncryptedProvider;
import libKonogonka.ctraes.AesCtrDecryptSimple;
import libKonogonka.exceptions.EmptySectionException;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import java.io.*;
import java.util.LinkedList;
@ -35,26 +38,28 @@ import java.util.LinkedList;
* TODO: MAKE SOME DECOMPOSITION
* */
public class NCAContent {
private File file;
private long offsetPosition;
private NCASectionBlock ncaSectionBlock;
private NCAHeaderTableEntry ncaHeaderTableEntry;
private byte[] decryptedKey;
private final static Logger log = LogManager.getLogger(NCAContent.class);
private final File file;
private final long offsetPosition;
private final NcaFsHeader ncaFsHeader;
private final NCAHeaderTableEntry ncaHeaderTableEntry;
private final byte[] decryptedKey;
private LinkedList<byte[]> Pfs0SHA256hashes;
private final LinkedList<byte[]> Pfs0SHA256hashes;
private IPFS0Provider pfs0;
private IRomFsProvider romfs;
// TODO: if decryptedKey is empty, throw exception ??
public NCAContent(File file,
long offsetPosition,
NCASectionBlock ncaSectionBlock,
NcaFsHeader ncaFsHeader,
NCAHeaderTableEntry ncaHeaderTableEntry,
byte[] decryptedKey) throws Exception
{
this.file = file;
this.offsetPosition = offsetPosition;
this.ncaSectionBlock = ncaSectionBlock;
this.ncaFsHeader = ncaFsHeader;
this.ncaHeaderTableEntry = ncaHeaderTableEntry;
this.decryptedKey = decryptedKey;
@ -63,16 +68,16 @@ public class NCAContent {
if (ncaHeaderTableEntry.getMediaEndOffset() == 0)
throw new EmptySectionException("Empty section");
// If it's PFS0Provider
if (ncaSectionBlock.getSuperBlockPFS0() != null)
if (ncaFsHeader.getSuperBlockPFS0() != null)
this.proceedPFS0();
else if (ncaSectionBlock.getSuperBlockIVFC() != null)
else if (ncaFsHeader.getSuperBlockIVFC() != null)
this.proceedRomFs();
else
throw new Exception("NCAContent(): Not supported. PFS0 or RomFS supported only.");
}
private void proceedPFS0() throws Exception {
switch (ncaSectionBlock.getCryptoType()){
switch (ncaFsHeader.getCryptoType()){
case 0x01:
proceedPFS0NotEncrypted(); // IF NO ENCRYPTION
break;
@ -86,13 +91,13 @@ public class NCAContent {
private void proceedPFS0NotEncrypted() throws Exception{
RandomAccessFile raf = new RandomAccessFile(file, "r");
long thisMediaLocation = offsetPosition + (ncaHeaderTableEntry.getMediaStartOffset() * 0x200);
long hashTableLocation = thisMediaLocation + ncaSectionBlock.getSuperBlockPFS0().getHashTableOffset();
long pfs0Location = thisMediaLocation + ncaSectionBlock.getSuperBlockPFS0().getPfs0offset();
long hashTableLocation = thisMediaLocation + ncaFsHeader.getSuperBlockPFS0().getHashTableOffset();
long pfs0Location = thisMediaLocation + ncaFsHeader.getSuperBlockPFS0().getPfs0offset();
raf.seek(hashTableLocation);
byte[] rawData;
long sha256recordsNumber = ncaSectionBlock.getSuperBlockPFS0().getHashTableSize() / 0x20;
long sha256recordsNumber = ncaFsHeader.getSuperBlockPFS0().getHashTableSize() / 0x20;
// Collect hashes
for (int i = 0; i < sha256recordsNumber; i++){
rawData = new byte[0x20]; // 32 bytes - size of SHA256 hash
@ -111,13 +116,13 @@ public class NCAContent {
new CryptoSection03Pfs0(file,
offsetPosition,
decryptedKey,
ncaSectionBlock,
ncaFsHeader,
ncaHeaderTableEntry.getMediaStartOffset(),
ncaHeaderTableEntry.getMediaEndOffset());
}
private void proceedRomFs() throws Exception{
switch (ncaSectionBlock.getCryptoType()){
switch (ncaFsHeader.getCryptoType()){
case 0x01:
proceedRomFsNotEncrypted(); // IF NO ENCRYPTION
break;
@ -125,23 +130,22 @@ public class NCAContent {
proceedRomFsEncrypted(); // If encrypted regular [ 0x03 ]
break;
default:
throw new Exception("NCAContent() -> proceedRomFs(): Non-supported 'Crypto type'");
throw new Exception("Non-supported 'Crypto type'");
}
}
private void proceedRomFsNotEncrypted(){
// TODO: Clarify, implement if needed
System.out.println("proceedRomFs() -> proceedRomFsNotEncrypted() is not implemented :(");
private void proceedRomFsNotEncrypted(){ // TODO: Clarify, implement if needed
log.error("proceedRomFs() -> proceedRomFsNotEncrypted() is not implemented :(");
}
private void proceedRomFsEncrypted() throws Exception{
if (decryptedKey == null)
throw new Exception("CryptoSection03: unable to proceed. No decrypted key provided.");
this.romfs = new RomFsEncryptedProvider(
offsetPosition,
ncaSectionBlock.getSuperBlockIVFC().getLvl6Offset(),
ncaFsHeader.getSuperBlockIVFC().getLvl6Offset(),
file,
offsetPosition,
decryptedKey,
ncaSectionBlock.getSectionCTR(),
ncaFsHeader.getSectionCTR(),
ncaHeaderTableEntry.getMediaStartOffset(),
ncaHeaderTableEntry.getMediaEndOffset());
}
@ -151,27 +155,24 @@ public class NCAContent {
public IRomFsProvider getRomfs() { return romfs; }
private class CryptoSection03Pfs0 {
CryptoSection03Pfs0(File file,
long offsetPosition,
byte[] decryptedKey,
NCASectionBlock ncaSectionBlock,
NcaFsHeader ncaFsHeader,
long mediaStartBlocksOffset,
long mediaEndBlocksOffset) throws Exception
{
/*//--------------------------------------------------------------------------------------------------
System.out.println("Media start location: " + mediaStartBlocksOffset);
System.out.println("Media end location: " + mediaEndBlocksOffset);
System.out.println("Media size : " + (mediaEndBlocksOffset-mediaStartBlocksOffset));
System.out.println("Media act. location: " + (offsetPosition + (mediaStartBlocksOffset * 0x200)));
System.out.println("SHA256 hash tbl size: " + ncaSectionBlock.getSuperBlockPFS0().getHashTableSize());
System.out.println("SHA256 hash tbl offs: " + ncaSectionBlock.getSuperBlockPFS0().getHashTableOffset());
System.out.println("PFS0 Offs: " + ncaSectionBlock.getSuperBlockPFS0().getPfs0offset());
System.out.println("SHA256 records: " + (ncaSectionBlock.getSuperBlockPFS0().getHashTableSize() / 0x20));
System.out.println("KEY: " + LoperConverter.byteArrToHexString(decryptedKey));
System.out.println("CTR: " + LoperConverter.byteArrToHexString(ncaSectionBlock.getSectionCTR()));
System.out.println();
//--------------------------------------------------------------------------------------------------*/
log.debug( "-== Crypto Section 03 PFS0 ==-\n" +
"Media start location: " + RainbowDump.formatDecHexString(mediaStartBlocksOffset) + "\n" +
"Media end location: " + RainbowDump.formatDecHexString(mediaEndBlocksOffset) + "\n" +
"Media size: " + RainbowDump.formatDecHexString((mediaEndBlocksOffset-mediaStartBlocksOffset)) + "\n" +
"Media actual location: " + RainbowDump.formatDecHexString((offsetPosition + (mediaStartBlocksOffset * 0x200))) + "\n" +
"SHA256 hash table size: " + RainbowDump.formatDecHexString(ncaFsHeader.getSuperBlockPFS0().getHashTableSize()) + "\n" +
"SHA256 hash table offs: " + RainbowDump.formatDecHexString(ncaFsHeader.getSuperBlockPFS0().getHashTableOffset()) + "\n" +
"PFS0 Offset: " + RainbowDump.formatDecHexString(ncaFsHeader.getSuperBlockPFS0().getPfs0offset()) + "\n" +
"SHA256 records: " + RainbowDump.formatDecHexString((ncaFsHeader.getSuperBlockPFS0().getHashTableSize() / 0x20)) + "\n" +
"KEY (decrypted): " + Converter.byteArrToHexString(decryptedKey) + "\n" +
"CTR: " + Converter.byteArrToHexString(ncaFsHeader.getSectionCTR()) + "\n");
if (decryptedKey == null)
throw new Exception("CryptoSection03: unable to proceed. No decrypted key provided.");
@ -179,7 +180,7 @@ public class NCAContent {
long abosluteOffsetPosition = offsetPosition + (mediaStartBlocksOffset * 0x200);
raf.seek(abosluteOffsetPosition);
AesCtrDecryptSimple decryptor = new AesCtrDecryptSimple(decryptedKey, ncaSectionBlock.getSectionCTR(), mediaStartBlocksOffset * 0x200);
AesCtrDecryptSimple decryptor = new AesCtrDecryptSimple(decryptedKey, ncaFsHeader.getSectionCTR(), mediaStartBlocksOffset * 0x200);
byte[] encryptedBlock;
byte[] dectyptedBlock;
@ -190,13 +191,13 @@ public class NCAContent {
Thread pThread = new Thread(new ParseThread(
streamInp,
ncaSectionBlock.getSuperBlockPFS0().getPfs0offset(),
ncaSectionBlock.getSuperBlockPFS0().getHashTableOffset(),
ncaSectionBlock.getSuperBlockPFS0().getHashTableSize(),
ncaFsHeader.getSuperBlockPFS0().getPfs0offset(),
ncaFsHeader.getSuperBlockPFS0().getHashTableOffset(),
ncaFsHeader.getSuperBlockPFS0().getHashTableSize(),
offsetPosition,
file,
decryptedKey,
ncaSectionBlock.getSectionCTR(),
ncaFsHeader.getSectionCTR(),
mediaStartBlocksOffset,
mediaEndBlocksOffset
));
@ -206,7 +207,7 @@ public class NCAContent {
encryptedBlock = new byte[0x200];
if (raf.read(encryptedBlock) != -1){
//dectyptedBlock = aesCtr.decrypt(encryptedBlock);
dectyptedBlock = decryptor.dectyptNext(encryptedBlock);
dectyptedBlock = decryptor.decryptNext(encryptedBlock);
// Writing decrypted data to pipe
try {
streamOut.write(dectyptedBlock);
@ -220,8 +221,8 @@ public class NCAContent {
streamOut.close();
raf.close();
}
/*
* Since we representing decrypted data as stream (it's easier to look on it this way),
/**
* Since we're representing decrypted data as stream (it's easier to look on it this way),
* this thread will be parsing it.
* */
private class ParseThread implements Runnable{
@ -233,12 +234,12 @@ public class NCAContent {
long hashTableRecordsCount;
long pfs0offset;
private long MetaOffsetPositionInFile;
private File MetaFileWithEncPFS0;
private byte[] MetaKey;
private byte[] MetaSectionCTR;
private long MetaMediaStartOffset;
private long MetaMediaEndOffset;
private final long MetaOffsetPositionInFile;
private final File MetaFileWithEncPFS0;
private final byte[] MetaKey;
private final byte[] MetaSectionCTR;
private final long MetaMediaStartOffset;
private final long MetaMediaEndOffset;
ParseThread(PipedInputStream pipedInputStream,
@ -313,12 +314,9 @@ public class NCAContent {
pipedInputStream.close();
}
catch (Exception e){
System.out.println("'ParseThread' thread exception");
e.printStackTrace();
}
finally {
System.out.println("Thread dies");
log.debug("NCA Content parsing thread exception: ", e);
}
//finally { System.out.println("NCA Content thread dies");}
}
}
}
@ -334,18 +332,17 @@ public class NCAContent {
RandomAccessFile raf = new RandomAccessFile(file, "r");
///--------------------------------------------------------------------------------------------------
System.out.println("NCAContent() -> exportEncryptedSectionType03() Debug information");
System.out.println("Media start location: " + mediaStartBlocksOffset);
System.out.println("Media end location: " + mediaEndBlocksOffset);
System.out.println("Media size : " + (mediaEndBlocksOffset-mediaStartBlocksOffset));
System.out.println("Media act. location: " + (offsetPosition + (mediaStartBlocksOffset * 0x200)));
System.out.println("KEY: " + LoperConverter.byteArrToHexString(decryptedKey));
System.out.println("CTR: " + LoperConverter.byteArrToHexString(ncaSectionBlock.getSectionCTR()));
System.out.println();
log.debug("NCAContent() -> exportEncryptedSectionType03() information" + "\n" +
"Media start location: " + mediaStartBlocksOffset + "\n" +
"Media end location: " + mediaEndBlocksOffset + "\n" +
"Media size : " + (mediaEndBlocksOffset-mediaStartBlocksOffset) + "\n" +
"Media act. location: " + (offsetPosition + (mediaStartBlocksOffset * 0x200)) + "\n" +
"KEY: " + Converter.byteArrToHexString(decryptedKey) + "\n" +
"CTR: " + Converter.byteArrToHexString(ncaFsHeader.getSectionCTR()) + "\n");
//---------------------------------------------------------------------------------------------------/
if (ncaSectionBlock.getCryptoType() == 0x01){
System.out.println("NCAContent -> getRawDataContentPipedInpStream (Zero encryption section type 01): Thread started");
if (ncaFsHeader.getCryptoType() == 0x01){
log.trace("NCAContent -> getRawDataContentPipedInpStream (Zero encryption section type 01): Thread started");
Thread workerThread;
PipedOutputStream streamOut = new PipedOutputStream();
@ -363,8 +360,7 @@ public class NCAContent {
}
}
catch (Exception e){
System.out.println("NCAContent -> exportRawData(): "+e.getMessage());
e.printStackTrace();
log.error("NCAContent -> exportRawData() failure", e);
}
finally {
try {
@ -374,13 +370,13 @@ public class NCAContent {
streamOut.close();
}catch (Exception ignored) {}
}
System.out.println("NCAContent -> exportRawData(): Thread died");
log.trace("NCAContent -> exportRawData(): Thread died");
});
workerThread.start();
return streamIn;
}
else if (ncaSectionBlock.getCryptoType() == 0x03){
System.out.println("NCAContent -> getRawDataContentPipedInpStream (Encrypted Section Type 03): Thread started");
else if (ncaFsHeader.getCryptoType() == 0x03){
log.trace("NCAContent -> getRawDataContentPipedInpStream (Encrypted Section Type 03): Thread started");
if (decryptedKey == null)
throw new Exception("NCAContent -> exportRawData(): unable to proceed. No decrypted key provided.");
@ -396,7 +392,7 @@ public class NCAContent {
raf.seek(abosluteOffsetPosition);
AesCtrDecryptSimple decryptor = new AesCtrDecryptSimple(decryptedKey,
ncaSectionBlock.getSectionCTR(),
ncaFsHeader.getSectionCTR(),
mediaStartBlocksOffset * 0x200);
byte[] encryptedBlock;
@ -406,7 +402,7 @@ public class NCAContent {
for (int i = 0; i < mediaBlocksSize; i++){
encryptedBlock = new byte[0x200];
if (raf.read(encryptedBlock) != -1){
dectyptedBlock = decryptor.dectyptNext(encryptedBlock);
dectyptedBlock = decryptor.decryptNext(encryptedBlock);
// Writing decrypted data to pipe
streamOut.write(dectyptedBlock);
}
@ -415,8 +411,7 @@ public class NCAContent {
}
}
catch (Exception e){
System.out.println("NCAContent -> exportRawData(): "+e.getMessage());
e.printStackTrace();
log.error("NCAContent -> exportRawData(): ", e);
}
finally {
try {
@ -426,7 +421,7 @@ public class NCAContent {
streamOut.close();
}catch (Exception ignored) {}
}
System.out.println("NCAContent -> exportRawData(): Thread died");
log.trace("NCAContent -> exportRawData(): Thread died");
});
workerThread.start();
return streamIn;

View File

@ -18,34 +18,26 @@
*/
package libKonogonka.Tools.NCA;
import java.nio.ByteBuffer;
import java.nio.ByteOrder;
import libKonogonka.Converter;
import java.util.Arrays;
public class NCAHeaderTableEntry {
private long mediaStartOffset;
private long mediaEndOffset;
private byte[] unknwn1;
private byte[] unknwn2;
private final long mediaStartOffset;
private final long mediaEndOffset;
private final byte[] unknwn1;
private final byte[] unknwn2;
public NCAHeaderTableEntry(byte[] table) throws Exception{
if (table.length < 0x10)
throw new Exception("Section Table size is too small.");
this.mediaStartOffset = convertUnsignedIntBytesToLong(Arrays.copyOfRange(table, 0x0, 0x4));
this.mediaEndOffset = convertUnsignedIntBytesToLong(Arrays.copyOfRange(table, 0x4, 0x8));
this.mediaStartOffset = Converter.getLElongOfInt(table, 0);
this.mediaEndOffset = Converter.getLElongOfInt(table, 0x4);
this.unknwn1 = Arrays.copyOfRange(table, 0x8, 0xC);
this.unknwn2 = Arrays.copyOfRange(table, 0xC, 0x10);
}
private long convertUnsignedIntBytesToLong(byte[] intBytes){
if (intBytes.length == 4)
return ByteBuffer.wrap(Arrays.copyOf(intBytes, 8)).order(ByteOrder.LITTLE_ENDIAN).getLong();
else
return -1;
}
public long getMediaStartOffset() { return mediaStartOffset; }
public long getMediaEndOffset() { return mediaEndOffset; }
public byte[] getUnknwn1() { return unknwn1; }

View File

@ -18,27 +18,32 @@
*/
package libKonogonka.Tools.NCA;
import libKonogonka.Tools.NCA.NCASectionTableBlock.NCASectionBlock;
import libKonogonka.Tools.NCA.NCASectionTableBlock.NcaFsHeader;
import libKonogonka.exceptions.EmptySectionException;
import libKonogonka.xtsaes.XTSAESCipher;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import org.bouncycastle.crypto.params.KeyParameter;
import javax.crypto.Cipher;
import javax.crypto.spec.SecretKeySpec;
import java.io.File;
import java.io.RandomAccessFile;
import java.io.*;
import java.nio.charset.StandardCharsets;
import java.nio.file.Files;
import java.nio.file.Paths;
import java.util.Arrays;
import java.util.HashMap;
import static libKonogonka.LoperConverter.byteArrToHexString;
import static libKonogonka.LoperConverter.getLElong;
import static libKonogonka.Converter.byteArrToHexString;
import static libKonogonka.Converter.getLElong;
// TODO: check file size
public class NCAProvider {
private File file; // File that contains NCA
private long offset; // Offset where NCA actually located
private HashMap<String, String> keys; // hashmap with keys using _0x naming (where x number 0-N)
private final static Logger log = LogManager.getLogger(NCAProvider.class);
private final File file; // File that contains NCA
private final long offset; // Offset where NCA actually located
private final HashMap<String, String> keys; // hashmap with keys using _0x naming (where x number 0-N)
// Header
private byte[] rsa2048one;
private byte[] rsa2048two;
@ -78,10 +83,10 @@ public class NCAProvider {
private NCAHeaderTableEntry tableEntry2;
private NCAHeaderTableEntry tableEntry3;
private NCASectionBlock sectionBlock0;
private NCASectionBlock sectionBlock1;
private NCASectionBlock sectionBlock2;
private NCASectionBlock sectionBlock3;
private NcaFsHeader sectionBlock0;
private NcaFsHeader sectionBlock1;
private NcaFsHeader sectionBlock2;
private NcaFsHeader sectionBlock3;
private NCAContent ncaContent0;
private NCAContent ncaContent1;
@ -93,6 +98,7 @@ public class NCAProvider {
}
public NCAProvider (File file, HashMap<String, String> keys, long offsetPosition) throws Exception{
this.file = file;
this.keys = keys;
String header_key = keys.get("header_key");
if (header_key == null )
@ -100,15 +106,10 @@ public class NCAProvider {
if (header_key.length() != 64)
throw new Exception("header_key is too small or too big. Must be 64 symbols.");
this.file = file;
this.offset = offsetPosition;
KeyParameter key1 = new KeyParameter(
hexStrToByteArray(header_key.substring(0, 32))
);
KeyParameter key2 = new KeyParameter(
hexStrToByteArray(header_key.substring(32, 64))
);
KeyParameter key1 = new KeyParameter(hexStrToByteArray(header_key.substring(0, 32)));
KeyParameter key2 = new KeyParameter(hexStrToByteArray(header_key.substring(32, 64)));
XTSAESCipher xtsaesCipher = new XTSAESCipher(false);
xtsaesCipher.init(false, key1, key2);
@ -134,18 +135,20 @@ public class NCAProvider {
raf.close();
getNCAContent();
/*
//---------------------------------------------------------------------
/*//---------------------------------------------------------------------
FileInputStream fis = new FileInputStream(file);
BufferedOutputStream bos = new BufferedOutputStream(new FileOutputStream("/tmp/decrypted.nca"));
int i = 0;
byte[] block = new byte[0x200];
while (fis.read(block) != -1){
decryptedSequence = new byte[0x200];
xtsaesCipher.processDataUnit(block, 0, 0x200, decryptedSequence, 0, i++);
bos.write(decryptedSequence);
try (BufferedOutputStream bos = new BufferedOutputStream(Files.newOutputStream(Paths.get("/tmp/decrypted.nca")))){
int i = 0;
byte[] block = new byte[0x200];
while (fis.read(block) != -1){
decryptedSequence = new byte[0x200];
xtsaesCipher.processDataUnit(block, 0, 0x200, decryptedSequence, 0, i++);
bos.write(decryptedSequence);
}
}
catch (Exception e){
throw new Exception("Failed to export decrypted AES-XTS", e);
}
bos.close();
//---------------------------------------------------------------------*/
}
@ -193,10 +196,12 @@ public class NCAProvider {
else
cryptoTypeReal = cryptoType1;
if (cryptoTypeReal > 0) // TODO: CLARIFY WHY THE FUCK IS IT FAIR????
if (cryptoTypeReal > 0) // TODO: CLARIFY WHY THE FUCK IS IT FAIR????
cryptoTypeReal -= 1;
//todo: if nca3 proceed
//If nca3 proceed
if (! magicnum.equalsIgnoreCase("NCA3"))
throw new Exception("Not supported data type: "+magicnum+". Only NCA3 supported");
// Decrypt keys if encrypted
if (Arrays.equals(rightsId, new byte[0x10])) {
String keyAreaKey;
@ -232,10 +237,10 @@ public class NCAProvider {
tableEntry2 = new NCAHeaderTableEntry(Arrays.copyOfRange(tableBytes, 0x20, 0x30));
tableEntry3 = new NCAHeaderTableEntry(Arrays.copyOfRange(tableBytes, 0x30, 0x40));
sectionBlock0 = new NCASectionBlock(Arrays.copyOfRange(decryptedData, 0x400, 0x600));
sectionBlock1 = new NCASectionBlock(Arrays.copyOfRange(decryptedData, 0x600, 0x800));
sectionBlock2 = new NCASectionBlock(Arrays.copyOfRange(decryptedData, 0x800, 0xa00));
sectionBlock3 = new NCASectionBlock(Arrays.copyOfRange(decryptedData, 0xa00, 0xc00));
sectionBlock0 = new NcaFsHeader(Arrays.copyOfRange(decryptedData, 0x400, 0x600));
sectionBlock1 = new NcaFsHeader(Arrays.copyOfRange(decryptedData, 0x600, 0x800));
sectionBlock2 = new NcaFsHeader(Arrays.copyOfRange(decryptedData, 0x800, 0xa00));
sectionBlock3 = new NcaFsHeader(Arrays.copyOfRange(decryptedData, 0xa00, 0xc00));
}
private void keyAreaKeyNotSupportedOrFound() throws Exception{
@ -259,12 +264,12 @@ public class NCAProvider {
throw new Exception(exceptionStringBuilder.toString());
}
private void getNCAContent(){
private void getNCAContent() throws Exception{
byte[] key;
// If empty Rights ID
if (Arrays.equals(rightsId, new byte[0x10])) {
key = decryptedKey2; // TODO: Just remember this dumb hack
key = decryptedKey2; // NOTE: Just remember this dumb hack
}
else {
try {
@ -278,42 +283,35 @@ public class NCAProvider {
key = cipher.doFinal(rightsIDkey);
}
catch (Exception e){
e.printStackTrace();
System.out.println("No title.keys loaded?");
return;
throw new Exception("No title.keys loaded?", e);
}
}
getNcaContentByNumber(0, key);
getNcaContentByNumber(1, key);
getNcaContentByNumber(2, key);
getNcaContentByNumber(3, key);
}
private void getNcaContentByNumber(int number, byte[] key){
try {
this.ncaContent0 = new NCAContent(file, offset, sectionBlock0, tableEntry0, key);
}
catch (EmptySectionException ignored){}
catch (Exception e){
this.ncaContent0 = null;
e.printStackTrace();
}
try{
this.ncaContent1 = new NCAContent(file, offset, sectionBlock1, tableEntry1, key);
}
catch (EmptySectionException ignored){}
catch (Exception e){
this.ncaContent1 = null;
e.printStackTrace();
}
try{
this.ncaContent2 = new NCAContent(file, offset, sectionBlock2, tableEntry2, key);
}
catch (EmptySectionException ignored){}
catch (Exception e){
this.ncaContent2 = null;
e.printStackTrace();
}
try{
this.ncaContent3 = new NCAContent(file, offset, sectionBlock3, tableEntry3, key);
switch (number) {
case 0:
this.ncaContent0 = new NCAContent(file, offset, sectionBlock0, tableEntry0, key);
break;
case 1:
this.ncaContent1 = new NCAContent(file, offset, sectionBlock1, tableEntry1, key);
break;
case 2:
this.ncaContent2 = new NCAContent(file, offset, sectionBlock2, tableEntry2, key);
break;
case 3:
this.ncaContent3 = new NCAContent(file, offset, sectionBlock3, tableEntry3, key);
break;
}
}
catch (EmptySectionException ignored){}
catch (Exception e){
this.ncaContent3 = null;
e.printStackTrace();
log.debug("Unable to get NCA Content "+number, e);
}
}
@ -347,20 +345,54 @@ public class NCAProvider {
public byte[] getDecryptedKey1() { return decryptedKey1; }
public byte[] getDecryptedKey2() { return decryptedKey2; }
public byte[] getDecryptedKey3() { return decryptedKey3; }
/**
* Get NCA Hedaer Table Entry for selected id
* @param id should be 0-3
* */
public NCAHeaderTableEntry getTableEntry(int id) throws Exception{
switch (id) {
case 0:
return getTableEntry0();
case 1:
return getTableEntry1();
case 2:
return getTableEntry2();
case 3:
return getTableEntry3();
default:
throw new Exception("NCA Table Entry must be defined in range 0-3 while '"+id+"' requested");
}
}
public NCAHeaderTableEntry getTableEntry0() { return tableEntry0; }
public NCAHeaderTableEntry getTableEntry1() { return tableEntry1; }
public NCAHeaderTableEntry getTableEntry2() { return tableEntry2; }
public NCAHeaderTableEntry getTableEntry3() { return tableEntry3; }
/**
* Get NCA Section Block for selected section
* @param id should be 0-3
* */
public NcaFsHeader getSectionBlock(int id) throws Exception{
switch (id) {
case 0:
return getSectionBlock0();
case 1:
return getSectionBlock1();
case 2:
return getSectionBlock2();
case 3:
return getSectionBlock3();
default:
throw new Exception("NCA Section Block must be defined in range 0-3 while '"+id+"' requested");
}
}
public NcaFsHeader getSectionBlock0() { return sectionBlock0; }
public NcaFsHeader getSectionBlock1() { return sectionBlock1; }
public NcaFsHeader getSectionBlock2() { return sectionBlock2; }
public NcaFsHeader getSectionBlock3() { return sectionBlock3; }
public NCASectionBlock getSectionBlock0() { return sectionBlock0; }
public NCASectionBlock getSectionBlock1() { return sectionBlock1; }
public NCASectionBlock getSectionBlock2() { return sectionBlock2; }
public NCASectionBlock getSectionBlock3() { return sectionBlock3; }
public boolean isKeyAvailable(){ // TODO: USE
public boolean isKeyAvailable(){ // NOTE: never used
if (Arrays.equals(rightsId, new byte[0x10]))
return true;
return false;
else
return keys.containsKey(byteArrToHexString(rightsId));
}
@ -368,7 +400,7 @@ public class NCAProvider {
* Get content for the selected section
* @param sectionNumber should be 0-3
* */
public NCAContent getNCAContentProvider(int sectionNumber){
public NCAContent getNCAContentProvider(int sectionNumber) throws Exception{
switch (sectionNumber) {
case 0:
return ncaContent0;
@ -379,7 +411,7 @@ public class NCAProvider {
case 3:
return ncaContent3;
default:
return null;
throw new Exception("NCA Content must be requested in range of 0-3, while 'Section Number "+sectionNumber+"' requested");
}
}
}

View File

@ -0,0 +1,43 @@
/*
Copyright 2018-2022 Dmitry Isaenko
This file is part of libKonogonka.
libKonogonka is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
libKonogonka is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with libKonogonka. If not, see <https://www.gnu.org/licenses/>.
*/
package libKonogonka.Tools.NCA.NCASectionTableBlock;
import java.nio.charset.StandardCharsets;
import java.util.Arrays;
import static libKonogonka.Converter.getLEint;
public class BucketTreeHeader {
private final String magic;
private final int version;
private final int entryCount;
private final byte[] unknown;
BucketTreeHeader(byte[] rawBytes){
magic = new String(Arrays.copyOfRange(rawBytes, 0x0, 0x4), StandardCharsets.US_ASCII);
version = getLEint(rawBytes, 0x4);
entryCount = getLEint(rawBytes, 0x8);
unknown = Arrays.copyOfRange(rawBytes, 0xc, 0x10);
}
public String getMagic() {return magic;}
public int getVersion() {return version;}
public int getEntryCount() {return entryCount;}
public byte[] getUnknown() {return unknown;}
}

View File

@ -0,0 +1,45 @@
/*
Copyright 2018-2022 Dmitry Isaenko
This file is part of libKonogonka.
libKonogonka is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
libKonogonka is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with libKonogonka. If not, see <https://www.gnu.org/licenses/>.
*/
package libKonogonka.Tools.NCA.NCASectionTableBlock;
import java.util.Arrays;
import static libKonogonka.Converter.getLElong;
public class CompressionInfo {
private final long offset;
private final long size;
private final BucketTreeHeader bktr;
private final byte[] unknown;
CompressionInfo(byte[] rawTable){
offset = getLElong(rawTable, 0);
size = getLElong(rawTable, 0x8);
bktr = new BucketTreeHeader(Arrays.copyOfRange(rawTable, 0x10, 0x20));
unknown = Arrays.copyOfRange(rawTable, 0x20, 0x28);
}
public long getOffset() {return offset;}
public long getSize() {return size;}
public String getBktrMagic() { return bktr.getMagic(); }
public int getBktrVersion() { return bktr.getVersion(); }
public int getBktrEntryCount() { return bktr.getEntryCount(); }
public byte[] getBktrUnknown() { return bktr.getUnknown(); }
public byte[] getUnknown() {return unknown;}
}

View File

@ -0,0 +1,38 @@
/*
Copyright 2018-2022 Dmitry Isaenko
This file is part of libKonogonka.
libKonogonka is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
libKonogonka is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with libKonogonka. If not, see <https://www.gnu.org/licenses/>.
*/
package libKonogonka.Tools.NCA.NCASectionTableBlock;
import java.util.Arrays;
import static libKonogonka.Converter.getLElong;
public class MetaDataHashDataInfo {
private final long offset;
private final long size;
private final byte[] tableHash;
MetaDataHashDataInfo(byte[] rawTable){
offset = getLElong(rawTable, 0);
size = getLElong(rawTable, 0x8);
tableHash = Arrays.copyOfRange(rawTable, 0x10, 0x20);
}
public long getOffset() {return offset;}
public long getSize() {return size;}
public byte[] getTableHash() {return tableHash;}
}

View File

@ -1,113 +0,0 @@
/*
Copyright 2019-2022 Dmitry Isaenko
This file is part of libKonogonka.
libKonogonka is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
libKonogonka is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with libKonogonka. If not, see <https://www.gnu.org/licenses/>.
*/
package libKonogonka.Tools.NCA.NCASectionTableBlock;
import java.nio.charset.StandardCharsets;
import java.util.Arrays;
import static libKonogonka.LoperConverter.getLEint;
import static libKonogonka.LoperConverter.getLElong;
public class NCASectionBlock {
private byte[] version;
private byte fsType;
private byte hashType;
private byte cryptoType;
private byte[] padding;
private SuperBlockIVFC superBlockIVFC;
private SuperBlockPFS0 superBlockPFS0;
private byte[] BKTRfullHeader;
// BKTR extended
private long BKTRoffsetSection1;
private long BKTRsizeSection1;
private String BKTRmagicSection1;
private int BKTRu32Section1;
private int BKTRs32Section1;
private byte[] BKTRunknownSection1;
private long BKTRoffsetSection2;
private long BKTRsizeSection2;
private String BKTRmagicSection2;
private int BKTRu32Section2;
private int BKTRs32Section2;
private byte[] BKTRunknownSection2;
private byte[] sectionCTR;
private byte[] unknownEndPadding;
public NCASectionBlock(byte[] tableBlockBytes) throws Exception{
if (tableBlockBytes.length != 0x200)
throw new Exception("Table Block Section size is incorrect.");
version = Arrays.copyOfRange(tableBlockBytes, 0, 0x2);
fsType = tableBlockBytes[0x2];
hashType = tableBlockBytes[0x3];
cryptoType = tableBlockBytes[0x4];
padding = Arrays.copyOfRange(tableBlockBytes, 0x5, 0x8);
byte[] superBlockBytes = Arrays.copyOfRange(tableBlockBytes, 0x8, 0xf8);
if ((fsType == 0) && (hashType == 0x3))
superBlockIVFC = new SuperBlockIVFC(superBlockBytes);
else if ((fsType == 0x1) && (hashType == 0x2))
superBlockPFS0 = new SuperBlockPFS0(superBlockBytes);
BKTRfullHeader = Arrays.copyOfRange(tableBlockBytes, 0x100, 0x140);
BKTRoffsetSection1 = getLElong(BKTRfullHeader, 0);
BKTRsizeSection1 = getLElong(BKTRfullHeader, 0x8);
BKTRmagicSection1 = new String(Arrays.copyOfRange(BKTRfullHeader, 0x10, 0x14), StandardCharsets.US_ASCII);
BKTRu32Section1 = getLEint(BKTRfullHeader, 0x14);
BKTRs32Section1 = getLEint(BKTRfullHeader, 0x18);
BKTRunknownSection1 = Arrays.copyOfRange(tableBlockBytes, 0x1c, 0x20);
BKTRoffsetSection2 = getLElong(BKTRfullHeader, 0x20);
BKTRsizeSection2 = getLElong(BKTRfullHeader, 0x28);
BKTRmagicSection2 = new String(Arrays.copyOfRange(BKTRfullHeader, 0x30, 0x34), StandardCharsets.US_ASCII);
BKTRu32Section2 = getLEint(BKTRfullHeader, 0x34);
BKTRs32Section2 = getLEint(BKTRfullHeader, 0x38);
BKTRunknownSection2 = Arrays.copyOfRange(BKTRfullHeader, 0x3c, 0x40);
sectionCTR = Arrays.copyOfRange(tableBlockBytes, 0x140, 0x148);
unknownEndPadding = Arrays.copyOfRange(tableBlockBytes, 0x148, 0x200);
}
public byte[] getVersion() { return version; }
public byte getFsType() { return fsType; }
public byte getHashType() { return hashType; }
public byte getCryptoType() { return cryptoType; }
public byte[] getPadding() { return padding; }
public SuperBlockIVFC getSuperBlockIVFC() { return superBlockIVFC; }
public SuperBlockPFS0 getSuperBlockPFS0() { return superBlockPFS0; }
public byte[] getBKTRfullHeader() { return BKTRfullHeader; }
public long getBKTRoffsetSection1() { return BKTRoffsetSection1; }
public long getBKTRsizeSection1() { return BKTRsizeSection1; }
public String getBKTRmagicSection1() { return BKTRmagicSection1; }
public int getBKTRu32Section1() { return BKTRu32Section1; }
public int getBKTRs32Section1() { return BKTRs32Section1; }
public byte[] getBKTRunknownSection1() { return BKTRunknownSection1; }
public long getBKTRoffsetSection2() { return BKTRoffsetSection2; }
public long getBKTRsizeSection2() { return BKTRsizeSection2; }
public String getBKTRmagicSection2() { return BKTRmagicSection2; }
public int getBKTRu32Section2() { return BKTRu32Section2; }
public int getBKTRs32Section2() { return BKTRs32Section2; }
public byte[] getBKTRunknownSection2() { return BKTRunknownSection2; }
public byte[] getSectionCTR() { return sectionCTR; }
public byte[] getUnknownEndPadding() { return unknownEndPadding; }
}

View File

@ -0,0 +1,287 @@
/*
Copyright 2019-2022 Dmitry Isaenko
This file is part of libKonogonka.
libKonogonka is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
libKonogonka is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with libKonogonka. If not, see <https://www.gnu.org/licenses/>.
*/
package libKonogonka.Tools.NCA.NCASectionTableBlock;
import libKonogonka.RainbowDump;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import java.util.Arrays;
import static libKonogonka.Converter.byteArrToHexString;
import static libKonogonka.Converter.getLElong;
public class NcaFsHeader {
private final static Logger log = LogManager.getLogger(NcaFsHeader.class);
private final byte[] version;
private final byte fsType;
private final byte hashType;
private final byte cryptoType;
private final byte metaDataHashType;
private final byte[] padding;
private SuperBlockIVFC superBlockIVFC;
private SuperBlockPFS0 superBlockPFS0;
// BKTR extended
private final long PatchInfoOffsetSection1;
private final long PatchInfoSizeSection1;
private final BucketTreeHeader BktrSection1;
private final long PatchInfoOffsetSection2;
private final long PatchInfoSizeSection2;
private final BucketTreeHeader BktrSection2;
private final byte[] generation;
private final byte[] sectionCTR;
private final SparseInfo sparseInfo;
private final CompressionInfo compressionInfo;
private final MetaDataHashDataInfo metaDataHashDataInfo;
private final byte[] unknownEndPadding;
public NcaFsHeader(byte[] tableBlockBytes) throws Exception{
if (tableBlockBytes.length != 0x200)
throw new Exception("Table Block Section size is incorrect.");
version = Arrays.copyOfRange(tableBlockBytes, 0, 0x2);
fsType = tableBlockBytes[0x2];
hashType = tableBlockBytes[0x3];
cryptoType = tableBlockBytes[0x4];
metaDataHashType = tableBlockBytes[0x6];
padding = Arrays.copyOfRange(tableBlockBytes, 0x6, 0x8);
byte[] superBlockBytes = Arrays.copyOfRange(tableBlockBytes, 0x8, 0xf8);
if ((fsType == 0) && (hashType == 0x3))
superBlockIVFC = new SuperBlockIVFC(superBlockBytes);
else if ((fsType == 0x1) && (hashType == 0x2))
superBlockPFS0 = new SuperBlockPFS0(superBlockBytes);
PatchInfoOffsetSection1 = getLElong(tableBlockBytes, 0x100);
PatchInfoSizeSection1 = getLElong(tableBlockBytes, 0x108);
BktrSection1 = new BucketTreeHeader(Arrays.copyOfRange(tableBlockBytes, 0x110, 0x120));
PatchInfoOffsetSection2 = getLElong(tableBlockBytes, 0x120);
PatchInfoSizeSection2 = getLElong(tableBlockBytes, 0x128);
BktrSection2 = new BucketTreeHeader(Arrays.copyOfRange(tableBlockBytes, 0x130, 0x140));
generation = Arrays.copyOfRange(tableBlockBytes, 0x140, 0x144);
sectionCTR = Arrays.copyOfRange(tableBlockBytes, 0x144, 0x148);
sparseInfo = new SparseInfo(Arrays.copyOfRange(tableBlockBytes, 0x148, 0x178));
compressionInfo = new CompressionInfo(Arrays.copyOfRange(tableBlockBytes, 0x178, 0x1a0));
metaDataHashDataInfo = new MetaDataHashDataInfo(Arrays.copyOfRange(tableBlockBytes, 0x1a0, 0x1d0));
unknownEndPadding = Arrays.copyOfRange(tableBlockBytes, 0x1d0, 0x200);
}
public byte[] getVersion() { return version; }
public byte getFsType() { return fsType; }
public byte getHashType() { return hashType; }
public byte getCryptoType() { return cryptoType; }
public byte getMetaDataHashType() { return metaDataHashType; }
public byte[] getPadding() { return padding; }
public SuperBlockIVFC getSuperBlockIVFC() { return superBlockIVFC; }
public SuperBlockPFS0 getSuperBlockPFS0() { return superBlockPFS0; }
public long getPatchInfoOffsetSection1() { return PatchInfoOffsetSection1; }
public long getPatchInfoSizeSection1() { return PatchInfoSizeSection1; }
public String getPatchInfoMagicSection1() { return BktrSection1.getMagic(); }
public int getPatchInfoVersionSection1() { return BktrSection1.getVersion(); }
public int getEntryCountSection1() { return BktrSection1.getEntryCount(); }
public byte[] getPatchInfoUnknownSection1() { return BktrSection1.getUnknown(); }
public long getPatchInfoOffsetSection2() { return PatchInfoOffsetSection2; }
public long getPatchInfoSizeSection2() { return PatchInfoSizeSection2; }
public String getPatchInfoMagicSection2() { return BktrSection2.getMagic(); }
public int getPatchInfoVersionSection2() { return BktrSection2.getVersion(); }
public int getEntryCountSection2() { return BktrSection2.getEntryCount(); }
public byte[] getPatchInfoUnknownSection2() { return BktrSection2.getUnknown(); }
public byte[] getGeneration() {return generation;}
public byte[] getSectionCTR() { return sectionCTR; }
public SparseInfo getSparseInfo() {return sparseInfo;}
public CompressionInfo getCompressionInfo() {return compressionInfo;}
public MetaDataHashDataInfo getMetaDataHashDataInfo() {return metaDataHashDataInfo;}
public byte[] getUnknownEndPadding() { return unknownEndPadding; }
public void printDebug(){
String hashTypeDescription;
switch (hashType){
case 0 :
hashTypeDescription = "Auto";
break;
case 1 :
hashTypeDescription = "None";
break;
case 2 :
hashTypeDescription = "HierarchicalSha256Hash";
break;
case 3 :
hashTypeDescription = "HierarchicalIntegrityHash";
break;
case 4 :
hashTypeDescription = "AutoSha3";
break;
case 5 :
hashTypeDescription = "HierarchicalSha3256Hash";
break;
case 6 :
hashTypeDescription = "HierarchicalIntegritySha3Hash";
break;
default:
hashTypeDescription = "???";
}
String cryptoTypeDescription;
switch (cryptoType){
case 0 :
cryptoTypeDescription = "Auto";
break;
case 1 :
cryptoTypeDescription = "None";
break;
case 2 :
cryptoTypeDescription = "AesXts";
break;
case 3 :
cryptoTypeDescription = "AesCtr";
break;
case 4 :
cryptoTypeDescription = "AesCtrEx";
break;
case 5 :
cryptoTypeDescription = "AesCtrSkipLayerHash";
break;
case 6 :
cryptoTypeDescription = "AesCtrExSkipLayerHash";
break;
default:
cryptoTypeDescription = "???";
}
log.debug("NCASectionBlock:\n" +
"Version : " + byteArrToHexString(version) + "\n" +
"FS Type : " + fsType +(fsType == 0?" (RomFS)":fsType == 1?" (PartitionFS)":" (Unknown)")+ "\n" +
"Hash Type : " + hashType +" ("+ hashTypeDescription + ")\n" +
"Crypto Type : " + cryptoType + " (" + cryptoTypeDescription + ")\n" +
"Meta Data Hash Type : " + metaDataHashType + "\n" +
"Padding : " + byteArrToHexString(padding) + "\n" +
"Super Block IVFC : " + superBlockIVFC + "\n" +
"Super Block PFS0 : " + superBlockPFS0 + "\n" +
"================================================================================================\n" +
(((fsType == 0) && (hashType == 0x3))?
("| Hash Data - RomFS\n" +
"| Magic : " + superBlockIVFC.getMagic() + "\n" +
"| Version : " + superBlockIVFC.getVersion() + "\n" +
"| Master Hash Size : " + superBlockIVFC.getMasterHashSize() + "\n" +
"| Total Number of Levels : " + superBlockIVFC.getTotalNumberOfLevels() + "\n\n" +
"| Level 1 Offset : " + RainbowDump.formatDecHexString(superBlockIVFC.getLvl1Offset()) + "\n" +
"| Level 1 Size : " + RainbowDump.formatDecHexString(superBlockIVFC.getLvl1Size()) + "\n" +
"| Level 1 Block Size (log2) : " + RainbowDump.formatDecHexString(superBlockIVFC.getLvl1SBlockSize()) + "\n" +
"| Level 1 reserved : " + byteArrToHexString(superBlockIVFC.getReserved1()) + "\n\n" +
"| Level 2 Offset : " + RainbowDump.formatDecHexString(superBlockIVFC.getLvl2Offset()) + "\n" +
"| Level 2 Size : " + RainbowDump.formatDecHexString(superBlockIVFC.getLvl2Size()) + "\n" +
"| Level 2 Block Size (log2) : " + RainbowDump.formatDecHexString(superBlockIVFC.getLvl2SBlockSize()) + "\n" +
"| Level 2 reserved : " + byteArrToHexString(superBlockIVFC.getReserved2()) + "\n\n" +
"| Level 3 Offset : " + RainbowDump.formatDecHexString(superBlockIVFC.getLvl3Offset()) + "\n" +
"| Level 3 Size : " + RainbowDump.formatDecHexString(superBlockIVFC.getLvl3Size()) + "\n" +
"| Level 3 Block Size (log2) : " + RainbowDump.formatDecHexString(superBlockIVFC.getLvl3SBlockSize()) + "\n" +
"| Level 3 reserved : " + byteArrToHexString(superBlockIVFC.getReserved3()) + "\n\n" +
"| Level 4 Offset : " + RainbowDump.formatDecHexString(superBlockIVFC.getLvl4Offset()) + "\n" +
"| Level 4 Size : " + RainbowDump.formatDecHexString(superBlockIVFC.getLvl4Size()) + "\n" +
"| Level 4 Block Size (log2) : " + RainbowDump.formatDecHexString(superBlockIVFC.getLvl4SBlockSize()) + "\n" +
"| Level 4 reserved : " + byteArrToHexString(superBlockIVFC.getReserved4()) + "\n\n" +
"| Level 5 Offset : " + RainbowDump.formatDecHexString(superBlockIVFC.getLvl5Offset()) + "\n" +
"| Level 5 Size : " + RainbowDump.formatDecHexString(superBlockIVFC.getLvl5Size()) + "\n" +
"| Level 5 Block Size (log2) : " + RainbowDump.formatDecHexString(superBlockIVFC.getLvl5SBlockSize()) + "\n" +
"| Level 5 reserved : " + byteArrToHexString(superBlockIVFC.getReserved5()) + "\n\n" +
"| Level 6 Offset : " + RainbowDump.formatDecHexString(superBlockIVFC.getLvl6Offset()) + "\n" +
"| Level 6 Size : " + RainbowDump.formatDecHexString(superBlockIVFC.getLvl6Size()) + "\n" +
"| Level 6 Block Size (log2) : " + RainbowDump.formatDecHexString(superBlockIVFC.getLvl6SBlockSize()) + "\n" +
"| Level 6 reserved : " + byteArrToHexString(superBlockIVFC.getReserved6()) + "\n\n" +
"| SignatureSalt : " + byteArrToHexString(superBlockIVFC.getSignatureSalt()) + "\n" +
"| Master Hash : " + byteArrToHexString(superBlockIVFC.getMasterHash()) + "\n" +
"| Reserved (tail) : " + byteArrToHexString(superBlockIVFC.getReservedTail()) + "\n"
)
:(((fsType == 0x1) && (hashType == 0x2))?
("| Hash Data - PFS0\n" +
"| SHA256 hash : " + byteArrToHexString(superBlockPFS0.getSHA256hash()) + "\n" +
"| Block Size (bytes) : " + superBlockPFS0.getBlockSize() + "\n" +
"| Layer Count (2) : " + superBlockPFS0.getLayerCount() + "\n" +
"| Hash table offset : " + RainbowDump.formatDecHexString(superBlockPFS0.getHashTableOffset()) + "\n" +
"| Hash table size : " + RainbowDump.formatDecHexString(superBlockPFS0.getHashTableSize()) + "\n" +
"| PFS0 header offset : " + RainbowDump.formatDecHexString(superBlockPFS0.getPfs0offset()) + "\n" +
"| PFS0 header size : " + RainbowDump.formatDecHexString(superBlockPFS0.getPfs0size()) + "\n" +
"| Unknown (reserved) : " + byteArrToHexString(superBlockPFS0.getZeroes()) + "\n"
)
:
" // Hash Data - EMPTY \\\\ \n"
)) +
"================================================================================================\n" +
" PatchInfo\n" +
"================================================================================================\n" +
"Indirect Offset : " + PatchInfoOffsetSection1 + "\n" +
"Indirect Size : " + PatchInfoSizeSection1 + "\n" +
"Magic ('BKTR') : " + BktrSection1.getMagic() + "\n" +
"Version : " + BktrSection1.getVersion() + "\n" +
"EntryCount : " + BktrSection1.getEntryCount() + "\n" +
"Unknown (reserved) : " + byteArrToHexString(BktrSection1.getUnknown()) + "\n" +
"------------------------------------------------------------------------------------------------\n" +
"AesCtrEx Offset : " + PatchInfoOffsetSection2 + "\n" +
"AesCtrEx Size : " + PatchInfoSizeSection2 + "\n" +
"Magic ('BKTR') : " + BktrSection2.getMagic() + "\n" +
"Version : " + BktrSection2.getVersion() + "\n" +
"EntryCount : " + BktrSection2.getEntryCount() + "\n" +
"Unknown (reserved) : " + byteArrToHexString(BktrSection2.getUnknown()) + "\n" +
"================================================================================================\n" +
"Generation : " + byteArrToHexString(generation) + "\n" +
"Section CTR : " + byteArrToHexString(sectionCTR) + "\n" +
"================================================================================================\n" +
" Sparse Info\n" +
"Table Offset : " + sparseInfo.getOffset() + "\n" +
"Table Size : " + sparseInfo.getSize() + "\n" +
"Magic ('BKTR') : " + sparseInfo.getBktrMagic() + "\n" +
"Version : " + sparseInfo.getBktrVersion() + "\n" +
"EntryCount : " + sparseInfo.getBktrEntryCount() + "\n" +
"Unknown (BKTR) : " + byteArrToHexString(sparseInfo.getBktrUnknown()) + "\n" +
"PhysicalOffset : " + sparseInfo.getPhysicalOffset() + "\n" +
"Generation : " + byteArrToHexString(sparseInfo.getGeneration()) + "\n" +
"Unknown (reserved) : " + byteArrToHexString(sparseInfo.getUnknown()) + "\n" +
"================================================================================================\n" +
" Compression Info\n" +
"Table Offset : " + compressionInfo.getOffset() + "\n" +
"Table Size : " + compressionInfo.getSize() + "\n" +
"Magic ('BKTR') : " + compressionInfo.getBktrMagic() + "\n" +
"Version : " + compressionInfo.getBktrVersion() + "\n" +
"EntryCount : " + compressionInfo.getBktrEntryCount() + "\n" +
"Unknown (reserved) : " + byteArrToHexString(compressionInfo.getBktrUnknown()) + "\n" +
"Reserved : " + byteArrToHexString(compressionInfo.getUnknown()) + "\n" +
"================================================================================================\n" +
" Meta Data Hash Data Info\n" +
"Table Offset : " + metaDataHashDataInfo.getOffset() + "\n" +
"Table Size : " + metaDataHashDataInfo.getSize() + "\n" +
"Unknown (reserved) : " + byteArrToHexString(metaDataHashDataInfo.getTableHash()) + "\n" +
"================================================================================================\n" +
"Unknown End Padding : " + byteArrToHexString(unknownEndPadding) + "\n" +
"################################################################################################\n"
);
}
}

View File

@ -0,0 +1,51 @@
/*
Copyright 2018-2022 Dmitry Isaenko
This file is part of libKonogonka.
libKonogonka is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
libKonogonka is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with libKonogonka. If not, see <https://www.gnu.org/licenses/>.
*/
package libKonogonka.Tools.NCA.NCASectionTableBlock;
import java.util.Arrays;
import static libKonogonka.Converter.getLElong;
public class SparseInfo {
private final long offset;
private final long size;
private final BucketTreeHeader bktr;
private final long physicalOffset;
private final byte[] generation;
private final byte[] unknown;
SparseInfo(byte[] rawTable){
offset = getLElong(rawTable, 0);
size = getLElong(rawTable, 0x8);
bktr = new BucketTreeHeader(Arrays.copyOfRange(rawTable, 0x10, 0x20));
physicalOffset = getLElong(rawTable, 0x20);
generation = Arrays.copyOfRange(rawTable, 0x28, 0x2a);
unknown = Arrays.copyOfRange(rawTable, 0x2a, 0x30);
}
public long getOffset() { return offset; }
public long getSize() { return size; }
public String getBktrMagic() { return bktr.getMagic(); }
public int getBktrVersion() { return bktr.getVersion(); }
public int getBktrEntryCount() { return bktr.getEntryCount(); }
public byte[] getBktrUnknown() { return bktr.getUnknown(); }
public long getPhysicalOffset() {return physicalOffset;}
public byte[] getGeneration() {return generation;}
public byte[] getUnknown() {return unknown;}
}

View File

@ -21,50 +21,55 @@ package libKonogonka.Tools.NCA.NCASectionTableBlock;
import java.nio.charset.StandardCharsets;
import java.util.Arrays;
import static libKonogonka.LoperConverter.getLEint;
import static libKonogonka.LoperConverter.getLElong;
import static libKonogonka.Converter.getLEint;
import static libKonogonka.Converter.getLElong;
public class SuperBlockIVFC {
private String magic;
private int magicNumber;
private int masterHashSize;
private int totalNumberOfLevels;
private long lvl1Offset;
private long lvl1Size;
private int lvl1SBlockSize;
private byte[] reserved1;
private final String magic;
private final int version;
private final int masterHashSize;
private final int totalNumberOfLevels;
private final long lvl1Offset;
private final long lvl1Size;
private final int lvl1SBlockSize;
private final byte[] reserved1;
private long lvl2Offset;
private long lvl2Size;
private int lvl2SBlockSize;
private byte[] reserved2;
private final long lvl2Offset;
private final long lvl2Size;
private final int lvl2SBlockSize;
private final byte[] reserved2;
private long lvl3Offset;
private long lvl3Size;
private int lvl3SBlockSize;
private byte[] reserved3;
private final long lvl3Offset;
private final long lvl3Size;
private final int lvl3SBlockSize;
private final byte[] reserved3;
private long lvl4Offset;
private long lvl4Size;
private int lvl4SBlockSize;
private byte[] reserved4;
private final long lvl4Offset;
private final long lvl4Size;
private final int lvl4SBlockSize;
private final byte[] reserved4;
private long lvl5Offset;
private long lvl5Size;
private int lvl5SBlockSize;
private byte[] reserved5;
private final long lvl5Offset;
private final long lvl5Size;
private final int lvl5SBlockSize;
private final byte[] reserved5;
private long lvl6Offset;
private long lvl6Size;
private int lvl6SBlockSize;
private byte[] reserved6;
private final long lvl6Offset;
private final long lvl6Size;
private final int lvl6SBlockSize;
private final byte[] reserved6;
private byte[] unknown;
private byte[] hash;
private final byte[] signatureSalt;
private final byte[] masterHash;
private final byte[] reservedTail;
/**
* Also known as IntegrityMetaInfo
* @param sbBytes - Chunk of data related for IVFC Hash Data table
*/
SuperBlockIVFC(byte[] sbBytes){
this.magic = new String(Arrays.copyOfRange(sbBytes, 0, 4), StandardCharsets.US_ASCII);
this.magicNumber = getLEint(sbBytes, 0x4);
this.version = getLEint(sbBytes, 0x4);
this.masterHashSize = getLEint(sbBytes, 0x8);
this.totalNumberOfLevels = getLEint(sbBytes, 0xc);
@ -98,50 +103,13 @@ public class SuperBlockIVFC {
this.lvl6SBlockSize = getLEint(sbBytes, 0x98);
this.reserved6 = Arrays.copyOfRange(sbBytes, 0x9c, 0xa0);
this.unknown = Arrays.copyOfRange(sbBytes, 0xa0, 0xc0);
this.hash = Arrays.copyOfRange(sbBytes, 0xc0, 0xe0);
/*
System.out.println(magic);
System.out.println(magicNumber);
System.out.println(masterHashSize);
System.out.println(totalNumberOfLevels);
System.out.println(lvl1Offset);
System.out.println(lvl1Size);
System.out.println(lvl1SBlockSize);
RainbowHexDump.hexDumpUTF8(reserved1);
System.out.println(lvl2Offset);
System.out.println(lvl2Size);
System.out.println(lvl2SBlockSize);
RainbowHexDump.hexDumpUTF8(reserved2);
System.out.println(lvl3Offset);
System.out.println(lvl3Size);
System.out.println(lvl3SBlockSize);
RainbowHexDump.hexDumpUTF8(reserved3);
System.out.println(lvl4Offset);
System.out.println(lvl4Size);
System.out.println(lvl4SBlockSize);
RainbowHexDump.hexDumpUTF8(reserved4);
System.out.println(lvl5Offset);
System.out.println(lvl5Size);
System.out.println(lvl5SBlockSize);
RainbowHexDump.hexDumpUTF8(reserved5);
System.out.println(lvl6Offset);
System.out.println(lvl6Size);
System.out.println(lvl6SBlockSize);
RainbowHexDump.hexDumpUTF8(reserved6);
RainbowHexDump.hexDumpUTF8(unknown);
RainbowHexDump.hexDumpUTF8(hash);
// */
this.signatureSalt = Arrays.copyOfRange(sbBytes, 0xa0, 0xc0);
this.masterHash = Arrays.copyOfRange(sbBytes, 0xc0, 0xe0);
this.reservedTail = Arrays.copyOfRange(sbBytes, 0xe0, 0xf8);
}
public String getMagic() { return magic; }
public int getMagicNumber() { return magicNumber; }
public int getVersion() { return version; }
public int getMasterHashSize() { return masterHashSize; }
public int getTotalNumberOfLevels() { return totalNumberOfLevels; }
public long getLvl1Offset() { return lvl1Offset; }
@ -168,6 +136,7 @@ public class SuperBlockIVFC {
public long getLvl6Size() { return lvl6Size; }
public int getLvl6SBlockSize() { return lvl6SBlockSize; }
public byte[] getReserved6() { return reserved6; }
public byte[] getUnknown() { return unknown; }
public byte[] getHash() { return hash; }
public byte[] getSignatureSalt() { return signatureSalt; }
public byte[] getMasterHash() { return masterHash; }
public byte[] getReservedTail() { return reservedTail; }
}

View File

@ -20,33 +20,37 @@ package libKonogonka.Tools.NCA.NCASectionTableBlock;
import java.util.Arrays;
import static libKonogonka.LoperConverter.getLEint;
import static libKonogonka.LoperConverter.getLElong;
import static libKonogonka.Converter.getLEint;
import static libKonogonka.Converter.getLElong;
public class SuperBlockPFS0 {
private byte[] SHA256hash;
private int blockSize;
private int unknownNumberTwo;
private long hashTableOffset;
private long hashTableSize;
private long pfs0offset;
private long pfs0size;
private byte[] zeroes;
private final byte[] SHA256hash;
private final int blockSize;
private final int layerCount;
private final long hashTableOffset;
private final long hashTableSize;
private final long pfs0offset;
private final long pfs0size;
private final byte[] zeroes;
/**
* Also known as HierarchicalSha256Data
* @param sbBytes - Chunk of data related for PFS0 Hash Data table
*/
SuperBlockPFS0(byte[] sbBytes){
SHA256hash = Arrays.copyOfRange(sbBytes, 0, 0x20);
blockSize = getLEint(sbBytes, 0x20);
unknownNumberTwo = getLEint(sbBytes, 0x24);
layerCount = getLEint(sbBytes, 0x24);
hashTableOffset = getLElong(sbBytes, 0x28);
hashTableSize = getLElong(sbBytes, 0x30);
pfs0offset = getLElong(sbBytes, 0x38);
pfs0size = getLElong(sbBytes, 0x40);
zeroes = Arrays.copyOfRange(sbBytes, 0x48, 0xf8);
zeroes = Arrays.copyOfRange(sbBytes, 0x48, 0xf0);
}
public byte[] getSHA256hash() { return SHA256hash; }
public int getBlockSize() { return blockSize; }
public int getUnknownNumberTwo() { return unknownNumberTwo; }
public int getLayerCount() { return layerCount; }
public long getHashTableOffset() { return hashTableOffset; }
public long getHashTableSize() { return hashTableSize; }
public long getPfs0offset() { return pfs0offset; }

View File

@ -24,7 +24,7 @@ import libKonogonka.Tools.NPDM.ServiceAccessControlProvider;
import java.nio.charset.StandardCharsets;
import java.util.Arrays;
import static libKonogonka.LoperConverter.getLEint;
import static libKonogonka.Converter.getLEint;
public class ACI0Provider {
private String magicNum;

View File

@ -18,7 +18,7 @@
*/
package libKonogonka.Tools.NPDM.ACI0;
import libKonogonka.LoperConverter;
import libKonogonka.Converter;
import java.util.Arrays;
@ -39,11 +39,11 @@ public class FSAccessHeaderProvider {
public FSAccessHeaderProvider(byte[] bytes) {
version = bytes[0];
padding = Arrays.copyOfRange(bytes, 1, 0x4);
permissionsBitmask = LoperConverter.getLElong(bytes, 0x4);
dataSize = LoperConverter.getLEint(bytes, 0xC);
contentOwnIdSectionSize = LoperConverter.getLEint(bytes, 0x10);
dataNownerSizes = LoperConverter.getLEint(bytes, 0x14);
saveDataOwnSectionSize = LoperConverter.getLEint(bytes, 0x18);
permissionsBitmask = Converter.getLElong(bytes, 0x4);
dataSize = Converter.getLEint(bytes, 0xC);
contentOwnIdSectionSize = Converter.getLEint(bytes, 0x10);
dataNownerSizes = Converter.getLEint(bytes, 0x14);
saveDataOwnSectionSize = Converter.getLEint(bytes, 0x18);
unknownData = Arrays.copyOfRange(bytes, 0x1C, bytes.length);
}

View File

@ -24,7 +24,7 @@ import libKonogonka.Tools.NPDM.ServiceAccessControlProvider;
import java.nio.charset.StandardCharsets;
import java.util.Arrays;
import static libKonogonka.LoperConverter.*;
import static libKonogonka.Converter.*;
public class ACIDProvider {

View File

@ -18,7 +18,7 @@
*/
package libKonogonka.Tools.NPDM.ACID;
import libKonogonka.LoperConverter;
import libKonogonka.Converter;
import java.util.Arrays;
@ -35,7 +35,7 @@ public class FSAccessControlProvider {
public FSAccessControlProvider(byte[] bytes) {
version = bytes[0];
padding = Arrays.copyOfRange(bytes, 1, 0x4);
permissionsBitmask = LoperConverter.getLElong(bytes, 0x4);
permissionsBitmask = Converter.getLElong(bytes, 0x4);
reserved = Arrays.copyOfRange(bytes, 0xC, 0x2C);
}

View File

@ -18,7 +18,7 @@
*/
package libKonogonka.Tools.NPDM;
import libKonogonka.LoperConverter;
import libKonogonka.Converter;
import java.util.LinkedHashMap;
import java.util.LinkedList;
@ -126,7 +126,7 @@ public class KernelAccessControlProvider {
int position = 0;
// Collect all blocks
for (int i = 0; i < bytes.length / 4; i++) {
int block = LoperConverter.getLEint(bytes, position);
int block = Converter.getLEint(bytes, position);
position += 4;
rawData.add(block);

View File

@ -28,7 +28,7 @@ import java.io.RandomAccessFile;
import java.nio.charset.StandardCharsets;
import java.util.Arrays;
import static libKonogonka.LoperConverter.*;
import static libKonogonka.Converter.*;
public class NPDMProvider extends ASuperInFileProvider {

View File

@ -24,27 +24,27 @@ import java.io.*;
import java.nio.charset.StandardCharsets;
import java.util.Arrays;
import static libKonogonka.LoperConverter.*;
import static libKonogonka.Converter.*;
public class PFS0EncryptedProvider implements IPFS0Provider{
private long rawFileDataStart; // Always -1 @ PFS0EncryptedProvider
private String magic;
private int filesCount;
private int stringTableSize;
private byte[] padding;
private PFS0subFile[] pfs0subFiles;
private final String magic;
private final int filesCount;
private final int stringTableSize;
private final byte[] padding;
private final PFS0subFile[] pfs0subFiles;
//---------------------------------------
private long rawBlockDataStart;
private long offsetPositionInFile;
private File file;
private byte[] key;
private byte[] sectionCTR;
private long mediaStartOffset; // In 512-blocks
private long mediaEndOffset; // In 512-blocks
private final long offsetPositionInFile;
private final File file;
private final byte[] key;
private final byte[] sectionCTR;
private final long mediaStartOffset; // In 512-blocks
private final long mediaEndOffset; // In 512-blocks
public PFS0EncryptedProvider(PipedInputStream pipedInputStream,
long pfs0offsetPosition,
@ -62,7 +62,7 @@ public class PFS0EncryptedProvider implements IPFS0Provider{
this.sectionCTR = sectionCTR;
this.mediaStartOffset = mediaStartOffset;
this.mediaEndOffset = mediaEndOffset;
// pfs0offsetPosition is a position relative to Media block. Lets add pfs0 'header's' bytes count and get raw data start position in media block
// pfs0offsetPosition is a position relative to Media block. Let's add pfs0 'header's' bytes count and get raw data start position in media block
rawFileDataStart = -1; // Set -1 for PFS0EncryptedProvider
// Detect raw data start position using next var
rawBlockDataStart = pfs0offsetPosition;
@ -214,7 +214,7 @@ public class PFS0EncryptedProvider implements IPFS0Provider{
if (skipBytes > 0) {
encryptedBlock = new byte[0x200];
if (bis.read(encryptedBlock) == 0x200) {
dectyptedBlock = aesCtrDecryptSimple.dectyptNext(encryptedBlock);
dectyptedBlock = aesCtrDecryptSimple.decryptNext(encryptedBlock);
// If we have extra-small file that is less then a block and even more
if ((0x200 - skipBytes) > pfs0subFiles[subFileNumber].getSize()){
streamOut.write(dectyptedBlock, skipBytes, (int) pfs0subFiles[subFileNumber].getSize()); // safe cast
@ -244,7 +244,7 @@ public class PFS0EncryptedProvider implements IPFS0Provider{
encryptedBlock = new byte[0x200];
if (bis.read(encryptedBlock) == 0x200) {
//dectyptedBlock = aesCtr.decrypt(encryptedBlock);
dectyptedBlock = aesCtrDecryptSimple.dectyptNext(encryptedBlock);
dectyptedBlock = aesCtrDecryptSimple.decryptNext(encryptedBlock);
// Writing decrypted data to pipe
streamOut.write(dectyptedBlock);
}
@ -259,7 +259,7 @@ public class PFS0EncryptedProvider implements IPFS0Provider{
if (extraData > 0){ // In case we didn't get what we want
encryptedBlock = new byte[0x200];
if (bis.read(encryptedBlock) == 0x200) {
dectyptedBlock = aesCtrDecryptSimple.dectyptNext(encryptedBlock);
dectyptedBlock = aesCtrDecryptSimple.decryptNext(encryptedBlock);
streamOut.write(dectyptedBlock, 0, extraData);
}
else {
@ -270,7 +270,7 @@ public class PFS0EncryptedProvider implements IPFS0Provider{
else if (extraData < 0){ // In case we can get more than we need
encryptedBlock = new byte[0x200];
if (bis.read(encryptedBlock) == 0x200) {
dectyptedBlock = aesCtrDecryptSimple.dectyptNext(encryptedBlock);
dectyptedBlock = aesCtrDecryptSimple.decryptNext(encryptedBlock);
streamOut.write(dectyptedBlock, 0, 0x200 + extraData); // WTF ??? THIS LOOKS INCORRECT
}
else {

View File

@ -22,18 +22,18 @@ import java.io.*;
import java.nio.charset.StandardCharsets;
import java.util.Arrays;
import static libKonogonka.LoperConverter.*;
import static libKonogonka.Converter.*;
public class PFS0Provider implements IPFS0Provider{
private long rawFileDataStart; // Where data starts, excluding header, string table etc.
private final long rawFileDataStart; // Where data starts, excluding header, string table etc.
private String magic;
private int filesCount;
private int stringTableSize;
private byte[] padding;
private PFS0subFile[] pfs0subFiles;
private final String magic;
private final int filesCount;
private final int stringTableSize;
private final byte[] padding;
private final PFS0subFile[] pfs0subFiles;
private File file;
private final File file;
public PFS0Provider(File fileWithPfs0) throws Exception{ this(fileWithPfs0, 0); }

View File

@ -1,87 +0,0 @@
/*
Copyright 2019-2022 Dmitry Isaenko
This file is part of libKonogonka.
libKonogonka is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
libKonogonka is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with libKonogonka. If not, see <https://www.gnu.org/licenses/>.
*/
package libKonogonka.Tools.RomFs;
import libKonogonka.LoperConverter;
import java.nio.charset.StandardCharsets;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
import static libKonogonka.RainbowDump.formatDecHexString;
public class FileMeta4Debug {
List<FileMeta> allFiles;
FileMeta4Debug(long fileMetadataTableLength, byte[] fileMetadataTable) {
allFiles = new ArrayList<>();
int i = 0;
while (i < fileMetadataTableLength) {
FileMeta fileMeta = new FileMeta();
fileMeta.containingDirectoryOffset = LoperConverter.getLEint(fileMetadataTable, i);
i += 4;
fileMeta.nextSiblingFileOffset = LoperConverter.getLEint(fileMetadataTable, i);
i += 4;
fileMeta.fileDataOffset = LoperConverter.getLElong(fileMetadataTable, i);
i += 8;
fileMeta.fileDataLength = LoperConverter.getLElong(fileMetadataTable, i);
i += 8;
fileMeta.nextFileOffset = LoperConverter.getLEint(fileMetadataTable, i);
i += 4;
fileMeta.fileNameLength = LoperConverter.getLEint(fileMetadataTable, i);
i += 4;
fileMeta.fileName = new String(Arrays.copyOfRange(fileMetadataTable, i, i + fileMeta.fileNameLength), StandardCharsets.UTF_8);
;
i += getRealNameSize(fileMeta.fileNameLength);
allFiles.add(fileMeta);
}
for (FileMeta fileMeta : allFiles){
System.out.println(
"-------------------------FILE--------------------------------\n" +
"Offset of Containing Directory " + formatDecHexString(fileMeta.containingDirectoryOffset) + "\n" +
"Offset of next Sibling File " + formatDecHexString(fileMeta.nextSiblingFileOffset) + "\n" +
"Offset of File's Data " + formatDecHexString(fileMeta.fileDataOffset) + "\n" +
"Length of File's Data " + formatDecHexString(fileMeta.fileDataLength) + "\n" +
"Offset of next File in the same Hash Table bucket " + formatDecHexString(fileMeta.nextFileOffset) + "\n" +
"Name Length " + formatDecHexString(fileMeta.fileNameLength) + "\n" +
"Name Length (rounded up to multiple of 4) " + fileMeta.fileName + "\n"
);
}
}
private int getRealNameSize(int value){
if (value % 4 == 0)
return value;
return value + 4 - value % 4;
}
private static class FileMeta{
int containingDirectoryOffset;
int nextSiblingFileOffset;
long fileDataOffset;
long fileDataLength;
int nextFileOffset;
int fileNameLength;
String fileName;
}
}

View File

@ -19,7 +19,11 @@
package libKonogonka.Tools.RomFs;
import libKonogonka.LoperConverter;
import libKonogonka.Converter;
import libKonogonka.Tools.NCA.NCAContent;
import libKonogonka.Tools.RomFs.view.FileSystemTreeViewMaker;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import java.nio.charset.StandardCharsets;
import java.util.ArrayList;
@ -28,9 +32,11 @@ import java.util.Comparator;
import java.util.List;
public class FileSystemEntry {
private final static Logger log = LogManager.getLogger(NCAContent.class);
private boolean directoryFlag;
private String name;
private List<FileSystemEntry> content;
private final List<FileSystemEntry> content;
private static byte[] dirsMetadataTable;
private static byte[] filesMetadataTable;
@ -93,7 +99,7 @@ public class FileSystemEntry {
fileSystemEntry.fileOffset = fileMetaData.fileDataRealOffset;
fileSystemEntry.fileSize = fileMetaData.fileDataRealLength;
if (fileMetaData.nextSiblingFileOffset != -1)
directoryContainer.content.add( getFile(directoryContainer, fileMetaData.nextSiblingFileOffset) );
directoryContainer.content.add(getFile(directoryContainer, fileMetaData.nextSiblingFileOffset) );
return fileSystemEntry;
}
@ -107,31 +113,44 @@ public class FileSystemEntry {
private static class DirectoryMetaData {
private int parentDirectoryOffset;
private int nextSiblingDirectoryOffset;
private int firstSubdirectoryOffset;
private int firstFileOffset;
private final int parentDirectoryOffset;
private final int nextSiblingDirectoryOffset;
private final int firstSubdirectoryOffset;
private final int firstFileOffset;
private final int nextHashTableBucketDirectoryOffset;
private String dirName;
private final String dirName;
private DirectoryMetaData(){
this(0);
}
private DirectoryMetaData(int childDirMetaPosition){
int i = childDirMetaPosition;
parentDirectoryOffset = LoperConverter.getLEint(dirsMetadataTable, i);
parentDirectoryOffset = Converter.getLEint(dirsMetadataTable, i);
i += 4;
nextSiblingDirectoryOffset = LoperConverter.getLEint(dirsMetadataTable, i);
nextSiblingDirectoryOffset = Converter.getLEint(dirsMetadataTable, i);
i += 4;
firstSubdirectoryOffset = LoperConverter.getLEint(dirsMetadataTable, i);
firstSubdirectoryOffset = Converter.getLEint(dirsMetadataTable, i);
i += 4;
firstFileOffset = LoperConverter.getLEint(dirsMetadataTable, i);
firstFileOffset = Converter.getLEint(dirsMetadataTable, i);
i += 4;
// int nextHashTableBucketDirectoryOffset = LoperConverter.getLEint(dirsMetadataTable, i);
nextHashTableBucketDirectoryOffset = Converter.getLEint(dirsMetadataTable, i);
//*
if (nextHashTableBucketDirectoryOffset < 0) {
System.out.println("nextHashTableBucketDirectoryOffset: "+ nextHashTableBucketDirectoryOffset);
}
//*/
i += 4;
int dirNameLength = LoperConverter.getLEint(dirsMetadataTable, i);
i += 4;
dirName = new String(Arrays.copyOfRange(dirsMetadataTable, i, i + dirNameLength), StandardCharsets.UTF_8);
int dirNameLength = Converter.getLEint(dirsMetadataTable, i);
if (dirNameLength > 0) {
i += 4;
dirName = new String(Arrays.copyOfRange(dirsMetadataTable, i, i + dirNameLength), StandardCharsets.UTF_8);
}
else {
dirName = "";
System.out.println("dirName: "+dirNameLength);
}
//i += getRealNameSize(dirNameLength);
}
@ -142,10 +161,10 @@ public class FileSystemEntry {
}
}
private static class FileMetaData {
private int nextSiblingFileOffset;
private long fileDataRealOffset;
private long fileDataRealLength;
private final int nextSiblingFileOffset;
private final long fileDataRealOffset;
private final long fileDataRealLength;
private final int nextHashTableBucketFileOffset;
private String fileName;
@ -157,36 +176,43 @@ public class FileSystemEntry {
int i = childFileMetaPosition;
// int containingDirectoryOffset = LoperConverter.getLEint(filesMetadataTable, i); // never used
i += 4;
nextSiblingFileOffset = LoperConverter.getLEint(filesMetadataTable, i);
nextSiblingFileOffset = Converter.getLEint(filesMetadataTable, i);
i += 4;
fileDataRealOffset = LoperConverter.getLElong(filesMetadataTable, i);
fileDataRealOffset = Converter.getLElong(filesMetadataTable, i);
i += 8;
fileDataRealLength = LoperConverter.getLElong(filesMetadataTable, i);
fileDataRealLength = Converter.getLElong(filesMetadataTable, i);
i += 8;
//int nextHashTableBucketFileOffset = LoperConverter.getLEint(filesMetadataTable, i);
nextHashTableBucketFileOffset = Converter.getLEint(filesMetadataTable, i);
//*
if (nextHashTableBucketFileOffset < 0) {
System.out.println("nextHashTableBucketFileOffset: "+ nextHashTableBucketFileOffset);
}
//*/
i += 4;
int fileNameLength = LoperConverter.getLEint(filesMetadataTable, i);
i += 4;
fileName = new String(Arrays.copyOfRange(filesMetadataTable, i, i + fileNameLength), StandardCharsets.UTF_8);;
int fileNameLength = Converter.getLEint(filesMetadataTable, i);
if (fileNameLength > 0) {
i += 4;
fileName = "";
try {
fileName = new String(Arrays.copyOfRange(filesMetadataTable, i, i + fileNameLength), StandardCharsets.UTF_8);
}
catch (Exception e){
System.out.println("fileName sizes are: "+filesMetadataTable.length+"\t"+i+"\t"+i + fileNameLength+"\t\t"+nextHashTableBucketFileOffset);
}
}
else {
fileName = "";
System.out.println("fileName: "+fileNameLength);
}
//i += getRealNameSize(fileNameLength);
}
}
public void printTreeForDebug(int spacerForSizes){
log.debug(FileSystemTreeViewMaker.make(content, spacerForSizes));
}
public void printTreeForDebug(){
System.out.println("/");
for (FileSystemEntry entry: content)
printEntry(2, entry);
log.debug(FileSystemTreeViewMaker.make(content, 100));
}
private void printEntry(int cnt, FileSystemEntry entry) {
for (int i = 0; i < cnt; i++)
System.out.print(" ");
if (entry.isDirectory()){
System.out.println("|-" + entry.getName());
for (FileSystemEntry e : entry.content)
printEntry(cnt+2, e);
}
else
System.out.println("|-" + entry.getName() + String.format(" 0x%-10x 0x%-10x", entry.fileOffset, entry.fileSize));
}
}

View File

@ -1,84 +0,0 @@
/*
Copyright 2019-2022 Dmitry Isaenko
This file is part of libKonogonka.
libKonogonka is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
libKonogonka is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with libKonogonka. If not, see <https://www.gnu.org/licenses/>.
*/
package libKonogonka.Tools.RomFs;
import libKonogonka.LoperConverter;
import java.nio.charset.StandardCharsets;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
import static libKonogonka.RainbowDump.formatDecHexString;
public class FolderMeta4Debug {
List<FolderMeta> allFolders;
FolderMeta4Debug(long directoryMetadataTableLength, byte[] directoryMetadataTable){
allFolders = new ArrayList<>();
int i = 0;
while (i < directoryMetadataTableLength){
FolderMeta folderMeta = new FolderMeta();
folderMeta.parentDirectoryOffset = LoperConverter.getLEint(directoryMetadataTable, i);
i += 4;
folderMeta.nextSiblingDirectoryOffset = LoperConverter.getLEint(directoryMetadataTable, i);
i += 4;
folderMeta.firstSubdirectoryOffset = LoperConverter.getLEint(directoryMetadataTable, i);
i += 4;
folderMeta.firstFileOffset = LoperConverter.getLEint(directoryMetadataTable, i);
i += 4;
folderMeta.nextDirectoryOffset = LoperConverter.getLEint(directoryMetadataTable, i);
i += 4;
folderMeta.dirNameLength = LoperConverter.getLEint(directoryMetadataTable, i);
i += 4;
folderMeta.dirName = new String(Arrays.copyOfRange(directoryMetadataTable, i, i + folderMeta.dirNameLength), StandardCharsets.UTF_8);
i += getRealNameSize(folderMeta.dirNameLength);
System.out.println(
"---------------------------DIRECTORY------------------------\n" +
"Offset of Parent Directory (self if Root) " + formatDecHexString(folderMeta.parentDirectoryOffset ) +"\n" +
"Offset of next Sibling Directory " + formatDecHexString(folderMeta.nextSiblingDirectoryOffset) +"\n" +
"Offset of first Child Directory (Subdirectory) " + formatDecHexString(folderMeta.firstSubdirectoryOffset ) +"\n" +
"Offset of first File (in File Metadata Table) " + formatDecHexString(folderMeta.firstFileOffset ) +"\n" +
"Offset of next Directory in the same Hash Table bucket " + formatDecHexString(folderMeta.nextDirectoryOffset ) +"\n" +
"Name Length " + formatDecHexString(folderMeta.dirNameLength ) +"\n" +
"Name Length (rounded up to multiple of 4) " + folderMeta.dirName + "\n"
);
allFolders.add(folderMeta);
}
}
private int getRealNameSize(int value){
if (value % 4 == 0)
return value;
return value + 4 - value % 4;
}
private static class FolderMeta {
int parentDirectoryOffset;
int nextSiblingDirectoryOffset;
int firstSubdirectoryOffset;
int firstFileOffset;
int nextDirectoryOffset;
int dirNameLength;
String dirName;
}
}

View File

@ -23,9 +23,10 @@ import java.io.File;
import java.io.PipedInputStream;
public interface IRomFsProvider {
File getFile();
long getLevel6Offset();
Level6Header getHeader();
FileSystemEntry getRootEntry();
PipedInputStream getContent(FileSystemEntry entry) throws Exception;
File getFile();
void printDebug();
}

View File

@ -19,25 +19,41 @@
package libKonogonka.Tools.RomFs;
import libKonogonka.LoperConverter;
import libKonogonka.Converter;
import libKonogonka.RainbowDump;
import java.util.Arrays;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
/**
* This class stores information contained in Level 6 Header of the RomFS image
* ------------------------------------
* | Header Length (usually 0x50) |
* | Directory Hash Table Offset | Not used by this library | '<< 32' to get real offset: see implementation
* | Directory Hash Table Length | Not used by this library
* | Directory Metadata Table Offset |
* | Directory Metadata Table Length |
* | File Hash Table Offset | Not used by this library
* | File Hash Table Length | Not used by this library
* | File Metadata Table Offset |
* | File Metadata Table Length |
* | File Data Offset |
* ------------------------------------
* */
public class Level6Header {
private long headerLength;
private final static Logger log = LogManager.getLogger(Level6Header.class);
private final long headerLength;
private long directoryHashTableOffset;
private long directoryHashTableLength;
private long directoryMetadataTableOffset;
private long directoryMetadataTableLength;
private long fileHashTableOffset;
private long fileHashTableLength;
private long fileMetadataTableOffset;
private long fileMetadataTableLength;
private long fileDataOffset;
private final long directoryHashTableLength;
private final long directoryMetadataTableOffset;
private final long directoryMetadataTableLength;
private final long fileHashTableOffset;
private final long fileHashTableLength;
private final long fileMetadataTableOffset;
private final long fileMetadataTableLength;
private final long fileDataOffset;
private byte[] headerBytes;
private int i;
private final byte[] headerBytes;
private int _cursor;
Level6Header(byte[] headerBytes) throws Exception{
this.headerBytes = headerBytes;
@ -54,12 +70,11 @@ public class Level6Header {
fileMetadataTableOffset = getNext();
fileMetadataTableLength = getNext();
fileDataOffset = getNext();
RainbowDump.hexDumpUTF8(Arrays.copyOfRange(headerBytes, 0, 0x50));
}
private long getNext(){
final long result = LoperConverter.getLEint(headerBytes, i);
i += 0x8;
final long result = Converter.getLEint(headerBytes, _cursor);
_cursor += 0x8;
return result;
}
@ -75,8 +90,8 @@ public class Level6Header {
public long getFileDataOffset() { return fileDataOffset; }
public void printDebugInfo(){
System.out.println("== Level 6 Header ==\n" +
"Header Length (always 0x50 ?) "+ RainbowDump.formatDecHexString(headerLength)+" (size of this structure within first 0x200 block of LEVEL 6 part)\n" +
log.debug("== Level 6 Header ==\n" +
"Header Length (usually 0x50) "+ RainbowDump.formatDecHexString(headerLength)+" (size of this structure within first 0x200 block of LEVEL 6 part)\n" +
"Directory Hash Table Offset "+ RainbowDump.formatDecHexString(directoryHashTableOffset)+" (against THIS block where HEADER contains)\n" +
"Directory Hash Table Length "+ RainbowDump.formatDecHexString(directoryHashTableLength) + "\n" +
"Directory Metadata Table Offset "+ RainbowDump.formatDecHexString(directoryMetadataTableOffset) + "\n" +

View File

@ -0,0 +1,121 @@
/*
Copyright 2018-2022 Dmitry Isaenko
This file is part of libKonogonka.
libKonogonka is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
libKonogonka is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with libKonogonka. If not, see <https://www.gnu.org/licenses/>.
*/
package libKonogonka.Tools.RomFs;
import libKonogonka.Converter;
import java.io.BufferedInputStream;
import java.io.File;
import java.nio.file.Files;
/**
* Construct header for RomFs and obtain root fileSystemEntry (meta information)
* */
class RomFsDecryptedConstruct {
private Level6Header header;
private FileSystemEntry rootEntry;
private final BufferedInputStream fileBufferedInputStream;
private int headerSize;
private byte[] directoryMetadataTable;
private byte[] fileMetadataTable;
RomFsDecryptedConstruct(File decryptedFsImageFile, long level6offset) throws Exception{
if (level6offset < 0)
throw new Exception("Incorrect Level 6 Offset");
fileBufferedInputStream = new BufferedInputStream(Files.newInputStream(decryptedFsImageFile.toPath()));
fastForwardBySkippingBytes(level6offset);
detectHeaderSize();
constructHeader();
fastForwardBySkippingBytes(header.getDirectoryMetadataTableOffset() - headerSize);
directoryMetadataTableLengthCheck();
directoryMetadataTableConstruct();
fastForwardBySkippingBytes(header.getFileMetadataTableOffset() - header.getFileHashTableOffset());
fileMetadataTableLengthCheck();
fileMetadataTableConstruct();
constructRootFilesystemEntry();
fileBufferedInputStream.close();
}
private void detectHeaderSize() throws Exception{
fileBufferedInputStream.mark(0x10);
byte[] lv6HeaderSizeRaw = new byte[0x8];
if (fileBufferedInputStream.read(lv6HeaderSizeRaw) != 0x8)
throw new Exception("Failed to read header size");
headerSize = Converter.getLEint(lv6HeaderSizeRaw, 0);
fileBufferedInputStream.reset();
}
private void constructHeader() throws Exception{
byte[] rawDataChunk = new byte[headerSize];
if (fileBufferedInputStream.read(rawDataChunk) != headerSize)
throw new Exception(String.format("Failed to read header (0x%x)", headerSize));
this.header = new Level6Header(rawDataChunk);
}
private void directoryMetadataTableLengthCheck() throws Exception{
if (header.getDirectoryMetadataTableLength() < 0)
throw new Exception("Not supported operation.");
}
private void directoryMetadataTableConstruct() throws Exception{
directoryMetadataTable = new byte[(int) header.getDirectoryMetadataTableLength()];
if (fileBufferedInputStream.read(directoryMetadataTable) != (int) header.getDirectoryMetadataTableLength())
throw new Exception("Failed to read "+header.getDirectoryMetadataTableLength());
}
private void fileMetadataTableLengthCheck() throws Exception{
if (header.getFileMetadataTableLength() < 0)
throw new Exception("Not supported operation.");
}
private void fileMetadataTableConstruct() throws Exception{
fileMetadataTable = new byte[(int) header.getFileMetadataTableLength()];
if (fileBufferedInputStream.read(fileMetadataTable) != (int) header.getFileMetadataTableLength())
throw new Exception("Failed to read "+header.getFileMetadataTableLength());
}
private void constructRootFilesystemEntry() throws Exception{
rootEntry = new FileSystemEntry(directoryMetadataTable, fileMetadataTable);
//rootEntry.printTreeForDebug();
}
private void fastForwardBySkippingBytes(long size) throws Exception{
long mustSkip = size;
long skipped = 0;
while (mustSkip > 0){
skipped += fileBufferedInputStream.skip(mustSkip);
mustSkip = size - skipped;
}
}
Level6Header getHeader() { return header; }
FileSystemEntry getRootEntry(){ return rootEntry; }
byte[] getDirectoryMetadataTable() { return directoryMetadataTable; }
byte[] getFileMetadataTable() { return fileMetadataTable;}
}

View File

@ -0,0 +1,93 @@
/*
Copyright 2018-2022 Dmitry Isaenko
This file is part of libKonogonka.
libKonogonka is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
libKonogonka is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with libKonogonka. If not, see <https://www.gnu.org/licenses/>.
*/
package libKonogonka.Tools.RomFs;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import java.io.BufferedInputStream;
import java.io.File;
import java.io.IOException;
import java.io.PipedOutputStream;
import java.nio.file.Files;
public class RomFsDecryptedContentRetrieve implements Runnable {
private final static Logger log = LogManager.getLogger(RomFsDecryptedContentRetrieve.class);
private final File parentFile;
private final PipedOutputStream streamOut;
private final long internalFileRealPosition;
private final long internalFileSize;
RomFsDecryptedContentRetrieve(File parentFile,
PipedOutputStream streamOut,
long internalFileRealPosition,
long internalFileSize){
this.parentFile = parentFile;
this.streamOut = streamOut;
this.internalFileRealPosition = internalFileRealPosition;
this.internalFileSize = internalFileSize;
}
@Override
public void run() {
log.trace("Executing thread");
try (BufferedInputStream bis = new BufferedInputStream(Files.newInputStream(parentFile.toPath()))){
fastForwardBySkippingBytes(bis, internalFileRealPosition);
int readPice = 8388608; // 8mb NOTE: consider switching to 1mb 1048576
long readFrom = 0;
byte[] readBuffer;
while (readFrom < internalFileSize) {
if (internalFileSize - readFrom < readPice)
readPice = Math.toIntExact(internalFileSize - readFrom); // it's safe, I guarantee
readBuffer = new byte[readPice];
if (bis.read(readBuffer) != readPice) {
log.error("getContent(): Unable to read requested size from file.");
return;
}
streamOut.write(readBuffer);
readFrom += readPice;
}
} catch (Exception exception) {
log.error("RomFsDecryptedProvider -> getContent(): Unable to provide stream", exception);
}
finally {
closeStreamOut();
}
log.trace("Thread died");
}
private void fastForwardBySkippingBytes(BufferedInputStream bis, long size) throws Exception{
long mustSkip = size;
long skipped = 0;
while (mustSkip > 0){
skipped += bis.skip(mustSkip);
mustSkip = size - skipped;
}
}
private void closeStreamOut(){
try {
streamOut.close();
}
catch (IOException e){
log.error("RomFsDecryptedProvider -> getContent(): Unable to close 'StreamOut'");
}
}
}

View File

@ -16,151 +16,61 @@
* You should have received a copy of the GNU General Public License
* along with libKonogonka. If not, see <https://www.gnu.org/licenses/>.
*/
package libKonogonka.Tools.RomFs;
import libKonogonka.Tools.RomFs.view.DirectoryMetaTablePlainView;
import libKonogonka.Tools.RomFs.view.FileMetaTablePlainView;
import java.io.*;
public class RomFsDecryptedProvider implements IRomFsProvider{
private final File file;
private final long level6Offset;
private final Level6Header level6Header;
private final FileSystemEntry rootEntry;
// Used only for debug
private final byte[] directoryMetadataTable;
private final byte[] fileMetadataTable;
private long level6Offset;
private File file;
private Level6Header header;
private FileSystemEntry rootEntry;
public RomFsDecryptedProvider(File decryptedFsImageFile, long level6Offset) throws Exception{
if (level6Offset < 0)
throw new Exception("Incorrect Level 6 Offset");
public RomFsDecryptedProvider(File decryptedFsImageFile, long level6offset) throws Exception{
RomFsDecryptedConstruct construct = new RomFsDecryptedConstruct(decryptedFsImageFile, level6offset);
this.file = decryptedFsImageFile;
this.level6Offset = level6offset;
this.level6Header = construct.getHeader();
this.rootEntry = construct.getRootEntry();
BufferedInputStream bis = new BufferedInputStream(new FileInputStream(decryptedFsImageFile));
this.level6Offset = level6Offset;
skipBytes(bis, level6Offset);
byte[] rawDataChunk = new byte[0x50];
if (bis.read(rawDataChunk) != 0x50)
throw new Exception("Failed to read header (0x50)");
this.header = new Level6Header(rawDataChunk);
/*
// Print Dir Hash table as is:
long seekTo = header.getDirectoryHashTableOffset() - 0x50;
rawDataChunk = new byte[(int) header.getDirectoryHashTableLength()];
skipTo(bis, seekTo);
if (bis.read(rawDataChunk) != (int) header.getDirectoryHashTableLength())
throw new Exception("Failed to read Dir hash table");
RainbowDump.hexDumpUTF8(rawDataChunk);
// Print Files Hash table as is:
seekTo = header.getFileHashTableOffset() - header.getDirectoryMetadataTableOffset();
rawDataChunk = new byte[(int) header.getFileHashTableLength()];
skipTo(bis, seekTo);
if (bis.read(rawDataChunk) != (int) header.getFileHashTableLength())
throw new Exception("Failed to read Files hash table");
RainbowDump.hexDumpUTF8(rawDataChunk);
*/
// Read directories metadata
long locationInFile = header.getDirectoryMetadataTableOffset() - 0x50;
skipBytes(bis, locationInFile);
if (header.getDirectoryMetadataTableLength() < 0)
throw new Exception("Not supported operation.");
byte[] directoryMetadataTable = new byte[(int) header.getDirectoryMetadataTableLength()];
if (bis.read(directoryMetadataTable) != (int) header.getDirectoryMetadataTableLength())
throw new Exception("Failed to read "+header.getDirectoryMetadataTableLength());
// Read files metadata
locationInFile = header.getFileMetadataTableOffset() - header.getFileHashTableOffset(); // TODO: replace to 'CurrentPosition'?
skipBytes(bis, locationInFile);
if (header.getFileMetadataTableLength() < 0)
throw new Exception("Not supported operation.");
byte[] fileMetadataTable = new byte[(int) header.getFileMetadataTableLength()];
if (bis.read(fileMetadataTable) != (int) header.getFileMetadataTableLength())
throw new Exception("Failed to read "+header.getFileMetadataTableLength());
rootEntry = new FileSystemEntry(directoryMetadataTable, fileMetadataTable);
//printDebug(directoryMetadataTable, fileMetadataTable);
bis.close();
}
private void skipBytes(BufferedInputStream bis, long size) throws Exception{
long mustSkip = size;
long skipped = 0;
while (mustSkip > 0){
skipped += bis.skip(mustSkip);
mustSkip = size - skipped;
}
this.directoryMetadataTable = construct.getDirectoryMetadataTable();
this.fileMetadataTable = construct.getFileMetadataTable();
}
@Override
public File getFile() { return file; }
@Override
public long getLevel6Offset() { return level6Offset; }
@Override
public Level6Header getHeader() { return header; }
public Level6Header getHeader() { return level6Header; }
@Override
public FileSystemEntry getRootEntry() { return rootEntry; }
@Override
public PipedInputStream getContent(FileSystemEntry entry) throws Exception{
if (entry.isDirectory())
throw new Exception("Request of the binary stream for the folder entry doesn't make sense.");
throw new Exception("Request of the binary stream for the folder entry is not supported (and doesn't make sense).");
PipedOutputStream streamOut = new PipedOutputStream();
Thread workerThread;
PipedInputStream streamIn = new PipedInputStream(streamOut);
long internalFileRealPosition = level6Offset + level6Header.getFileDataOffset() + entry.getFileOffset();
long internalFileSize = entry.getFileSize();
workerThread = new Thread(() -> {
System.out.println("RomFsDecryptedProvider -> getContent(): Executing thread");
try {
long subFileRealPosition = level6Offset + header.getFileDataOffset() + entry.getFileOffset();
BufferedInputStream bis = new BufferedInputStream(new FileInputStream(file));
skipBytes(bis, subFileRealPosition);
int readPice = 8388608; // 8mb NOTE: consider switching to 1mb 1048576
long readFrom = 0;
long realFileSize = entry.getFileSize();
byte[] readBuf;
while (readFrom < realFileSize) {
if (realFileSize - readFrom < readPice)
readPice = Math.toIntExact(realFileSize - readFrom); // it's safe, I guarantee
readBuf = new byte[readPice];
if (bis.read(readBuf) != readPice) {
System.out.println("RomFsDecryptedProvider -> getContent(): Unable to read requested size from file.");
return;
}
streamOut.write(readBuf);
readFrom += readPice;
}
bis.close();
streamOut.close();
} catch (Exception e) {
System.out.println("RomFsDecryptedProvider -> getContent(): Unable to provide stream");
e.printStackTrace();
}
System.out.println("RomFsDecryptedProvider -> getContent(): Thread is dead");
});
workerThread.start();
Thread contentRetrievingThread = new Thread(
new RomFsDecryptedContentRetrieve(file, streamOut, internalFileRealPosition, internalFileSize));
contentRetrievingThread.start();
return streamIn;
}
@Override
public File getFile() {
return file;
}
private void printDebug(byte[] directoryMetadataTable, byte[] fileMetadataTable){
new FolderMeta4Debug(header.getDirectoryMetadataTableLength(), directoryMetadataTable);
new FileMeta4Debug(header.getFileMetadataTableLength(), fileMetadataTable);
public void printDebug(){
level6Header.printDebugInfo();
new DirectoryMetaTablePlainView(level6Header.getDirectoryMetadataTableLength(), directoryMetadataTable);
new FileMetaTablePlainView(level6Header.getFileMetadataTableLength(), fileMetadataTable);
rootEntry.printTreeForDebug();
}
}

View File

@ -0,0 +1,180 @@
/*
Copyright 2018-2022 Dmitry Isaenko
This file is part of libKonogonka.
libKonogonka is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
libKonogonka is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with libKonogonka. If not, see <https://www.gnu.org/licenses/>.
*/
package libKonogonka.Tools.RomFs;
import libKonogonka.ctraes.AesCtrDecryptSimple;
import java.io.File;
import java.io.RandomAccessFile;
import java.util.Arrays;
public class RomFsEncryptedConstruct {
private final long absoluteOffsetPosition;
private final long level6Offset;
private final RandomAccessFile raf;
private final AesCtrDecryptSimple decryptor;
private Level6Header header;
private byte[] directoryMetadataTable;
private byte[] fileMetadataTable;
private FileSystemEntry rootEntry;
RomFsEncryptedConstruct(File encryptedFsImageFile,
long romFsOffset,
long level6Offset,
AesCtrDecryptSimple decryptor,
long mediaStartOffset
) throws Exception {
if (level6Offset < 0)
throw new Exception("Incorrect Level 6 Offset");
this.raf = new RandomAccessFile(encryptedFsImageFile, "r");
this.level6Offset = level6Offset;
this.absoluteOffsetPosition = romFsOffset + (mediaStartOffset * 0x200);
this.decryptor = decryptor;
raf.seek(absoluteOffsetPosition + level6Offset);
//Go to Level 6 header position
decryptor.skipNext(level6Offset / 0x200);
constructHeader();
directoryMetadataTableLengthCheck();
directoryMetadataTableConstruct();
fileMetadataTableLengthCheck();
fileMetadataTableConstruct();
constructRootFilesystemEntry();
raf.close();
}
private void constructHeader() throws Exception{
// Decrypt data
byte[] encryptedBlock = new byte[0x200];
byte[] decryptedBlock;
if (raf.read(encryptedBlock) == 0x200)
decryptedBlock = decryptor.decryptNext(encryptedBlock);
else
throw new Exception("Failed to read header header (0x200 - block)");
this.header = new Level6Header(decryptedBlock);
}
private void directoryMetadataTableLengthCheck() throws Exception{
if (header.getDirectoryMetadataTableLength() < 0)
throw new Exception("Not supported: DirectoryMetadataTableLength < 0");
}
private void directoryMetadataTableConstruct() throws Exception{
directoryMetadataTable = readMetaTable(header.getDirectoryMetadataTableOffset(),
header.getDirectoryMetadataTableLength());
}
private void fileMetadataTableLengthCheck() throws Exception{
if (header.getFileMetadataTableLength() < 0)
throw new Exception("Not supported: FileMetadataTableLength < 0");
}
private void fileMetadataTableConstruct() throws Exception{
fileMetadataTable = readMetaTable(header.getFileMetadataTableOffset(),
header.getFileMetadataTableLength());
}
private void constructRootFilesystemEntry() throws Exception{
rootEntry = new FileSystemEntry(directoryMetadataTable, fileMetadataTable);
}
private byte[] readMetaTable(long metaOffset,
long metaSize) throws Exception{
byte[] encryptedBlock;
byte[] decryptedBlock;
byte[] metadataTable = new byte[(int) metaSize];
//0
decryptor.reset();
long startBlock = metaOffset / 0x200;
decryptor.skipNext(level6Offset / 0x200 + startBlock);
raf.seek(absoluteOffsetPosition + level6Offset + startBlock * 0x200);
//1
long ignoreBytes = metaOffset - startBlock * 0x200;
long currentPosition = 0;
if (ignoreBytes > 0) {
encryptedBlock = new byte[0x200];
if (raf.read(encryptedBlock) == 0x200) {
decryptedBlock = decryptor.decryptNext(encryptedBlock);
// If we have extra-small file that is less than a block and even more
if ((0x200 - ignoreBytes) > metaSize){
metadataTable = Arrays.copyOfRange(decryptedBlock, (int)ignoreBytes, 0x200);
return metadataTable;
}
else {
System.arraycopy(decryptedBlock, (int) ignoreBytes, metadataTable, 0, 0x200 - (int) ignoreBytes);
currentPosition = 0x200 - ignoreBytes;
}
}
else {
throw new Exception("Unable to get 512 bytes from 1st bock for Directory/File Metadata Table");
}
startBlock++;
}
long endBlock = (metaSize + ignoreBytes) / 0x200 + startBlock; // <- pointing to place where any data related to this media-block ends
//2
int extraData = (int) ((endBlock - startBlock)*0x200 - (metaSize + ignoreBytes));
if (extraData < 0)
endBlock--;
//3
while ( startBlock < endBlock ) {
encryptedBlock = new byte[0x200];
if (raf.read(encryptedBlock) == 0x200) {
decryptedBlock = decryptor.decryptNext(encryptedBlock);
System.arraycopy(decryptedBlock, 0, metadataTable, (int) currentPosition, 0x200);
}
else
throw new Exception("Unable to get 512 bytes from block for Directory/File Metadata Table");
startBlock++;
currentPosition += 0x200;
}
//4
if (extraData != 0){ // In case we didn't get what we want
encryptedBlock = new byte[0x200];
if (raf.read(encryptedBlock) == 0x200) {
decryptedBlock = decryptor.decryptNext(encryptedBlock);
System.arraycopy(decryptedBlock, 0, metadataTable, (int) currentPosition, Math.abs(extraData));
}
else
throw new Exception("Unable to get 512 bytes from block for Directory/File Metadata Table");
}
return metadataTable;
}
Level6Header getHeader() { return header; }
FileSystemEntry getRootEntry(){ return rootEntry; }
byte[] getDirectoryMetadataTable() { return directoryMetadataTable; }
byte[] getFileMetadataTable() { return fileMetadataTable;}
}

View File

@ -0,0 +1,138 @@
/*
Copyright 2018-2022 Dmitry Isaenko
This file is part of libKonogonka.
libKonogonka is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
libKonogonka is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with libKonogonka. If not, see <https://www.gnu.org/licenses/>.
*/
package libKonogonka.Tools.RomFs;
import libKonogonka.ctraes.AesCtrDecryptSimple;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import java.io.File;
import java.io.PipedOutputStream;
import java.io.RandomAccessFile;
public class RomFsEncryptedContentRetrieve implements Runnable{
private final static Logger log = LogManager.getLogger(RomFsEncryptedContentRetrieve.class);
private final File parentFile;
private final PipedOutputStream streamOut;
private final long absoluteOffsetPosition;
private final AesCtrDecryptSimple decryptor;
private final long internalFileOffset;
private final long internalFileSize;
private final long level6Offset;
private final long headersFileDataOffset;
RomFsEncryptedContentRetrieve(File parentFile,
PipedOutputStream streamOut,
long absoluteOffsetPosition,
AesCtrDecryptSimple decryptor,
long internalFileOffset,
long internalFileSize,
long level6Offset,
long headersFileDataOffset
){
this.parentFile = parentFile;
this.absoluteOffsetPosition = absoluteOffsetPosition;
this.streamOut = streamOut;
this.decryptor = decryptor;
this.internalFileOffset = internalFileOffset;
this.internalFileSize = internalFileSize;
this.level6Offset = level6Offset;
this.headersFileDataOffset = headersFileDataOffset;
}
@Override
public void run() {
log.trace("Executing thread");
try {
byte[] encryptedBlock;
byte[] decryptedBlock;
RandomAccessFile raf = new RandomAccessFile(parentFile, "r");
//0
long startBlock = (internalFileOffset + headersFileDataOffset) / 0x200;
decryptor.skipNext(level6Offset / 0x200 + startBlock);
// long absoluteOffsetPosition = romFsOffsetPosition + (mediaStartOffset * 0x200); // calculated in constructor
raf.seek(absoluteOffsetPosition + level6Offset + startBlock * 0x200);
//1
long ignoreBytes = (internalFileOffset + headersFileDataOffset) - startBlock * 0x200;
if (ignoreBytes > 0) {
encryptedBlock = new byte[0x200];
if (raf.read(encryptedBlock) == 0x200) {
decryptedBlock = decryptor.decryptNext(encryptedBlock);
// If we have extra-small file that is less than a block and even more
if ((0x200 - ignoreBytes) > internalFileSize){
streamOut.write(decryptedBlock, (int)ignoreBytes, (int) internalFileSize); // safe cast
raf.close();
streamOut.close();
return;
}
else {
streamOut.write(decryptedBlock, (int) ignoreBytes, 0x200 - (int) ignoreBytes);
}
}
else {
throw new Exception("Unable to get 512 bytes from 1st bock");
}
startBlock++;
}
long endBlock = (internalFileSize + ignoreBytes) / 0x200 + startBlock; // <- pointing to place where any data related to this media-block ends
//2
int extraData = (int) ((endBlock - startBlock)*0x200 - (internalFileSize + ignoreBytes));
if (extraData < 0)
endBlock--;
//3
while ( startBlock < endBlock ) {
encryptedBlock = new byte[0x200];
if (raf.read(encryptedBlock) == 0x200) {
decryptedBlock = decryptor.decryptNext(encryptedBlock);
streamOut.write(decryptedBlock);
}
else
throw new Exception("Unable to get 512 bytes from block");
startBlock++;
}
//4
if (extraData != 0){ // In case we didn't get what we want
encryptedBlock = new byte[0x200];
if (raf.read(encryptedBlock) == 0x200) {
decryptedBlock = decryptor.decryptNext(encryptedBlock);
streamOut.write(decryptedBlock, 0, Math.abs(extraData));
}
else
throw new Exception("Unable to get 512 bytes from block");
}
raf.close();
streamOut.close();
} catch (Exception exception) {
log.error("Unable to provide stream", exception);
}
log.trace("Thread died");
}
}

View File

@ -19,274 +19,107 @@
package libKonogonka.Tools.RomFs;
import libKonogonka.Tools.RomFs.view.DirectoryMetaTablePlainView;
import libKonogonka.Tools.RomFs.view.FileMetaTablePlainView;
import libKonogonka.ctraes.AesCtrDecryptSimple;
import java.io.File;
import java.io.PipedInputStream;
import java.io.PipedOutputStream;
import java.io.RandomAccessFile;
import java.util.Arrays;
public class RomFsEncryptedProvider implements IRomFsProvider{
private final File file;
private final long level6Offset;
private final Level6Header level6Header;
private final FileSystemEntry rootEntry;
private long level6Offset;
private final byte[] key; // Used @ createDecryptor only
private final byte[] sectionCTR; // Used @ createDecryptor only
private final long mediaStartOffset; // Used @ createDecryptor only
private final long absoluteOffsetPosition;
private File file;
private Level6Header header;
//private long mediaEndOffset; // We know this, but actually never use
private FileSystemEntry rootEntry;
// Used only for debug
private final byte[] directoryMetadataTable;
private final byte[] fileMetadataTable;
//--------------------------------
public RomFsEncryptedProvider(long level6Offset,
File encryptedFsImageFile,
long romFsOffsetPosition,
byte[] key,
byte[] sectionCTR,
long mediaStartOffset
) throws Exception{
this(level6Offset, encryptedFsImageFile, romFsOffsetPosition, key, sectionCTR, mediaStartOffset, -1);
}
private long romFSoffsetPosition;
private byte[] key;
private byte[] sectionCTR;
private long mediaStartOffset;
private long mediaEndOffset;
public RomFsEncryptedProvider(long romFSoffsetPosition,
long level6Offset,
File fileWithEncPFS0,
public RomFsEncryptedProvider(long level6Offset,
File encryptedFsImageFile,
long romFsOffsetPosition,
byte[] key,
byte[] sectionCTR,
long mediaStartOffset,
long mediaEndOffset
) throws Exception{
this.file = fileWithEncPFS0;
this.level6Offset = level6Offset;
this.romFSoffsetPosition = romFSoffsetPosition;
this.key = key;
this.sectionCTR = sectionCTR;
this.mediaStartOffset = mediaStartOffset;
this.mediaEndOffset = mediaEndOffset;
RandomAccessFile raf = new RandomAccessFile(file, "r");
long abosluteOffsetPosition = romFSoffsetPosition + (mediaStartOffset * 0x200);
raf.seek(abosluteOffsetPosition + level6Offset);
RomFsEncryptedConstruct construct = new RomFsEncryptedConstruct(encryptedFsImageFile,
romFsOffsetPosition,
level6Offset,
createDecryptor(),
mediaStartOffset);
this.file = encryptedFsImageFile;
this.level6Offset = level6Offset;
this.level6Header = construct.getHeader();
this.rootEntry = construct.getRootEntry();
AesCtrDecryptSimple decryptor = new AesCtrDecryptSimple(key, sectionCTR, mediaStartOffset * 0x200);
//Go to Level 6 header
decryptor.skipNext(level6Offset / 0x200);
this.absoluteOffsetPosition = romFsOffsetPosition + (mediaStartOffset * 0x200);
// Decrypt data
byte[] encryptedBlock = new byte[0x200];
byte[] dectyptedBlock;
if (raf.read(encryptedBlock) == 0x200)
dectyptedBlock = decryptor.dectyptNext(encryptedBlock);
else
throw new Exception("Failed to read header header (0x200 - block)");
this.header = new Level6Header(dectyptedBlock);
header.printDebugInfo();
if (header.getDirectoryMetadataTableLength() < 0)
throw new Exception("Not supported: DirectoryMetadataTableLength < 0");
if (header.getFileMetadataTableLength() < 0)
throw new Exception("Not supported: FileMetadataTableLength < 0");
/*---------------------------------*/
// Read directories metadata
byte[] directoryMetadataTable = readMetaTable(abosluteOffsetPosition,
header.getDirectoryMetadataTableOffset(),
header.getDirectoryMetadataTableLength(),
raf);
// Read files metadata
byte[] fileMetadataTable = readMetaTable(abosluteOffsetPosition,
header.getFileMetadataTableOffset(),
header.getFileMetadataTableLength(),
raf);
rootEntry = new FileSystemEntry(directoryMetadataTable, fileMetadataTable);
raf.close();
this.directoryMetadataTable = construct.getDirectoryMetadataTable();
this.fileMetadataTable = construct.getFileMetadataTable();
}
private byte[] readMetaTable(long abosluteOffsetPosition,
long metaOffset,
long metaSize,
RandomAccessFile raf) throws Exception{
byte[] encryptedBlock;
byte[] dectyptedBlock;
byte[] metadataTable = new byte[(int) metaSize];
//0
AesCtrDecryptSimple decryptor = new AesCtrDecryptSimple(key, sectionCTR, mediaStartOffset * 0x200);
long startBlock = metaOffset / 0x200;
decryptor.skipNext(level6Offset / 0x200 + startBlock);
raf.seek(abosluteOffsetPosition + level6Offset + startBlock * 0x200);
//1
long ignoreBytes = metaOffset - startBlock * 0x200;
long currentPosition = 0;
if (ignoreBytes > 0) {
encryptedBlock = new byte[0x200];
if (raf.read(encryptedBlock) == 0x200) {
dectyptedBlock = decryptor.dectyptNext(encryptedBlock);
// If we have extra-small file that is less then a block and even more
if ((0x200 - ignoreBytes) > metaSize){
metadataTable = Arrays.copyOfRange(dectyptedBlock, (int)ignoreBytes, 0x200);
return metadataTable;
}
else {
System.arraycopy(dectyptedBlock, (int) ignoreBytes, metadataTable, 0, 0x200 - (int) ignoreBytes);
currentPosition = 0x200 - ignoreBytes;
}
}
else {
throw new Exception("RomFsEncryptedProvider(): Unable to get 512 bytes from 1st bock for Directory Metadata Table");
}
startBlock++;
}
long endBlock = (metaSize + ignoreBytes) / 0x200 + startBlock; // <- pointing to place where any data related to this media-block ends
//2
int extraData = (int) ((endBlock - startBlock)*0x200 - (metaSize + ignoreBytes));
if (extraData < 0)
endBlock--;
//3
while ( startBlock < endBlock ) {
encryptedBlock = new byte[0x200];
if (raf.read(encryptedBlock) == 0x200) {
dectyptedBlock = decryptor.dectyptNext(encryptedBlock);
System.arraycopy(dectyptedBlock, 0, metadataTable, (int) currentPosition, 0x200);
}
else
throw new Exception("RomFsEncryptedProvider(): Unable to get 512 bytes from block for Directory Metadata Table");
startBlock++;
currentPosition += 0x200;
}
//4
if (extraData != 0){ // In case we didn't get what we want
encryptedBlock = new byte[0x200];
if (raf.read(encryptedBlock) == 0x200) {
dectyptedBlock = decryptor.dectyptNext(encryptedBlock);
System.arraycopy(dectyptedBlock, 0, metadataTable, (int) currentPosition, Math.abs(extraData));
}
else
throw new Exception("RomFsEncryptedProvider(): Unable to get 512 bytes from block for Directory Metadata Table");
}
return metadataTable;
private AesCtrDecryptSimple createDecryptor() throws Exception{
return new AesCtrDecryptSimple(key, sectionCTR, mediaStartOffset * 0x200);
}
@Override
public File getFile() { return file; }
@Override
public long getLevel6Offset() { return level6Offset; }
@Override
public Level6Header getHeader() { return header; }
public Level6Header getHeader() {return level6Header;}
@Override
public FileSystemEntry getRootEntry() { return rootEntry; }
@Override
public PipedInputStream getContent(FileSystemEntry entry) throws Exception{
if (entry.isDirectory())
throw new Exception("Request of the binary stream for the folder entry doesn't make sense.");
throw new Exception("Request of the binary stream for the folder entry is not supported (and doesn't make sense).");
PipedOutputStream streamOut = new PipedOutputStream();
Thread workerThread;
PipedInputStream streamIn = new PipedInputStream(streamOut);
workerThread = new Thread(() -> {
System.out.println("RomFsDecryptedProvider -> getContent(): Executing thread");
try {
long internalFileOffset = entry.getFileOffset();
long internalFileSize = entry.getFileSize();
byte[] encryptedBlock;
byte[] dectyptedBlock;
RandomAccessFile raf = new RandomAccessFile(file, "r");
//0
AesCtrDecryptSimple decryptor = new AesCtrDecryptSimple(key, sectionCTR, mediaStartOffset * 0x200);
long startBlock = (entry.getFileOffset() + header.getFileDataOffset()) / 0x200;
decryptor.skipNext(level6Offset / 0x200 + startBlock);
long abosluteOffsetPosition = romFSoffsetPosition + (mediaStartOffset * 0x200);
raf.seek(abosluteOffsetPosition + level6Offset + startBlock * 0x200);
//1
long ignoreBytes = (entry.getFileOffset() + header.getFileDataOffset()) - startBlock * 0x200;
if (ignoreBytes > 0) {
encryptedBlock = new byte[0x200];
if (raf.read(encryptedBlock) == 0x200) {
dectyptedBlock = decryptor.dectyptNext(encryptedBlock);
// If we have extra-small file that is less then a block and even more
if ((0x200 - ignoreBytes) > entry.getFileSize()){
streamOut.write(dectyptedBlock, (int)ignoreBytes, (int) entry.getFileSize()); // safe cast
raf.close();
streamOut.close();
return;
}
else {
streamOut.write(dectyptedBlock, (int) ignoreBytes, 0x200 - (int) ignoreBytes);
}
}
else {
throw new Exception("RomFsEncryptedProvider(): Unable to get 512 bytes from 1st bock for Directory Metadata Table");
}
startBlock++;
}
long endBlock = (entry.getFileSize() + ignoreBytes) / 0x200 + startBlock; // <- pointing to place where any data related to this media-block ends
//2
int extraData = (int) ((endBlock - startBlock)*0x200 - (entry.getFileSize() + ignoreBytes));
if (extraData < 0)
endBlock--;
//3
while ( startBlock < endBlock ) {
encryptedBlock = new byte[0x200];
if (raf.read(encryptedBlock) == 0x200) {
dectyptedBlock = decryptor.dectyptNext(encryptedBlock);
streamOut.write(dectyptedBlock);
}
else
throw new Exception("RomFsEncryptedProvider(): Unable to get 512 bytes from block for Directory Metadata Table");
startBlock++;
}
//4
if (extraData != 0){ // In case we didn't get what we want
encryptedBlock = new byte[0x200];
if (raf.read(encryptedBlock) == 0x200) {
dectyptedBlock = decryptor.dectyptNext(encryptedBlock);
streamOut.write(dectyptedBlock, 0, Math.abs(extraData));
}
else
throw new Exception("RomFsEncryptedProvider(): Unable to get 512 bytes from block for Directory Metadata Table");
}
raf.close();
streamOut.close();
} catch (Exception e) {
System.out.println("RomFsDecryptedProvider -> getContent(): Unable to provide stream");
e.printStackTrace();
}
System.out.println("RomFsDecryptedProvider -> getContent(): Thread is dead");
});
workerThread.start();
Thread contentRetrievingThread = new Thread(new RomFsEncryptedContentRetrieve(
file,
streamOut,
absoluteOffsetPosition,
createDecryptor(),
internalFileOffset,
internalFileSize,
level6Offset,
level6Header.getFileDataOffset()
));
contentRetrievingThread.start();
return streamIn;
}
@Override
public File getFile() {
return file;
}
private void printDebug(byte[] directoryMetadataTable, byte[] fileMetadataTable){
new FolderMeta4Debug(header.getDirectoryMetadataTableLength(), directoryMetadataTable);
new FileMeta4Debug(header.getFileMetadataTableLength(), fileMetadataTable);
public void printDebug(){
level6Header.printDebugInfo();
new DirectoryMetaTablePlainView(level6Header.getDirectoryMetadataTableLength(), directoryMetadataTable);
new FileMetaTablePlainView(level6Header.getFileMetadataTableLength(), fileMetadataTable);
rootEntry.printTreeForDebug();
}
}

View File

@ -0,0 +1,82 @@
/*
Copyright 2019-2022 Dmitry Isaenko
This file is part of libKonogonka.
libKonogonka is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
libKonogonka is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with libKonogonka. If not, see <https://www.gnu.org/licenses/>.
*/
package libKonogonka.Tools.RomFs.view;
import libKonogonka.Converter;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import java.nio.charset.StandardCharsets;
import java.util.Arrays;
import static libKonogonka.RainbowDump.formatDecHexString;
public class DirectoryMetaTablePlainView {
private final static Logger log = LogManager.getLogger(DirectoryMetaTablePlainView.class);
// directoryMetadataTableLength must be declared since directoryMetadataTable could be bigger than declared size for encrypted blocks
public DirectoryMetaTablePlainView(long directoryMetadataTableLength, byte[] directoryMetadataTable){
int i = 0;
while (i < directoryMetadataTableLength){
FolderMeta folderMeta = new FolderMeta();
folderMeta.parentDirectoryOffset = Converter.getLEint(directoryMetadataTable, i);
i += 4;
folderMeta.nextSiblingDirectoryOffset = Converter.getLEint(directoryMetadataTable, i);
i += 4;
folderMeta.firstSubdirectoryOffset = Converter.getLEint(directoryMetadataTable, i);
i += 4;
folderMeta.firstFileOffset = Converter.getLEint(directoryMetadataTable, i);
i += 4;
folderMeta.nextDirectoryOffset = Converter.getLEint(directoryMetadataTable, i);
i += 4;
folderMeta.dirNameLength = Converter.getLEint(directoryMetadataTable, i);
i += 4;
folderMeta.dirName = new String(Arrays.copyOfRange(directoryMetadataTable, i, i + folderMeta.dirNameLength), StandardCharsets.UTF_8);
i += getRealNameSize(folderMeta.dirNameLength);
log.debug(
"- DIRECTORY -\n" +
"Offset of Parent Directory (self if Root) " + formatDecHexString(folderMeta.parentDirectoryOffset ) +"\n" +
"Offset of next Sibling Directory " + formatDecHexString(folderMeta.nextSiblingDirectoryOffset) +"\n" +
"Offset of first Child Directory (Subdirectory) " + formatDecHexString(folderMeta.firstSubdirectoryOffset ) +"\n" +
"Offset of first File (in File Metadata Table) " + formatDecHexString(folderMeta.firstFileOffset ) +"\n" +
"Offset of next Directory in the same Hash Table bucket " + formatDecHexString(folderMeta.nextDirectoryOffset ) +"\n" +
"Name Length " + formatDecHexString(folderMeta.dirNameLength ) +"\n" +
"Name Length (rounded up to multiple of 4) " + folderMeta.dirName + "\n"
);
}
}
private int getRealNameSize(int value){
if (value % 4 == 0)
return value;
return value + 4 - value % 4;
}
private static class FolderMeta {
int parentDirectoryOffset;
int nextSiblingDirectoryOffset;
int firstSubdirectoryOffset;
int firstFileOffset;
int nextDirectoryOffset;
int dirNameLength;
String dirName;
}
}

View File

@ -0,0 +1,81 @@
/*
Copyright 2019-2022 Dmitry Isaenko
This file is part of libKonogonka.
libKonogonka is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
libKonogonka is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with libKonogonka. If not, see <https://www.gnu.org/licenses/>.
*/
package libKonogonka.Tools.RomFs.view;
import libKonogonka.Converter;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import java.nio.charset.StandardCharsets;
import java.util.Arrays;
import static libKonogonka.RainbowDump.formatDecHexString;
public class FileMetaTablePlainView {
private final static Logger log = LogManager.getLogger(FileMetaTablePlainView.class);
public FileMetaTablePlainView(long fileMetadataTableLength, byte[] fileMetadataTable) {
int i = 0;
while (i < fileMetadataTableLength) {
FileMeta fileMeta = new FileMeta();
fileMeta.containingDirectoryOffset = Converter.getLEint(fileMetadataTable, i);
i += 4;
fileMeta.nextSiblingFileOffset = Converter.getLEint(fileMetadataTable, i);
i += 4;
fileMeta.fileDataOffset = Converter.getLElong(fileMetadataTable, i);
i += 8;
fileMeta.fileDataLength = Converter.getLElong(fileMetadataTable, i);
i += 8;
fileMeta.nextFileOffset = Converter.getLEint(fileMetadataTable, i);
i += 4;
fileMeta.fileNameLength = Converter.getLEint(fileMetadataTable, i);
i += 4;
fileMeta.fileName = new String(Arrays.copyOfRange(fileMetadataTable, i, i + fileMeta.fileNameLength), StandardCharsets.UTF_8);
i += getRealNameSize(fileMeta.fileNameLength);
log.debug(
"- FILE -\n" +
"Offset of Containing Directory " + formatDecHexString(fileMeta.containingDirectoryOffset) + "\n" +
"Offset of next Sibling File " + formatDecHexString(fileMeta.nextSiblingFileOffset) + "\n" +
"Offset of File's Data " + formatDecHexString(fileMeta.fileDataOffset) + "\n" +
"Length of File's Data " + formatDecHexString(fileMeta.fileDataLength) + "\n" +
"Offset of next File in the same Hash Table bucket " + formatDecHexString(fileMeta.nextFileOffset) + "\n" +
"Name Length " + formatDecHexString(fileMeta.fileNameLength) + "\n" +
"Name Length (rounded up to multiple of 4) " + fileMeta.fileName + "\n"
);
}
}
private int getRealNameSize(int value){
if (value % 4 == 0)
return value;
return value + 4 - value % 4;
}
private static class FileMeta{
int containingDirectoryOffset;
int nextSiblingFileOffset;
long fileDataOffset;
long fileDataLength;
int nextFileOffset;
int fileNameLength;
String fileName;
}
}

View File

@ -0,0 +1,65 @@
/*
Copyright 2018-2022 Dmitry Isaenko
This file is part of libKonogonka.
libKonogonka is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
libKonogonka is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with libKonogonka. If not, see <https://www.gnu.org/licenses/>.
*/
package libKonogonka.Tools.RomFs.view;
import libKonogonka.Tools.RomFs.FileSystemEntry;
import java.util.List;
/**
* Used in pair with FileSystemEntry
* */
public class FileSystemTreeViewMaker {
private StringBuilder tree;
private int spacerForSizes;
private FileSystemTreeViewMaker(){}
private void init(List<FileSystemEntry> content){
tree = new StringBuilder("/\n");
for (FileSystemEntry entry: content)
printEntry(2, entry);
}
private void printEntry(int count, FileSystemEntry entry) {
int i;
for (i = 0; i < count; i++)
tree.append(" ");
if (entry.isDirectory()) {
tree.append("|-");
tree.append(entry.getName());
tree.append("\n");
for (FileSystemEntry e : entry.getContent())
printEntry(count + 2, e);
return;
}
tree.append("|-");
tree.append(entry.getName());
tree.append(String.format("%"+(spacerForSizes-entry.getName().length()-i)+"s0x%-10x 0x%-10x", "", entry.getFileOffset(), entry.getFileSize()));
tree.append("\n");
}
public static String make(List<FileSystemEntry> content, int spacerForSizes){
FileSystemTreeViewMaker maker = new FileSystemTreeViewMaker();
maker.spacerForSizes = spacerForSizes;
maker.init(content);
return maker.tree.toString();
}
}

View File

@ -20,11 +20,11 @@ package libKonogonka.Tools.TIK;
import java.io.BufferedInputStream;
import java.io.File;
import java.io.FileInputStream;
import java.nio.charset.StandardCharsets;
import java.nio.file.Files;
import java.util.Arrays;
import static libKonogonka.LoperConverter.*;
import static libKonogonka.Converter.*;
/*
DON'T TRUST WIKI. Ticket size always (?) equal 0x02c0 (704 bytes)
@ -67,41 +67,41 @@ Next:
* */
public class TIKProvider {
// Signature-related
private byte[] sigType;
private byte[] signature;
private final byte[] sigType;
private final byte[] signature;
// Ticket
private String Issuer;
private byte[] TitleKeyBlockStartingBytes; // Actually 32 bytes.
private byte[] TitleKeyBlockEndingBytes; // Anything else
private byte Unknown1;
private byte TitleKeyType;
private byte[] Unknown2;
private byte MasterKeyRevision;
private byte[] Unknown3;
private byte[] TicketId;
private byte[] DeviceId;
private byte[] RightsId;
private byte[] RightsIdEndingBytes;
private byte[] AccountId;
private byte[] Unknown4;
private final String Issuer;
private final byte[] TitleKeyBlockStartingBytes; // Actually 32 bytes.
private final byte[] TitleKeyBlockEndingBytes; // Everything left
private final byte Unknown1;
private final byte TitleKeyType;
private final byte[] Unknown2;
private final byte MasterKeyRevision;
private final byte[] Unknown3;
private final byte[] TicketId;
private final byte[] DeviceId;
private final byte[] RightsId;
//private byte[] RightsIdEndingBytes;
private final byte[] AccountId;
private final byte[] Unknown4;
public TIKProvider(File file) throws Exception{ this(file, 0); }
public TIKProvider(File file, long offset) throws Exception {
if (file.length() - offset < 0x02c0)
throw new Exception("TIKProvider: File is too small.");
throw new Exception("File is too small.");
BufferedInputStream bis = new BufferedInputStream(new FileInputStream(file));
BufferedInputStream bis = new BufferedInputStream(Files.newInputStream(file.toPath()));
if (bis.skip(offset) != offset) {
bis.close();
throw new Exception("TIKProvider: Unable to skip requested range - " + offset);
throw new Exception("Unable to skip requested range - " + offset);
}
sigType = new byte[0x4];
if (bis.read(sigType) != 4) {
bis.close();
throw new Exception("TIKProvider: Unable to read requested range - " + offset);
throw new Exception("Unable to read requested range - " + offset);
}
byte[] readChunk;
@ -112,7 +112,7 @@ public class TIKProvider {
readChunk = new byte[0x23c];
if (bis.read(readChunk) != 0x23c) {
bis.close();
throw new Exception("TIKProvider: Unable to read requested range - 0x23c");
throw new Exception("Unable to read requested range - 0x23c");
}
signature = Arrays.copyOfRange(readChunk, 0, 0x200);
break;
@ -121,7 +121,7 @@ public class TIKProvider {
readChunk = new byte[0x13c];
if (bis.read(readChunk) != 0x13c) {
bis.close();
throw new Exception("TIKProvider: Unable to read requested range - 0x13c");
throw new Exception("Unable to read requested range - 0x13c");
}
signature = Arrays.copyOfRange(readChunk, 0, 0x100);
break;
@ -130,20 +130,20 @@ public class TIKProvider {
readChunk = new byte[0x7c];
if (bis.read(readChunk) != 0x7c) {
bis.close();
throw new Exception("TIKProvider: Unable to read requested range - 0x7c");
throw new Exception("Unable to read requested range - 0x7c");
}
signature = Arrays.copyOfRange(readChunk, 0, 0x3c);
break;
default:
bis.close();
throw new Exception("TIKProvider: Unknown ticket (Signature) type. Aborting.");
throw new Exception("Unknown ticket (Signature) type. Aborting.");
}
// Let's read ticket body itself
readChunk = new byte[0x180];
if (bis.read(readChunk) != 0x180) {
bis.close();
throw new Exception("TIKProvider: Unable to read requested range - Ticket data");
throw new Exception("Unable to read requested range - Ticket data");
}
bis.close();

View File

@ -24,7 +24,7 @@ import java.io.*;
import java.nio.charset.StandardCharsets;
import java.util.Arrays;
import static libKonogonka.LoperConverter.*;
import static libKonogonka.Converter.*;
/**
* HFS0

View File

@ -22,8 +22,8 @@ import java.util.Arrays;
import java.util.Collections;
import java.util.List;
import static libKonogonka.LoperConverter.getLEint;
import static libKonogonka.LoperConverter.getLElong;
import static libKonogonka.Converter.getLEint;
import static libKonogonka.Converter.getLElong;
/**
* Header information
* */

View File

@ -23,8 +23,8 @@ import javax.crypto.spec.IvParameterSpec;
import javax.crypto.spec.SecretKeySpec;
import java.util.Arrays;
import static libKonogonka.LoperConverter.getLEint;
import static libKonogonka.LoperConverter.getLElong;
import static libKonogonka.Converter.getLEint;
import static libKonogonka.Converter.getLElong;
/**
* Gamecard Info

View File

@ -18,7 +18,9 @@
*/
package libKonogonka.ctraes;
import libKonogonka.LoperConverter;
import libKonogonka.Converter;
import libKonogonka.RainbowDump;
/**
* Simplify decryption of the CTR
*/
@ -28,13 +30,15 @@ public class AesCtrDecryptSimple {
private byte[] IVarray;
private AesCtr aesCtr;
private final byte[] initialKey;
private final byte[] initialSectionCTR;
private final long initialRealMediaOffset;
public AesCtrDecryptSimple(byte[] key, byte[] sectionCTR, long realMediaOffset) throws Exception{
this.realMediaOffset = realMediaOffset;
aesCtr = new AesCtr(key);
// IV for CTR == 16 bytes
IVarray = new byte[0x10];
// Populate first 8 bytes taken from Header's section Block CTR
System.arraycopy(LoperConverter.flip(sectionCTR), 0x0, IVarray, 0x0, 0x8);
this.initialKey = key;
this.initialSectionCTR = sectionCTR;
this.initialRealMediaOffset = realMediaOffset;
reset();
}
public void skipNext(){
@ -42,22 +46,30 @@ public class AesCtrDecryptSimple {
}
public void skipNext(long blocksNum){
if (blocksNum > 0)
realMediaOffset += blocksNum * 0x200;
realMediaOffset += blocksNum * 0x200;
}
public byte[] dectyptNext(byte[] enctyptedBlock) throws Exception{
updateIV(realMediaOffset);
byte[] decryptedBlock = aesCtr.decrypt(enctyptedBlock, IVarray);
public byte[] decryptNext(byte[] encryptedBlock) throws Exception{
updateIV();
byte[] decryptedBlock = aesCtr.decrypt(encryptedBlock, IVarray);
realMediaOffset += 0x200;
return decryptedBlock;
}
// Populate last 8 bytes calculated. Thanks hactool project!
private void updateIV(long offset){
offset >>= 4;
private void updateIV(){
long offset = realMediaOffset >> 4;
for (int i = 0; i < 0x8; i++){
IVarray[0x10-i-1] = (byte)(offset & 0xff); // Note: issues could be here
offset >>= 8;
}
}
public void reset() throws Exception{
realMediaOffset = initialRealMediaOffset;
aesCtr = new AesCtr(initialKey);
// IV for CTR == 16 bytes
IVarray = new byte[0x10];
// Populate first 4 bytes taken from Header's section Block CTR (aka SecureValue)
System.arraycopy(Converter.flip(initialSectionCTR), 0x0, IVarray, 0x0, 0x4);
}
}

View File

@ -0,0 +1,84 @@
## LEVELS ARE:
# * ALL
# * TRACE
# * DEBUG
# * INFO
# * WARN
# * ERROR
# * FATAL
# * OFF
#############
# Extra logging related to initialization of Log4j
# Set to debug or trace if log4j initialization is failing
status = error
# Name of the configuration
name = DebugConfigDevelopmentOnlyVerbose
# Configure root logger level
rootLogger.level = TRACE
# Root logger referring to console appender
rootLogger.appenderRef.stdout.ref = consoleLogger
# Console appender configuration
appender.console.type = Console
appender.console.name = consoleLogger
appender.console.layout.type = PatternLayout
appender.console.layout.pattern = %d{yyyy-MM-dd HH:mm:ss} %-5p %c{1}:%L - %m%n
##################################################
# # Enable log to files
# rootLogger.appenderRef.rolling.ref = fileLogger
# # Log files location
# property.basePath = /tmp
# property.filename = libKonogonka
# # RollingFileAppender name, pattern, path and rollover policy
# appender.rolling.type = RollingFile
# appender.rolling.name = fileLogger
# appender.rolling.fileName= ${basePath}/${filename}.log
# appender.rolling.filePattern= ${basePath}/${filename}_%d{yyyyMMdd}.log.gz
# appender.rolling.layout.type = PatternLayout
# appender.rolling.layout.pattern = %d{yyyy-MM-dd HH:mm:ss.SSS} %level [%t] [%l] - %msg%n
# appender.rolling.policies.type = Policies
#
# # RollingFileAppender rotation policy
# appender.rolling.policies.size.type = SizeBasedTriggeringPolicy
# appender.rolling.policies.size.size = 10MB
# # Setting for time-based policies
# #appender.rolling.policies.time.type = TimeBasedTriggeringPolicy
# #appender.rolling.policies.time.interval = 1
# #appender.rolling.policies.time.modulate = true
# appender.rolling.strategy.type = DefaultRolloverStrategy
# appender.rolling.strategy.delete.type = Delete
# appender.rolling.strategy.delete.basePath = ${basePath}
# appender.rolling.strategy.delete.maxDepth = 10
# appender.rolling.strategy.delete.ifLastModified.type = IfLastModified
#
# # Delete all files older than 30 days
# appender.rolling.strategy.delete.ifLastModified.age = 30d
#
##################################################
#
# # Redirect log messages to a log file, support file rolling.
# appender.file.type = RollingFile
# appender.file.name = File
# appender.file.fileName = /opt/IBM/configurator_logs/${filename}
# appender.file.filePattern = /opt/IBM/configurator_logs/${filename}.%i
# appender.file.layout.type = PatternLayout
# appender.file.layout.pattern = %d{yyyy-MM-dd HH:mm:ss} %-5p %c{1} - %m%n
# appender.file.policies.type = Policies
# appender.file.policies.size.type = SizeBasedTriggeringPolicy
# appender.file.policies.size.size=5MB
# appender.file.strategy.type = DefaultRolloverStrategy
# appender.file.strategy.max = 10
#
# rootLogger.appenderRefs = file, console
# rootLogger.appenderRef.console.ref = STDOUT
# rootLogger.appenderRef.file.ref = File
#
# loggers = file
#
# logger.file.name = com.comergent.apps.configurator.initializer.FunctionLoader
# logger.file.level = debug
# logger.file.additivity = false
# logger.file.appenderRef.file.ref = File
#
##################################################

View File

@ -0,0 +1,85 @@
/*
Copyright 2018-2022 Dmitry Isaenko
This file is part of libKonogonka.
libKonogonka is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
libKonogonka is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with libKonogonka. If not, see <https://www.gnu.org/licenses/>.
*/
package libKonogonka.RomFsDecrypted;
import libKonogonka.KeyChainHolder;
import org.junit.jupiter.api.Disabled;
import org.junit.jupiter.api.DisplayName;
import org.junit.jupiter.api.Test;
import java.io.BufferedReader;
import java.io.FileReader;
import java.util.Map;
public class KeyChainHolderTest {
private static final String keysFileLocation = "./FilesForTests/prod.keys";
private static final String xci_header_keyFileLocation = "./FilesForTests/xci_header_key.txt";
private KeyChainHolder keyChainHolder;
@Disabled
@DisplayName("Key Chain Holder Test")
@Test
void keysChain() throws Exception{
BufferedReader br = new BufferedReader(new FileReader(xci_header_keyFileLocation));
String keyValue = br.readLine();
br.close();
if (keyValue == null)
throw new Exception("Unable to retrieve xci_header_key");
keyValue = keyValue.trim();
keyChainHolder = new KeyChainHolder(keysFileLocation, keyValue);
}
void printXciHeaderKey(){
System.out.println("-=== xci_header ===-");
System.out.println(keyChainHolder.getXci_header_key());
}
void printKAKApplication(){
System.out.println("-=== key_area_key_application test ===-");
for (Map.Entry entry : keyChainHolder.getKey_area_key_application().entrySet()){
System.out.println(entry.getKey() + " - " + entry.getValue());
}
}
void printKAKOcean(){
System.out.println("-=== key_area_key_ocean test ===-");
for (Map.Entry entry : keyChainHolder.getKey_area_key_ocean().entrySet()){
System.out.println(entry.getKey() + " - " + entry.getValue());
}
}
void printKAKSystem(){
System.out.println("-=== key_area_key_system test ===-");
for (Map.Entry entry : keyChainHolder.getKey_area_key_system().entrySet()){
System.out.println(entry.getKey() + " - " + entry.getValue());
}
}
void printKAKTitleKek(){
System.out.println("-=== titlekek test ===-");
for (Map.Entry entry : keyChainHolder.getTitlekek().entrySet()){
System.out.println(entry.getKey() + " - " + entry.getValue());
}
}
void printRawKeySet(){
System.out.println("-=== Raw Key Set (everything) test ===-");
for (Map.Entry entry : keyChainHolder.getRawKeySet().entrySet()){
System.out.println(entry.getKey() + " - " + entry.getValue());
}
}
}

View File

@ -0,0 +1,69 @@
/*
Copyright 2018-2022 Dmitry Isaenko
This file is part of libKonogonka.
libKonogonka is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
libKonogonka is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with libKonogonka. If not, see <https://www.gnu.org/licenses/>.
*/
package libKonogonka.RomFsDecrypted;
import java.io.File;
import java.nio.file.Path;
import libKonogonka.Tools.RomFs.RomFsDecryptedProvider;
import org.junit.jupiter.api.Disabled;
import org.junit.jupiter.api.DisplayName;
import org.junit.jupiter.api.Test;
import org.junit.jupiter.api.io.TempDir;
// log.fatal("Configuration File Defined To Be :: "+System.getProperty("log4j.configurationFile"));
public class RomFsDecryptedTest {
@TempDir
Path mainLogsDir;
private static final String decryptedFileAbsolutePath = "./FilesForTests/NCAContent_0 [lv6 147456].bin";
private File decryptedFile;
long lv6offset;
RomFsDecryptedProvider provider;
@Disabled
@DisplayName("RomFsDecryptedProvider: tests")
@Test
void romFsValidation() throws Exception{
makeFile();
parseLv6offsetFromFileName();
makeProvider();
provider.printDebug();
}
void makeFile(){
decryptedFile = new File(decryptedFileAbsolutePath);
}
void parseLv6offsetFromFileName(){
lv6offset = Long.parseLong(decryptedFile.getName().replaceAll("(^.*lv6\\s)|(]\\.bin)", ""));
}
void makeProvider() throws Exception{
provider = new RomFsDecryptedProvider(decryptedFile, lv6offset);
}
/*
void checkFilesWorkers(){
assertTrue(fw1 instanceof WorkerFiles);
assertTrue(fw2 instanceof WorkerFiles);
assertTrue(fw3 instanceof WorkerFiles);
}
*/
}

View File

@ -0,0 +1,96 @@
/*
Copyright 2018-2022 Dmitry Isaenko
This file is part of libKonogonka.
libKonogonka is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
libKonogonka is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with libKonogonka. If not, see <https://www.gnu.org/licenses/>.
*/
package libKonogonka.RomFsDecrypted;
import libKonogonka.KeyChainHolder;
import libKonogonka.Tools.NCA.NCAHeaderTableEntry;
import libKonogonka.Tools.NCA.NCAProvider;
import libKonogonka.Tools.NCA.NCASectionTableBlock.NcaFsHeader;
import org.junit.jupiter.api.*;
import java.io.BufferedReader;
import java.io.File;
import java.io.FileReader;
@TestMethodOrder(MethodOrderer.OrderAnnotation.class)
public class RomFsEncryptedTest {
private static final String keysFileLocation = "./FilesForTests/prod.keys";
private static final String xci_header_keyFileLocation = "./FilesForTests/xci_header_key.txt";
private static final String ncaFileLocation = "./FilesForTests/PFS_RomFS.nca";
private static KeyChainHolder keyChainHolder;
private static NCAProvider ncaProvider;
@Disabled
@Order(1)
@DisplayName("KeyChain test")
@Test
void keysChain() throws Exception{
BufferedReader br = new BufferedReader(new FileReader(xci_header_keyFileLocation));
String keyValue = br.readLine();
br.close();
if (keyValue == null)
throw new Exception("Unable to retrieve xci_header_key");
keyValue = keyValue.trim();
keyChainHolder = new KeyChainHolder(keysFileLocation, keyValue);
}
@Disabled
@Order(2)
@DisplayName("RomFsEncryptedProvider: NCA provider quick test")
@Test
void ncaProvider() throws Exception{
ncaProvider = new NCAProvider(new File(ncaFileLocation), keyChainHolder.getRawKeySet());
}
@Disabled
@Order(3)
@DisplayName("RomFsEncryptedProvider: RomFs test")
@Test
void romFsValidation() throws Exception{
for (byte i = 0; i < 4; i++){
if (ncaProvider.getSectionBlock(i).getFsType() == 0 && ncaProvider.getSectionBlock(i).getCryptoType() != 0){
ncaProvider.getNCAContentProvider(i).getRomfs().printDebug();
ncaProvider.getSectionBlock(i).printDebug();
return;
}
}
}
@Disabled
@Order(4)
@DisplayName("RomFsEncryptedProvider: NCA Header Table Entries test")
@Test
void NcaHeaderTableEntryValidation() throws Exception{
for (byte i = 0; i < 4; i++){
NcaFsHeader header = ncaProvider.getSectionBlock(i);
if (header != null)
header.printDebug();
}
}
@Disabled
@Order(5)
@DisplayName("RomFsEncryptedProvider: PFS test")
@Test
void pfsValidation(){
//1 PFS and cryptotype != 0
}
}