Refactoring. Start moving to log4j2
Some checks failed
continuous-integration/drone/push Build is failing

This commit is contained in:
Dmitry Isaenko 2022-09-05 00:39:48 +03:00
parent 55fb58f25b
commit 220d14e2c1
46 changed files with 2242 additions and 1048 deletions

View file

@ -1,4 +1,4 @@
# konogonka # libKonogonka
[![Build Status](https://ci.redrise.ru/api/badges/desu/libKonogonka/status.svg)](https://ci.redrise.ru/desu/libKonogonka) [![Build Status](https://ci.redrise.ru/api/badges/desu/libKonogonka/status.svg)](https://ci.redrise.ru/desu/libKonogonka)

40
pom.xml
View file

@ -6,7 +6,7 @@
<groupId>ru.redrise</groupId> <groupId>ru.redrise</groupId>
<artifactId>libKonogonka</artifactId> <artifactId>libKonogonka</artifactId>
<version>0.1</version> <version>0.1-SNAPSHOT</version>
<url>https://git.redrise.ru/desu/${project.name}}/</url> <url>https://git.redrise.ru/desu/${project.name}}/</url>
<description> <description>
@ -62,8 +62,34 @@
<version>1.0</version> <version>1.0</version>
<scope>compile</scope> <scope>compile</scope>
</dependency> </dependency>
</dependencies>
<!-- https://mvnrepository.com/artifact/org.apache.logging.log4j/log4j-core -->
<dependency>
<groupId>org.apache.logging.log4j</groupId>
<artifactId>log4j-core</artifactId>
<version>2.18.0</version>
<scope>compile</scope>
</dependency>
<!-- testing -->
<dependency>
<groupId>org.junit.jupiter</groupId>
<artifactId>junit-jupiter-engine</artifactId>
<version>5.5.2</version>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.junit.jupiter</groupId>
<artifactId>junit-jupiter-api</artifactId>
<version>5.5.2</version>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.junit.jupiter</groupId>
<artifactId>junit-jupiter-params</artifactId>
<version>5.5.2</version>
<scope>test</scope>
</dependency>
</dependencies>
<build> <build>
<finalName>${project.artifactId}-${project.version}-${maven.build.timestamp}</finalName> <finalName>${project.artifactId}-${project.version}-${maven.build.timestamp}</finalName>
<plugins> <plugins>
@ -100,6 +126,7 @@
</executions> </executions>
</plugin> </plugin>
--> -->
<plugin> <plugin>
<groupId>org.apache.maven.plugins</groupId> <groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-compiler-plugin</artifactId> <artifactId>maven-compiler-plugin</artifactId>
@ -109,7 +136,7 @@
<target>8</target> <target>8</target>
</configuration> </configuration>
</plugin> </plugin>
<!-- Generate JAR with dependencies --> <!-- Generate JAR with dependencies
<plugin> <plugin>
<groupId>org.apache.maven.plugins</groupId> <groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-assembly-plugin</artifactId> <artifactId>maven-assembly-plugin</artifactId>
@ -123,18 +150,19 @@
<descriptorRefs> <descriptorRefs>
<descriptorRef>jar-with-dependencies</descriptorRef> <descriptorRef>jar-with-dependencies</descriptorRef>
</descriptorRefs> </descriptorRefs>
<!-- <appendAssemblyId>false</appendAssemblyId> --> <!- <appendAssemblyId>false</appendAssemblyId> -
</configuration> </configuration>
<executions> <executions>
<execution> <execution>
<id>make-assembly</id> <!-- this is used for inheritance merges --> <id>make-assembly</id>
<phase>package</phase> <!-- bind to the packaging phase --> <phase>package</phase>
<goals> <goals>
<goal>single</goal> <goal>single</goal>
</goals> </goals>
</execution> </execution>
</executions> </executions>
</plugin> </plugin>
-->
</plugins> </plugins>
</build> </build>
</project> </project>

View file

@ -21,7 +21,7 @@ package libKonogonka;
import java.nio.ByteBuffer; import java.nio.ByteBuffer;
import java.nio.ByteOrder; import java.nio.ByteOrder;
public class LoperConverter { public class Converter {
public static int getLEint(byte[] bytes, int fromOffset){ public static int getLEint(byte[] bytes, int fromOffset){
return ByteBuffer.wrap(bytes, fromOffset, 0x4).order(ByteOrder.LITTLE_ENDIAN).getInt(); return ByteBuffer.wrap(bytes, fromOffset, 0x4).order(ByteOrder.LITTLE_ENDIAN).getInt();
} }
@ -30,7 +30,7 @@ public class LoperConverter {
return ByteBuffer.wrap(bytes, fromOffset, 0x8).order(ByteOrder.LITTLE_ENDIAN).getLong(); return ByteBuffer.wrap(bytes, fromOffset, 0x8).order(ByteOrder.LITTLE_ENDIAN).getLong();
} }
/** /**
* Convert int to long. Workaround to store unsigned int * Convert (usually unsigned) int to long. Workaround to store unsigned int
* @param bytes original array * @param bytes original array
* @param fromOffset start position of the 4-bytes value * @param fromOffset start position of the 4-bytes value
* */ * */

View file

@ -0,0 +1,103 @@
/*
Copyright 2019-2022 Dmitry Isaenko
This file is part of libKonogonka.
libKonogonka is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
libKonogonka is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with libKonogonka. If not, see <https://www.gnu.org/licenses/>.
*/
package libKonogonka;
import java.io.BufferedReader;
import java.io.File;
import java.io.FileReader;
import java.util.HashMap;
public class KeyChainHolder {
private final File keysFile;
private final String xci_header_key;
private HashMap<String, String> rawKeySet;
private HashMap<String, String> key_area_key_application,
key_area_key_ocean,
key_area_key_system,
titlekek;
public KeyChainHolder(String pathToKeysFile, String xci_header_key) throws Exception{
this(new File(pathToKeysFile), xci_header_key);
}
public KeyChainHolder(File keysFile, String xci_header_key) throws Exception{
this.keysFile = keysFile;
this.xci_header_key = xci_header_key;
collectEverything();
}
private void collectEverything() throws Exception{
rawKeySet = new HashMap<>();
BufferedReader br = new BufferedReader(new FileReader(keysFile));
String fileLine;
String[] keyValue;
while ((fileLine = br.readLine()) != null){
keyValue = fileLine.trim().split("\\s+?=\\s+?", 2);
if (keyValue.length == 2)
rawKeySet.put(keyValue[0], keyValue[1]);
}
key_area_key_application = collectKeysByType("key_area_key_application");
key_area_key_ocean = collectKeysByType("key_area_key_ocean");
key_area_key_system = collectKeysByType("key_area_key_system");
titlekek = collectKeysByType("titlekek");
}
private HashMap<String, String> collectKeysByType(String keyName){
HashMap<String, String> tempKeySet = new HashMap<>();
String keyNamePattern = keyName+"_%02x";
HashMap<String, String> map = new HashMap<>();
String keyParsed;
int counter = 0;
while ((keyParsed = map.get(String.format(keyNamePattern, counter))) != null){
tempKeySet.put(String.format(keyNamePattern, counter), keyParsed);
counter++;
}
return tempKeySet;
}
public String getXci_header_key() {
return xci_header_key;
}
public String getHeader_key() {
return rawKeySet.get("header_key");
}
public HashMap<String, String> getRawKeySet() {
return rawKeySet;
}
public HashMap<String, String> getKey_area_key_application() {
return key_area_key_application;
}
public HashMap<String, String> getKey_area_key_ocean() {
return key_area_key_ocean;
}
public HashMap<String, String> getKey_area_key_system() {
return key_area_key_system;
}
public HashMap<String, String> getTitlekek() {
return titlekek;
}
}

View file

@ -18,8 +18,9 @@
*/ */
package libKonogonka.Tools.NCA; package libKonogonka.Tools.NCA;
import libKonogonka.LoperConverter; import libKonogonka.Converter;
import libKonogonka.Tools.NCA.NCASectionTableBlock.NCASectionBlock; import libKonogonka.RainbowDump;
import libKonogonka.Tools.NCA.NCASectionTableBlock.NcaFsHeader;
import libKonogonka.Tools.PFS0.IPFS0Provider; import libKonogonka.Tools.PFS0.IPFS0Provider;
import libKonogonka.Tools.PFS0.PFS0EncryptedProvider; import libKonogonka.Tools.PFS0.PFS0EncryptedProvider;
import libKonogonka.Tools.PFS0.PFS0Provider; import libKonogonka.Tools.PFS0.PFS0Provider;
@ -27,6 +28,8 @@ import libKonogonka.Tools.RomFs.IRomFsProvider;
import libKonogonka.Tools.RomFs.RomFsEncryptedProvider; import libKonogonka.Tools.RomFs.RomFsEncryptedProvider;
import libKonogonka.ctraes.AesCtrDecryptSimple; import libKonogonka.ctraes.AesCtrDecryptSimple;
import libKonogonka.exceptions.EmptySectionException; import libKonogonka.exceptions.EmptySectionException;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import java.io.*; import java.io.*;
import java.util.LinkedList; import java.util.LinkedList;
@ -35,26 +38,28 @@ import java.util.LinkedList;
* TODO: MAKE SOME DECOMPOSITION * TODO: MAKE SOME DECOMPOSITION
* */ * */
public class NCAContent { public class NCAContent {
private File file; private final static Logger log = LogManager.getLogger(NCAContent.class);
private long offsetPosition;
private NCASectionBlock ncaSectionBlock;
private NCAHeaderTableEntry ncaHeaderTableEntry;
private byte[] decryptedKey;
private LinkedList<byte[]> Pfs0SHA256hashes; private final File file;
private final long offsetPosition;
private final NcaFsHeader ncaFsHeader;
private final NCAHeaderTableEntry ncaHeaderTableEntry;
private final byte[] decryptedKey;
private final LinkedList<byte[]> Pfs0SHA256hashes;
private IPFS0Provider pfs0; private IPFS0Provider pfs0;
private IRomFsProvider romfs; private IRomFsProvider romfs;
// TODO: if decryptedKey is empty, throw exception ?? // TODO: if decryptedKey is empty, throw exception ??
public NCAContent(File file, public NCAContent(File file,
long offsetPosition, long offsetPosition,
NCASectionBlock ncaSectionBlock, NcaFsHeader ncaFsHeader,
NCAHeaderTableEntry ncaHeaderTableEntry, NCAHeaderTableEntry ncaHeaderTableEntry,
byte[] decryptedKey) throws Exception byte[] decryptedKey) throws Exception
{ {
this.file = file; this.file = file;
this.offsetPosition = offsetPosition; this.offsetPosition = offsetPosition;
this.ncaSectionBlock = ncaSectionBlock; this.ncaFsHeader = ncaFsHeader;
this.ncaHeaderTableEntry = ncaHeaderTableEntry; this.ncaHeaderTableEntry = ncaHeaderTableEntry;
this.decryptedKey = decryptedKey; this.decryptedKey = decryptedKey;
@ -63,16 +68,16 @@ public class NCAContent {
if (ncaHeaderTableEntry.getMediaEndOffset() == 0) if (ncaHeaderTableEntry.getMediaEndOffset() == 0)
throw new EmptySectionException("Empty section"); throw new EmptySectionException("Empty section");
// If it's PFS0Provider // If it's PFS0Provider
if (ncaSectionBlock.getSuperBlockPFS0() != null) if (ncaFsHeader.getSuperBlockPFS0() != null)
this.proceedPFS0(); this.proceedPFS0();
else if (ncaSectionBlock.getSuperBlockIVFC() != null) else if (ncaFsHeader.getSuperBlockIVFC() != null)
this.proceedRomFs(); this.proceedRomFs();
else else
throw new Exception("NCAContent(): Not supported. PFS0 or RomFS supported only."); throw new Exception("NCAContent(): Not supported. PFS0 or RomFS supported only.");
} }
private void proceedPFS0() throws Exception { private void proceedPFS0() throws Exception {
switch (ncaSectionBlock.getCryptoType()){ switch (ncaFsHeader.getCryptoType()){
case 0x01: case 0x01:
proceedPFS0NotEncrypted(); // IF NO ENCRYPTION proceedPFS0NotEncrypted(); // IF NO ENCRYPTION
break; break;
@ -86,13 +91,13 @@ public class NCAContent {
private void proceedPFS0NotEncrypted() throws Exception{ private void proceedPFS0NotEncrypted() throws Exception{
RandomAccessFile raf = new RandomAccessFile(file, "r"); RandomAccessFile raf = new RandomAccessFile(file, "r");
long thisMediaLocation = offsetPosition + (ncaHeaderTableEntry.getMediaStartOffset() * 0x200); long thisMediaLocation = offsetPosition + (ncaHeaderTableEntry.getMediaStartOffset() * 0x200);
long hashTableLocation = thisMediaLocation + ncaSectionBlock.getSuperBlockPFS0().getHashTableOffset(); long hashTableLocation = thisMediaLocation + ncaFsHeader.getSuperBlockPFS0().getHashTableOffset();
long pfs0Location = thisMediaLocation + ncaSectionBlock.getSuperBlockPFS0().getPfs0offset(); long pfs0Location = thisMediaLocation + ncaFsHeader.getSuperBlockPFS0().getPfs0offset();
raf.seek(hashTableLocation); raf.seek(hashTableLocation);
byte[] rawData; byte[] rawData;
long sha256recordsNumber = ncaSectionBlock.getSuperBlockPFS0().getHashTableSize() / 0x20; long sha256recordsNumber = ncaFsHeader.getSuperBlockPFS0().getHashTableSize() / 0x20;
// Collect hashes // Collect hashes
for (int i = 0; i < sha256recordsNumber; i++){ for (int i = 0; i < sha256recordsNumber; i++){
rawData = new byte[0x20]; // 32 bytes - size of SHA256 hash rawData = new byte[0x20]; // 32 bytes - size of SHA256 hash
@ -111,13 +116,13 @@ public class NCAContent {
new CryptoSection03Pfs0(file, new CryptoSection03Pfs0(file,
offsetPosition, offsetPosition,
decryptedKey, decryptedKey,
ncaSectionBlock, ncaFsHeader,
ncaHeaderTableEntry.getMediaStartOffset(), ncaHeaderTableEntry.getMediaStartOffset(),
ncaHeaderTableEntry.getMediaEndOffset()); ncaHeaderTableEntry.getMediaEndOffset());
} }
private void proceedRomFs() throws Exception{ private void proceedRomFs() throws Exception{
switch (ncaSectionBlock.getCryptoType()){ switch (ncaFsHeader.getCryptoType()){
case 0x01: case 0x01:
proceedRomFsNotEncrypted(); // IF NO ENCRYPTION proceedRomFsNotEncrypted(); // IF NO ENCRYPTION
break; break;
@ -125,23 +130,22 @@ public class NCAContent {
proceedRomFsEncrypted(); // If encrypted regular [ 0x03 ] proceedRomFsEncrypted(); // If encrypted regular [ 0x03 ]
break; break;
default: default:
throw new Exception("NCAContent() -> proceedRomFs(): Non-supported 'Crypto type'"); throw new Exception("Non-supported 'Crypto type'");
} }
} }
private void proceedRomFsNotEncrypted(){ private void proceedRomFsNotEncrypted(){ // TODO: Clarify, implement if needed
// TODO: Clarify, implement if needed log.error("proceedRomFs() -> proceedRomFsNotEncrypted() is not implemented :(");
System.out.println("proceedRomFs() -> proceedRomFsNotEncrypted() is not implemented :(");
} }
private void proceedRomFsEncrypted() throws Exception{ private void proceedRomFsEncrypted() throws Exception{
if (decryptedKey == null) if (decryptedKey == null)
throw new Exception("CryptoSection03: unable to proceed. No decrypted key provided."); throw new Exception("CryptoSection03: unable to proceed. No decrypted key provided.");
this.romfs = new RomFsEncryptedProvider( this.romfs = new RomFsEncryptedProvider(
offsetPosition, ncaFsHeader.getSuperBlockIVFC().getLvl6Offset(),
ncaSectionBlock.getSuperBlockIVFC().getLvl6Offset(),
file, file,
offsetPosition,
decryptedKey, decryptedKey,
ncaSectionBlock.getSectionCTR(), ncaFsHeader.getSectionCTR(),
ncaHeaderTableEntry.getMediaStartOffset(), ncaHeaderTableEntry.getMediaStartOffset(),
ncaHeaderTableEntry.getMediaEndOffset()); ncaHeaderTableEntry.getMediaEndOffset());
} }
@ -151,27 +155,24 @@ public class NCAContent {
public IRomFsProvider getRomfs() { return romfs; } public IRomFsProvider getRomfs() { return romfs; }
private class CryptoSection03Pfs0 { private class CryptoSection03Pfs0 {
CryptoSection03Pfs0(File file, CryptoSection03Pfs0(File file,
long offsetPosition, long offsetPosition,
byte[] decryptedKey, byte[] decryptedKey,
NCASectionBlock ncaSectionBlock, NcaFsHeader ncaFsHeader,
long mediaStartBlocksOffset, long mediaStartBlocksOffset,
long mediaEndBlocksOffset) throws Exception long mediaEndBlocksOffset) throws Exception
{ {
/*//-------------------------------------------------------------------------------------------------- log.debug( "-== Crypto Section 03 PFS0 ==-\n" +
System.out.println("Media start location: " + mediaStartBlocksOffset); "Media start location: " + RainbowDump.formatDecHexString(mediaStartBlocksOffset) + "\n" +
System.out.println("Media end location: " + mediaEndBlocksOffset); "Media end location: " + RainbowDump.formatDecHexString(mediaEndBlocksOffset) + "\n" +
System.out.println("Media size : " + (mediaEndBlocksOffset-mediaStartBlocksOffset)); "Media size: " + RainbowDump.formatDecHexString((mediaEndBlocksOffset-mediaStartBlocksOffset)) + "\n" +
System.out.println("Media act. location: " + (offsetPosition + (mediaStartBlocksOffset * 0x200))); "Media actual location: " + RainbowDump.formatDecHexString((offsetPosition + (mediaStartBlocksOffset * 0x200))) + "\n" +
System.out.println("SHA256 hash tbl size: " + ncaSectionBlock.getSuperBlockPFS0().getHashTableSize()); "SHA256 hash table size: " + RainbowDump.formatDecHexString(ncaFsHeader.getSuperBlockPFS0().getHashTableSize()) + "\n" +
System.out.println("SHA256 hash tbl offs: " + ncaSectionBlock.getSuperBlockPFS0().getHashTableOffset()); "SHA256 hash table offs: " + RainbowDump.formatDecHexString(ncaFsHeader.getSuperBlockPFS0().getHashTableOffset()) + "\n" +
System.out.println("PFS0 Offs: " + ncaSectionBlock.getSuperBlockPFS0().getPfs0offset()); "PFS0 Offset: " + RainbowDump.formatDecHexString(ncaFsHeader.getSuperBlockPFS0().getPfs0offset()) + "\n" +
System.out.println("SHA256 records: " + (ncaSectionBlock.getSuperBlockPFS0().getHashTableSize() / 0x20)); "SHA256 records: " + RainbowDump.formatDecHexString((ncaFsHeader.getSuperBlockPFS0().getHashTableSize() / 0x20)) + "\n" +
System.out.println("KEY: " + LoperConverter.byteArrToHexString(decryptedKey)); "KEY (decrypted): " + Converter.byteArrToHexString(decryptedKey) + "\n" +
System.out.println("CTR: " + LoperConverter.byteArrToHexString(ncaSectionBlock.getSectionCTR())); "CTR: " + Converter.byteArrToHexString(ncaFsHeader.getSectionCTR()) + "\n");
System.out.println();
//--------------------------------------------------------------------------------------------------*/
if (decryptedKey == null) if (decryptedKey == null)
throw new Exception("CryptoSection03: unable to proceed. No decrypted key provided."); throw new Exception("CryptoSection03: unable to proceed. No decrypted key provided.");
@ -179,7 +180,7 @@ public class NCAContent {
long abosluteOffsetPosition = offsetPosition + (mediaStartBlocksOffset * 0x200); long abosluteOffsetPosition = offsetPosition + (mediaStartBlocksOffset * 0x200);
raf.seek(abosluteOffsetPosition); raf.seek(abosluteOffsetPosition);
AesCtrDecryptSimple decryptor = new AesCtrDecryptSimple(decryptedKey, ncaSectionBlock.getSectionCTR(), mediaStartBlocksOffset * 0x200); AesCtrDecryptSimple decryptor = new AesCtrDecryptSimple(decryptedKey, ncaFsHeader.getSectionCTR(), mediaStartBlocksOffset * 0x200);
byte[] encryptedBlock; byte[] encryptedBlock;
byte[] dectyptedBlock; byte[] dectyptedBlock;
@ -190,13 +191,13 @@ public class NCAContent {
Thread pThread = new Thread(new ParseThread( Thread pThread = new Thread(new ParseThread(
streamInp, streamInp,
ncaSectionBlock.getSuperBlockPFS0().getPfs0offset(), ncaFsHeader.getSuperBlockPFS0().getPfs0offset(),
ncaSectionBlock.getSuperBlockPFS0().getHashTableOffset(), ncaFsHeader.getSuperBlockPFS0().getHashTableOffset(),
ncaSectionBlock.getSuperBlockPFS0().getHashTableSize(), ncaFsHeader.getSuperBlockPFS0().getHashTableSize(),
offsetPosition, offsetPosition,
file, file,
decryptedKey, decryptedKey,
ncaSectionBlock.getSectionCTR(), ncaFsHeader.getSectionCTR(),
mediaStartBlocksOffset, mediaStartBlocksOffset,
mediaEndBlocksOffset mediaEndBlocksOffset
)); ));
@ -206,7 +207,7 @@ public class NCAContent {
encryptedBlock = new byte[0x200]; encryptedBlock = new byte[0x200];
if (raf.read(encryptedBlock) != -1){ if (raf.read(encryptedBlock) != -1){
//dectyptedBlock = aesCtr.decrypt(encryptedBlock); //dectyptedBlock = aesCtr.decrypt(encryptedBlock);
dectyptedBlock = decryptor.dectyptNext(encryptedBlock); dectyptedBlock = decryptor.decryptNext(encryptedBlock);
// Writing decrypted data to pipe // Writing decrypted data to pipe
try { try {
streamOut.write(dectyptedBlock); streamOut.write(dectyptedBlock);
@ -220,8 +221,8 @@ public class NCAContent {
streamOut.close(); streamOut.close();
raf.close(); raf.close();
} }
/* /**
* Since we representing decrypted data as stream (it's easier to look on it this way), * Since we're representing decrypted data as stream (it's easier to look on it this way),
* this thread will be parsing it. * this thread will be parsing it.
* */ * */
private class ParseThread implements Runnable{ private class ParseThread implements Runnable{
@ -233,12 +234,12 @@ public class NCAContent {
long hashTableRecordsCount; long hashTableRecordsCount;
long pfs0offset; long pfs0offset;
private long MetaOffsetPositionInFile; private final long MetaOffsetPositionInFile;
private File MetaFileWithEncPFS0; private final File MetaFileWithEncPFS0;
private byte[] MetaKey; private final byte[] MetaKey;
private byte[] MetaSectionCTR; private final byte[] MetaSectionCTR;
private long MetaMediaStartOffset; private final long MetaMediaStartOffset;
private long MetaMediaEndOffset; private final long MetaMediaEndOffset;
ParseThread(PipedInputStream pipedInputStream, ParseThread(PipedInputStream pipedInputStream,
@ -313,12 +314,9 @@ public class NCAContent {
pipedInputStream.close(); pipedInputStream.close();
} }
catch (Exception e){ catch (Exception e){
System.out.println("'ParseThread' thread exception"); log.debug("NCA Content parsing thread exception: ", e);
e.printStackTrace();
}
finally {
System.out.println("Thread dies");
} }
//finally { System.out.println("NCA Content thread dies");}
} }
} }
} }
@ -334,18 +332,17 @@ public class NCAContent {
RandomAccessFile raf = new RandomAccessFile(file, "r"); RandomAccessFile raf = new RandomAccessFile(file, "r");
///-------------------------------------------------------------------------------------------------- ///--------------------------------------------------------------------------------------------------
System.out.println("NCAContent() -> exportEncryptedSectionType03() Debug information"); log.debug("NCAContent() -> exportEncryptedSectionType03() information" + "\n" +
System.out.println("Media start location: " + mediaStartBlocksOffset); "Media start location: " + mediaStartBlocksOffset + "\n" +
System.out.println("Media end location: " + mediaEndBlocksOffset); "Media end location: " + mediaEndBlocksOffset + "\n" +
System.out.println("Media size : " + (mediaEndBlocksOffset-mediaStartBlocksOffset)); "Media size : " + (mediaEndBlocksOffset-mediaStartBlocksOffset) + "\n" +
System.out.println("Media act. location: " + (offsetPosition + (mediaStartBlocksOffset * 0x200))); "Media act. location: " + (offsetPosition + (mediaStartBlocksOffset * 0x200)) + "\n" +
System.out.println("KEY: " + LoperConverter.byteArrToHexString(decryptedKey)); "KEY: " + Converter.byteArrToHexString(decryptedKey) + "\n" +
System.out.println("CTR: " + LoperConverter.byteArrToHexString(ncaSectionBlock.getSectionCTR())); "CTR: " + Converter.byteArrToHexString(ncaFsHeader.getSectionCTR()) + "\n");
System.out.println();
//---------------------------------------------------------------------------------------------------/ //---------------------------------------------------------------------------------------------------/
if (ncaSectionBlock.getCryptoType() == 0x01){ if (ncaFsHeader.getCryptoType() == 0x01){
System.out.println("NCAContent -> getRawDataContentPipedInpStream (Zero encryption section type 01): Thread started"); log.trace("NCAContent -> getRawDataContentPipedInpStream (Zero encryption section type 01): Thread started");
Thread workerThread; Thread workerThread;
PipedOutputStream streamOut = new PipedOutputStream(); PipedOutputStream streamOut = new PipedOutputStream();
@ -363,8 +360,7 @@ public class NCAContent {
} }
} }
catch (Exception e){ catch (Exception e){
System.out.println("NCAContent -> exportRawData(): "+e.getMessage()); log.error("NCAContent -> exportRawData() failure", e);
e.printStackTrace();
} }
finally { finally {
try { try {
@ -374,13 +370,13 @@ public class NCAContent {
streamOut.close(); streamOut.close();
}catch (Exception ignored) {} }catch (Exception ignored) {}
} }
System.out.println("NCAContent -> exportRawData(): Thread died"); log.trace("NCAContent -> exportRawData(): Thread died");
}); });
workerThread.start(); workerThread.start();
return streamIn; return streamIn;
} }
else if (ncaSectionBlock.getCryptoType() == 0x03){ else if (ncaFsHeader.getCryptoType() == 0x03){
System.out.println("NCAContent -> getRawDataContentPipedInpStream (Encrypted Section Type 03): Thread started"); log.trace("NCAContent -> getRawDataContentPipedInpStream (Encrypted Section Type 03): Thread started");
if (decryptedKey == null) if (decryptedKey == null)
throw new Exception("NCAContent -> exportRawData(): unable to proceed. No decrypted key provided."); throw new Exception("NCAContent -> exportRawData(): unable to proceed. No decrypted key provided.");
@ -396,7 +392,7 @@ public class NCAContent {
raf.seek(abosluteOffsetPosition); raf.seek(abosluteOffsetPosition);
AesCtrDecryptSimple decryptor = new AesCtrDecryptSimple(decryptedKey, AesCtrDecryptSimple decryptor = new AesCtrDecryptSimple(decryptedKey,
ncaSectionBlock.getSectionCTR(), ncaFsHeader.getSectionCTR(),
mediaStartBlocksOffset * 0x200); mediaStartBlocksOffset * 0x200);
byte[] encryptedBlock; byte[] encryptedBlock;
@ -406,7 +402,7 @@ public class NCAContent {
for (int i = 0; i < mediaBlocksSize; i++){ for (int i = 0; i < mediaBlocksSize; i++){
encryptedBlock = new byte[0x200]; encryptedBlock = new byte[0x200];
if (raf.read(encryptedBlock) != -1){ if (raf.read(encryptedBlock) != -1){
dectyptedBlock = decryptor.dectyptNext(encryptedBlock); dectyptedBlock = decryptor.decryptNext(encryptedBlock);
// Writing decrypted data to pipe // Writing decrypted data to pipe
streamOut.write(dectyptedBlock); streamOut.write(dectyptedBlock);
} }
@ -415,8 +411,7 @@ public class NCAContent {
} }
} }
catch (Exception e){ catch (Exception e){
System.out.println("NCAContent -> exportRawData(): "+e.getMessage()); log.error("NCAContent -> exportRawData(): ", e);
e.printStackTrace();
} }
finally { finally {
try { try {
@ -426,7 +421,7 @@ public class NCAContent {
streamOut.close(); streamOut.close();
}catch (Exception ignored) {} }catch (Exception ignored) {}
} }
System.out.println("NCAContent -> exportRawData(): Thread died"); log.trace("NCAContent -> exportRawData(): Thread died");
}); });
workerThread.start(); workerThread.start();
return streamIn; return streamIn;

View file

@ -18,34 +18,26 @@
*/ */
package libKonogonka.Tools.NCA; package libKonogonka.Tools.NCA;
import java.nio.ByteBuffer; import libKonogonka.Converter;
import java.nio.ByteOrder;
import java.util.Arrays; import java.util.Arrays;
public class NCAHeaderTableEntry { public class NCAHeaderTableEntry {
private final long mediaStartOffset;
private long mediaStartOffset; private final long mediaEndOffset;
private long mediaEndOffset; private final byte[] unknwn1;
private byte[] unknwn1; private final byte[] unknwn2;
private byte[] unknwn2;
public NCAHeaderTableEntry(byte[] table) throws Exception{ public NCAHeaderTableEntry(byte[] table) throws Exception{
if (table.length < 0x10) if (table.length < 0x10)
throw new Exception("Section Table size is too small."); throw new Exception("Section Table size is too small.");
this.mediaStartOffset = convertUnsignedIntBytesToLong(Arrays.copyOfRange(table, 0x0, 0x4)); this.mediaStartOffset = Converter.getLElongOfInt(table, 0);
this.mediaEndOffset = convertUnsignedIntBytesToLong(Arrays.copyOfRange(table, 0x4, 0x8)); this.mediaEndOffset = Converter.getLElongOfInt(table, 0x4);
this.unknwn1 = Arrays.copyOfRange(table, 0x8, 0xC); this.unknwn1 = Arrays.copyOfRange(table, 0x8, 0xC);
this.unknwn2 = Arrays.copyOfRange(table, 0xC, 0x10); this.unknwn2 = Arrays.copyOfRange(table, 0xC, 0x10);
} }
private long convertUnsignedIntBytesToLong(byte[] intBytes){
if (intBytes.length == 4)
return ByteBuffer.wrap(Arrays.copyOf(intBytes, 8)).order(ByteOrder.LITTLE_ENDIAN).getLong();
else
return -1;
}
public long getMediaStartOffset() { return mediaStartOffset; } public long getMediaStartOffset() { return mediaStartOffset; }
public long getMediaEndOffset() { return mediaEndOffset; } public long getMediaEndOffset() { return mediaEndOffset; }
public byte[] getUnknwn1() { return unknwn1; } public byte[] getUnknwn1() { return unknwn1; }

View file

@ -18,27 +18,32 @@
*/ */
package libKonogonka.Tools.NCA; package libKonogonka.Tools.NCA;
import libKonogonka.Tools.NCA.NCASectionTableBlock.NCASectionBlock; import libKonogonka.Tools.NCA.NCASectionTableBlock.NcaFsHeader;
import libKonogonka.exceptions.EmptySectionException; import libKonogonka.exceptions.EmptySectionException;
import libKonogonka.xtsaes.XTSAESCipher; import libKonogonka.xtsaes.XTSAESCipher;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import org.bouncycastle.crypto.params.KeyParameter; import org.bouncycastle.crypto.params.KeyParameter;
import javax.crypto.Cipher; import javax.crypto.Cipher;
import javax.crypto.spec.SecretKeySpec; import javax.crypto.spec.SecretKeySpec;
import java.io.File; import java.io.*;
import java.io.RandomAccessFile;
import java.nio.charset.StandardCharsets; import java.nio.charset.StandardCharsets;
import java.nio.file.Files;
import java.nio.file.Paths;
import java.util.Arrays; import java.util.Arrays;
import java.util.HashMap; import java.util.HashMap;
import static libKonogonka.LoperConverter.byteArrToHexString; import static libKonogonka.Converter.byteArrToHexString;
import static libKonogonka.LoperConverter.getLElong; import static libKonogonka.Converter.getLElong;
// TODO: check file size // TODO: check file size
public class NCAProvider { public class NCAProvider {
private File file; // File that contains NCA private final static Logger log = LogManager.getLogger(NCAProvider.class);
private long offset; // Offset where NCA actually located
private HashMap<String, String> keys; // hashmap with keys using _0x naming (where x number 0-N) private final File file; // File that contains NCA
private final long offset; // Offset where NCA actually located
private final HashMap<String, String> keys; // hashmap with keys using _0x naming (where x number 0-N)
// Header // Header
private byte[] rsa2048one; private byte[] rsa2048one;
private byte[] rsa2048two; private byte[] rsa2048two;
@ -78,10 +83,10 @@ public class NCAProvider {
private NCAHeaderTableEntry tableEntry2; private NCAHeaderTableEntry tableEntry2;
private NCAHeaderTableEntry tableEntry3; private NCAHeaderTableEntry tableEntry3;
private NCASectionBlock sectionBlock0; private NcaFsHeader sectionBlock0;
private NCASectionBlock sectionBlock1; private NcaFsHeader sectionBlock1;
private NCASectionBlock sectionBlock2; private NcaFsHeader sectionBlock2;
private NCASectionBlock sectionBlock3; private NcaFsHeader sectionBlock3;
private NCAContent ncaContent0; private NCAContent ncaContent0;
private NCAContent ncaContent1; private NCAContent ncaContent1;
@ -93,6 +98,7 @@ public class NCAProvider {
} }
public NCAProvider (File file, HashMap<String, String> keys, long offsetPosition) throws Exception{ public NCAProvider (File file, HashMap<String, String> keys, long offsetPosition) throws Exception{
this.file = file;
this.keys = keys; this.keys = keys;
String header_key = keys.get("header_key"); String header_key = keys.get("header_key");
if (header_key == null ) if (header_key == null )
@ -100,15 +106,10 @@ public class NCAProvider {
if (header_key.length() != 64) if (header_key.length() != 64)
throw new Exception("header_key is too small or too big. Must be 64 symbols."); throw new Exception("header_key is too small or too big. Must be 64 symbols.");
this.file = file;
this.offset = offsetPosition; this.offset = offsetPosition;
KeyParameter key1 = new KeyParameter( KeyParameter key1 = new KeyParameter(hexStrToByteArray(header_key.substring(0, 32)));
hexStrToByteArray(header_key.substring(0, 32)) KeyParameter key2 = new KeyParameter(hexStrToByteArray(header_key.substring(32, 64)));
);
KeyParameter key2 = new KeyParameter(
hexStrToByteArray(header_key.substring(32, 64))
);
XTSAESCipher xtsaesCipher = new XTSAESCipher(false); XTSAESCipher xtsaesCipher = new XTSAESCipher(false);
xtsaesCipher.init(false, key1, key2); xtsaesCipher.init(false, key1, key2);
@ -134,10 +135,9 @@ public class NCAProvider {
raf.close(); raf.close();
getNCAContent(); getNCAContent();
/* /*//---------------------------------------------------------------------
//---------------------------------------------------------------------
FileInputStream fis = new FileInputStream(file); FileInputStream fis = new FileInputStream(file);
BufferedOutputStream bos = new BufferedOutputStream(new FileOutputStream("/tmp/decrypted.nca")); try (BufferedOutputStream bos = new BufferedOutputStream(Files.newOutputStream(Paths.get("/tmp/decrypted.nca")))){
int i = 0; int i = 0;
byte[] block = new byte[0x200]; byte[] block = new byte[0x200];
while (fis.read(block) != -1){ while (fis.read(block) != -1){
@ -145,7 +145,10 @@ public class NCAProvider {
xtsaesCipher.processDataUnit(block, 0, 0x200, decryptedSequence, 0, i++); xtsaesCipher.processDataUnit(block, 0, 0x200, decryptedSequence, 0, i++);
bos.write(decryptedSequence); bos.write(decryptedSequence);
} }
bos.close(); }
catch (Exception e){
throw new Exception("Failed to export decrypted AES-XTS", e);
}
//---------------------------------------------------------------------*/ //---------------------------------------------------------------------*/
} }
@ -196,7 +199,9 @@ public class NCAProvider {
if (cryptoTypeReal > 0) // TODO: CLARIFY WHY THE FUCK IS IT FAIR???? if (cryptoTypeReal > 0) // TODO: CLARIFY WHY THE FUCK IS IT FAIR????
cryptoTypeReal -= 1; cryptoTypeReal -= 1;
//todo: if nca3 proceed //If nca3 proceed
if (! magicnum.equalsIgnoreCase("NCA3"))
throw new Exception("Not supported data type: "+magicnum+". Only NCA3 supported");
// Decrypt keys if encrypted // Decrypt keys if encrypted
if (Arrays.equals(rightsId, new byte[0x10])) { if (Arrays.equals(rightsId, new byte[0x10])) {
String keyAreaKey; String keyAreaKey;
@ -232,10 +237,10 @@ public class NCAProvider {
tableEntry2 = new NCAHeaderTableEntry(Arrays.copyOfRange(tableBytes, 0x20, 0x30)); tableEntry2 = new NCAHeaderTableEntry(Arrays.copyOfRange(tableBytes, 0x20, 0x30));
tableEntry3 = new NCAHeaderTableEntry(Arrays.copyOfRange(tableBytes, 0x30, 0x40)); tableEntry3 = new NCAHeaderTableEntry(Arrays.copyOfRange(tableBytes, 0x30, 0x40));
sectionBlock0 = new NCASectionBlock(Arrays.copyOfRange(decryptedData, 0x400, 0x600)); sectionBlock0 = new NcaFsHeader(Arrays.copyOfRange(decryptedData, 0x400, 0x600));
sectionBlock1 = new NCASectionBlock(Arrays.copyOfRange(decryptedData, 0x600, 0x800)); sectionBlock1 = new NcaFsHeader(Arrays.copyOfRange(decryptedData, 0x600, 0x800));
sectionBlock2 = new NCASectionBlock(Arrays.copyOfRange(decryptedData, 0x800, 0xa00)); sectionBlock2 = new NcaFsHeader(Arrays.copyOfRange(decryptedData, 0x800, 0xa00));
sectionBlock3 = new NCASectionBlock(Arrays.copyOfRange(decryptedData, 0xa00, 0xc00)); sectionBlock3 = new NcaFsHeader(Arrays.copyOfRange(decryptedData, 0xa00, 0xc00));
} }
private void keyAreaKeyNotSupportedOrFound() throws Exception{ private void keyAreaKeyNotSupportedOrFound() throws Exception{
@ -259,12 +264,12 @@ public class NCAProvider {
throw new Exception(exceptionStringBuilder.toString()); throw new Exception(exceptionStringBuilder.toString());
} }
private void getNCAContent(){ private void getNCAContent() throws Exception{
byte[] key; byte[] key;
// If empty Rights ID // If empty Rights ID
if (Arrays.equals(rightsId, new byte[0x10])) { if (Arrays.equals(rightsId, new byte[0x10])) {
key = decryptedKey2; // TODO: Just remember this dumb hack key = decryptedKey2; // NOTE: Just remember this dumb hack
} }
else { else {
try { try {
@ -278,42 +283,35 @@ public class NCAProvider {
key = cipher.doFinal(rightsIDkey); key = cipher.doFinal(rightsIDkey);
} }
catch (Exception e){ catch (Exception e){
e.printStackTrace(); throw new Exception("No title.keys loaded?", e);
System.out.println("No title.keys loaded?");
return;
} }
} }
getNcaContentByNumber(0, key);
getNcaContentByNumber(1, key);
getNcaContentByNumber(2, key);
getNcaContentByNumber(3, key);
}
private void getNcaContentByNumber(int number, byte[] key){
try { try {
switch (number) {
case 0:
this.ncaContent0 = new NCAContent(file, offset, sectionBlock0, tableEntry0, key); this.ncaContent0 = new NCAContent(file, offset, sectionBlock0, tableEntry0, key);
} break;
catch (EmptySectionException ignored){} case 1:
catch (Exception e){
this.ncaContent0 = null;
e.printStackTrace();
}
try{
this.ncaContent1 = new NCAContent(file, offset, sectionBlock1, tableEntry1, key); this.ncaContent1 = new NCAContent(file, offset, sectionBlock1, tableEntry1, key);
} break;
catch (EmptySectionException ignored){} case 2:
catch (Exception e){
this.ncaContent1 = null;
e.printStackTrace();
}
try{
this.ncaContent2 = new NCAContent(file, offset, sectionBlock2, tableEntry2, key); this.ncaContent2 = new NCAContent(file, offset, sectionBlock2, tableEntry2, key);
} break;
catch (EmptySectionException ignored){} case 3:
catch (Exception e){
this.ncaContent2 = null;
e.printStackTrace();
}
try{
this.ncaContent3 = new NCAContent(file, offset, sectionBlock3, tableEntry3, key); this.ncaContent3 = new NCAContent(file, offset, sectionBlock3, tableEntry3, key);
break;
}
} }
catch (EmptySectionException ignored){} catch (EmptySectionException ignored){}
catch (Exception e){ catch (Exception e){
this.ncaContent3 = null; this.ncaContent3 = null;
e.printStackTrace(); log.debug("Unable to get NCA Content "+number, e);
} }
} }
@ -347,20 +345,54 @@ public class NCAProvider {
public byte[] getDecryptedKey1() { return decryptedKey1; } public byte[] getDecryptedKey1() { return decryptedKey1; }
public byte[] getDecryptedKey2() { return decryptedKey2; } public byte[] getDecryptedKey2() { return decryptedKey2; }
public byte[] getDecryptedKey3() { return decryptedKey3; } public byte[] getDecryptedKey3() { return decryptedKey3; }
/**
* Get NCA Hedaer Table Entry for selected id
* @param id should be 0-3
* */
public NCAHeaderTableEntry getTableEntry(int id) throws Exception{
switch (id) {
case 0:
return getTableEntry0();
case 1:
return getTableEntry1();
case 2:
return getTableEntry2();
case 3:
return getTableEntry3();
default:
throw new Exception("NCA Table Entry must be defined in range 0-3 while '"+id+"' requested");
}
}
public NCAHeaderTableEntry getTableEntry0() { return tableEntry0; } public NCAHeaderTableEntry getTableEntry0() { return tableEntry0; }
public NCAHeaderTableEntry getTableEntry1() { return tableEntry1; } public NCAHeaderTableEntry getTableEntry1() { return tableEntry1; }
public NCAHeaderTableEntry getTableEntry2() { return tableEntry2; } public NCAHeaderTableEntry getTableEntry2() { return tableEntry2; }
public NCAHeaderTableEntry getTableEntry3() { return tableEntry3; } public NCAHeaderTableEntry getTableEntry3() { return tableEntry3; }
/**
* Get NCA Section Block for selected section
* @param id should be 0-3
* */
public NcaFsHeader getSectionBlock(int id) throws Exception{
switch (id) {
case 0:
return getSectionBlock0();
case 1:
return getSectionBlock1();
case 2:
return getSectionBlock2();
case 3:
return getSectionBlock3();
default:
throw new Exception("NCA Section Block must be defined in range 0-3 while '"+id+"' requested");
}
}
public NcaFsHeader getSectionBlock0() { return sectionBlock0; }
public NcaFsHeader getSectionBlock1() { return sectionBlock1; }
public NcaFsHeader getSectionBlock2() { return sectionBlock2; }
public NcaFsHeader getSectionBlock3() { return sectionBlock3; }
public NCASectionBlock getSectionBlock0() { return sectionBlock0; } public boolean isKeyAvailable(){ // NOTE: never used
public NCASectionBlock getSectionBlock1() { return sectionBlock1; }
public NCASectionBlock getSectionBlock2() { return sectionBlock2; }
public NCASectionBlock getSectionBlock3() { return sectionBlock3; }
public boolean isKeyAvailable(){ // TODO: USE
if (Arrays.equals(rightsId, new byte[0x10])) if (Arrays.equals(rightsId, new byte[0x10]))
return true; return false;
else else
return keys.containsKey(byteArrToHexString(rightsId)); return keys.containsKey(byteArrToHexString(rightsId));
} }
@ -368,7 +400,7 @@ public class NCAProvider {
* Get content for the selected section * Get content for the selected section
* @param sectionNumber should be 0-3 * @param sectionNumber should be 0-3
* */ * */
public NCAContent getNCAContentProvider(int sectionNumber){ public NCAContent getNCAContentProvider(int sectionNumber) throws Exception{
switch (sectionNumber) { switch (sectionNumber) {
case 0: case 0:
return ncaContent0; return ncaContent0;
@ -379,7 +411,7 @@ public class NCAProvider {
case 3: case 3:
return ncaContent3; return ncaContent3;
default: default:
return null; throw new Exception("NCA Content must be requested in range of 0-3, while 'Section Number "+sectionNumber+"' requested");
} }
} }
} }

View file

@ -0,0 +1,43 @@
/*
Copyright 2018-2022 Dmitry Isaenko
This file is part of libKonogonka.
libKonogonka is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
libKonogonka is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with libKonogonka. If not, see <https://www.gnu.org/licenses/>.
*/
package libKonogonka.Tools.NCA.NCASectionTableBlock;
import java.nio.charset.StandardCharsets;
import java.util.Arrays;
import static libKonogonka.Converter.getLEint;
public class BucketTreeHeader {
private final String magic;
private final int version;
private final int entryCount;
private final byte[] unknown;
BucketTreeHeader(byte[] rawBytes){
magic = new String(Arrays.copyOfRange(rawBytes, 0x0, 0x4), StandardCharsets.US_ASCII);
version = getLEint(rawBytes, 0x4);
entryCount = getLEint(rawBytes, 0x8);
unknown = Arrays.copyOfRange(rawBytes, 0xc, 0x10);
}
public String getMagic() {return magic;}
public int getVersion() {return version;}
public int getEntryCount() {return entryCount;}
public byte[] getUnknown() {return unknown;}
}

View file

@ -0,0 +1,45 @@
/*
Copyright 2018-2022 Dmitry Isaenko
This file is part of libKonogonka.
libKonogonka is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
libKonogonka is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with libKonogonka. If not, see <https://www.gnu.org/licenses/>.
*/
package libKonogonka.Tools.NCA.NCASectionTableBlock;
import java.util.Arrays;
import static libKonogonka.Converter.getLElong;
public class CompressionInfo {
private final long offset;
private final long size;
private final BucketTreeHeader bktr;
private final byte[] unknown;
CompressionInfo(byte[] rawTable){
offset = getLElong(rawTable, 0);
size = getLElong(rawTable, 0x8);
bktr = new BucketTreeHeader(Arrays.copyOfRange(rawTable, 0x10, 0x20));
unknown = Arrays.copyOfRange(rawTable, 0x20, 0x28);
}
public long getOffset() {return offset;}
public long getSize() {return size;}
public String getBktrMagic() { return bktr.getMagic(); }
public int getBktrVersion() { return bktr.getVersion(); }
public int getBktrEntryCount() { return bktr.getEntryCount(); }
public byte[] getBktrUnknown() { return bktr.getUnknown(); }
public byte[] getUnknown() {return unknown;}
}

View file

@ -0,0 +1,38 @@
/*
Copyright 2018-2022 Dmitry Isaenko
This file is part of libKonogonka.
libKonogonka is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
libKonogonka is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with libKonogonka. If not, see <https://www.gnu.org/licenses/>.
*/
package libKonogonka.Tools.NCA.NCASectionTableBlock;
import java.util.Arrays;
import static libKonogonka.Converter.getLElong;
public class MetaDataHashDataInfo {
private final long offset;
private final long size;
private final byte[] tableHash;
MetaDataHashDataInfo(byte[] rawTable){
offset = getLElong(rawTable, 0);
size = getLElong(rawTable, 0x8);
tableHash = Arrays.copyOfRange(rawTable, 0x10, 0x20);
}
public long getOffset() {return offset;}
public long getSize() {return size;}
public byte[] getTableHash() {return tableHash;}
}

View file

@ -1,113 +0,0 @@
/*
Copyright 2019-2022 Dmitry Isaenko
This file is part of libKonogonka.
libKonogonka is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
libKonogonka is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with libKonogonka. If not, see <https://www.gnu.org/licenses/>.
*/
package libKonogonka.Tools.NCA.NCASectionTableBlock;
import java.nio.charset.StandardCharsets;
import java.util.Arrays;
import static libKonogonka.LoperConverter.getLEint;
import static libKonogonka.LoperConverter.getLElong;
public class NCASectionBlock {
private byte[] version;
private byte fsType;
private byte hashType;
private byte cryptoType;
private byte[] padding;
private SuperBlockIVFC superBlockIVFC;
private SuperBlockPFS0 superBlockPFS0;
private byte[] BKTRfullHeader;
// BKTR extended
private long BKTRoffsetSection1;
private long BKTRsizeSection1;
private String BKTRmagicSection1;
private int BKTRu32Section1;
private int BKTRs32Section1;
private byte[] BKTRunknownSection1;
private long BKTRoffsetSection2;
private long BKTRsizeSection2;
private String BKTRmagicSection2;
private int BKTRu32Section2;
private int BKTRs32Section2;
private byte[] BKTRunknownSection2;
private byte[] sectionCTR;
private byte[] unknownEndPadding;
public NCASectionBlock(byte[] tableBlockBytes) throws Exception{
if (tableBlockBytes.length != 0x200)
throw new Exception("Table Block Section size is incorrect.");
version = Arrays.copyOfRange(tableBlockBytes, 0, 0x2);
fsType = tableBlockBytes[0x2];
hashType = tableBlockBytes[0x3];
cryptoType = tableBlockBytes[0x4];
padding = Arrays.copyOfRange(tableBlockBytes, 0x5, 0x8);
byte[] superBlockBytes = Arrays.copyOfRange(tableBlockBytes, 0x8, 0xf8);
if ((fsType == 0) && (hashType == 0x3))
superBlockIVFC = new SuperBlockIVFC(superBlockBytes);
else if ((fsType == 0x1) && (hashType == 0x2))
superBlockPFS0 = new SuperBlockPFS0(superBlockBytes);
BKTRfullHeader = Arrays.copyOfRange(tableBlockBytes, 0x100, 0x140);
BKTRoffsetSection1 = getLElong(BKTRfullHeader, 0);
BKTRsizeSection1 = getLElong(BKTRfullHeader, 0x8);
BKTRmagicSection1 = new String(Arrays.copyOfRange(BKTRfullHeader, 0x10, 0x14), StandardCharsets.US_ASCII);
BKTRu32Section1 = getLEint(BKTRfullHeader, 0x14);
BKTRs32Section1 = getLEint(BKTRfullHeader, 0x18);
BKTRunknownSection1 = Arrays.copyOfRange(tableBlockBytes, 0x1c, 0x20);
BKTRoffsetSection2 = getLElong(BKTRfullHeader, 0x20);
BKTRsizeSection2 = getLElong(BKTRfullHeader, 0x28);
BKTRmagicSection2 = new String(Arrays.copyOfRange(BKTRfullHeader, 0x30, 0x34), StandardCharsets.US_ASCII);
BKTRu32Section2 = getLEint(BKTRfullHeader, 0x34);
BKTRs32Section2 = getLEint(BKTRfullHeader, 0x38);
BKTRunknownSection2 = Arrays.copyOfRange(BKTRfullHeader, 0x3c, 0x40);
sectionCTR = Arrays.copyOfRange(tableBlockBytes, 0x140, 0x148);
unknownEndPadding = Arrays.copyOfRange(tableBlockBytes, 0x148, 0x200);
}
public byte[] getVersion() { return version; }
public byte getFsType() { return fsType; }
public byte getHashType() { return hashType; }
public byte getCryptoType() { return cryptoType; }
public byte[] getPadding() { return padding; }
public SuperBlockIVFC getSuperBlockIVFC() { return superBlockIVFC; }
public SuperBlockPFS0 getSuperBlockPFS0() { return superBlockPFS0; }
public byte[] getBKTRfullHeader() { return BKTRfullHeader; }
public long getBKTRoffsetSection1() { return BKTRoffsetSection1; }
public long getBKTRsizeSection1() { return BKTRsizeSection1; }
public String getBKTRmagicSection1() { return BKTRmagicSection1; }
public int getBKTRu32Section1() { return BKTRu32Section1; }
public int getBKTRs32Section1() { return BKTRs32Section1; }
public byte[] getBKTRunknownSection1() { return BKTRunknownSection1; }
public long getBKTRoffsetSection2() { return BKTRoffsetSection2; }
public long getBKTRsizeSection2() { return BKTRsizeSection2; }
public String getBKTRmagicSection2() { return BKTRmagicSection2; }
public int getBKTRu32Section2() { return BKTRu32Section2; }
public int getBKTRs32Section2() { return BKTRs32Section2; }
public byte[] getBKTRunknownSection2() { return BKTRunknownSection2; }
public byte[] getSectionCTR() { return sectionCTR; }
public byte[] getUnknownEndPadding() { return unknownEndPadding; }
}

View file

@ -0,0 +1,287 @@
/*
Copyright 2019-2022 Dmitry Isaenko
This file is part of libKonogonka.
libKonogonka is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
libKonogonka is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with libKonogonka. If not, see <https://www.gnu.org/licenses/>.
*/
package libKonogonka.Tools.NCA.NCASectionTableBlock;
import libKonogonka.RainbowDump;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import java.util.Arrays;
import static libKonogonka.Converter.byteArrToHexString;
import static libKonogonka.Converter.getLElong;
public class NcaFsHeader {
private final static Logger log = LogManager.getLogger(NcaFsHeader.class);
private final byte[] version;
private final byte fsType;
private final byte hashType;
private final byte cryptoType;
private final byte metaDataHashType;
private final byte[] padding;
private SuperBlockIVFC superBlockIVFC;
private SuperBlockPFS0 superBlockPFS0;
// BKTR extended
private final long PatchInfoOffsetSection1;
private final long PatchInfoSizeSection1;
private final BucketTreeHeader BktrSection1;
private final long PatchInfoOffsetSection2;
private final long PatchInfoSizeSection2;
private final BucketTreeHeader BktrSection2;
private final byte[] generation;
private final byte[] sectionCTR;
private final SparseInfo sparseInfo;
private final CompressionInfo compressionInfo;
private final MetaDataHashDataInfo metaDataHashDataInfo;
private final byte[] unknownEndPadding;
public NcaFsHeader(byte[] tableBlockBytes) throws Exception{
if (tableBlockBytes.length != 0x200)
throw new Exception("Table Block Section size is incorrect.");
version = Arrays.copyOfRange(tableBlockBytes, 0, 0x2);
fsType = tableBlockBytes[0x2];
hashType = tableBlockBytes[0x3];
cryptoType = tableBlockBytes[0x4];
metaDataHashType = tableBlockBytes[0x6];
padding = Arrays.copyOfRange(tableBlockBytes, 0x6, 0x8);
byte[] superBlockBytes = Arrays.copyOfRange(tableBlockBytes, 0x8, 0xf8);
if ((fsType == 0) && (hashType == 0x3))
superBlockIVFC = new SuperBlockIVFC(superBlockBytes);
else if ((fsType == 0x1) && (hashType == 0x2))
superBlockPFS0 = new SuperBlockPFS0(superBlockBytes);
PatchInfoOffsetSection1 = getLElong(tableBlockBytes, 0x100);
PatchInfoSizeSection1 = getLElong(tableBlockBytes, 0x108);
BktrSection1 = new BucketTreeHeader(Arrays.copyOfRange(tableBlockBytes, 0x110, 0x120));
PatchInfoOffsetSection2 = getLElong(tableBlockBytes, 0x120);
PatchInfoSizeSection2 = getLElong(tableBlockBytes, 0x128);
BktrSection2 = new BucketTreeHeader(Arrays.copyOfRange(tableBlockBytes, 0x130, 0x140));
generation = Arrays.copyOfRange(tableBlockBytes, 0x140, 0x144);
sectionCTR = Arrays.copyOfRange(tableBlockBytes, 0x144, 0x148);
sparseInfo = new SparseInfo(Arrays.copyOfRange(tableBlockBytes, 0x148, 0x178));
compressionInfo = new CompressionInfo(Arrays.copyOfRange(tableBlockBytes, 0x178, 0x1a0));
metaDataHashDataInfo = new MetaDataHashDataInfo(Arrays.copyOfRange(tableBlockBytes, 0x1a0, 0x1d0));
unknownEndPadding = Arrays.copyOfRange(tableBlockBytes, 0x1d0, 0x200);
}
public byte[] getVersion() { return version; }
public byte getFsType() { return fsType; }
public byte getHashType() { return hashType; }
public byte getCryptoType() { return cryptoType; }
public byte getMetaDataHashType() { return metaDataHashType; }
public byte[] getPadding() { return padding; }
public SuperBlockIVFC getSuperBlockIVFC() { return superBlockIVFC; }
public SuperBlockPFS0 getSuperBlockPFS0() { return superBlockPFS0; }
public long getPatchInfoOffsetSection1() { return PatchInfoOffsetSection1; }
public long getPatchInfoSizeSection1() { return PatchInfoSizeSection1; }
public String getPatchInfoMagicSection1() { return BktrSection1.getMagic(); }
public int getPatchInfoVersionSection1() { return BktrSection1.getVersion(); }
public int getEntryCountSection1() { return BktrSection1.getEntryCount(); }
public byte[] getPatchInfoUnknownSection1() { return BktrSection1.getUnknown(); }
public long getPatchInfoOffsetSection2() { return PatchInfoOffsetSection2; }
public long getPatchInfoSizeSection2() { return PatchInfoSizeSection2; }
public String getPatchInfoMagicSection2() { return BktrSection2.getMagic(); }
public int getPatchInfoVersionSection2() { return BktrSection2.getVersion(); }
public int getEntryCountSection2() { return BktrSection2.getEntryCount(); }
public byte[] getPatchInfoUnknownSection2() { return BktrSection2.getUnknown(); }
public byte[] getGeneration() {return generation;}
public byte[] getSectionCTR() { return sectionCTR; }
public SparseInfo getSparseInfo() {return sparseInfo;}
public CompressionInfo getCompressionInfo() {return compressionInfo;}
public MetaDataHashDataInfo getMetaDataHashDataInfo() {return metaDataHashDataInfo;}
public byte[] getUnknownEndPadding() { return unknownEndPadding; }
public void printDebug(){
String hashTypeDescription;
switch (hashType){
case 0 :
hashTypeDescription = "Auto";
break;
case 1 :
hashTypeDescription = "None";
break;
case 2 :
hashTypeDescription = "HierarchicalSha256Hash";
break;
case 3 :
hashTypeDescription = "HierarchicalIntegrityHash";
break;
case 4 :
hashTypeDescription = "AutoSha3";
break;
case 5 :
hashTypeDescription = "HierarchicalSha3256Hash";
break;
case 6 :
hashTypeDescription = "HierarchicalIntegritySha3Hash";
break;
default:
hashTypeDescription = "???";
}
String cryptoTypeDescription;
switch (cryptoType){
case 0 :
cryptoTypeDescription = "Auto";
break;
case 1 :
cryptoTypeDescription = "None";
break;
case 2 :
cryptoTypeDescription = "AesXts";
break;
case 3 :
cryptoTypeDescription = "AesCtr";
break;
case 4 :
cryptoTypeDescription = "AesCtrEx";
break;
case 5 :
cryptoTypeDescription = "AesCtrSkipLayerHash";
break;
case 6 :
cryptoTypeDescription = "AesCtrExSkipLayerHash";
break;
default:
cryptoTypeDescription = "???";
}
log.debug("NCASectionBlock:\n" +
"Version : " + byteArrToHexString(version) + "\n" +
"FS Type : " + fsType +(fsType == 0?" (RomFS)":fsType == 1?" (PartitionFS)":" (Unknown)")+ "\n" +
"Hash Type : " + hashType +" ("+ hashTypeDescription + ")\n" +
"Crypto Type : " + cryptoType + " (" + cryptoTypeDescription + ")\n" +
"Meta Data Hash Type : " + metaDataHashType + "\n" +
"Padding : " + byteArrToHexString(padding) + "\n" +
"Super Block IVFC : " + superBlockIVFC + "\n" +
"Super Block PFS0 : " + superBlockPFS0 + "\n" +
"================================================================================================\n" +
(((fsType == 0) && (hashType == 0x3))?
("| Hash Data - RomFS\n" +
"| Magic : " + superBlockIVFC.getMagic() + "\n" +
"| Version : " + superBlockIVFC.getVersion() + "\n" +
"| Master Hash Size : " + superBlockIVFC.getMasterHashSize() + "\n" +
"| Total Number of Levels : " + superBlockIVFC.getTotalNumberOfLevels() + "\n\n" +
"| Level 1 Offset : " + RainbowDump.formatDecHexString(superBlockIVFC.getLvl1Offset()) + "\n" +
"| Level 1 Size : " + RainbowDump.formatDecHexString(superBlockIVFC.getLvl1Size()) + "\n" +
"| Level 1 Block Size (log2) : " + RainbowDump.formatDecHexString(superBlockIVFC.getLvl1SBlockSize()) + "\n" +
"| Level 1 reserved : " + byteArrToHexString(superBlockIVFC.getReserved1()) + "\n\n" +
"| Level 2 Offset : " + RainbowDump.formatDecHexString(superBlockIVFC.getLvl2Offset()) + "\n" +
"| Level 2 Size : " + RainbowDump.formatDecHexString(superBlockIVFC.getLvl2Size()) + "\n" +
"| Level 2 Block Size (log2) : " + RainbowDump.formatDecHexString(superBlockIVFC.getLvl2SBlockSize()) + "\n" +
"| Level 2 reserved : " + byteArrToHexString(superBlockIVFC.getReserved2()) + "\n\n" +
"| Level 3 Offset : " + RainbowDump.formatDecHexString(superBlockIVFC.getLvl3Offset()) + "\n" +
"| Level 3 Size : " + RainbowDump.formatDecHexString(superBlockIVFC.getLvl3Size()) + "\n" +
"| Level 3 Block Size (log2) : " + RainbowDump.formatDecHexString(superBlockIVFC.getLvl3SBlockSize()) + "\n" +
"| Level 3 reserved : " + byteArrToHexString(superBlockIVFC.getReserved3()) + "\n\n" +
"| Level 4 Offset : " + RainbowDump.formatDecHexString(superBlockIVFC.getLvl4Offset()) + "\n" +
"| Level 4 Size : " + RainbowDump.formatDecHexString(superBlockIVFC.getLvl4Size()) + "\n" +
"| Level 4 Block Size (log2) : " + RainbowDump.formatDecHexString(superBlockIVFC.getLvl4SBlockSize()) + "\n" +
"| Level 4 reserved : " + byteArrToHexString(superBlockIVFC.getReserved4()) + "\n\n" +
"| Level 5 Offset : " + RainbowDump.formatDecHexString(superBlockIVFC.getLvl5Offset()) + "\n" +
"| Level 5 Size : " + RainbowDump.formatDecHexString(superBlockIVFC.getLvl5Size()) + "\n" +
"| Level 5 Block Size (log2) : " + RainbowDump.formatDecHexString(superBlockIVFC.getLvl5SBlockSize()) + "\n" +
"| Level 5 reserved : " + byteArrToHexString(superBlockIVFC.getReserved5()) + "\n\n" +
"| Level 6 Offset : " + RainbowDump.formatDecHexString(superBlockIVFC.getLvl6Offset()) + "\n" +
"| Level 6 Size : " + RainbowDump.formatDecHexString(superBlockIVFC.getLvl6Size()) + "\n" +
"| Level 6 Block Size (log2) : " + RainbowDump.formatDecHexString(superBlockIVFC.getLvl6SBlockSize()) + "\n" +
"| Level 6 reserved : " + byteArrToHexString(superBlockIVFC.getReserved6()) + "\n\n" +
"| SignatureSalt : " + byteArrToHexString(superBlockIVFC.getSignatureSalt()) + "\n" +
"| Master Hash : " + byteArrToHexString(superBlockIVFC.getMasterHash()) + "\n" +
"| Reserved (tail) : " + byteArrToHexString(superBlockIVFC.getReservedTail()) + "\n"
)
:(((fsType == 0x1) && (hashType == 0x2))?
("| Hash Data - PFS0\n" +
"| SHA256 hash : " + byteArrToHexString(superBlockPFS0.getSHA256hash()) + "\n" +
"| Block Size (bytes) : " + superBlockPFS0.getBlockSize() + "\n" +
"| Layer Count (2) : " + superBlockPFS0.getLayerCount() + "\n" +
"| Hash table offset : " + RainbowDump.formatDecHexString(superBlockPFS0.getHashTableOffset()) + "\n" +
"| Hash table size : " + RainbowDump.formatDecHexString(superBlockPFS0.getHashTableSize()) + "\n" +
"| PFS0 header offset : " + RainbowDump.formatDecHexString(superBlockPFS0.getPfs0offset()) + "\n" +
"| PFS0 header size : " + RainbowDump.formatDecHexString(superBlockPFS0.getPfs0size()) + "\n" +
"| Unknown (reserved) : " + byteArrToHexString(superBlockPFS0.getZeroes()) + "\n"
)
:
" // Hash Data - EMPTY \\\\ \n"
)) +
"================================================================================================\n" +
" PatchInfo\n" +
"================================================================================================\n" +
"Indirect Offset : " + PatchInfoOffsetSection1 + "\n" +
"Indirect Size : " + PatchInfoSizeSection1 + "\n" +
"Magic ('BKTR') : " + BktrSection1.getMagic() + "\n" +
"Version : " + BktrSection1.getVersion() + "\n" +
"EntryCount : " + BktrSection1.getEntryCount() + "\n" +
"Unknown (reserved) : " + byteArrToHexString(BktrSection1.getUnknown()) + "\n" +
"------------------------------------------------------------------------------------------------\n" +
"AesCtrEx Offset : " + PatchInfoOffsetSection2 + "\n" +
"AesCtrEx Size : " + PatchInfoSizeSection2 + "\n" +
"Magic ('BKTR') : " + BktrSection2.getMagic() + "\n" +
"Version : " + BktrSection2.getVersion() + "\n" +
"EntryCount : " + BktrSection2.getEntryCount() + "\n" +
"Unknown (reserved) : " + byteArrToHexString(BktrSection2.getUnknown()) + "\n" +
"================================================================================================\n" +
"Generation : " + byteArrToHexString(generation) + "\n" +
"Section CTR : " + byteArrToHexString(sectionCTR) + "\n" +
"================================================================================================\n" +
" Sparse Info\n" +
"Table Offset : " + sparseInfo.getOffset() + "\n" +
"Table Size : " + sparseInfo.getSize() + "\n" +
"Magic ('BKTR') : " + sparseInfo.getBktrMagic() + "\n" +
"Version : " + sparseInfo.getBktrVersion() + "\n" +
"EntryCount : " + sparseInfo.getBktrEntryCount() + "\n" +
"Unknown (BKTR) : " + byteArrToHexString(sparseInfo.getBktrUnknown()) + "\n" +
"PhysicalOffset : " + sparseInfo.getPhysicalOffset() + "\n" +
"Generation : " + byteArrToHexString(sparseInfo.getGeneration()) + "\n" +
"Unknown (reserved) : " + byteArrToHexString(sparseInfo.getUnknown()) + "\n" +
"================================================================================================\n" +
" Compression Info\n" +
"Table Offset : " + compressionInfo.getOffset() + "\n" +
"Table Size : " + compressionInfo.getSize() + "\n" +
"Magic ('BKTR') : " + compressionInfo.getBktrMagic() + "\n" +
"Version : " + compressionInfo.getBktrVersion() + "\n" +
"EntryCount : " + compressionInfo.getBktrEntryCount() + "\n" +
"Unknown (reserved) : " + byteArrToHexString(compressionInfo.getBktrUnknown()) + "\n" +
"Reserved : " + byteArrToHexString(compressionInfo.getUnknown()) + "\n" +
"================================================================================================\n" +
" Meta Data Hash Data Info\n" +
"Table Offset : " + metaDataHashDataInfo.getOffset() + "\n" +
"Table Size : " + metaDataHashDataInfo.getSize() + "\n" +
"Unknown (reserved) : " + byteArrToHexString(metaDataHashDataInfo.getTableHash()) + "\n" +
"================================================================================================\n" +
"Unknown End Padding : " + byteArrToHexString(unknownEndPadding) + "\n" +
"################################################################################################\n"
);
}
}

View file

@ -0,0 +1,51 @@
/*
Copyright 2018-2022 Dmitry Isaenko
This file is part of libKonogonka.
libKonogonka is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
libKonogonka is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with libKonogonka. If not, see <https://www.gnu.org/licenses/>.
*/
package libKonogonka.Tools.NCA.NCASectionTableBlock;
import java.util.Arrays;
import static libKonogonka.Converter.getLElong;
public class SparseInfo {
private final long offset;
private final long size;
private final BucketTreeHeader bktr;
private final long physicalOffset;
private final byte[] generation;
private final byte[] unknown;
SparseInfo(byte[] rawTable){
offset = getLElong(rawTable, 0);
size = getLElong(rawTable, 0x8);
bktr = new BucketTreeHeader(Arrays.copyOfRange(rawTable, 0x10, 0x20));
physicalOffset = getLElong(rawTable, 0x20);
generation = Arrays.copyOfRange(rawTable, 0x28, 0x2a);
unknown = Arrays.copyOfRange(rawTable, 0x2a, 0x30);
}
public long getOffset() { return offset; }
public long getSize() { return size; }
public String getBktrMagic() { return bktr.getMagic(); }
public int getBktrVersion() { return bktr.getVersion(); }
public int getBktrEntryCount() { return bktr.getEntryCount(); }
public byte[] getBktrUnknown() { return bktr.getUnknown(); }
public long getPhysicalOffset() {return physicalOffset;}
public byte[] getGeneration() {return generation;}
public byte[] getUnknown() {return unknown;}
}

View file

@ -21,50 +21,55 @@ package libKonogonka.Tools.NCA.NCASectionTableBlock;
import java.nio.charset.StandardCharsets; import java.nio.charset.StandardCharsets;
import java.util.Arrays; import java.util.Arrays;
import static libKonogonka.LoperConverter.getLEint; import static libKonogonka.Converter.getLEint;
import static libKonogonka.LoperConverter.getLElong; import static libKonogonka.Converter.getLElong;
public class SuperBlockIVFC { public class SuperBlockIVFC {
private String magic; private final String magic;
private int magicNumber; private final int version;
private int masterHashSize; private final int masterHashSize;
private int totalNumberOfLevels; private final int totalNumberOfLevels;
private long lvl1Offset; private final long lvl1Offset;
private long lvl1Size; private final long lvl1Size;
private int lvl1SBlockSize; private final int lvl1SBlockSize;
private byte[] reserved1; private final byte[] reserved1;
private long lvl2Offset; private final long lvl2Offset;
private long lvl2Size; private final long lvl2Size;
private int lvl2SBlockSize; private final int lvl2SBlockSize;
private byte[] reserved2; private final byte[] reserved2;
private long lvl3Offset; private final long lvl3Offset;
private long lvl3Size; private final long lvl3Size;
private int lvl3SBlockSize; private final int lvl3SBlockSize;
private byte[] reserved3; private final byte[] reserved3;
private long lvl4Offset; private final long lvl4Offset;
private long lvl4Size; private final long lvl4Size;
private int lvl4SBlockSize; private final int lvl4SBlockSize;
private byte[] reserved4; private final byte[] reserved4;
private long lvl5Offset; private final long lvl5Offset;
private long lvl5Size; private final long lvl5Size;
private int lvl5SBlockSize; private final int lvl5SBlockSize;
private byte[] reserved5; private final byte[] reserved5;
private long lvl6Offset; private final long lvl6Offset;
private long lvl6Size; private final long lvl6Size;
private int lvl6SBlockSize; private final int lvl6SBlockSize;
private byte[] reserved6; private final byte[] reserved6;
private byte[] unknown; private final byte[] signatureSalt;
private byte[] hash; private final byte[] masterHash;
private final byte[] reservedTail;
/**
* Also known as IntegrityMetaInfo
* @param sbBytes - Chunk of data related for IVFC Hash Data table
*/
SuperBlockIVFC(byte[] sbBytes){ SuperBlockIVFC(byte[] sbBytes){
this.magic = new String(Arrays.copyOfRange(sbBytes, 0, 4), StandardCharsets.US_ASCII); this.magic = new String(Arrays.copyOfRange(sbBytes, 0, 4), StandardCharsets.US_ASCII);
this.magicNumber = getLEint(sbBytes, 0x4); this.version = getLEint(sbBytes, 0x4);
this.masterHashSize = getLEint(sbBytes, 0x8); this.masterHashSize = getLEint(sbBytes, 0x8);
this.totalNumberOfLevels = getLEint(sbBytes, 0xc); this.totalNumberOfLevels = getLEint(sbBytes, 0xc);
@ -98,50 +103,13 @@ public class SuperBlockIVFC {
this.lvl6SBlockSize = getLEint(sbBytes, 0x98); this.lvl6SBlockSize = getLEint(sbBytes, 0x98);
this.reserved6 = Arrays.copyOfRange(sbBytes, 0x9c, 0xa0); this.reserved6 = Arrays.copyOfRange(sbBytes, 0x9c, 0xa0);
this.unknown = Arrays.copyOfRange(sbBytes, 0xa0, 0xc0); this.signatureSalt = Arrays.copyOfRange(sbBytes, 0xa0, 0xc0);
this.hash = Arrays.copyOfRange(sbBytes, 0xc0, 0xe0); this.masterHash = Arrays.copyOfRange(sbBytes, 0xc0, 0xe0);
/* this.reservedTail = Arrays.copyOfRange(sbBytes, 0xe0, 0xf8);
System.out.println(magic);
System.out.println(magicNumber);
System.out.println(masterHashSize);
System.out.println(totalNumberOfLevels);
System.out.println(lvl1Offset);
System.out.println(lvl1Size);
System.out.println(lvl1SBlockSize);
RainbowHexDump.hexDumpUTF8(reserved1);
System.out.println(lvl2Offset);
System.out.println(lvl2Size);
System.out.println(lvl2SBlockSize);
RainbowHexDump.hexDumpUTF8(reserved2);
System.out.println(lvl3Offset);
System.out.println(lvl3Size);
System.out.println(lvl3SBlockSize);
RainbowHexDump.hexDumpUTF8(reserved3);
System.out.println(lvl4Offset);
System.out.println(lvl4Size);
System.out.println(lvl4SBlockSize);
RainbowHexDump.hexDumpUTF8(reserved4);
System.out.println(lvl5Offset);
System.out.println(lvl5Size);
System.out.println(lvl5SBlockSize);
RainbowHexDump.hexDumpUTF8(reserved5);
System.out.println(lvl6Offset);
System.out.println(lvl6Size);
System.out.println(lvl6SBlockSize);
RainbowHexDump.hexDumpUTF8(reserved6);
RainbowHexDump.hexDumpUTF8(unknown);
RainbowHexDump.hexDumpUTF8(hash);
// */
} }
public String getMagic() { return magic; } public String getMagic() { return magic; }
public int getMagicNumber() { return magicNumber; } public int getVersion() { return version; }
public int getMasterHashSize() { return masterHashSize; } public int getMasterHashSize() { return masterHashSize; }
public int getTotalNumberOfLevels() { return totalNumberOfLevels; } public int getTotalNumberOfLevels() { return totalNumberOfLevels; }
public long getLvl1Offset() { return lvl1Offset; } public long getLvl1Offset() { return lvl1Offset; }
@ -168,6 +136,7 @@ public class SuperBlockIVFC {
public long getLvl6Size() { return lvl6Size; } public long getLvl6Size() { return lvl6Size; }
public int getLvl6SBlockSize() { return lvl6SBlockSize; } public int getLvl6SBlockSize() { return lvl6SBlockSize; }
public byte[] getReserved6() { return reserved6; } public byte[] getReserved6() { return reserved6; }
public byte[] getUnknown() { return unknown; } public byte[] getSignatureSalt() { return signatureSalt; }
public byte[] getHash() { return hash; } public byte[] getMasterHash() { return masterHash; }
public byte[] getReservedTail() { return reservedTail; }
} }

View file

@ -20,33 +20,37 @@ package libKonogonka.Tools.NCA.NCASectionTableBlock;
import java.util.Arrays; import java.util.Arrays;
import static libKonogonka.LoperConverter.getLEint; import static libKonogonka.Converter.getLEint;
import static libKonogonka.LoperConverter.getLElong; import static libKonogonka.Converter.getLElong;
public class SuperBlockPFS0 { public class SuperBlockPFS0 {
private byte[] SHA256hash; private final byte[] SHA256hash;
private int blockSize; private final int blockSize;
private int unknownNumberTwo; private final int layerCount;
private long hashTableOffset; private final long hashTableOffset;
private long hashTableSize; private final long hashTableSize;
private long pfs0offset; private final long pfs0offset;
private long pfs0size; private final long pfs0size;
private byte[] zeroes; private final byte[] zeroes;
/**
* Also known as HierarchicalSha256Data
* @param sbBytes - Chunk of data related for PFS0 Hash Data table
*/
SuperBlockPFS0(byte[] sbBytes){ SuperBlockPFS0(byte[] sbBytes){
SHA256hash = Arrays.copyOfRange(sbBytes, 0, 0x20); SHA256hash = Arrays.copyOfRange(sbBytes, 0, 0x20);
blockSize = getLEint(sbBytes, 0x20); blockSize = getLEint(sbBytes, 0x20);
unknownNumberTwo = getLEint(sbBytes, 0x24); layerCount = getLEint(sbBytes, 0x24);
hashTableOffset = getLElong(sbBytes, 0x28); hashTableOffset = getLElong(sbBytes, 0x28);
hashTableSize = getLElong(sbBytes, 0x30); hashTableSize = getLElong(sbBytes, 0x30);
pfs0offset = getLElong(sbBytes, 0x38); pfs0offset = getLElong(sbBytes, 0x38);
pfs0size = getLElong(sbBytes, 0x40); pfs0size = getLElong(sbBytes, 0x40);
zeroes = Arrays.copyOfRange(sbBytes, 0x48, 0xf8); zeroes = Arrays.copyOfRange(sbBytes, 0x48, 0xf0);
} }
public byte[] getSHA256hash() { return SHA256hash; } public byte[] getSHA256hash() { return SHA256hash; }
public int getBlockSize() { return blockSize; } public int getBlockSize() { return blockSize; }
public int getUnknownNumberTwo() { return unknownNumberTwo; } public int getLayerCount() { return layerCount; }
public long getHashTableOffset() { return hashTableOffset; } public long getHashTableOffset() { return hashTableOffset; }
public long getHashTableSize() { return hashTableSize; } public long getHashTableSize() { return hashTableSize; }
public long getPfs0offset() { return pfs0offset; } public long getPfs0offset() { return pfs0offset; }

View file

@ -24,7 +24,7 @@ import libKonogonka.Tools.NPDM.ServiceAccessControlProvider;
import java.nio.charset.StandardCharsets; import java.nio.charset.StandardCharsets;
import java.util.Arrays; import java.util.Arrays;
import static libKonogonka.LoperConverter.getLEint; import static libKonogonka.Converter.getLEint;
public class ACI0Provider { public class ACI0Provider {
private String magicNum; private String magicNum;

View file

@ -18,7 +18,7 @@
*/ */
package libKonogonka.Tools.NPDM.ACI0; package libKonogonka.Tools.NPDM.ACI0;
import libKonogonka.LoperConverter; import libKonogonka.Converter;
import java.util.Arrays; import java.util.Arrays;
@ -39,11 +39,11 @@ public class FSAccessHeaderProvider {
public FSAccessHeaderProvider(byte[] bytes) { public FSAccessHeaderProvider(byte[] bytes) {
version = bytes[0]; version = bytes[0];
padding = Arrays.copyOfRange(bytes, 1, 0x4); padding = Arrays.copyOfRange(bytes, 1, 0x4);
permissionsBitmask = LoperConverter.getLElong(bytes, 0x4); permissionsBitmask = Converter.getLElong(bytes, 0x4);
dataSize = LoperConverter.getLEint(bytes, 0xC); dataSize = Converter.getLEint(bytes, 0xC);
contentOwnIdSectionSize = LoperConverter.getLEint(bytes, 0x10); contentOwnIdSectionSize = Converter.getLEint(bytes, 0x10);
dataNownerSizes = LoperConverter.getLEint(bytes, 0x14); dataNownerSizes = Converter.getLEint(bytes, 0x14);
saveDataOwnSectionSize = LoperConverter.getLEint(bytes, 0x18); saveDataOwnSectionSize = Converter.getLEint(bytes, 0x18);
unknownData = Arrays.copyOfRange(bytes, 0x1C, bytes.length); unknownData = Arrays.copyOfRange(bytes, 0x1C, bytes.length);
} }

View file

@ -24,7 +24,7 @@ import libKonogonka.Tools.NPDM.ServiceAccessControlProvider;
import java.nio.charset.StandardCharsets; import java.nio.charset.StandardCharsets;
import java.util.Arrays; import java.util.Arrays;
import static libKonogonka.LoperConverter.*; import static libKonogonka.Converter.*;
public class ACIDProvider { public class ACIDProvider {

View file

@ -18,7 +18,7 @@
*/ */
package libKonogonka.Tools.NPDM.ACID; package libKonogonka.Tools.NPDM.ACID;
import libKonogonka.LoperConverter; import libKonogonka.Converter;
import java.util.Arrays; import java.util.Arrays;
@ -35,7 +35,7 @@ public class FSAccessControlProvider {
public FSAccessControlProvider(byte[] bytes) { public FSAccessControlProvider(byte[] bytes) {
version = bytes[0]; version = bytes[0];
padding = Arrays.copyOfRange(bytes, 1, 0x4); padding = Arrays.copyOfRange(bytes, 1, 0x4);
permissionsBitmask = LoperConverter.getLElong(bytes, 0x4); permissionsBitmask = Converter.getLElong(bytes, 0x4);
reserved = Arrays.copyOfRange(bytes, 0xC, 0x2C); reserved = Arrays.copyOfRange(bytes, 0xC, 0x2C);
} }

View file

@ -18,7 +18,7 @@
*/ */
package libKonogonka.Tools.NPDM; package libKonogonka.Tools.NPDM;
import libKonogonka.LoperConverter; import libKonogonka.Converter;
import java.util.LinkedHashMap; import java.util.LinkedHashMap;
import java.util.LinkedList; import java.util.LinkedList;
@ -126,7 +126,7 @@ public class KernelAccessControlProvider {
int position = 0; int position = 0;
// Collect all blocks // Collect all blocks
for (int i = 0; i < bytes.length / 4; i++) { for (int i = 0; i < bytes.length / 4; i++) {
int block = LoperConverter.getLEint(bytes, position); int block = Converter.getLEint(bytes, position);
position += 4; position += 4;
rawData.add(block); rawData.add(block);

View file

@ -28,7 +28,7 @@ import java.io.RandomAccessFile;
import java.nio.charset.StandardCharsets; import java.nio.charset.StandardCharsets;
import java.util.Arrays; import java.util.Arrays;
import static libKonogonka.LoperConverter.*; import static libKonogonka.Converter.*;
public class NPDMProvider extends ASuperInFileProvider { public class NPDMProvider extends ASuperInFileProvider {

View file

@ -24,27 +24,27 @@ import java.io.*;
import java.nio.charset.StandardCharsets; import java.nio.charset.StandardCharsets;
import java.util.Arrays; import java.util.Arrays;
import static libKonogonka.LoperConverter.*; import static libKonogonka.Converter.*;
public class PFS0EncryptedProvider implements IPFS0Provider{ public class PFS0EncryptedProvider implements IPFS0Provider{
private long rawFileDataStart; // Always -1 @ PFS0EncryptedProvider private long rawFileDataStart; // Always -1 @ PFS0EncryptedProvider
private String magic; private final String magic;
private int filesCount; private final int filesCount;
private int stringTableSize; private final int stringTableSize;
private byte[] padding; private final byte[] padding;
private PFS0subFile[] pfs0subFiles; private final PFS0subFile[] pfs0subFiles;
//--------------------------------------- //---------------------------------------
private long rawBlockDataStart; private long rawBlockDataStart;
private long offsetPositionInFile; private final long offsetPositionInFile;
private File file; private final File file;
private byte[] key; private final byte[] key;
private byte[] sectionCTR; private final byte[] sectionCTR;
private long mediaStartOffset; // In 512-blocks private final long mediaStartOffset; // In 512-blocks
private long mediaEndOffset; // In 512-blocks private final long mediaEndOffset; // In 512-blocks
public PFS0EncryptedProvider(PipedInputStream pipedInputStream, public PFS0EncryptedProvider(PipedInputStream pipedInputStream,
long pfs0offsetPosition, long pfs0offsetPosition,
@ -62,7 +62,7 @@ public class PFS0EncryptedProvider implements IPFS0Provider{
this.sectionCTR = sectionCTR; this.sectionCTR = sectionCTR;
this.mediaStartOffset = mediaStartOffset; this.mediaStartOffset = mediaStartOffset;
this.mediaEndOffset = mediaEndOffset; this.mediaEndOffset = mediaEndOffset;
// pfs0offsetPosition is a position relative to Media block. Lets add pfs0 'header's' bytes count and get raw data start position in media block // pfs0offsetPosition is a position relative to Media block. Let's add pfs0 'header's' bytes count and get raw data start position in media block
rawFileDataStart = -1; // Set -1 for PFS0EncryptedProvider rawFileDataStart = -1; // Set -1 for PFS0EncryptedProvider
// Detect raw data start position using next var // Detect raw data start position using next var
rawBlockDataStart = pfs0offsetPosition; rawBlockDataStart = pfs0offsetPosition;
@ -214,7 +214,7 @@ public class PFS0EncryptedProvider implements IPFS0Provider{
if (skipBytes > 0) { if (skipBytes > 0) {
encryptedBlock = new byte[0x200]; encryptedBlock = new byte[0x200];
if (bis.read(encryptedBlock) == 0x200) { if (bis.read(encryptedBlock) == 0x200) {
dectyptedBlock = aesCtrDecryptSimple.dectyptNext(encryptedBlock); dectyptedBlock = aesCtrDecryptSimple.decryptNext(encryptedBlock);
// If we have extra-small file that is less then a block and even more // If we have extra-small file that is less then a block and even more
if ((0x200 - skipBytes) > pfs0subFiles[subFileNumber].getSize()){ if ((0x200 - skipBytes) > pfs0subFiles[subFileNumber].getSize()){
streamOut.write(dectyptedBlock, skipBytes, (int) pfs0subFiles[subFileNumber].getSize()); // safe cast streamOut.write(dectyptedBlock, skipBytes, (int) pfs0subFiles[subFileNumber].getSize()); // safe cast
@ -244,7 +244,7 @@ public class PFS0EncryptedProvider implements IPFS0Provider{
encryptedBlock = new byte[0x200]; encryptedBlock = new byte[0x200];
if (bis.read(encryptedBlock) == 0x200) { if (bis.read(encryptedBlock) == 0x200) {
//dectyptedBlock = aesCtr.decrypt(encryptedBlock); //dectyptedBlock = aesCtr.decrypt(encryptedBlock);
dectyptedBlock = aesCtrDecryptSimple.dectyptNext(encryptedBlock); dectyptedBlock = aesCtrDecryptSimple.decryptNext(encryptedBlock);
// Writing decrypted data to pipe // Writing decrypted data to pipe
streamOut.write(dectyptedBlock); streamOut.write(dectyptedBlock);
} }
@ -259,7 +259,7 @@ public class PFS0EncryptedProvider implements IPFS0Provider{
if (extraData > 0){ // In case we didn't get what we want if (extraData > 0){ // In case we didn't get what we want
encryptedBlock = new byte[0x200]; encryptedBlock = new byte[0x200];
if (bis.read(encryptedBlock) == 0x200) { if (bis.read(encryptedBlock) == 0x200) {
dectyptedBlock = aesCtrDecryptSimple.dectyptNext(encryptedBlock); dectyptedBlock = aesCtrDecryptSimple.decryptNext(encryptedBlock);
streamOut.write(dectyptedBlock, 0, extraData); streamOut.write(dectyptedBlock, 0, extraData);
} }
else { else {
@ -270,7 +270,7 @@ public class PFS0EncryptedProvider implements IPFS0Provider{
else if (extraData < 0){ // In case we can get more than we need else if (extraData < 0){ // In case we can get more than we need
encryptedBlock = new byte[0x200]; encryptedBlock = new byte[0x200];
if (bis.read(encryptedBlock) == 0x200) { if (bis.read(encryptedBlock) == 0x200) {
dectyptedBlock = aesCtrDecryptSimple.dectyptNext(encryptedBlock); dectyptedBlock = aesCtrDecryptSimple.decryptNext(encryptedBlock);
streamOut.write(dectyptedBlock, 0, 0x200 + extraData); // WTF ??? THIS LOOKS INCORRECT streamOut.write(dectyptedBlock, 0, 0x200 + extraData); // WTF ??? THIS LOOKS INCORRECT
} }
else { else {

View file

@ -22,18 +22,18 @@ import java.io.*;
import java.nio.charset.StandardCharsets; import java.nio.charset.StandardCharsets;
import java.util.Arrays; import java.util.Arrays;
import static libKonogonka.LoperConverter.*; import static libKonogonka.Converter.*;
public class PFS0Provider implements IPFS0Provider{ public class PFS0Provider implements IPFS0Provider{
private long rawFileDataStart; // Where data starts, excluding header, string table etc. private final long rawFileDataStart; // Where data starts, excluding header, string table etc.
private String magic; private final String magic;
private int filesCount; private final int filesCount;
private int stringTableSize; private final int stringTableSize;
private byte[] padding; private final byte[] padding;
private PFS0subFile[] pfs0subFiles; private final PFS0subFile[] pfs0subFiles;
private File file; private final File file;
public PFS0Provider(File fileWithPfs0) throws Exception{ this(fileWithPfs0, 0); } public PFS0Provider(File fileWithPfs0) throws Exception{ this(fileWithPfs0, 0); }

View file

@ -1,87 +0,0 @@
/*
Copyright 2019-2022 Dmitry Isaenko
This file is part of libKonogonka.
libKonogonka is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
libKonogonka is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with libKonogonka. If not, see <https://www.gnu.org/licenses/>.
*/
package libKonogonka.Tools.RomFs;
import libKonogonka.LoperConverter;
import java.nio.charset.StandardCharsets;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
import static libKonogonka.RainbowDump.formatDecHexString;
public class FileMeta4Debug {
List<FileMeta> allFiles;
FileMeta4Debug(long fileMetadataTableLength, byte[] fileMetadataTable) {
allFiles = new ArrayList<>();
int i = 0;
while (i < fileMetadataTableLength) {
FileMeta fileMeta = new FileMeta();
fileMeta.containingDirectoryOffset = LoperConverter.getLEint(fileMetadataTable, i);
i += 4;
fileMeta.nextSiblingFileOffset = LoperConverter.getLEint(fileMetadataTable, i);
i += 4;
fileMeta.fileDataOffset = LoperConverter.getLElong(fileMetadataTable, i);
i += 8;
fileMeta.fileDataLength = LoperConverter.getLElong(fileMetadataTable, i);
i += 8;
fileMeta.nextFileOffset = LoperConverter.getLEint(fileMetadataTable, i);
i += 4;
fileMeta.fileNameLength = LoperConverter.getLEint(fileMetadataTable, i);
i += 4;
fileMeta.fileName = new String(Arrays.copyOfRange(fileMetadataTable, i, i + fileMeta.fileNameLength), StandardCharsets.UTF_8);
;
i += getRealNameSize(fileMeta.fileNameLength);
allFiles.add(fileMeta);
}
for (FileMeta fileMeta : allFiles){
System.out.println(
"-------------------------FILE--------------------------------\n" +
"Offset of Containing Directory " + formatDecHexString(fileMeta.containingDirectoryOffset) + "\n" +
"Offset of next Sibling File " + formatDecHexString(fileMeta.nextSiblingFileOffset) + "\n" +
"Offset of File's Data " + formatDecHexString(fileMeta.fileDataOffset) + "\n" +
"Length of File's Data " + formatDecHexString(fileMeta.fileDataLength) + "\n" +
"Offset of next File in the same Hash Table bucket " + formatDecHexString(fileMeta.nextFileOffset) + "\n" +
"Name Length " + formatDecHexString(fileMeta.fileNameLength) + "\n" +
"Name Length (rounded up to multiple of 4) " + fileMeta.fileName + "\n"
);
}
}
private int getRealNameSize(int value){
if (value % 4 == 0)
return value;
return value + 4 - value % 4;
}
private static class FileMeta{
int containingDirectoryOffset;
int nextSiblingFileOffset;
long fileDataOffset;
long fileDataLength;
int nextFileOffset;
int fileNameLength;
String fileName;
}
}

View file

@ -19,7 +19,11 @@
package libKonogonka.Tools.RomFs; package libKonogonka.Tools.RomFs;
import libKonogonka.LoperConverter; import libKonogonka.Converter;
import libKonogonka.Tools.NCA.NCAContent;
import libKonogonka.Tools.RomFs.view.FileSystemTreeViewMaker;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import java.nio.charset.StandardCharsets; import java.nio.charset.StandardCharsets;
import java.util.ArrayList; import java.util.ArrayList;
@ -28,9 +32,11 @@ import java.util.Comparator;
import java.util.List; import java.util.List;
public class FileSystemEntry { public class FileSystemEntry {
private final static Logger log = LogManager.getLogger(NCAContent.class);
private boolean directoryFlag; private boolean directoryFlag;
private String name; private String name;
private List<FileSystemEntry> content; private final List<FileSystemEntry> content;
private static byte[] dirsMetadataTable; private static byte[] dirsMetadataTable;
private static byte[] filesMetadataTable; private static byte[] filesMetadataTable;
@ -107,31 +113,44 @@ public class FileSystemEntry {
private static class DirectoryMetaData { private static class DirectoryMetaData {
private int parentDirectoryOffset; private final int parentDirectoryOffset;
private int nextSiblingDirectoryOffset; private final int nextSiblingDirectoryOffset;
private int firstSubdirectoryOffset; private final int firstSubdirectoryOffset;
private int firstFileOffset; private final int firstFileOffset;
private final int nextHashTableBucketDirectoryOffset;
private String dirName; private final String dirName;
private DirectoryMetaData(){ private DirectoryMetaData(){
this(0); this(0);
} }
private DirectoryMetaData(int childDirMetaPosition){ private DirectoryMetaData(int childDirMetaPosition){
int i = childDirMetaPosition; int i = childDirMetaPosition;
parentDirectoryOffset = LoperConverter.getLEint(dirsMetadataTable, i); parentDirectoryOffset = Converter.getLEint(dirsMetadataTable, i);
i += 4; i += 4;
nextSiblingDirectoryOffset = LoperConverter.getLEint(dirsMetadataTable, i); nextSiblingDirectoryOffset = Converter.getLEint(dirsMetadataTable, i);
i += 4; i += 4;
firstSubdirectoryOffset = LoperConverter.getLEint(dirsMetadataTable, i); firstSubdirectoryOffset = Converter.getLEint(dirsMetadataTable, i);
i += 4; i += 4;
firstFileOffset = LoperConverter.getLEint(dirsMetadataTable, i); firstFileOffset = Converter.getLEint(dirsMetadataTable, i);
i += 4; i += 4;
// int nextHashTableBucketDirectoryOffset = LoperConverter.getLEint(dirsMetadataTable, i); nextHashTableBucketDirectoryOffset = Converter.getLEint(dirsMetadataTable, i);
//*
if (nextHashTableBucketDirectoryOffset < 0) {
System.out.println("nextHashTableBucketDirectoryOffset: "+ nextHashTableBucketDirectoryOffset);
}
//*/
i += 4; i += 4;
int dirNameLength = LoperConverter.getLEint(dirsMetadataTable, i); int dirNameLength = Converter.getLEint(dirsMetadataTable, i);
if (dirNameLength > 0) {
i += 4; i += 4;
dirName = new String(Arrays.copyOfRange(dirsMetadataTable, i, i + dirNameLength), StandardCharsets.UTF_8); dirName = new String(Arrays.copyOfRange(dirsMetadataTable, i, i + dirNameLength), StandardCharsets.UTF_8);
}
else {
dirName = "";
System.out.println("dirName: "+dirNameLength);
}
//i += getRealNameSize(dirNameLength); //i += getRealNameSize(dirNameLength);
} }
@ -142,10 +161,10 @@ public class FileSystemEntry {
} }
} }
private static class FileMetaData { private static class FileMetaData {
private final int nextSiblingFileOffset;
private int nextSiblingFileOffset; private final long fileDataRealOffset;
private long fileDataRealOffset; private final long fileDataRealLength;
private long fileDataRealLength; private final int nextHashTableBucketFileOffset;
private String fileName; private String fileName;
@ -157,36 +176,43 @@ public class FileSystemEntry {
int i = childFileMetaPosition; int i = childFileMetaPosition;
// int containingDirectoryOffset = LoperConverter.getLEint(filesMetadataTable, i); // never used // int containingDirectoryOffset = LoperConverter.getLEint(filesMetadataTable, i); // never used
i += 4; i += 4;
nextSiblingFileOffset = LoperConverter.getLEint(filesMetadataTable, i); nextSiblingFileOffset = Converter.getLEint(filesMetadataTable, i);
i += 4; i += 4;
fileDataRealOffset = LoperConverter.getLElong(filesMetadataTable, i); fileDataRealOffset = Converter.getLElong(filesMetadataTable, i);
i += 8; i += 8;
fileDataRealLength = LoperConverter.getLElong(filesMetadataTable, i); fileDataRealLength = Converter.getLElong(filesMetadataTable, i);
i += 8; i += 8;
//int nextHashTableBucketFileOffset = LoperConverter.getLEint(filesMetadataTable, i); nextHashTableBucketFileOffset = Converter.getLEint(filesMetadataTable, i);
//*
if (nextHashTableBucketFileOffset < 0) {
System.out.println("nextHashTableBucketFileOffset: "+ nextHashTableBucketFileOffset);
}
//*/
i += 4; i += 4;
int fileNameLength = LoperConverter.getLEint(filesMetadataTable, i); int fileNameLength = Converter.getLEint(filesMetadataTable, i);
if (fileNameLength > 0) {
i += 4; i += 4;
fileName = new String(Arrays.copyOfRange(filesMetadataTable, i, i + fileNameLength), StandardCharsets.UTF_8);; fileName = "";
try {
fileName = new String(Arrays.copyOfRange(filesMetadataTable, i, i + fileNameLength), StandardCharsets.UTF_8);
}
catch (Exception e){
System.out.println("fileName sizes are: "+filesMetadataTable.length+"\t"+i+"\t"+i + fileNameLength+"\t\t"+nextHashTableBucketFileOffset);
}
}
else {
fileName = "";
System.out.println("fileName: "+fileNameLength);
}
//i += getRealNameSize(fileNameLength); //i += getRealNameSize(fileNameLength);
} }
} }
public void printTreeForDebug(int spacerForSizes){
log.debug(FileSystemTreeViewMaker.make(content, spacerForSizes));
}
public void printTreeForDebug(){ public void printTreeForDebug(){
System.out.println("/"); log.debug(FileSystemTreeViewMaker.make(content, 100));
for (FileSystemEntry entry: content)
printEntry(2, entry);
} }
private void printEntry(int cnt, FileSystemEntry entry) {
for (int i = 0; i < cnt; i++)
System.out.print(" ");
if (entry.isDirectory()){
System.out.println("|-" + entry.getName());
for (FileSystemEntry e : entry.content)
printEntry(cnt+2, e);
}
else
System.out.println("|-" + entry.getName() + String.format(" 0x%-10x 0x%-10x", entry.fileOffset, entry.fileSize));
}
} }

View file

@ -1,84 +0,0 @@
/*
Copyright 2019-2022 Dmitry Isaenko
This file is part of libKonogonka.
libKonogonka is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
libKonogonka is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with libKonogonka. If not, see <https://www.gnu.org/licenses/>.
*/
package libKonogonka.Tools.RomFs;
import libKonogonka.LoperConverter;
import java.nio.charset.StandardCharsets;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
import static libKonogonka.RainbowDump.formatDecHexString;
public class FolderMeta4Debug {
List<FolderMeta> allFolders;
FolderMeta4Debug(long directoryMetadataTableLength, byte[] directoryMetadataTable){
allFolders = new ArrayList<>();
int i = 0;
while (i < directoryMetadataTableLength){
FolderMeta folderMeta = new FolderMeta();
folderMeta.parentDirectoryOffset = LoperConverter.getLEint(directoryMetadataTable, i);
i += 4;
folderMeta.nextSiblingDirectoryOffset = LoperConverter.getLEint(directoryMetadataTable, i);
i += 4;
folderMeta.firstSubdirectoryOffset = LoperConverter.getLEint(directoryMetadataTable, i);
i += 4;
folderMeta.firstFileOffset = LoperConverter.getLEint(directoryMetadataTable, i);
i += 4;
folderMeta.nextDirectoryOffset = LoperConverter.getLEint(directoryMetadataTable, i);
i += 4;
folderMeta.dirNameLength = LoperConverter.getLEint(directoryMetadataTable, i);
i += 4;
folderMeta.dirName = new String(Arrays.copyOfRange(directoryMetadataTable, i, i + folderMeta.dirNameLength), StandardCharsets.UTF_8);
i += getRealNameSize(folderMeta.dirNameLength);
System.out.println(
"---------------------------DIRECTORY------------------------\n" +
"Offset of Parent Directory (self if Root) " + formatDecHexString(folderMeta.parentDirectoryOffset ) +"\n" +
"Offset of next Sibling Directory " + formatDecHexString(folderMeta.nextSiblingDirectoryOffset) +"\n" +
"Offset of first Child Directory (Subdirectory) " + formatDecHexString(folderMeta.firstSubdirectoryOffset ) +"\n" +
"Offset of first File (in File Metadata Table) " + formatDecHexString(folderMeta.firstFileOffset ) +"\n" +
"Offset of next Directory in the same Hash Table bucket " + formatDecHexString(folderMeta.nextDirectoryOffset ) +"\n" +
"Name Length " + formatDecHexString(folderMeta.dirNameLength ) +"\n" +
"Name Length (rounded up to multiple of 4) " + folderMeta.dirName + "\n"
);
allFolders.add(folderMeta);
}
}
private int getRealNameSize(int value){
if (value % 4 == 0)
return value;
return value + 4 - value % 4;
}
private static class FolderMeta {
int parentDirectoryOffset;
int nextSiblingDirectoryOffset;
int firstSubdirectoryOffset;
int firstFileOffset;
int nextDirectoryOffset;
int dirNameLength;
String dirName;
}
}

View file

@ -23,9 +23,10 @@ import java.io.File;
import java.io.PipedInputStream; import java.io.PipedInputStream;
public interface IRomFsProvider { public interface IRomFsProvider {
File getFile();
long getLevel6Offset(); long getLevel6Offset();
Level6Header getHeader(); Level6Header getHeader();
FileSystemEntry getRootEntry(); FileSystemEntry getRootEntry();
PipedInputStream getContent(FileSystemEntry entry) throws Exception; PipedInputStream getContent(FileSystemEntry entry) throws Exception;
File getFile(); void printDebug();
} }

View file

@ -19,25 +19,41 @@
package libKonogonka.Tools.RomFs; package libKonogonka.Tools.RomFs;
import libKonogonka.LoperConverter; import libKonogonka.Converter;
import libKonogonka.RainbowDump; import libKonogonka.RainbowDump;
import org.apache.logging.log4j.LogManager;
import java.util.Arrays; import org.apache.logging.log4j.Logger;
/**
* This class stores information contained in Level 6 Header of the RomFS image
* ------------------------------------
* | Header Length (usually 0x50) |
* | Directory Hash Table Offset | Not used by this library | '<< 32' to get real offset: see implementation
* | Directory Hash Table Length | Not used by this library
* | Directory Metadata Table Offset |
* | Directory Metadata Table Length |
* | File Hash Table Offset | Not used by this library
* | File Hash Table Length | Not used by this library
* | File Metadata Table Offset |
* | File Metadata Table Length |
* | File Data Offset |
* ------------------------------------
* */
public class Level6Header { public class Level6Header {
private long headerLength; private final static Logger log = LogManager.getLogger(Level6Header.class);
private long directoryHashTableOffset;
private long directoryHashTableLength;
private long directoryMetadataTableOffset;
private long directoryMetadataTableLength;
private long fileHashTableOffset;
private long fileHashTableLength;
private long fileMetadataTableOffset;
private long fileMetadataTableLength;
private long fileDataOffset;
private byte[] headerBytes; private final long headerLength;
private int i; private long directoryHashTableOffset;
private final long directoryHashTableLength;
private final long directoryMetadataTableOffset;
private final long directoryMetadataTableLength;
private final long fileHashTableOffset;
private final long fileHashTableLength;
private final long fileMetadataTableOffset;
private final long fileMetadataTableLength;
private final long fileDataOffset;
private final byte[] headerBytes;
private int _cursor;
Level6Header(byte[] headerBytes) throws Exception{ Level6Header(byte[] headerBytes) throws Exception{
this.headerBytes = headerBytes; this.headerBytes = headerBytes;
@ -54,12 +70,11 @@ public class Level6Header {
fileMetadataTableOffset = getNext(); fileMetadataTableOffset = getNext();
fileMetadataTableLength = getNext(); fileMetadataTableLength = getNext();
fileDataOffset = getNext(); fileDataOffset = getNext();
RainbowDump.hexDumpUTF8(Arrays.copyOfRange(headerBytes, 0, 0x50));
} }
private long getNext(){ private long getNext(){
final long result = LoperConverter.getLEint(headerBytes, i); final long result = Converter.getLEint(headerBytes, _cursor);
i += 0x8; _cursor += 0x8;
return result; return result;
} }
@ -75,8 +90,8 @@ public class Level6Header {
public long getFileDataOffset() { return fileDataOffset; } public long getFileDataOffset() { return fileDataOffset; }
public void printDebugInfo(){ public void printDebugInfo(){
System.out.println("== Level 6 Header ==\n" + log.debug("== Level 6 Header ==\n" +
"Header Length (always 0x50 ?) "+ RainbowDump.formatDecHexString(headerLength)+" (size of this structure within first 0x200 block of LEVEL 6 part)\n" + "Header Length (usually 0x50) "+ RainbowDump.formatDecHexString(headerLength)+" (size of this structure within first 0x200 block of LEVEL 6 part)\n" +
"Directory Hash Table Offset "+ RainbowDump.formatDecHexString(directoryHashTableOffset)+" (against THIS block where HEADER contains)\n" + "Directory Hash Table Offset "+ RainbowDump.formatDecHexString(directoryHashTableOffset)+" (against THIS block where HEADER contains)\n" +
"Directory Hash Table Length "+ RainbowDump.formatDecHexString(directoryHashTableLength) + "\n" + "Directory Hash Table Length "+ RainbowDump.formatDecHexString(directoryHashTableLength) + "\n" +
"Directory Metadata Table Offset "+ RainbowDump.formatDecHexString(directoryMetadataTableOffset) + "\n" + "Directory Metadata Table Offset "+ RainbowDump.formatDecHexString(directoryMetadataTableOffset) + "\n" +

View file

@ -0,0 +1,121 @@
/*
Copyright 2018-2022 Dmitry Isaenko
This file is part of libKonogonka.
libKonogonka is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
libKonogonka is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with libKonogonka. If not, see <https://www.gnu.org/licenses/>.
*/
package libKonogonka.Tools.RomFs;
import libKonogonka.Converter;
import java.io.BufferedInputStream;
import java.io.File;
import java.nio.file.Files;
/**
* Construct header for RomFs and obtain root fileSystemEntry (meta information)
* */
class RomFsDecryptedConstruct {
private Level6Header header;
private FileSystemEntry rootEntry;
private final BufferedInputStream fileBufferedInputStream;
private int headerSize;
private byte[] directoryMetadataTable;
private byte[] fileMetadataTable;
RomFsDecryptedConstruct(File decryptedFsImageFile, long level6offset) throws Exception{
if (level6offset < 0)
throw new Exception("Incorrect Level 6 Offset");
fileBufferedInputStream = new BufferedInputStream(Files.newInputStream(decryptedFsImageFile.toPath()));
fastForwardBySkippingBytes(level6offset);
detectHeaderSize();
constructHeader();
fastForwardBySkippingBytes(header.getDirectoryMetadataTableOffset() - headerSize);
directoryMetadataTableLengthCheck();
directoryMetadataTableConstruct();
fastForwardBySkippingBytes(header.getFileMetadataTableOffset() - header.getFileHashTableOffset());
fileMetadataTableLengthCheck();
fileMetadataTableConstruct();
constructRootFilesystemEntry();
fileBufferedInputStream.close();
}
private void detectHeaderSize() throws Exception{
fileBufferedInputStream.mark(0x10);
byte[] lv6HeaderSizeRaw = new byte[0x8];
if (fileBufferedInputStream.read(lv6HeaderSizeRaw) != 0x8)
throw new Exception("Failed to read header size");
headerSize = Converter.getLEint(lv6HeaderSizeRaw, 0);
fileBufferedInputStream.reset();
}
private void constructHeader() throws Exception{
byte[] rawDataChunk = new byte[headerSize];
if (fileBufferedInputStream.read(rawDataChunk) != headerSize)
throw new Exception(String.format("Failed to read header (0x%x)", headerSize));
this.header = new Level6Header(rawDataChunk);
}
private void directoryMetadataTableLengthCheck() throws Exception{
if (header.getDirectoryMetadataTableLength() < 0)
throw new Exception("Not supported operation.");
}
private void directoryMetadataTableConstruct() throws Exception{
directoryMetadataTable = new byte[(int) header.getDirectoryMetadataTableLength()];
if (fileBufferedInputStream.read(directoryMetadataTable) != (int) header.getDirectoryMetadataTableLength())
throw new Exception("Failed to read "+header.getDirectoryMetadataTableLength());
}
private void fileMetadataTableLengthCheck() throws Exception{
if (header.getFileMetadataTableLength() < 0)
throw new Exception("Not supported operation.");
}
private void fileMetadataTableConstruct() throws Exception{
fileMetadataTable = new byte[(int) header.getFileMetadataTableLength()];
if (fileBufferedInputStream.read(fileMetadataTable) != (int) header.getFileMetadataTableLength())
throw new Exception("Failed to read "+header.getFileMetadataTableLength());
}
private void constructRootFilesystemEntry() throws Exception{
rootEntry = new FileSystemEntry(directoryMetadataTable, fileMetadataTable);
//rootEntry.printTreeForDebug();
}
private void fastForwardBySkippingBytes(long size) throws Exception{
long mustSkip = size;
long skipped = 0;
while (mustSkip > 0){
skipped += fileBufferedInputStream.skip(mustSkip);
mustSkip = size - skipped;
}
}
Level6Header getHeader() { return header; }
FileSystemEntry getRootEntry(){ return rootEntry; }
byte[] getDirectoryMetadataTable() { return directoryMetadataTable; }
byte[] getFileMetadataTable() { return fileMetadataTable;}
}

View file

@ -0,0 +1,93 @@
/*
Copyright 2018-2022 Dmitry Isaenko
This file is part of libKonogonka.
libKonogonka is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
libKonogonka is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with libKonogonka. If not, see <https://www.gnu.org/licenses/>.
*/
package libKonogonka.Tools.RomFs;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import java.io.BufferedInputStream;
import java.io.File;
import java.io.IOException;
import java.io.PipedOutputStream;
import java.nio.file.Files;
public class RomFsDecryptedContentRetrieve implements Runnable {
private final static Logger log = LogManager.getLogger(RomFsDecryptedContentRetrieve.class);
private final File parentFile;
private final PipedOutputStream streamOut;
private final long internalFileRealPosition;
private final long internalFileSize;
RomFsDecryptedContentRetrieve(File parentFile,
PipedOutputStream streamOut,
long internalFileRealPosition,
long internalFileSize){
this.parentFile = parentFile;
this.streamOut = streamOut;
this.internalFileRealPosition = internalFileRealPosition;
this.internalFileSize = internalFileSize;
}
@Override
public void run() {
log.trace("Executing thread");
try (BufferedInputStream bis = new BufferedInputStream(Files.newInputStream(parentFile.toPath()))){
fastForwardBySkippingBytes(bis, internalFileRealPosition);
int readPice = 8388608; // 8mb NOTE: consider switching to 1mb 1048576
long readFrom = 0;
byte[] readBuffer;
while (readFrom < internalFileSize) {
if (internalFileSize - readFrom < readPice)
readPice = Math.toIntExact(internalFileSize - readFrom); // it's safe, I guarantee
readBuffer = new byte[readPice];
if (bis.read(readBuffer) != readPice) {
log.error("getContent(): Unable to read requested size from file.");
return;
}
streamOut.write(readBuffer);
readFrom += readPice;
}
} catch (Exception exception) {
log.error("RomFsDecryptedProvider -> getContent(): Unable to provide stream", exception);
}
finally {
closeStreamOut();
}
log.trace("Thread died");
}
private void fastForwardBySkippingBytes(BufferedInputStream bis, long size) throws Exception{
long mustSkip = size;
long skipped = 0;
while (mustSkip > 0){
skipped += bis.skip(mustSkip);
mustSkip = size - skipped;
}
}
private void closeStreamOut(){
try {
streamOut.close();
}
catch (IOException e){
log.error("RomFsDecryptedProvider -> getContent(): Unable to close 'StreamOut'");
}
}
}

View file

@ -16,151 +16,61 @@
* You should have received a copy of the GNU General Public License * You should have received a copy of the GNU General Public License
* along with libKonogonka. If not, see <https://www.gnu.org/licenses/>. * along with libKonogonka. If not, see <https://www.gnu.org/licenses/>.
*/ */
package libKonogonka.Tools.RomFs; package libKonogonka.Tools.RomFs;
import libKonogonka.Tools.RomFs.view.DirectoryMetaTablePlainView;
import libKonogonka.Tools.RomFs.view.FileMetaTablePlainView;
import java.io.*; import java.io.*;
public class RomFsDecryptedProvider implements IRomFsProvider{ public class RomFsDecryptedProvider implements IRomFsProvider{
private final File file;
private final long level6Offset;
private final Level6Header level6Header;
private final FileSystemEntry rootEntry;
// Used only for debug
private final byte[] directoryMetadataTable;
private final byte[] fileMetadataTable;
private long level6Offset; public RomFsDecryptedProvider(File decryptedFsImageFile, long level6offset) throws Exception{
RomFsDecryptedConstruct construct = new RomFsDecryptedConstruct(decryptedFsImageFile, level6offset);
private File file;
private Level6Header header;
private FileSystemEntry rootEntry;
public RomFsDecryptedProvider(File decryptedFsImageFile, long level6Offset) throws Exception{
if (level6Offset < 0)
throw new Exception("Incorrect Level 6 Offset");
this.file = decryptedFsImageFile; this.file = decryptedFsImageFile;
this.level6Offset = level6offset;
this.level6Header = construct.getHeader();
this.rootEntry = construct.getRootEntry();
BufferedInputStream bis = new BufferedInputStream(new FileInputStream(decryptedFsImageFile)); this.directoryMetadataTable = construct.getDirectoryMetadataTable();
this.fileMetadataTable = construct.getFileMetadataTable();
this.level6Offset = level6Offset;
skipBytes(bis, level6Offset);
byte[] rawDataChunk = new byte[0x50];
if (bis.read(rawDataChunk) != 0x50)
throw new Exception("Failed to read header (0x50)");
this.header = new Level6Header(rawDataChunk);
/*
// Print Dir Hash table as is:
long seekTo = header.getDirectoryHashTableOffset() - 0x50;
rawDataChunk = new byte[(int) header.getDirectoryHashTableLength()];
skipTo(bis, seekTo);
if (bis.read(rawDataChunk) != (int) header.getDirectoryHashTableLength())
throw new Exception("Failed to read Dir hash table");
RainbowDump.hexDumpUTF8(rawDataChunk);
// Print Files Hash table as is:
seekTo = header.getFileHashTableOffset() - header.getDirectoryMetadataTableOffset();
rawDataChunk = new byte[(int) header.getFileHashTableLength()];
skipTo(bis, seekTo);
if (bis.read(rawDataChunk) != (int) header.getFileHashTableLength())
throw new Exception("Failed to read Files hash table");
RainbowDump.hexDumpUTF8(rawDataChunk);
*/
// Read directories metadata
long locationInFile = header.getDirectoryMetadataTableOffset() - 0x50;
skipBytes(bis, locationInFile);
if (header.getDirectoryMetadataTableLength() < 0)
throw new Exception("Not supported operation.");
byte[] directoryMetadataTable = new byte[(int) header.getDirectoryMetadataTableLength()];
if (bis.read(directoryMetadataTable) != (int) header.getDirectoryMetadataTableLength())
throw new Exception("Failed to read "+header.getDirectoryMetadataTableLength());
// Read files metadata
locationInFile = header.getFileMetadataTableOffset() - header.getFileHashTableOffset(); // TODO: replace to 'CurrentPosition'?
skipBytes(bis, locationInFile);
if (header.getFileMetadataTableLength() < 0)
throw new Exception("Not supported operation.");
byte[] fileMetadataTable = new byte[(int) header.getFileMetadataTableLength()];
if (bis.read(fileMetadataTable) != (int) header.getFileMetadataTableLength())
throw new Exception("Failed to read "+header.getFileMetadataTableLength());
rootEntry = new FileSystemEntry(directoryMetadataTable, fileMetadataTable);
//printDebug(directoryMetadataTable, fileMetadataTable);
bis.close();
}
private void skipBytes(BufferedInputStream bis, long size) throws Exception{
long mustSkip = size;
long skipped = 0;
while (mustSkip > 0){
skipped += bis.skip(mustSkip);
mustSkip = size - skipped;
}
} }
@Override
public File getFile() { return file; }
@Override @Override
public long getLevel6Offset() { return level6Offset; } public long getLevel6Offset() { return level6Offset; }
@Override @Override
public Level6Header getHeader() { return header; } public Level6Header getHeader() { return level6Header; }
@Override @Override
public FileSystemEntry getRootEntry() { return rootEntry; } public FileSystemEntry getRootEntry() { return rootEntry; }
@Override @Override
public PipedInputStream getContent(FileSystemEntry entry) throws Exception{ public PipedInputStream getContent(FileSystemEntry entry) throws Exception{
if (entry.isDirectory()) if (entry.isDirectory())
throw new Exception("Request of the binary stream for the folder entry doesn't make sense."); throw new Exception("Request of the binary stream for the folder entry is not supported (and doesn't make sense).");
PipedOutputStream streamOut = new PipedOutputStream(); PipedOutputStream streamOut = new PipedOutputStream();
Thread workerThread;
PipedInputStream streamIn = new PipedInputStream(streamOut); PipedInputStream streamIn = new PipedInputStream(streamOut);
long internalFileRealPosition = level6Offset + level6Header.getFileDataOffset() + entry.getFileOffset();
long internalFileSize = entry.getFileSize();
workerThread = new Thread(() -> { Thread contentRetrievingThread = new Thread(
System.out.println("RomFsDecryptedProvider -> getContent(): Executing thread"); new RomFsDecryptedContentRetrieve(file, streamOut, internalFileRealPosition, internalFileSize));
try { contentRetrievingThread.start();
long subFileRealPosition = level6Offset + header.getFileDataOffset() + entry.getFileOffset();
BufferedInputStream bis = new BufferedInputStream(new FileInputStream(file));
skipBytes(bis, subFileRealPosition);
int readPice = 8388608; // 8mb NOTE: consider switching to 1mb 1048576
long readFrom = 0;
long realFileSize = entry.getFileSize();
byte[] readBuf;
while (readFrom < realFileSize) {
if (realFileSize - readFrom < readPice)
readPice = Math.toIntExact(realFileSize - readFrom); // it's safe, I guarantee
readBuf = new byte[readPice];
if (bis.read(readBuf) != readPice) {
System.out.println("RomFsDecryptedProvider -> getContent(): Unable to read requested size from file.");
return;
}
streamOut.write(readBuf);
readFrom += readPice;
}
bis.close();
streamOut.close();
} catch (Exception e) {
System.out.println("RomFsDecryptedProvider -> getContent(): Unable to provide stream");
e.printStackTrace();
}
System.out.println("RomFsDecryptedProvider -> getContent(): Thread is dead");
});
workerThread.start();
return streamIn; return streamIn;
} }
@Override @Override
public File getFile() { public void printDebug(){
return file; level6Header.printDebugInfo();
} new DirectoryMetaTablePlainView(level6Header.getDirectoryMetadataTableLength(), directoryMetadataTable);
new FileMetaTablePlainView(level6Header.getFileMetadataTableLength(), fileMetadataTable);
private void printDebug(byte[] directoryMetadataTable, byte[] fileMetadataTable){
new FolderMeta4Debug(header.getDirectoryMetadataTableLength(), directoryMetadataTable);
new FileMeta4Debug(header.getFileMetadataTableLength(), fileMetadataTable);
rootEntry.printTreeForDebug(); rootEntry.printTreeForDebug();
} }
} }

View file

@ -0,0 +1,180 @@
/*
Copyright 2018-2022 Dmitry Isaenko
This file is part of libKonogonka.
libKonogonka is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
libKonogonka is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with libKonogonka. If not, see <https://www.gnu.org/licenses/>.
*/
package libKonogonka.Tools.RomFs;
import libKonogonka.ctraes.AesCtrDecryptSimple;
import java.io.File;
import java.io.RandomAccessFile;
import java.util.Arrays;
public class RomFsEncryptedConstruct {
private final long absoluteOffsetPosition;
private final long level6Offset;
private final RandomAccessFile raf;
private final AesCtrDecryptSimple decryptor;
private Level6Header header;
private byte[] directoryMetadataTable;
private byte[] fileMetadataTable;
private FileSystemEntry rootEntry;
RomFsEncryptedConstruct(File encryptedFsImageFile,
long romFsOffset,
long level6Offset,
AesCtrDecryptSimple decryptor,
long mediaStartOffset
) throws Exception {
if (level6Offset < 0)
throw new Exception("Incorrect Level 6 Offset");
this.raf = new RandomAccessFile(encryptedFsImageFile, "r");
this.level6Offset = level6Offset;
this.absoluteOffsetPosition = romFsOffset + (mediaStartOffset * 0x200);
this.decryptor = decryptor;
raf.seek(absoluteOffsetPosition + level6Offset);
//Go to Level 6 header position
decryptor.skipNext(level6Offset / 0x200);
constructHeader();
directoryMetadataTableLengthCheck();
directoryMetadataTableConstruct();
fileMetadataTableLengthCheck();
fileMetadataTableConstruct();
constructRootFilesystemEntry();
raf.close();
}
private void constructHeader() throws Exception{
// Decrypt data
byte[] encryptedBlock = new byte[0x200];
byte[] decryptedBlock;
if (raf.read(encryptedBlock) == 0x200)
decryptedBlock = decryptor.decryptNext(encryptedBlock);
else
throw new Exception("Failed to read header header (0x200 - block)");
this.header = new Level6Header(decryptedBlock);
}
private void directoryMetadataTableLengthCheck() throws Exception{
if (header.getDirectoryMetadataTableLength() < 0)
throw new Exception("Not supported: DirectoryMetadataTableLength < 0");
}
private void directoryMetadataTableConstruct() throws Exception{
directoryMetadataTable = readMetaTable(header.getDirectoryMetadataTableOffset(),
header.getDirectoryMetadataTableLength());
}
private void fileMetadataTableLengthCheck() throws Exception{
if (header.getFileMetadataTableLength() < 0)
throw new Exception("Not supported: FileMetadataTableLength < 0");
}
private void fileMetadataTableConstruct() throws Exception{
fileMetadataTable = readMetaTable(header.getFileMetadataTableOffset(),
header.getFileMetadataTableLength());
}
private void constructRootFilesystemEntry() throws Exception{
rootEntry = new FileSystemEntry(directoryMetadataTable, fileMetadataTable);
}
private byte[] readMetaTable(long metaOffset,
long metaSize) throws Exception{
byte[] encryptedBlock;
byte[] decryptedBlock;
byte[] metadataTable = new byte[(int) metaSize];
//0
decryptor.reset();
long startBlock = metaOffset / 0x200;
decryptor.skipNext(level6Offset / 0x200 + startBlock);
raf.seek(absoluteOffsetPosition + level6Offset + startBlock * 0x200);
//1
long ignoreBytes = metaOffset - startBlock * 0x200;
long currentPosition = 0;
if (ignoreBytes > 0) {
encryptedBlock = new byte[0x200];
if (raf.read(encryptedBlock) == 0x200) {
decryptedBlock = decryptor.decryptNext(encryptedBlock);
// If we have extra-small file that is less than a block and even more
if ((0x200 - ignoreBytes) > metaSize){
metadataTable = Arrays.copyOfRange(decryptedBlock, (int)ignoreBytes, 0x200);
return metadataTable;
}
else {
System.arraycopy(decryptedBlock, (int) ignoreBytes, metadataTable, 0, 0x200 - (int) ignoreBytes);
currentPosition = 0x200 - ignoreBytes;
}
}
else {
throw new Exception("Unable to get 512 bytes from 1st bock for Directory/File Metadata Table");
}
startBlock++;
}
long endBlock = (metaSize + ignoreBytes) / 0x200 + startBlock; // <- pointing to place where any data related to this media-block ends
//2
int extraData = (int) ((endBlock - startBlock)*0x200 - (metaSize + ignoreBytes));
if (extraData < 0)
endBlock--;
//3
while ( startBlock < endBlock ) {
encryptedBlock = new byte[0x200];
if (raf.read(encryptedBlock) == 0x200) {
decryptedBlock = decryptor.decryptNext(encryptedBlock);
System.arraycopy(decryptedBlock, 0, metadataTable, (int) currentPosition, 0x200);
}
else
throw new Exception("Unable to get 512 bytes from block for Directory/File Metadata Table");
startBlock++;
currentPosition += 0x200;
}
//4
if (extraData != 0){ // In case we didn't get what we want
encryptedBlock = new byte[0x200];
if (raf.read(encryptedBlock) == 0x200) {
decryptedBlock = decryptor.decryptNext(encryptedBlock);
System.arraycopy(decryptedBlock, 0, metadataTable, (int) currentPosition, Math.abs(extraData));
}
else
throw new Exception("Unable to get 512 bytes from block for Directory/File Metadata Table");
}
return metadataTable;
}
Level6Header getHeader() { return header; }
FileSystemEntry getRootEntry(){ return rootEntry; }
byte[] getDirectoryMetadataTable() { return directoryMetadataTable; }
byte[] getFileMetadataTable() { return fileMetadataTable;}
}

View file

@ -0,0 +1,138 @@
/*
Copyright 2018-2022 Dmitry Isaenko
This file is part of libKonogonka.
libKonogonka is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
libKonogonka is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with libKonogonka. If not, see <https://www.gnu.org/licenses/>.
*/
package libKonogonka.Tools.RomFs;
import libKonogonka.ctraes.AesCtrDecryptSimple;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import java.io.File;
import java.io.PipedOutputStream;
import java.io.RandomAccessFile;
public class RomFsEncryptedContentRetrieve implements Runnable{
private final static Logger log = LogManager.getLogger(RomFsEncryptedContentRetrieve.class);
private final File parentFile;
private final PipedOutputStream streamOut;
private final long absoluteOffsetPosition;
private final AesCtrDecryptSimple decryptor;
private final long internalFileOffset;
private final long internalFileSize;
private final long level6Offset;
private final long headersFileDataOffset;
RomFsEncryptedContentRetrieve(File parentFile,
PipedOutputStream streamOut,
long absoluteOffsetPosition,
AesCtrDecryptSimple decryptor,
long internalFileOffset,
long internalFileSize,
long level6Offset,
long headersFileDataOffset
){
this.parentFile = parentFile;
this.absoluteOffsetPosition = absoluteOffsetPosition;
this.streamOut = streamOut;
this.decryptor = decryptor;
this.internalFileOffset = internalFileOffset;
this.internalFileSize = internalFileSize;
this.level6Offset = level6Offset;
this.headersFileDataOffset = headersFileDataOffset;
}
@Override
public void run() {
log.trace("Executing thread");
try {
byte[] encryptedBlock;
byte[] decryptedBlock;
RandomAccessFile raf = new RandomAccessFile(parentFile, "r");
//0
long startBlock = (internalFileOffset + headersFileDataOffset) / 0x200;
decryptor.skipNext(level6Offset / 0x200 + startBlock);
// long absoluteOffsetPosition = romFsOffsetPosition + (mediaStartOffset * 0x200); // calculated in constructor
raf.seek(absoluteOffsetPosition + level6Offset + startBlock * 0x200);
//1
long ignoreBytes = (internalFileOffset + headersFileDataOffset) - startBlock * 0x200;
if (ignoreBytes > 0) {
encryptedBlock = new byte[0x200];
if (raf.read(encryptedBlock) == 0x200) {
decryptedBlock = decryptor.decryptNext(encryptedBlock);
// If we have extra-small file that is less than a block and even more
if ((0x200 - ignoreBytes) > internalFileSize){
streamOut.write(decryptedBlock, (int)ignoreBytes, (int) internalFileSize); // safe cast
raf.close();
streamOut.close();
return;
}
else {
streamOut.write(decryptedBlock, (int) ignoreBytes, 0x200 - (int) ignoreBytes);
}
}
else {
throw new Exception("Unable to get 512 bytes from 1st bock");
}
startBlock++;
}
long endBlock = (internalFileSize + ignoreBytes) / 0x200 + startBlock; // <- pointing to place where any data related to this media-block ends
//2
int extraData = (int) ((endBlock - startBlock)*0x200 - (internalFileSize + ignoreBytes));
if (extraData < 0)
endBlock--;
//3
while ( startBlock < endBlock ) {
encryptedBlock = new byte[0x200];
if (raf.read(encryptedBlock) == 0x200) {
decryptedBlock = decryptor.decryptNext(encryptedBlock);
streamOut.write(decryptedBlock);
}
else
throw new Exception("Unable to get 512 bytes from block");
startBlock++;
}
//4
if (extraData != 0){ // In case we didn't get what we want
encryptedBlock = new byte[0x200];
if (raf.read(encryptedBlock) == 0x200) {
decryptedBlock = decryptor.decryptNext(encryptedBlock);
streamOut.write(decryptedBlock, 0, Math.abs(extraData));
}
else
throw new Exception("Unable to get 512 bytes from block");
}
raf.close();
streamOut.close();
} catch (Exception exception) {
log.error("Unable to provide stream", exception);
}
log.trace("Thread died");
}
}

View file

@ -19,274 +19,107 @@
package libKonogonka.Tools.RomFs; package libKonogonka.Tools.RomFs;
import libKonogonka.Tools.RomFs.view.DirectoryMetaTablePlainView;
import libKonogonka.Tools.RomFs.view.FileMetaTablePlainView;
import libKonogonka.ctraes.AesCtrDecryptSimple; import libKonogonka.ctraes.AesCtrDecryptSimple;
import java.io.File; import java.io.File;
import java.io.PipedInputStream; import java.io.PipedInputStream;
import java.io.PipedOutputStream; import java.io.PipedOutputStream;
import java.io.RandomAccessFile;
import java.util.Arrays;
public class RomFsEncryptedProvider implements IRomFsProvider{ public class RomFsEncryptedProvider implements IRomFsProvider{
private final File file;
private final long level6Offset;
private final Level6Header level6Header;
private final FileSystemEntry rootEntry;
private long level6Offset; private final byte[] key; // Used @ createDecryptor only
private final byte[] sectionCTR; // Used @ createDecryptor only
private final long mediaStartOffset; // Used @ createDecryptor only
private final long absoluteOffsetPosition;
private File file; //private long mediaEndOffset; // We know this, but actually never use
private Level6Header header;
private FileSystemEntry rootEntry; // Used only for debug
private final byte[] directoryMetadataTable;
private final byte[] fileMetadataTable;
//-------------------------------- public RomFsEncryptedProvider(long level6Offset,
File encryptedFsImageFile,
long romFsOffsetPosition,
byte[] key,
byte[] sectionCTR,
long mediaStartOffset
) throws Exception{
this(level6Offset, encryptedFsImageFile, romFsOffsetPosition, key, sectionCTR, mediaStartOffset, -1);
}
private long romFSoffsetPosition; public RomFsEncryptedProvider(long level6Offset,
private byte[] key; File encryptedFsImageFile,
private byte[] sectionCTR; long romFsOffsetPosition,
private long mediaStartOffset;
private long mediaEndOffset;
public RomFsEncryptedProvider(long romFSoffsetPosition,
long level6Offset,
File fileWithEncPFS0,
byte[] key, byte[] key,
byte[] sectionCTR, byte[] sectionCTR,
long mediaStartOffset, long mediaStartOffset,
long mediaEndOffset long mediaEndOffset
) throws Exception{ ) throws Exception{
this.file = fileWithEncPFS0;
this.level6Offset = level6Offset;
this.romFSoffsetPosition = romFSoffsetPosition;
this.key = key; this.key = key;
this.sectionCTR = sectionCTR; this.sectionCTR = sectionCTR;
this.mediaStartOffset = mediaStartOffset; this.mediaStartOffset = mediaStartOffset;
this.mediaEndOffset = mediaEndOffset;
RandomAccessFile raf = new RandomAccessFile(file, "r"); RomFsEncryptedConstruct construct = new RomFsEncryptedConstruct(encryptedFsImageFile,
long abosluteOffsetPosition = romFSoffsetPosition + (mediaStartOffset * 0x200); romFsOffsetPosition,
raf.seek(abosluteOffsetPosition + level6Offset); level6Offset,
createDecryptor(),
mediaStartOffset);
this.file = encryptedFsImageFile;
this.level6Offset = level6Offset;
this.level6Header = construct.getHeader();
this.rootEntry = construct.getRootEntry();
AesCtrDecryptSimple decryptor = new AesCtrDecryptSimple(key, sectionCTR, mediaStartOffset * 0x200); this.absoluteOffsetPosition = romFsOffsetPosition + (mediaStartOffset * 0x200);
//Go to Level 6 header
decryptor.skipNext(level6Offset / 0x200);
// Decrypt data this.directoryMetadataTable = construct.getDirectoryMetadataTable();
byte[] encryptedBlock = new byte[0x200]; this.fileMetadataTable = construct.getFileMetadataTable();
byte[] dectyptedBlock;
if (raf.read(encryptedBlock) == 0x200)
dectyptedBlock = decryptor.dectyptNext(encryptedBlock);
else
throw new Exception("Failed to read header header (0x200 - block)");
this.header = new Level6Header(dectyptedBlock);
header.printDebugInfo();
if (header.getDirectoryMetadataTableLength() < 0)
throw new Exception("Not supported: DirectoryMetadataTableLength < 0");
if (header.getFileMetadataTableLength() < 0)
throw new Exception("Not supported: FileMetadataTableLength < 0");
/*---------------------------------*/
// Read directories metadata
byte[] directoryMetadataTable = readMetaTable(abosluteOffsetPosition,
header.getDirectoryMetadataTableOffset(),
header.getDirectoryMetadataTableLength(),
raf);
// Read files metadata
byte[] fileMetadataTable = readMetaTable(abosluteOffsetPosition,
header.getFileMetadataTableOffset(),
header.getFileMetadataTableLength(),
raf);
rootEntry = new FileSystemEntry(directoryMetadataTable, fileMetadataTable);
raf.close();
} }
private AesCtrDecryptSimple createDecryptor() throws Exception{
private byte[] readMetaTable(long abosluteOffsetPosition, return new AesCtrDecryptSimple(key, sectionCTR, mediaStartOffset * 0x200);
long metaOffset,
long metaSize,
RandomAccessFile raf) throws Exception{
byte[] encryptedBlock;
byte[] dectyptedBlock;
byte[] metadataTable = new byte[(int) metaSize];
//0
AesCtrDecryptSimple decryptor = new AesCtrDecryptSimple(key, sectionCTR, mediaStartOffset * 0x200);
long startBlock = metaOffset / 0x200;
decryptor.skipNext(level6Offset / 0x200 + startBlock);
raf.seek(abosluteOffsetPosition + level6Offset + startBlock * 0x200);
//1
long ignoreBytes = metaOffset - startBlock * 0x200;
long currentPosition = 0;
if (ignoreBytes > 0) {
encryptedBlock = new byte[0x200];
if (raf.read(encryptedBlock) == 0x200) {
dectyptedBlock = decryptor.dectyptNext(encryptedBlock);
// If we have extra-small file that is less then a block and even more
if ((0x200 - ignoreBytes) > metaSize){
metadataTable = Arrays.copyOfRange(dectyptedBlock, (int)ignoreBytes, 0x200);
return metadataTable;
} }
else { @Override
System.arraycopy(dectyptedBlock, (int) ignoreBytes, metadataTable, 0, 0x200 - (int) ignoreBytes); public File getFile() { return file; }
currentPosition = 0x200 - ignoreBytes;
}
}
else {
throw new Exception("RomFsEncryptedProvider(): Unable to get 512 bytes from 1st bock for Directory Metadata Table");
}
startBlock++;
}
long endBlock = (metaSize + ignoreBytes) / 0x200 + startBlock; // <- pointing to place where any data related to this media-block ends
//2
int extraData = (int) ((endBlock - startBlock)*0x200 - (metaSize + ignoreBytes));
if (extraData < 0)
endBlock--;
//3
while ( startBlock < endBlock ) {
encryptedBlock = new byte[0x200];
if (raf.read(encryptedBlock) == 0x200) {
dectyptedBlock = decryptor.dectyptNext(encryptedBlock);
System.arraycopy(dectyptedBlock, 0, metadataTable, (int) currentPosition, 0x200);
}
else
throw new Exception("RomFsEncryptedProvider(): Unable to get 512 bytes from block for Directory Metadata Table");
startBlock++;
currentPosition += 0x200;
}
//4
if (extraData != 0){ // In case we didn't get what we want
encryptedBlock = new byte[0x200];
if (raf.read(encryptedBlock) == 0x200) {
dectyptedBlock = decryptor.dectyptNext(encryptedBlock);
System.arraycopy(dectyptedBlock, 0, metadataTable, (int) currentPosition, Math.abs(extraData));
}
else
throw new Exception("RomFsEncryptedProvider(): Unable to get 512 bytes from block for Directory Metadata Table");
}
return metadataTable;
}
@Override @Override
public long getLevel6Offset() { return level6Offset; } public long getLevel6Offset() { return level6Offset; }
@Override @Override
public Level6Header getHeader() { return header; } public Level6Header getHeader() {return level6Header;}
@Override @Override
public FileSystemEntry getRootEntry() { return rootEntry; } public FileSystemEntry getRootEntry() { return rootEntry; }
@Override @Override
public PipedInputStream getContent(FileSystemEntry entry) throws Exception{ public PipedInputStream getContent(FileSystemEntry entry) throws Exception{
if (entry.isDirectory()) if (entry.isDirectory())
throw new Exception("Request of the binary stream for the folder entry doesn't make sense."); throw new Exception("Request of the binary stream for the folder entry is not supported (and doesn't make sense).");
PipedOutputStream streamOut = new PipedOutputStream(); PipedOutputStream streamOut = new PipedOutputStream();
Thread workerThread;
PipedInputStream streamIn = new PipedInputStream(streamOut); PipedInputStream streamIn = new PipedInputStream(streamOut);
workerThread = new Thread(() -> { long internalFileOffset = entry.getFileOffset();
System.out.println("RomFsDecryptedProvider -> getContent(): Executing thread"); long internalFileSize = entry.getFileSize();
try {
byte[] encryptedBlock; Thread contentRetrievingThread = new Thread(new RomFsEncryptedContentRetrieve(
byte[] dectyptedBlock; file,
streamOut,
RandomAccessFile raf = new RandomAccessFile(file, "r"); absoluteOffsetPosition,
createDecryptor(),
//0 internalFileOffset,
AesCtrDecryptSimple decryptor = new AesCtrDecryptSimple(key, sectionCTR, mediaStartOffset * 0x200); internalFileSize,
level6Offset,
long startBlock = (entry.getFileOffset() + header.getFileDataOffset()) / 0x200; level6Header.getFileDataOffset()
));
decryptor.skipNext(level6Offset / 0x200 + startBlock); contentRetrievingThread.start();
long abosluteOffsetPosition = romFSoffsetPosition + (mediaStartOffset * 0x200);
raf.seek(abosluteOffsetPosition + level6Offset + startBlock * 0x200);
//1
long ignoreBytes = (entry.getFileOffset() + header.getFileDataOffset()) - startBlock * 0x200;
if (ignoreBytes > 0) {
encryptedBlock = new byte[0x200];
if (raf.read(encryptedBlock) == 0x200) {
dectyptedBlock = decryptor.dectyptNext(encryptedBlock);
// If we have extra-small file that is less then a block and even more
if ((0x200 - ignoreBytes) > entry.getFileSize()){
streamOut.write(dectyptedBlock, (int)ignoreBytes, (int) entry.getFileSize()); // safe cast
raf.close();
streamOut.close();
return;
}
else {
streamOut.write(dectyptedBlock, (int) ignoreBytes, 0x200 - (int) ignoreBytes);
}
}
else {
throw new Exception("RomFsEncryptedProvider(): Unable to get 512 bytes from 1st bock for Directory Metadata Table");
}
startBlock++;
}
long endBlock = (entry.getFileSize() + ignoreBytes) / 0x200 + startBlock; // <- pointing to place where any data related to this media-block ends
//2
int extraData = (int) ((endBlock - startBlock)*0x200 - (entry.getFileSize() + ignoreBytes));
if (extraData < 0)
endBlock--;
//3
while ( startBlock < endBlock ) {
encryptedBlock = new byte[0x200];
if (raf.read(encryptedBlock) == 0x200) {
dectyptedBlock = decryptor.dectyptNext(encryptedBlock);
streamOut.write(dectyptedBlock);
}
else
throw new Exception("RomFsEncryptedProvider(): Unable to get 512 bytes from block for Directory Metadata Table");
startBlock++;
}
//4
if (extraData != 0){ // In case we didn't get what we want
encryptedBlock = new byte[0x200];
if (raf.read(encryptedBlock) == 0x200) {
dectyptedBlock = decryptor.dectyptNext(encryptedBlock);
streamOut.write(dectyptedBlock, 0, Math.abs(extraData));
}
else
throw new Exception("RomFsEncryptedProvider(): Unable to get 512 bytes from block for Directory Metadata Table");
}
raf.close();
streamOut.close();
} catch (Exception e) {
System.out.println("RomFsDecryptedProvider -> getContent(): Unable to provide stream");
e.printStackTrace();
}
System.out.println("RomFsDecryptedProvider -> getContent(): Thread is dead");
});
workerThread.start();
return streamIn; return streamIn;
} }
@Override @Override
public File getFile() { public void printDebug(){
return file; level6Header.printDebugInfo();
} new DirectoryMetaTablePlainView(level6Header.getDirectoryMetadataTableLength(), directoryMetadataTable);
new FileMetaTablePlainView(level6Header.getFileMetadataTableLength(), fileMetadataTable);
private void printDebug(byte[] directoryMetadataTable, byte[] fileMetadataTable){
new FolderMeta4Debug(header.getDirectoryMetadataTableLength(), directoryMetadataTable);
new FileMeta4Debug(header.getFileMetadataTableLength(), fileMetadataTable);
rootEntry.printTreeForDebug(); rootEntry.printTreeForDebug();
} }
} }

View file

@ -0,0 +1,82 @@
/*
Copyright 2019-2022 Dmitry Isaenko
This file is part of libKonogonka.
libKonogonka is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
libKonogonka is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with libKonogonka. If not, see <https://www.gnu.org/licenses/>.
*/
package libKonogonka.Tools.RomFs.view;
import libKonogonka.Converter;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import java.nio.charset.StandardCharsets;
import java.util.Arrays;
import static libKonogonka.RainbowDump.formatDecHexString;
public class DirectoryMetaTablePlainView {
private final static Logger log = LogManager.getLogger(DirectoryMetaTablePlainView.class);
// directoryMetadataTableLength must be declared since directoryMetadataTable could be bigger than declared size for encrypted blocks
public DirectoryMetaTablePlainView(long directoryMetadataTableLength, byte[] directoryMetadataTable){
int i = 0;
while (i < directoryMetadataTableLength){
FolderMeta folderMeta = new FolderMeta();
folderMeta.parentDirectoryOffset = Converter.getLEint(directoryMetadataTable, i);
i += 4;
folderMeta.nextSiblingDirectoryOffset = Converter.getLEint(directoryMetadataTable, i);
i += 4;
folderMeta.firstSubdirectoryOffset = Converter.getLEint(directoryMetadataTable, i);
i += 4;
folderMeta.firstFileOffset = Converter.getLEint(directoryMetadataTable, i);
i += 4;
folderMeta.nextDirectoryOffset = Converter.getLEint(directoryMetadataTable, i);
i += 4;
folderMeta.dirNameLength = Converter.getLEint(directoryMetadataTable, i);
i += 4;
folderMeta.dirName = new String(Arrays.copyOfRange(directoryMetadataTable, i, i + folderMeta.dirNameLength), StandardCharsets.UTF_8);
i += getRealNameSize(folderMeta.dirNameLength);
log.debug(
"- DIRECTORY -\n" +
"Offset of Parent Directory (self if Root) " + formatDecHexString(folderMeta.parentDirectoryOffset ) +"\n" +
"Offset of next Sibling Directory " + formatDecHexString(folderMeta.nextSiblingDirectoryOffset) +"\n" +
"Offset of first Child Directory (Subdirectory) " + formatDecHexString(folderMeta.firstSubdirectoryOffset ) +"\n" +
"Offset of first File (in File Metadata Table) " + formatDecHexString(folderMeta.firstFileOffset ) +"\n" +
"Offset of next Directory in the same Hash Table bucket " + formatDecHexString(folderMeta.nextDirectoryOffset ) +"\n" +
"Name Length " + formatDecHexString(folderMeta.dirNameLength ) +"\n" +
"Name Length (rounded up to multiple of 4) " + folderMeta.dirName + "\n"
);
}
}
private int getRealNameSize(int value){
if (value % 4 == 0)
return value;
return value + 4 - value % 4;
}
private static class FolderMeta {
int parentDirectoryOffset;
int nextSiblingDirectoryOffset;
int firstSubdirectoryOffset;
int firstFileOffset;
int nextDirectoryOffset;
int dirNameLength;
String dirName;
}
}

View file

@ -0,0 +1,81 @@
/*
Copyright 2019-2022 Dmitry Isaenko
This file is part of libKonogonka.
libKonogonka is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
libKonogonka is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with libKonogonka. If not, see <https://www.gnu.org/licenses/>.
*/
package libKonogonka.Tools.RomFs.view;
import libKonogonka.Converter;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import java.nio.charset.StandardCharsets;
import java.util.Arrays;
import static libKonogonka.RainbowDump.formatDecHexString;
public class FileMetaTablePlainView {
private final static Logger log = LogManager.getLogger(FileMetaTablePlainView.class);
public FileMetaTablePlainView(long fileMetadataTableLength, byte[] fileMetadataTable) {
int i = 0;
while (i < fileMetadataTableLength) {
FileMeta fileMeta = new FileMeta();
fileMeta.containingDirectoryOffset = Converter.getLEint(fileMetadataTable, i);
i += 4;
fileMeta.nextSiblingFileOffset = Converter.getLEint(fileMetadataTable, i);
i += 4;
fileMeta.fileDataOffset = Converter.getLElong(fileMetadataTable, i);
i += 8;
fileMeta.fileDataLength = Converter.getLElong(fileMetadataTable, i);
i += 8;
fileMeta.nextFileOffset = Converter.getLEint(fileMetadataTable, i);
i += 4;
fileMeta.fileNameLength = Converter.getLEint(fileMetadataTable, i);
i += 4;
fileMeta.fileName = new String(Arrays.copyOfRange(fileMetadataTable, i, i + fileMeta.fileNameLength), StandardCharsets.UTF_8);
i += getRealNameSize(fileMeta.fileNameLength);
log.debug(
"- FILE -\n" +
"Offset of Containing Directory " + formatDecHexString(fileMeta.containingDirectoryOffset) + "\n" +
"Offset of next Sibling File " + formatDecHexString(fileMeta.nextSiblingFileOffset) + "\n" +
"Offset of File's Data " + formatDecHexString(fileMeta.fileDataOffset) + "\n" +
"Length of File's Data " + formatDecHexString(fileMeta.fileDataLength) + "\n" +
"Offset of next File in the same Hash Table bucket " + formatDecHexString(fileMeta.nextFileOffset) + "\n" +
"Name Length " + formatDecHexString(fileMeta.fileNameLength) + "\n" +
"Name Length (rounded up to multiple of 4) " + fileMeta.fileName + "\n"
);
}
}
private int getRealNameSize(int value){
if (value % 4 == 0)
return value;
return value + 4 - value % 4;
}
private static class FileMeta{
int containingDirectoryOffset;
int nextSiblingFileOffset;
long fileDataOffset;
long fileDataLength;
int nextFileOffset;
int fileNameLength;
String fileName;
}
}

View file

@ -0,0 +1,65 @@
/*
Copyright 2018-2022 Dmitry Isaenko
This file is part of libKonogonka.
libKonogonka is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
libKonogonka is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with libKonogonka. If not, see <https://www.gnu.org/licenses/>.
*/
package libKonogonka.Tools.RomFs.view;
import libKonogonka.Tools.RomFs.FileSystemEntry;
import java.util.List;
/**
* Used in pair with FileSystemEntry
* */
public class FileSystemTreeViewMaker {
private StringBuilder tree;
private int spacerForSizes;
private FileSystemTreeViewMaker(){}
private void init(List<FileSystemEntry> content){
tree = new StringBuilder("/\n");
for (FileSystemEntry entry: content)
printEntry(2, entry);
}
private void printEntry(int count, FileSystemEntry entry) {
int i;
for (i = 0; i < count; i++)
tree.append(" ");
if (entry.isDirectory()) {
tree.append("|-");
tree.append(entry.getName());
tree.append("\n");
for (FileSystemEntry e : entry.getContent())
printEntry(count + 2, e);
return;
}
tree.append("|-");
tree.append(entry.getName());
tree.append(String.format("%"+(spacerForSizes-entry.getName().length()-i)+"s0x%-10x 0x%-10x", "", entry.getFileOffset(), entry.getFileSize()));
tree.append("\n");
}
public static String make(List<FileSystemEntry> content, int spacerForSizes){
FileSystemTreeViewMaker maker = new FileSystemTreeViewMaker();
maker.spacerForSizes = spacerForSizes;
maker.init(content);
return maker.tree.toString();
}
}

View file

@ -20,11 +20,11 @@ package libKonogonka.Tools.TIK;
import java.io.BufferedInputStream; import java.io.BufferedInputStream;
import java.io.File; import java.io.File;
import java.io.FileInputStream;
import java.nio.charset.StandardCharsets; import java.nio.charset.StandardCharsets;
import java.nio.file.Files;
import java.util.Arrays; import java.util.Arrays;
import static libKonogonka.LoperConverter.*; import static libKonogonka.Converter.*;
/* /*
DON'T TRUST WIKI. Ticket size always (?) equal 0x02c0 (704 bytes) DON'T TRUST WIKI. Ticket size always (?) equal 0x02c0 (704 bytes)
@ -67,41 +67,41 @@ Next:
* */ * */
public class TIKProvider { public class TIKProvider {
// Signature-related // Signature-related
private byte[] sigType; private final byte[] sigType;
private byte[] signature; private final byte[] signature;
// Ticket // Ticket
private String Issuer; private final String Issuer;
private byte[] TitleKeyBlockStartingBytes; // Actually 32 bytes. private final byte[] TitleKeyBlockStartingBytes; // Actually 32 bytes.
private byte[] TitleKeyBlockEndingBytes; // Anything else private final byte[] TitleKeyBlockEndingBytes; // Everything left
private byte Unknown1; private final byte Unknown1;
private byte TitleKeyType; private final byte TitleKeyType;
private byte[] Unknown2; private final byte[] Unknown2;
private byte MasterKeyRevision; private final byte MasterKeyRevision;
private byte[] Unknown3; private final byte[] Unknown3;
private byte[] TicketId; private final byte[] TicketId;
private byte[] DeviceId; private final byte[] DeviceId;
private byte[] RightsId; private final byte[] RightsId;
private byte[] RightsIdEndingBytes; //private byte[] RightsIdEndingBytes;
private byte[] AccountId; private final byte[] AccountId;
private byte[] Unknown4; private final byte[] Unknown4;
public TIKProvider(File file) throws Exception{ this(file, 0); } public TIKProvider(File file) throws Exception{ this(file, 0); }
public TIKProvider(File file, long offset) throws Exception { public TIKProvider(File file, long offset) throws Exception {
if (file.length() - offset < 0x02c0) if (file.length() - offset < 0x02c0)
throw new Exception("TIKProvider: File is too small."); throw new Exception("File is too small.");
BufferedInputStream bis = new BufferedInputStream(new FileInputStream(file)); BufferedInputStream bis = new BufferedInputStream(Files.newInputStream(file.toPath()));
if (bis.skip(offset) != offset) { if (bis.skip(offset) != offset) {
bis.close(); bis.close();
throw new Exception("TIKProvider: Unable to skip requested range - " + offset); throw new Exception("Unable to skip requested range - " + offset);
} }
sigType = new byte[0x4]; sigType = new byte[0x4];
if (bis.read(sigType) != 4) { if (bis.read(sigType) != 4) {
bis.close(); bis.close();
throw new Exception("TIKProvider: Unable to read requested range - " + offset); throw new Exception("Unable to read requested range - " + offset);
} }
byte[] readChunk; byte[] readChunk;
@ -112,7 +112,7 @@ public class TIKProvider {
readChunk = new byte[0x23c]; readChunk = new byte[0x23c];
if (bis.read(readChunk) != 0x23c) { if (bis.read(readChunk) != 0x23c) {
bis.close(); bis.close();
throw new Exception("TIKProvider: Unable to read requested range - 0x23c"); throw new Exception("Unable to read requested range - 0x23c");
} }
signature = Arrays.copyOfRange(readChunk, 0, 0x200); signature = Arrays.copyOfRange(readChunk, 0, 0x200);
break; break;
@ -121,7 +121,7 @@ public class TIKProvider {
readChunk = new byte[0x13c]; readChunk = new byte[0x13c];
if (bis.read(readChunk) != 0x13c) { if (bis.read(readChunk) != 0x13c) {
bis.close(); bis.close();
throw new Exception("TIKProvider: Unable to read requested range - 0x13c"); throw new Exception("Unable to read requested range - 0x13c");
} }
signature = Arrays.copyOfRange(readChunk, 0, 0x100); signature = Arrays.copyOfRange(readChunk, 0, 0x100);
break; break;
@ -130,20 +130,20 @@ public class TIKProvider {
readChunk = new byte[0x7c]; readChunk = new byte[0x7c];
if (bis.read(readChunk) != 0x7c) { if (bis.read(readChunk) != 0x7c) {
bis.close(); bis.close();
throw new Exception("TIKProvider: Unable to read requested range - 0x7c"); throw new Exception("Unable to read requested range - 0x7c");
} }
signature = Arrays.copyOfRange(readChunk, 0, 0x3c); signature = Arrays.copyOfRange(readChunk, 0, 0x3c);
break; break;
default: default:
bis.close(); bis.close();
throw new Exception("TIKProvider: Unknown ticket (Signature) type. Aborting."); throw new Exception("Unknown ticket (Signature) type. Aborting.");
} }
// Let's read ticket body itself // Let's read ticket body itself
readChunk = new byte[0x180]; readChunk = new byte[0x180];
if (bis.read(readChunk) != 0x180) { if (bis.read(readChunk) != 0x180) {
bis.close(); bis.close();
throw new Exception("TIKProvider: Unable to read requested range - Ticket data"); throw new Exception("Unable to read requested range - Ticket data");
} }
bis.close(); bis.close();

View file

@ -24,7 +24,7 @@ import java.io.*;
import java.nio.charset.StandardCharsets; import java.nio.charset.StandardCharsets;
import java.util.Arrays; import java.util.Arrays;
import static libKonogonka.LoperConverter.*; import static libKonogonka.Converter.*;
/** /**
* HFS0 * HFS0

View file

@ -22,8 +22,8 @@ import java.util.Arrays;
import java.util.Collections; import java.util.Collections;
import java.util.List; import java.util.List;
import static libKonogonka.LoperConverter.getLEint; import static libKonogonka.Converter.getLEint;
import static libKonogonka.LoperConverter.getLElong; import static libKonogonka.Converter.getLElong;
/** /**
* Header information * Header information
* */ * */

View file

@ -23,8 +23,8 @@ import javax.crypto.spec.IvParameterSpec;
import javax.crypto.spec.SecretKeySpec; import javax.crypto.spec.SecretKeySpec;
import java.util.Arrays; import java.util.Arrays;
import static libKonogonka.LoperConverter.getLEint; import static libKonogonka.Converter.getLEint;
import static libKonogonka.LoperConverter.getLElong; import static libKonogonka.Converter.getLElong;
/** /**
* Gamecard Info * Gamecard Info

View file

@ -18,7 +18,9 @@
*/ */
package libKonogonka.ctraes; package libKonogonka.ctraes;
import libKonogonka.LoperConverter; import libKonogonka.Converter;
import libKonogonka.RainbowDump;
/** /**
* Simplify decryption of the CTR * Simplify decryption of the CTR
*/ */
@ -28,13 +30,15 @@ public class AesCtrDecryptSimple {
private byte[] IVarray; private byte[] IVarray;
private AesCtr aesCtr; private AesCtr aesCtr;
private final byte[] initialKey;
private final byte[] initialSectionCTR;
private final long initialRealMediaOffset;
public AesCtrDecryptSimple(byte[] key, byte[] sectionCTR, long realMediaOffset) throws Exception{ public AesCtrDecryptSimple(byte[] key, byte[] sectionCTR, long realMediaOffset) throws Exception{
this.realMediaOffset = realMediaOffset; this.initialKey = key;
aesCtr = new AesCtr(key); this.initialSectionCTR = sectionCTR;
// IV for CTR == 16 bytes this.initialRealMediaOffset = realMediaOffset;
IVarray = new byte[0x10]; reset();
// Populate first 8 bytes taken from Header's section Block CTR
System.arraycopy(LoperConverter.flip(sectionCTR), 0x0, IVarray, 0x0, 0x8);
} }
public void skipNext(){ public void skipNext(){
@ -42,22 +46,30 @@ public class AesCtrDecryptSimple {
} }
public void skipNext(long blocksNum){ public void skipNext(long blocksNum){
if (blocksNum > 0)
realMediaOffset += blocksNum * 0x200; realMediaOffset += blocksNum * 0x200;
} }
public byte[] dectyptNext(byte[] enctyptedBlock) throws Exception{ public byte[] decryptNext(byte[] encryptedBlock) throws Exception{
updateIV(realMediaOffset); updateIV();
byte[] decryptedBlock = aesCtr.decrypt(enctyptedBlock, IVarray); byte[] decryptedBlock = aesCtr.decrypt(encryptedBlock, IVarray);
realMediaOffset += 0x200; realMediaOffset += 0x200;
return decryptedBlock; return decryptedBlock;
} }
// Populate last 8 bytes calculated. Thanks hactool project! // Populate last 8 bytes calculated. Thanks hactool project!
private void updateIV(long offset){ private void updateIV(){
offset >>= 4; long offset = realMediaOffset >> 4;
for (int i = 0; i < 0x8; i++){ for (int i = 0; i < 0x8; i++){
IVarray[0x10-i-1] = (byte)(offset & 0xff); // Note: issues could be here IVarray[0x10-i-1] = (byte)(offset & 0xff); // Note: issues could be here
offset >>= 8; offset >>= 8;
} }
} }
public void reset() throws Exception{
realMediaOffset = initialRealMediaOffset;
aesCtr = new AesCtr(initialKey);
// IV for CTR == 16 bytes
IVarray = new byte[0x10];
// Populate first 4 bytes taken from Header's section Block CTR (aka SecureValue)
System.arraycopy(Converter.flip(initialSectionCTR), 0x0, IVarray, 0x0, 0x4);
}
} }

View file

@ -0,0 +1,84 @@
## LEVELS ARE:
# * ALL
# * TRACE
# * DEBUG
# * INFO
# * WARN
# * ERROR
# * FATAL
# * OFF
#############
# Extra logging related to initialization of Log4j
# Set to debug or trace if log4j initialization is failing
status = error
# Name of the configuration
name = DebugConfigDevelopmentOnlyVerbose
# Configure root logger level
rootLogger.level = TRACE
# Root logger referring to console appender
rootLogger.appenderRef.stdout.ref = consoleLogger
# Console appender configuration
appender.console.type = Console
appender.console.name = consoleLogger
appender.console.layout.type = PatternLayout
appender.console.layout.pattern = %d{yyyy-MM-dd HH:mm:ss} %-5p %c{1}:%L - %m%n
##################################################
# # Enable log to files
# rootLogger.appenderRef.rolling.ref = fileLogger
# # Log files location
# property.basePath = /tmp
# property.filename = libKonogonka
# # RollingFileAppender name, pattern, path and rollover policy
# appender.rolling.type = RollingFile
# appender.rolling.name = fileLogger
# appender.rolling.fileName= ${basePath}/${filename}.log
# appender.rolling.filePattern= ${basePath}/${filename}_%d{yyyyMMdd}.log.gz
# appender.rolling.layout.type = PatternLayout
# appender.rolling.layout.pattern = %d{yyyy-MM-dd HH:mm:ss.SSS} %level [%t] [%l] - %msg%n
# appender.rolling.policies.type = Policies
#
# # RollingFileAppender rotation policy
# appender.rolling.policies.size.type = SizeBasedTriggeringPolicy
# appender.rolling.policies.size.size = 10MB
# # Setting for time-based policies
# #appender.rolling.policies.time.type = TimeBasedTriggeringPolicy
# #appender.rolling.policies.time.interval = 1
# #appender.rolling.policies.time.modulate = true
# appender.rolling.strategy.type = DefaultRolloverStrategy
# appender.rolling.strategy.delete.type = Delete
# appender.rolling.strategy.delete.basePath = ${basePath}
# appender.rolling.strategy.delete.maxDepth = 10
# appender.rolling.strategy.delete.ifLastModified.type = IfLastModified
#
# # Delete all files older than 30 days
# appender.rolling.strategy.delete.ifLastModified.age = 30d
#
##################################################
#
# # Redirect log messages to a log file, support file rolling.
# appender.file.type = RollingFile
# appender.file.name = File
# appender.file.fileName = /opt/IBM/configurator_logs/${filename}
# appender.file.filePattern = /opt/IBM/configurator_logs/${filename}.%i
# appender.file.layout.type = PatternLayout
# appender.file.layout.pattern = %d{yyyy-MM-dd HH:mm:ss} %-5p %c{1} - %m%n
# appender.file.policies.type = Policies
# appender.file.policies.size.type = SizeBasedTriggeringPolicy
# appender.file.policies.size.size=5MB
# appender.file.strategy.type = DefaultRolloverStrategy
# appender.file.strategy.max = 10
#
# rootLogger.appenderRefs = file, console
# rootLogger.appenderRef.console.ref = STDOUT
# rootLogger.appenderRef.file.ref = File
#
# loggers = file
#
# logger.file.name = com.comergent.apps.configurator.initializer.FunctionLoader
# logger.file.level = debug
# logger.file.additivity = false
# logger.file.appenderRef.file.ref = File
#
##################################################

View file

@ -0,0 +1,85 @@
/*
Copyright 2018-2022 Dmitry Isaenko
This file is part of libKonogonka.
libKonogonka is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
libKonogonka is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with libKonogonka. If not, see <https://www.gnu.org/licenses/>.
*/
package libKonogonka.RomFsDecrypted;
import libKonogonka.KeyChainHolder;
import org.junit.jupiter.api.Disabled;
import org.junit.jupiter.api.DisplayName;
import org.junit.jupiter.api.Test;
import java.io.BufferedReader;
import java.io.FileReader;
import java.util.Map;
public class KeyChainHolderTest {
private static final String keysFileLocation = "./FilesForTests/prod.keys";
private static final String xci_header_keyFileLocation = "./FilesForTests/xci_header_key.txt";
private KeyChainHolder keyChainHolder;
@Disabled
@DisplayName("Key Chain Holder Test")
@Test
void keysChain() throws Exception{
BufferedReader br = new BufferedReader(new FileReader(xci_header_keyFileLocation));
String keyValue = br.readLine();
br.close();
if (keyValue == null)
throw new Exception("Unable to retrieve xci_header_key");
keyValue = keyValue.trim();
keyChainHolder = new KeyChainHolder(keysFileLocation, keyValue);
}
void printXciHeaderKey(){
System.out.println("-=== xci_header ===-");
System.out.println(keyChainHolder.getXci_header_key());
}
void printKAKApplication(){
System.out.println("-=== key_area_key_application test ===-");
for (Map.Entry entry : keyChainHolder.getKey_area_key_application().entrySet()){
System.out.println(entry.getKey() + " - " + entry.getValue());
}
}
void printKAKOcean(){
System.out.println("-=== key_area_key_ocean test ===-");
for (Map.Entry entry : keyChainHolder.getKey_area_key_ocean().entrySet()){
System.out.println(entry.getKey() + " - " + entry.getValue());
}
}
void printKAKSystem(){
System.out.println("-=== key_area_key_system test ===-");
for (Map.Entry entry : keyChainHolder.getKey_area_key_system().entrySet()){
System.out.println(entry.getKey() + " - " + entry.getValue());
}
}
void printKAKTitleKek(){
System.out.println("-=== titlekek test ===-");
for (Map.Entry entry : keyChainHolder.getTitlekek().entrySet()){
System.out.println(entry.getKey() + " - " + entry.getValue());
}
}
void printRawKeySet(){
System.out.println("-=== Raw Key Set (everything) test ===-");
for (Map.Entry entry : keyChainHolder.getRawKeySet().entrySet()){
System.out.println(entry.getKey() + " - " + entry.getValue());
}
}
}

View file

@ -0,0 +1,69 @@
/*
Copyright 2018-2022 Dmitry Isaenko
This file is part of libKonogonka.
libKonogonka is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
libKonogonka is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with libKonogonka. If not, see <https://www.gnu.org/licenses/>.
*/
package libKonogonka.RomFsDecrypted;
import java.io.File;
import java.nio.file.Path;
import libKonogonka.Tools.RomFs.RomFsDecryptedProvider;
import org.junit.jupiter.api.Disabled;
import org.junit.jupiter.api.DisplayName;
import org.junit.jupiter.api.Test;
import org.junit.jupiter.api.io.TempDir;
// log.fatal("Configuration File Defined To Be :: "+System.getProperty("log4j.configurationFile"));
public class RomFsDecryptedTest {
@TempDir
Path mainLogsDir;
private static final String decryptedFileAbsolutePath = "./FilesForTests/NCAContent_0 [lv6 147456].bin";
private File decryptedFile;
long lv6offset;
RomFsDecryptedProvider provider;
@Disabled
@DisplayName("RomFsDecryptedProvider: tests")
@Test
void romFsValidation() throws Exception{
makeFile();
parseLv6offsetFromFileName();
makeProvider();
provider.printDebug();
}
void makeFile(){
decryptedFile = new File(decryptedFileAbsolutePath);
}
void parseLv6offsetFromFileName(){
lv6offset = Long.parseLong(decryptedFile.getName().replaceAll("(^.*lv6\\s)|(]\\.bin)", ""));
}
void makeProvider() throws Exception{
provider = new RomFsDecryptedProvider(decryptedFile, lv6offset);
}
/*
void checkFilesWorkers(){
assertTrue(fw1 instanceof WorkerFiles);
assertTrue(fw2 instanceof WorkerFiles);
assertTrue(fw3 instanceof WorkerFiles);
}
*/
}

View file

@ -0,0 +1,96 @@
/*
Copyright 2018-2022 Dmitry Isaenko
This file is part of libKonogonka.
libKonogonka is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
libKonogonka is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with libKonogonka. If not, see <https://www.gnu.org/licenses/>.
*/
package libKonogonka.RomFsDecrypted;
import libKonogonka.KeyChainHolder;
import libKonogonka.Tools.NCA.NCAHeaderTableEntry;
import libKonogonka.Tools.NCA.NCAProvider;
import libKonogonka.Tools.NCA.NCASectionTableBlock.NcaFsHeader;
import org.junit.jupiter.api.*;
import java.io.BufferedReader;
import java.io.File;
import java.io.FileReader;
@TestMethodOrder(MethodOrderer.OrderAnnotation.class)
public class RomFsEncryptedTest {
private static final String keysFileLocation = "./FilesForTests/prod.keys";
private static final String xci_header_keyFileLocation = "./FilesForTests/xci_header_key.txt";
private static final String ncaFileLocation = "./FilesForTests/PFS_RomFS.nca";
private static KeyChainHolder keyChainHolder;
private static NCAProvider ncaProvider;
@Disabled
@Order(1)
@DisplayName("KeyChain test")
@Test
void keysChain() throws Exception{
BufferedReader br = new BufferedReader(new FileReader(xci_header_keyFileLocation));
String keyValue = br.readLine();
br.close();
if (keyValue == null)
throw new Exception("Unable to retrieve xci_header_key");
keyValue = keyValue.trim();
keyChainHolder = new KeyChainHolder(keysFileLocation, keyValue);
}
@Disabled
@Order(2)
@DisplayName("RomFsEncryptedProvider: NCA provider quick test")
@Test
void ncaProvider() throws Exception{
ncaProvider = new NCAProvider(new File(ncaFileLocation), keyChainHolder.getRawKeySet());
}
@Disabled
@Order(3)
@DisplayName("RomFsEncryptedProvider: RomFs test")
@Test
void romFsValidation() throws Exception{
for (byte i = 0; i < 4; i++){
if (ncaProvider.getSectionBlock(i).getFsType() == 0 && ncaProvider.getSectionBlock(i).getCryptoType() != 0){
ncaProvider.getNCAContentProvider(i).getRomfs().printDebug();
ncaProvider.getSectionBlock(i).printDebug();
return;
}
}
}
@Disabled
@Order(4)
@DisplayName("RomFsEncryptedProvider: NCA Header Table Entries test")
@Test
void NcaHeaderTableEntryValidation() throws Exception{
for (byte i = 0; i < 4; i++){
NcaFsHeader header = ncaProvider.getSectionBlock(i);
if (header != null)
header.printDebug();
}
}
@Disabled
@Order(5)
@DisplayName("RomFsEncryptedProvider: PFS test")
@Test
void pfsValidation(){
//1 PFS and cryptotype != 0
}
}