debugging in progress...

This commit is contained in:
Szum123321 2022-11-28 18:53:55 +01:00
parent 993d6359ad
commit 2053df7311
8 changed files with 109 additions and 13 deletions

View File

@ -46,8 +46,8 @@ dependencies {
modImplementation("com.terraformersmc:modmenu:${project.modmenu_version}")
//General compression library
implementation "org.apache.commons:commons-compress:1.21"
include "org.apache.commons:commons-compress:1.21"
implementation "org.apache.commons:commons-compress:1.22"
include "org.apache.commons:commons-compress:1.22"
//LZMA support
implementation 'org.tukaani:xz:1.9'

View File

@ -21,11 +21,14 @@ package net.szum123321.textile_backup;
import net.minecraft.server.MinecraftServer;
import net.szum123321.textile_backup.core.digest.Hash;
import net.szum123321.textile_backup.core.Utilities;
import net.szum123321.textile_backup.core.XorSeaHash;
import net.szum123321.textile_backup.core.create.MakeBackupRunnable;
import net.szum123321.textile_backup.core.restore.AwaitThread;
import org.apache.commons.io.FileUtils;
import org.tukaani.xz.check.CRC64;
import java.nio.ByteBuffer;
import java.nio.ByteOrder;
import java.nio.file.Files;
import java.nio.file.Path;
import java.time.format.DateTimeFormatter;
@ -41,7 +44,31 @@ public class Globals {
public static final Globals INSTANCE = new Globals();
private static final TextileLogger log = new TextileLogger(TextileBackup.MOD_NAME);
public static final DateTimeFormatter defaultDateTimeFormatter = DateTimeFormatter.ofPattern("yyyy-MM-dd_HH.mm.ss");
public static final Supplier<Hash> CHECKSUM_SUPPLIER = XorSeaHash::new;
public static final Supplier<Hash> CHECKSUM_SUPPLIER = () -> new Hash() {
private final CRC64 crc = new CRC64();
@Override
public void update(byte b) {
crc.update(new byte[]{b});
}
@Override
public void update(long b) {
ByteBuffer v = ByteBuffer.allocate(8).order(ByteOrder.LITTLE_ENDIAN);
v.putLong(b);
crc.update(v.array());
}
@Override
public void update(byte[] b, int off, int len) {
crc.update(b, off, len);
}
@Override
public long getValue() {
ByteBuffer b = ByteBuffer.wrap(crc.finish());
return b.getLong();
}
};
private ExecutorService executorService = null;// = Executors.newSingleThreadExecutor();
public final AtomicBoolean globalShutdownBackupFlag = new AtomicBoolean(true);

View File

@ -19,6 +19,8 @@
package net.szum123321.textile_backup.core.digest;
import net.szum123321.textile_backup.Globals;
import net.szum123321.textile_backup.TextileBackup;
import net.szum123321.textile_backup.TextileLogger;
import net.szum123321.textile_backup.core.CompressionStatus;
import java.io.IOException;
@ -26,11 +28,13 @@ import java.nio.charset.StandardCharsets;
import java.nio.file.Files;
import java.nio.file.Path;
public class FileTreeHashBuilder {
private final static TextileLogger log = new TextileLogger(TextileBackup.MOD_NAME);
private final Object lock = new Object();
private long hash = 0, filesProcessed = 0, filesTotalSize = 0;
public void update(Path path, long newHash) throws IOException {
if(path.getFileName().toString().equals(CompressionStatus.DATA_FILENAME)) return;
//log.info("Putting: {}, {}", path, newHash);
var hasher = Globals.CHECKSUM_SUPPLIER.get();

View File

@ -20,6 +20,10 @@ package net.szum123321.textile_backup.core.digest;
public interface Hash {
void update(byte b);
default void update(int b) {
update((byte)b);
}
void update(long b);
default void update(byte[] b) {
update(b, 0, b.length);

View File

@ -19,6 +19,8 @@
package net.szum123321.textile_backup.core.digest;
import net.szum123321.textile_backup.Globals;
import net.szum123321.textile_backup.TextileBackup;
import net.szum123321.textile_backup.TextileLogger;
import net.szum123321.textile_backup.core.DataLeftException;
import net.szum123321.textile_backup.core.create.BrokenFileHandler;
import org.jetbrains.annotations.NotNull;
@ -29,11 +31,24 @@ import java.nio.file.Path;
//This class calculates a hash of the file on the input stream, submits it to FileTreeHashBuilder.
//In case the underlying stream hasn't been read completely in, puts it into BrokeFileHandler
public class HashingInputStream extends FilterInputStream {
private final static TextileLogger log = new TextileLogger(TextileBackup.MOD_NAME);
private final Path path;
private final Hash hasher = Globals.CHECKSUM_SUPPLIER.get();
private final FileTreeHashBuilder hashBuilder;
private final BrokenFileHandler brokenFileHandler;
private int cnt = 0;
@Override
public synchronized void reset() throws IOException {
log.info("Called reset! {}", path);
}
@Override
public boolean markSupported() {
return false;
}
public HashingInputStream(InputStream in, Path path, FileTreeHashBuilder hashBuilder, BrokenFileHandler brokenFileHandler) {
super(in);
this.path = path;
@ -44,21 +59,34 @@ public class HashingInputStream extends FilterInputStream {
@Override
public int read(byte @NotNull [] b, int off, int len) throws IOException {
int i = in.read(b, off, len);
if(i > -1) hasher.update(b, off, i);
if(i > -1) {
hasher.update(b, off, i);
cnt += i;
}
return i;
}
@Override
public int read() throws IOException {
int i = in.read();
if(i > -1) hasher.update(i);
if(i > -1) {
hasher.update(i);
cnt++;
}
return i;
}
@Override
public void close() throws IOException {
if(in.available() == 0) hashBuilder.update(path, hasher.getValue());
else brokenFileHandler.handle(path, new DataLeftException(in.available()));
if(in.available() == 0) {
long val = hasher.getValue();
hashBuilder.update(path, val);
log.info("Read in {}, of {}, with hash {}", path, cnt, val);
}
else {
brokenFileHandler.handle(path, new DataLeftException(in.available()));
//log.info("bad file {} {}",path, cnt);
}
super.close();
}
}

View File

@ -19,6 +19,8 @@
package net.szum123321.textile_backup.core.digest;
import net.szum123321.textile_backup.Globals;
import net.szum123321.textile_backup.TextileBackup;
import net.szum123321.textile_backup.TextileLogger;
import org.jetbrains.annotations.NotNull;
import java.io.FilterOutputStream;
@ -27,31 +29,47 @@ import java.io.OutputStream;
import java.nio.file.Path;
public class HashingOutputStream extends FilterOutputStream {
private final static TextileLogger log = new TextileLogger(TextileBackup.MOD_NAME);
private final Path path;
private final Hash hasher = Globals.CHECKSUM_SUPPLIER.get();
private final FileTreeHashBuilder hashBuilder;
private long cnt = 0;
public HashingOutputStream(OutputStream out, Path path, FileTreeHashBuilder hashBuilder) {
super(out);
this.path = path;
this.hashBuilder = hashBuilder;
}
@Override
public void flush() throws IOException {
//log.info("Called flush! {}", path);
super.flush();
}
@Override
public void write(int b) throws IOException {
hasher.update(b);
cnt++;
super.write(b);
}
@Override
public void write(byte @NotNull [] b, int off, int len) throws IOException {
cnt += len;
log.info("Called: {} with {}", path, len);
hasher.update(b, off, len);
super.write(b, off, len);
}
@Override
public void close() throws IOException {
long h = hasher.getValue();
log.info("Read in: {}, of {}, with hash {}", path, cnt, h);
hashBuilder.update(path, h);
super.close();
hashBuilder.update(path, hasher.getValue());
}
}

View File

@ -121,11 +121,12 @@ public class RestoreBackupRunnable implements Runnable {
log.error("An exception occurred while trying to restore a backup!", e);
} finally {
//Regardless of what happened, we should still clean up
if(Files.exists(tmp)) {
/* if(Files.exists(tmp)) {
try {
Utilities.deleteDirectory(tmp);
} catch (IOException ignored) {}
}
}*/
//TODO: uncomment
}
//in case we're playing on client

View File

@ -47,13 +47,27 @@ public class ZipDecompressor {
ZipArchiveEntry entry = it.next();
Path file = target.resolve(entry.getName());
byte[] buff = new byte[4096];
log.info("Unpacking {} uncompressed {} compressed {}", entry.getName(), entry.getSize(), entry.getCompressedSize());
if(entry.isDirectory()) {
Files.createDirectories(file);
} else {
Files.createDirectories(file.getParent());
try (OutputStream outputStream = Files.newOutputStream(file);
HashingOutputStream out = new HashingOutputStream(outputStream, file, hashBuilder)) {
IOUtils.copy(zipFile.getInputStream(entry), out);
HashingOutputStream out = new HashingOutputStream(outputStream, file, hashBuilder);
InputStream in = zipFile.getInputStream(entry)) {
int n;
long count = 0;
while((n = in.read(buff, 0, buff.length)) >= 1) {
out.write(buff, 0, n);
count += n;
}
log.info("File {}, in size {}, copied {}", entry.getName(), in.available(), count);
}
}
}