individual file hashing works. filetree still fails

This commit is contained in:
Szum123321 2022-11-28 19:43:30 +01:00
parent 2053df7311
commit 5367a00cdc
2 changed files with 3 additions and 31 deletions

View File

@ -19,8 +19,6 @@
package net.szum123321.textile_backup.core.digest;
import net.szum123321.textile_backup.Globals;
import net.szum123321.textile_backup.TextileBackup;
import net.szum123321.textile_backup.TextileLogger;
import org.jetbrains.annotations.NotNull;
import java.io.FilterOutputStream;
@ -29,45 +27,31 @@ import java.io.OutputStream;
import java.nio.file.Path;
public class HashingOutputStream extends FilterOutputStream {
private final static TextileLogger log = new TextileLogger(TextileBackup.MOD_NAME);
private final Path path;
private final Hash hasher = Globals.CHECKSUM_SUPPLIER.get();
private final FileTreeHashBuilder hashBuilder;
private long cnt = 0;
public HashingOutputStream(OutputStream out, Path path, FileTreeHashBuilder hashBuilder) {
super(out);
this.path = path;
this.hashBuilder = hashBuilder;
}
@Override
public void flush() throws IOException {
//log.info("Called flush! {}", path);
super.flush();
}
@Override
public void write(int b) throws IOException {
hasher.update(b);
cnt++;
super.write(b);
out.write(b);
}
@Override
public void write(byte @NotNull [] b, int off, int len) throws IOException {
cnt += len;
log.info("Called: {} with {}", path, len);
hasher.update(b, off, len);
super.write(b, off, len);
out.write(b, off, len);
}
@Override
public void close() throws IOException {
long h = hasher.getValue();
log.info("Read in: {}, of {}, with hash {}", path, cnt, h);
hashBuilder.update(path, h);
super.close();

View File

@ -47,10 +47,6 @@ public class ZipDecompressor {
ZipArchiveEntry entry = it.next();
Path file = target.resolve(entry.getName());
byte[] buff = new byte[4096];
log.info("Unpacking {} uncompressed {} compressed {}", entry.getName(), entry.getSize(), entry.getCompressedSize());
if(entry.isDirectory()) {
Files.createDirectories(file);
} else {
@ -59,15 +55,7 @@ public class ZipDecompressor {
HashingOutputStream out = new HashingOutputStream(outputStream, file, hashBuilder);
InputStream in = zipFile.getInputStream(entry)) {
int n;
long count = 0;
while((n = in.read(buff, 0, buff.length)) >= 1) {
out.write(buff, 0, n);
count += n;
}
log.info("File {}, in size {}, copied {}", entry.getName(), in.available(), count);
IOUtils.copy(in, out);
}
}
}