starting work on backup verification
This commit is contained in:
parent
ef4c69b4d1
commit
2774ebd2b4
@ -0,0 +1,16 @@
|
||||
package net.szum123321.textile_backup.core;
|
||||
|
||||
import java.io.Serializable;
|
||||
import java.nio.file.Path;
|
||||
import java.time.LocalDateTime;
|
||||
|
||||
public record CompressionStatus(long[] treeHash, LocalDateTime date, long startTimestamp, long finishTimestamp, boolean ok, Path[] brokenFiles) implements Serializable {
|
||||
|
||||
public static class Builder {
|
||||
public synchronized void update(Path path, long hash, Exception error) { throw new RuntimeException("UNIMPLEMENTED!"); }
|
||||
public synchronized void update(Path path, Exception error) { throw new RuntimeException("UNIMPLEMENTED!"); }
|
||||
public synchronized void update(Path path, long hash) { throw new RuntimeException("UNIMPLEMENTED!"); }
|
||||
|
||||
public CompressionStatus build() { throw new RuntimeException("UNIMPLEMENTED!"); }
|
||||
}
|
||||
}
|
@ -0,0 +1,47 @@
|
||||
package net.szum123321.textile_backup.core.create;
|
||||
|
||||
import net.szum123321.textile_backup.TextileBackup;
|
||||
import net.szum123321.textile_backup.TextileLogger;
|
||||
import net.szum123321.textile_backup.core.CompressionStatus;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.io.InputStream;
|
||||
|
||||
import java.nio.file.Files;
|
||||
import java.nio.file.Path;
|
||||
|
||||
public record FileInputStreamSupplier(Path path, String name, CompressionStatus.Builder builder) implements InputSupplier {
|
||||
private final static TextileLogger log = new TextileLogger(TextileBackup.MOD_NAME);
|
||||
|
||||
@Override
|
||||
public InputStream getInputStream() throws IOException {
|
||||
try {
|
||||
//TODO: put in hasher
|
||||
return new HashingInputStream(Files.newInputStream(path), path, null, builder);
|
||||
} catch (IOException e) {
|
||||
builder.update(path, e);
|
||||
throw e;
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public Path getPath() {
|
||||
return path;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getName() {
|
||||
return name;
|
||||
}
|
||||
|
||||
@Override
|
||||
public InputStream get() {
|
||||
try {
|
||||
return getInputStream();
|
||||
} catch (IOException e) {
|
||||
log.error("An exception occurred while trying to create an input stream from file: {}!", path.toString(), e);
|
||||
}
|
||||
|
||||
return null;
|
||||
}
|
||||
}
|
@ -0,0 +1,43 @@
|
||||
package net.szum123321.textile_backup.core.create;
|
||||
|
||||
import net.szum123321.textile_backup.core.CompressionStatus;
|
||||
import org.jetbrains.annotations.NotNull;
|
||||
|
||||
import java.io.*;
|
||||
import java.nio.file.Path;
|
||||
import java.util.zip.Checksum;
|
||||
|
||||
public class HashingInputStream extends FilterInputStream {
|
||||
|
||||
private final Path path;
|
||||
private final Checksum hasher;
|
||||
private final CompressionStatus.Builder statusBuilder;
|
||||
|
||||
public HashingInputStream(InputStream in, Path path, Checksum hasher, CompressionStatus.Builder statusBuilder) {
|
||||
super(in);
|
||||
this.hasher = hasher;
|
||||
this.statusBuilder = statusBuilder;
|
||||
this.path = path;
|
||||
}
|
||||
|
||||
@Override
|
||||
public int read(byte @NotNull [] b, int off, int len) throws IOException {
|
||||
int i = in.read(b, off, len);
|
||||
if(i > -1) hasher.update(b, off, i);
|
||||
return i;
|
||||
}
|
||||
|
||||
@Override
|
||||
public int read() throws IOException {
|
||||
int i = in.read();
|
||||
if(i > -1) hasher.update(i);
|
||||
return i;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void close() throws IOException {
|
||||
if(in.available() == 0) statusBuilder.update(path, hasher.getValue());
|
||||
else statusBuilder.update(path, hasher.getValue(), new RuntimeException("AAAaa"));
|
||||
super.close();
|
||||
}
|
||||
}
|
@ -0,0 +1,14 @@
|
||||
package net.szum123321.textile_backup.core.create;
|
||||
|
||||
import org.apache.commons.compress.parallel.InputStreamSupplier;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.io.InputStream;
|
||||
import java.nio.file.Path;
|
||||
|
||||
public interface InputSupplier extends InputStreamSupplier {
|
||||
InputStream getInputStream() throws IOException;
|
||||
Path getPath();
|
||||
|
||||
String getName();
|
||||
}
|
@ -21,9 +21,12 @@ package net.szum123321.textile_backup.core.create.compressors;
|
||||
import net.szum123321.textile_backup.TextileBackup;
|
||||
import net.szum123321.textile_backup.TextileLogger;
|
||||
import net.szum123321.textile_backup.core.ActionInitiator;
|
||||
import net.szum123321.textile_backup.core.CompressionStatus;
|
||||
import net.szum123321.textile_backup.core.NoSpaceLeftOnDeviceException;
|
||||
import net.szum123321.textile_backup.core.Utilities;
|
||||
import net.szum123321.textile_backup.core.create.BackupContext;
|
||||
import net.szum123321.textile_backup.core.create.FileInputStreamSupplier;
|
||||
import net.szum123321.textile_backup.core.create.InputSupplier;
|
||||
|
||||
import java.io.*;
|
||||
import java.nio.file.Files;
|
||||
@ -47,12 +50,13 @@ public abstract class AbstractCompressor {
|
||||
OutputStream arc = createArchiveOutputStream(bufferedOutputStream, ctx, coreLimit);
|
||||
Stream<Path> fileStream = Files.walk(inputFile)) {
|
||||
|
||||
CompressionStatus.Builder statusBuilder = new CompressionStatus.Builder();
|
||||
|
||||
fileStream
|
||||
.filter(path -> !Utilities.isBlacklisted(inputFile.relativize(path)))
|
||||
.filter(Files::isRegularFile).forEach(file -> {
|
||||
try {
|
||||
//hopefully one broken file won't spoil the whole archive
|
||||
addEntry(file, inputFile.relativize(file).toString(), arc);
|
||||
addEntry(new FileInputStreamSupplier(file, inputFile.relativize(file).toString(), statusBuilder), arc);
|
||||
} catch (IOException e) {
|
||||
log.error("An exception occurred while trying to compress: {}", inputFile.relativize(file).toString(), e);
|
||||
|
||||
@ -61,6 +65,13 @@ public abstract class AbstractCompressor {
|
||||
}
|
||||
});
|
||||
|
||||
//Serialize using gson?
|
||||
ByteArrayOutputStream bo = new ByteArrayOutputStream();
|
||||
ObjectOutputStream o = new ObjectOutputStream(bo);
|
||||
o.writeObject(statusBuilder.build());
|
||||
|
||||
addEntry(new StatusFileInputSupplier(bo.toByteArray(), bo.size()), arc);
|
||||
|
||||
finish(arc);
|
||||
} catch(NoSpaceLeftOnDeviceException e) {
|
||||
log.error("""
|
||||
@ -88,7 +99,7 @@ public abstract class AbstractCompressor {
|
||||
}
|
||||
|
||||
protected abstract OutputStream createArchiveOutputStream(OutputStream stream, BackupContext ctx, int coreLimit) throws IOException;
|
||||
protected abstract void addEntry(Path file, String entryName, OutputStream arc) throws IOException;
|
||||
protected abstract void addEntry(InputSupplier inputSupplier, OutputStream arc) throws IOException;
|
||||
|
||||
protected void finish(OutputStream arc) throws InterruptedException, ExecutionException, IOException {
|
||||
//This function is only needed for the ParallelZipCompressor to write out ParallelScatterZipCreator
|
||||
@ -97,4 +108,20 @@ public abstract class AbstractCompressor {
|
||||
protected void close() {
|
||||
//Same as above, just for ParallelGzipCompressor to shut down ExecutorService
|
||||
}
|
||||
}
|
||||
|
||||
private record StatusFileInputSupplier(byte[] data, int len) implements InputSupplier {
|
||||
private final static String NAME = "textile_status.data";
|
||||
|
||||
@Override
|
||||
public InputStream getInputStream() { return new ByteArrayInputStream(data, 0, len); }
|
||||
|
||||
@Override
|
||||
public Path getPath() { return Path.of(NAME); }
|
||||
|
||||
@Override
|
||||
public String getName() { return NAME; }
|
||||
|
||||
@Override
|
||||
public InputStream get() { return new ByteArrayInputStream(data, 0, len); }
|
||||
}
|
||||
}
|
||||
|
@ -22,12 +22,11 @@ import net.szum123321.textile_backup.TextileBackup;
|
||||
import net.szum123321.textile_backup.TextileLogger;
|
||||
import net.szum123321.textile_backup.core.NoSpaceLeftOnDeviceException;
|
||||
import net.szum123321.textile_backup.core.create.BackupContext;
|
||||
import net.szum123321.textile_backup.core.create.InputSupplier;
|
||||
import org.apache.commons.compress.archivers.zip.*;
|
||||
import org.apache.commons.compress.parallel.InputStreamSupplier;
|
||||
|
||||
import java.io.*;
|
||||
import java.nio.file.Files;
|
||||
import java.nio.file.Path;
|
||||
import java.util.Objects;
|
||||
import java.util.concurrent.*;
|
||||
import java.util.zip.ZipEntry;
|
||||
@ -67,19 +66,19 @@ public class ParallelZipCompressor extends ZipCompressor {
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void addEntry(Path file, String entryName, OutputStream arc) throws IOException {
|
||||
ZipArchiveEntry entry = (ZipArchiveEntry)((ZipArchiveOutputStream)arc).createArchiveEntry(file, entryName);
|
||||
protected void addEntry(InputSupplier input, OutputStream arc) throws IOException {
|
||||
ZipArchiveEntry entry = (ZipArchiveEntry)((ZipArchiveOutputStream)arc).createArchiveEntry(input.getPath(), input.getName());
|
||||
|
||||
if(ZipCompressor.isDotDat(file.getFileName().toString())) {
|
||||
if(ZipCompressor.isDotDat(input.getPath().getFileName().toString())) {
|
||||
entry.setMethod(ZipEntry.STORED);
|
||||
entry.setSize(Files.size(file));
|
||||
entry.setCompressedSize(Files.size(file));
|
||||
entry.setCrc(getCRC(file));
|
||||
entry.setSize(Files.size(input.getPath()));
|
||||
entry.setCompressedSize(Files.size(input.getPath()));
|
||||
entry.setCrc(getCRC(input.getPath()));
|
||||
} else entry.setMethod(ZipEntry.DEFLATED);
|
||||
|
||||
entry.setTime(System.currentTimeMillis());
|
||||
|
||||
scatterZipCreator.addArchiveEntry(entry, new FileInputStreamSupplier(file));
|
||||
scatterZipCreator.addArchiveEntry(entry, input);
|
||||
}
|
||||
|
||||
@Override
|
||||
@ -127,16 +126,4 @@ public class ParallelZipCompressor extends ZipCompressor {
|
||||
return isNative == that.isNative && Objects.equals(className, that.className) && Objects.equals(methodName, that.methodName);
|
||||
}
|
||||
}
|
||||
|
||||
record FileInputStreamSupplier(Path sourceFile) implements InputStreamSupplier {
|
||||
public InputStream get() {
|
||||
try {
|
||||
return Files.newInputStream(sourceFile);
|
||||
} catch (IOException e) {
|
||||
log.error("An exception occurred while trying to create an input stream from file: {}!", sourceFile.toString(), e);
|
||||
}
|
||||
|
||||
return null;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -21,6 +21,7 @@ package net.szum123321.textile_backup.core.create.compressors;
|
||||
import net.szum123321.textile_backup.config.ConfigHelper;
|
||||
import net.szum123321.textile_backup.core.Utilities;
|
||||
import net.szum123321.textile_backup.core.create.BackupContext;
|
||||
import net.szum123321.textile_backup.core.create.InputSupplier;
|
||||
import org.apache.commons.compress.archivers.zip.Zip64Mode;
|
||||
import org.apache.commons.compress.archivers.zip.ZipArchiveEntry;
|
||||
import org.apache.commons.compress.archivers.zip.ZipArchiveOutputStream;
|
||||
@ -54,15 +55,15 @@ public class ZipCompressor extends AbstractCompressor {
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void addEntry(Path file, String entryName, OutputStream arc) throws IOException {
|
||||
try (InputStream fileInputStream = Files.newInputStream(file)){
|
||||
ZipArchiveEntry entry = (ZipArchiveEntry)((ZipArchiveOutputStream)arc).createArchiveEntry(file, entryName);
|
||||
protected void addEntry(InputSupplier input, OutputStream arc) throws IOException {
|
||||
try (InputStream fileInputStream = input.getInputStream()) {
|
||||
ZipArchiveEntry entry = (ZipArchiveEntry)((ZipArchiveOutputStream)arc).createArchiveEntry(input.getPath(), input.getName());
|
||||
|
||||
if(isDotDat(file.getFileName().toString())) {
|
||||
if(isDotDat(input.getPath().getFileName().toString())) {
|
||||
entry.setMethod(ZipEntry.STORED);
|
||||
entry.setSize(Files.size(file));
|
||||
entry.setCompressedSize(Files.size(file));
|
||||
entry.setCrc(getCRC(file));
|
||||
entry.setSize(Files.size(input.getPath()));
|
||||
entry.setCompressedSize(Files.size(input.getPath()));
|
||||
entry.setCrc(getCRC(input.getPath()));
|
||||
}
|
||||
|
||||
((ZipArchiveOutputStream)arc).putArchiveEntry(entry);
|
||||
|
@ -20,13 +20,12 @@ package net.szum123321.textile_backup.core.create.compressors.tar;
|
||||
|
||||
import net.szum123321.textile_backup.core.create.BackupContext;
|
||||
import net.szum123321.textile_backup.core.create.compressors.AbstractCompressor;
|
||||
import net.szum123321.textile_backup.core.create.InputSupplier;
|
||||
import org.apache.commons.compress.archivers.tar.TarArchiveEntry;
|
||||
import org.apache.commons.compress.archivers.tar.TarArchiveOutputStream;
|
||||
import org.apache.commons.compress.utils.IOUtils;
|
||||
|
||||
import java.io.*;
|
||||
import java.nio.file.Files;
|
||||
import java.nio.file.Path;
|
||||
|
||||
public class AbstractTarArchiver extends AbstractCompressor {
|
||||
protected OutputStream getCompressorOutputStream(OutputStream stream, BackupContext ctx, int coreLimit) throws IOException {
|
||||
@ -43,9 +42,9 @@ public class AbstractTarArchiver extends AbstractCompressor {
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void addEntry(Path file, String entryName, OutputStream arc) throws IOException {
|
||||
try (InputStream fileInputStream = Files.newInputStream(file)){
|
||||
TarArchiveEntry entry = (TarArchiveEntry)((TarArchiveOutputStream) arc).createArchiveEntry(file, entryName);
|
||||
protected void addEntry(InputSupplier in, OutputStream arc) throws IOException {
|
||||
try (InputStream fileInputStream = in.getInputStream()) {
|
||||
TarArchiveEntry entry = (TarArchiveEntry)((TarArchiveOutputStream) arc).createArchiveEntry(in.getPath(), in.getName());
|
||||
((TarArchiveOutputStream)arc).putArchiveEntry(entry);
|
||||
|
||||
IOUtils.copy(fileInputStream, arc);
|
||||
|
Loading…
Reference in New Issue
Block a user