Merge pull request #121 from Szum123321/validate_backups_research_1
Validate backups research 1
This commit is contained in:
commit
04533f89fd
@ -1,8 +1,8 @@
|
|||||||
# Done to increase the memory available to gradle.
|
# Done to increase the memory available to gradle.
|
||||||
org.gradle.jvmargs=-Xmx1G
|
org.gradle.jvmargs=-Xmx1G
|
||||||
|
|
||||||
minecraft_version=1.20-rc1
|
minecraft_version=1.20
|
||||||
yarn_mappings=1.20-rc1+build.2
|
yarn_mappings=1.20+build.1
|
||||||
loader_version=0.14.21
|
loader_version=0.14.21
|
||||||
|
|
||||||
#Fabric api
|
#Fabric api
|
||||||
@ -20,6 +20,6 @@ databreaker_version=0.2.10
|
|||||||
pgzip_commit_hash=af5f5c297e735f3f2df7aa4eb0e19a5810b8aff6
|
pgzip_commit_hash=af5f5c297e735f3f2df7aa4eb0e19a5810b8aff6
|
||||||
|
|
||||||
# Mod Properties
|
# Mod Properties
|
||||||
mod_version = 3.0.0
|
mod_version = 3.1.0
|
||||||
maven_group = net.szum123321
|
maven_group = net.szum123321
|
||||||
archives_base_name = textile_backup
|
archives_base_name = textile_backup
|
@ -28,7 +28,7 @@ import java.time.format.DateTimeFormatter;
|
|||||||
import java.util.Map;
|
import java.util.Map;
|
||||||
import java.util.Optional;
|
import java.util.Optional;
|
||||||
|
|
||||||
public record CompressionStatus(long treeHash, Map<Path, Exception> brokenFiles, LocalDateTime date, long startTimestamp, long finishTimestamp, String version) implements Serializable {
|
public record CompressionStatus(long treeHash, Map<String, Exception> brokenFiles, LocalDateTime date, long startTimestamp, long finishTimestamp, String version) implements Serializable {
|
||||||
public static final String DATA_FILENAME = "textile_status.data";
|
public static final String DATA_FILENAME = "textile_status.data";
|
||||||
|
|
||||||
public Optional<String> validate(long hash, RestoreContext ctx) throws RuntimeException {
|
public Optional<String> validate(long hash, RestoreContext ctx) throws RuntimeException {
|
||||||
@ -80,8 +80,8 @@ public record CompressionStatus(long treeHash, Map<Path, Exception> brokenFiles,
|
|||||||
if(brokenFiles.isEmpty()) builder.append("[]");
|
if(brokenFiles.isEmpty()) builder.append("[]");
|
||||||
else {
|
else {
|
||||||
builder.append("[\n");
|
builder.append("[\n");
|
||||||
for(Path i: brokenFiles.keySet()) {
|
for(String i: brokenFiles.keySet()) {
|
||||||
builder.append(i.toString())
|
builder.append(i)
|
||||||
.append(":");
|
.append(":");
|
||||||
|
|
||||||
ByteArrayOutputStream o = new ByteArrayOutputStream();
|
ByteArrayOutputStream o = new ByteArrayOutputStream();
|
||||||
|
@ -115,9 +115,9 @@ public class Utilities {
|
|||||||
}
|
}
|
||||||
|
|
||||||
public static boolean isBlacklisted(Path path) {
|
public static boolean isBlacklisted(Path path) {
|
||||||
if (path.getFileName().equals("session.lock")) return true;
|
if (path.getFileName().equals(Path.of("session.lock"))) return true;
|
||||||
|
|
||||||
if(path.getFileName().endsWith(CompressionStatus.DATA_FILENAME)) return true;
|
if(path.getFileName().equals(Path.of(CompressionStatus.DATA_FILENAME))) return true;
|
||||||
|
|
||||||
return config.get().fileBlacklist.stream().anyMatch(path::startsWith);
|
return config.get().fileBlacklist.stream().anyMatch(path::startsWith);
|
||||||
}
|
}
|
||||||
|
@ -23,12 +23,12 @@ import java.util.HashMap;
|
|||||||
import java.util.Map;
|
import java.util.Map;
|
||||||
|
|
||||||
public class BrokenFileHandler {
|
public class BrokenFileHandler {
|
||||||
private final Map<Path, Exception> store = new HashMap<>();
|
private final Map<String, Exception> store = new HashMap<>();
|
||||||
public void handle(Path file, Exception e) { store.put(file, e); }
|
public void handle(Path file, Exception e) { store.put(file.toString(), e); }
|
||||||
|
|
||||||
public boolean valid() { return store.isEmpty(); }
|
public boolean valid() { return store.isEmpty(); }
|
||||||
|
|
||||||
public Map<Path, Exception> get() {
|
public Map<String, Exception> get() {
|
||||||
return store;
|
return store;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -52,7 +52,12 @@ public class HashingInputStream extends FilterInputStream {
|
|||||||
|
|
||||||
@Override
|
@Override
|
||||||
public int read(byte @NotNull [] b, int off, int len) throws IOException {
|
public int read(byte @NotNull [] b, int off, int len) throws IOException {
|
||||||
int i = in.read(b, off, len);
|
int i;
|
||||||
|
try {
|
||||||
|
i = in.read(b, off, len);
|
||||||
|
} catch(IOException e) {
|
||||||
|
throw new IOException("An exception occurred while trying to access: [" + path.toString() + "]", e);
|
||||||
|
}
|
||||||
if(i != -1) {
|
if(i != -1) {
|
||||||
hash.update(b, off, i);
|
hash.update(b, off, i);
|
||||||
bytesWritten += i;
|
bytesWritten += i;
|
||||||
@ -62,7 +67,12 @@ public class HashingInputStream extends FilterInputStream {
|
|||||||
|
|
||||||
@Override
|
@Override
|
||||||
public int read() throws IOException {
|
public int read() throws IOException {
|
||||||
int i = in.read();
|
int i;
|
||||||
|
try {
|
||||||
|
i = in.read();
|
||||||
|
} catch(IOException e) {
|
||||||
|
throw new IOException("An exception occurred while trying to access: [" + path.toString() + "]", e);
|
||||||
|
}
|
||||||
if(i != -1) {
|
if(i != -1) {
|
||||||
hash.update(i);
|
hash.update(i);
|
||||||
bytesWritten++;
|
bytesWritten++;
|
||||||
|
Loading…
Reference in New Issue
Block a user