summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorKeuin <[email protected]>2021-01-23 14:10:32 +0800
committerkeuin <[email protected]>2021-01-23 14:10:32 +0800
commit4a1d885afa7217b47d6183488c3dc6537cef05b6 (patch)
tree1b499db6b834cb0709029e30c0d52c0ddf200ffa
parent4ac575330130ac4e1b4b35386ffc0aacd431a5a4 (diff)
Version 1.4.6 (preview): added metadata for incremental backup (need integrated test and display implementation)
-rw-r--r--gradle.properties2
-rw-r--r--src/main/java/com/keuin/kbackupfabric/backup/incremental/ObjectCollection2.java70
-rw-r--r--src/main/java/com/keuin/kbackupfabric/backup/incremental/ObjectCollectionConverter.java30
-rw-r--r--src/main/java/com/keuin/kbackupfabric/backup/incremental/ObjectCollectionFactory.java8
-rw-r--r--src/main/java/com/keuin/kbackupfabric/backup/incremental/ObjectCollectionSerializer.java17
-rw-r--r--src/main/java/com/keuin/kbackupfabric/backup/incremental/ObjectElementConverter.java18
-rw-r--r--src/main/java/com/keuin/kbackupfabric/backup/incremental/identifier/Sha256IdentifierConverter.java14
-rw-r--r--src/main/java/com/keuin/kbackupfabric/backup/incremental/manager/IncCopyResult.java105
-rw-r--r--src/main/java/com/keuin/kbackupfabric/backup/incremental/manager/IncrementalBackupStorageManager.java38
-rw-r--r--src/main/java/com/keuin/kbackupfabric/backup/incremental/serializer/IncBackupInfoSerializer.java65
-rw-r--r--src/main/java/com/keuin/kbackupfabric/backup/incremental/serializer/SavedIncBackupV0.java64
-rw-r--r--src/main/java/com/keuin/kbackupfabric/backup/incremental/serializer/SavedIncBackupV1.java100
-rw-r--r--src/main/java/com/keuin/kbackupfabric/backup/incremental/serializer/SavedIncrementalBackup.java62
-rw-r--r--src/main/java/com/keuin/kbackupfabric/operation/backup/feedback/IncrementalBackupFeedback.java19
-rw-r--r--src/main/java/com/keuin/kbackupfabric/operation/backup/method/ConfiguredIncrementalBackupMethod.java47
-rw-r--r--src/main/java/com/keuin/kbackupfabric/util/backup/incremental/ObjectCollection.java (renamed from src/main/java/com/keuin/kbackupfabric/backup/incremental/ObjectCollection.java)10
-rw-r--r--src/main/java/com/keuin/kbackupfabric/util/backup/incremental/ObjectElement.java61
-rw-r--r--src/main/java/com/keuin/kbackupfabric/util/backup/incremental/identifier/ObjectIdentifier.java13
-rw-r--r--src/main/java/com/keuin/kbackupfabric/util/backup/incremental/identifier/Sha256Identifier.java84
-rw-r--r--src/main/java/com/keuin/kbackupfabric/util/backup/incremental/identifier/SingleHashIdentifier.java53
-rw-r--r--src/test/java/com/keuin/kbackupfabric/backup/incremental/ObjectCollectionFactoryTest.java6
-rw-r--r--src/test/java/com/keuin/kbackupfabric/backup/incremental/serializer/IncBackupInfoSerializerTest.java (renamed from src/test/java/com/keuin/kbackupfabric/backup/incremental/ObjectCollectionSerializerTest.java)16
-rw-r--r--src/test/java/com/keuin/kbackupfabric/backup/incremental/serializer/IncBakupBackwardCompatibilityTest.java45
-rw-r--r--src/test/java/com/keuin/kbackupfabric/operation/backup/method/ConfiguredIncrementalBackupMethodTest.java5
24 files changed, 899 insertions, 53 deletions
diff --git a/gradle.properties b/gradle.properties
index 8f29ad1..8b22d27 100644
--- a/gradle.properties
+++ b/gradle.properties
@@ -6,7 +6,7 @@ minecraft_version=1.14.4
yarn_mappings=1.14.4+build.18
loader_version=0.11.0
# Mod Properties
-mod_version=1.4.5
+mod_version=1.4.6
maven_group=com.keuin.kbackupfabric
archives_base_name=kbackup-fabric
# Dependencies
diff --git a/src/main/java/com/keuin/kbackupfabric/backup/incremental/ObjectCollection2.java b/src/main/java/com/keuin/kbackupfabric/backup/incremental/ObjectCollection2.java
new file mode 100644
index 0000000..8d8eb14
--- /dev/null
+++ b/src/main/java/com/keuin/kbackupfabric/backup/incremental/ObjectCollection2.java
@@ -0,0 +1,70 @@
+package com.keuin.kbackupfabric.backup.incremental;
+
+import java.io.Serializable;
+import java.util.*;
+
+/**
+ * This class must be in package `com.keuin.kbackupfabric.util.backup.incremental.ObjectCollection`,
+ * or it will not be compatible with old backups.
+ */
+public class ObjectCollection2 implements Serializable {
+
+ private final String name;
+ private final Map<String, ObjectElement> elements;
+ private final Map<String, ObjectCollection2> subCollections;
+
+ ObjectCollection2(String name, Set<ObjectElement> elements, Map<String, ObjectCollection2> subCollections) {
+ this.name = Objects.requireNonNull(name);
+ this.elements = new HashMap<>();
+ for (ObjectElement e : elements) {
+ Objects.requireNonNull(e);
+ if (this.elements.put(e.getName(), e) != null) {
+ throw new IllegalStateException("elements conflict with the same name");
+ }
+ }
+ this.subCollections = new HashMap<>(Objects.requireNonNull(subCollections));
+ }
+
+ public String getName() {
+ return name;
+ }
+
+ public Set<ObjectElement> getElementSet() {
+ return new HashSet<>(elements.values());
+ }
+
+ public Map<String, ObjectElement> getElementMap() {
+ return Collections.unmodifiableMap(elements);
+ }
+
+ public ObjectElement getElement(String name) {
+ return elements.get(name);
+ }
+
+ public Set<ObjectCollection2> getSubCollectionSet() {
+ return new HashSet<>(subCollections.values());
+ }
+
+ public Map<String, ObjectCollection2> getSubCollectionMap() {
+ return Collections.unmodifiableMap(subCollections);
+ }
+
+ public ObjectCollection2 getSubCollection(String name) {
+ return subCollections.get(name);
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ ObjectCollection2 that = (ObjectCollection2) o;
+ return name.equals(that.name) &&
+ elements.equals(that.elements) &&
+ subCollections.equals(that.subCollections);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(name, elements, subCollections);
+ }
+}
diff --git a/src/main/java/com/keuin/kbackupfabric/backup/incremental/ObjectCollectionConverter.java b/src/main/java/com/keuin/kbackupfabric/backup/incremental/ObjectCollectionConverter.java
new file mode 100644
index 0000000..4e8a379
--- /dev/null
+++ b/src/main/java/com/keuin/kbackupfabric/backup/incremental/ObjectCollectionConverter.java
@@ -0,0 +1,30 @@
+package com.keuin.kbackupfabric.backup.incremental;
+
+import com.keuin.kbackupfabric.util.backup.incremental.ObjectCollection;
+
+import java.util.HashMap;
+import java.util.HashSet;
+import java.util.Map;
+import java.util.Set;
+
+/**
+ * Convert legacy `ObjectCollection` (keep for backward-compatibility after refactoring the code)
+ * to new `ObjectCollection2`.
+ */
+public class ObjectCollectionConverter {
+ /**
+ * Convert legacy `ObjectCollection` (keep for backward-compatibility after refactoring the code)
+ * to new `ObjectCollection2`.
+ *
+ * @param objectCollection old instance.
+ * @return new instance.
+ */
+ public static ObjectCollection2 convert(ObjectCollection objectCollection) {
+ Map<String, ObjectCollection> oldSubCollectionMap = objectCollection.getSubCollectionMap();
+ Map<String, ObjectCollection2> convertedSubCollectionMap = new HashMap<>(oldSubCollectionMap.size());
+ oldSubCollectionMap.forEach((s, c) -> convertedSubCollectionMap.put(s, convert(c)));
+ Set<ObjectElement> convertedElementSet = new HashSet<>();
+ objectCollection.getElementSet().forEach(oldElement -> convertedElementSet.add(ObjectElementConverter.convert(oldElement)));
+ return new ObjectCollection2(objectCollection.getName(), convertedElementSet, convertedSubCollectionMap);
+ }
+}
diff --git a/src/main/java/com/keuin/kbackupfabric/backup/incremental/ObjectCollectionFactory.java b/src/main/java/com/keuin/kbackupfabric/backup/incremental/ObjectCollectionFactory.java
index 17eddaf..9b1a226 100644
--- a/src/main/java/com/keuin/kbackupfabric/backup/incremental/ObjectCollectionFactory.java
+++ b/src/main/java/com/keuin/kbackupfabric/backup/incremental/ObjectCollectionFactory.java
@@ -37,9 +37,9 @@ public class ObjectCollectionFactory<T extends ObjectIdentifier> {
throw new IllegalArgumentException("minParallelProcessFileCountThreshold must not be negative.");
}
- public ObjectCollection fromDirectory(File directory, Set<String> ignoredFiles) throws IOException {
+ public ObjectCollection2 fromDirectory(File directory, Set<String> ignoredFiles) throws IOException {
- final Map<String, ObjectCollection> subCollections = new HashMap<>();
+ final Map<String, ObjectCollection2> subCollections = new HashMap<>();
if (!Objects.requireNonNull(directory).isDirectory())
throw new IllegalArgumentException("given file is not a directory");
@@ -92,10 +92,10 @@ public class ObjectCollectionFactory<T extends ObjectIdentifier> {
}
}
- return new ObjectCollection(directory.getName(), subFiles, subCollections);
+ return new ObjectCollection2(directory.getName(), subFiles, subCollections);
}
- public ObjectCollection fromDirectory(File directory) throws IOException {
+ public ObjectCollection2 fromDirectory(File directory) throws IOException {
return fromDirectory(directory, Collections.emptySet());
}
diff --git a/src/main/java/com/keuin/kbackupfabric/backup/incremental/ObjectCollectionSerializer.java b/src/main/java/com/keuin/kbackupfabric/backup/incremental/ObjectCollectionSerializer.java
index f45d4d0..f663f20 100644
--- a/src/main/java/com/keuin/kbackupfabric/backup/incremental/ObjectCollectionSerializer.java
+++ b/src/main/java/com/keuin/kbackupfabric/backup/incremental/ObjectCollectionSerializer.java
@@ -5,14 +5,20 @@ import java.util.Objects;
/**
* Serialize and deserialize ObjectCollection from/to the disk file.
+ * Now we want to save additional metadata in incremental backups. So the serializer on pure ObjectCollection is depreciated.
*/
public class ObjectCollectionSerializer {
- public static ObjectCollection fromFile(File file) throws IOException {
+
+ /**
+ * This doesn't work with the latest format. Use IncBackupInfoSerializer instead.
+ */
+ @Deprecated
+ public static ObjectCollection2 fromFile(File file) throws IOException {
Objects.requireNonNull(file);
- ObjectCollection collection;
+ ObjectCollection2 collection;
try (FileInputStream fileInputStream = new FileInputStream(file)) {
try (ObjectInputStream objectInputStream = new ObjectInputStream(fileInputStream)) {
- collection = (ObjectCollection) objectInputStream.readObject();
+ collection = (ObjectCollection2) objectInputStream.readObject();
} catch (ClassNotFoundException ignored) {
// this should not happen
return null;
@@ -21,7 +27,10 @@ public class ObjectCollectionSerializer {
return collection;
}
- public static void toFile(ObjectCollection collection, File file) throws IOException {
+ /**
+ * Only used for testing backward-compatibility with legacy backups.
+ */
+ public static void toFile(ObjectCollection2 collection, File file) throws IOException {
Objects.requireNonNull(collection);
Objects.requireNonNull(file);
try (FileOutputStream fileOutputStream = new FileOutputStream(file)) {
diff --git a/src/main/java/com/keuin/kbackupfabric/backup/incremental/ObjectElementConverter.java b/src/main/java/com/keuin/kbackupfabric/backup/incremental/ObjectElementConverter.java
new file mode 100644
index 0000000..512c2d2
--- /dev/null
+++ b/src/main/java/com/keuin/kbackupfabric/backup/incremental/ObjectElementConverter.java
@@ -0,0 +1,18 @@
+package com.keuin.kbackupfabric.backup.incremental;
+
+import com.keuin.kbackupfabric.backup.incremental.identifier.Sha256IdentifierConverter;
+import com.keuin.kbackupfabric.util.backup.incremental.identifier.Sha256Identifier;
+
+public class ObjectElementConverter {
+ public static ObjectElement convert(com.keuin.kbackupfabric.util.backup.incremental.ObjectElement oldObjectElement) {
+ try {
+ return new ObjectElement(
+ oldObjectElement.getName(),
+ // in real world case, Sha256Identifier is the only used identifier in KBackup. So the cast is surely safe
+ Sha256IdentifierConverter.convert((Sha256Identifier) oldObjectElement.getIdentifier())
+ );
+ } catch (IllegalAccessException | NoSuchFieldException e) {
+ throw new RuntimeException(e);
+ }
+ }
+}
diff --git a/src/main/java/com/keuin/kbackupfabric/backup/incremental/identifier/Sha256IdentifierConverter.java b/src/main/java/com/keuin/kbackupfabric/backup/incremental/identifier/Sha256IdentifierConverter.java
new file mode 100644
index 0000000..a8ec77c
--- /dev/null
+++ b/src/main/java/com/keuin/kbackupfabric/backup/incremental/identifier/Sha256IdentifierConverter.java
@@ -0,0 +1,14 @@
+package com.keuin.kbackupfabric.backup.incremental.identifier;
+
+import com.keuin.kbackupfabric.util.backup.incremental.identifier.SingleHashIdentifier;
+
+import java.lang.reflect.Field;
+
+public class Sha256IdentifierConverter {
+ public static Sha256Identifier convert(com.keuin.kbackupfabric.util.backup.incremental.identifier.Sha256Identifier old) throws NoSuchFieldException, IllegalAccessException {
+ Field field = ((SingleHashIdentifier) old).getClass().getSuperclass().getDeclaredField("hash");
+ field.setAccessible(true);
+ byte[] hash = (byte[]) field.get(old);
+ return new Sha256Identifier(hash);
+ }
+}
diff --git a/src/main/java/com/keuin/kbackupfabric/backup/incremental/manager/IncCopyResult.java b/src/main/java/com/keuin/kbackupfabric/backup/incremental/manager/IncCopyResult.java
new file mode 100644
index 0000000..6011ea5
--- /dev/null
+++ b/src/main/java/com/keuin/kbackupfabric/backup/incremental/manager/IncCopyResult.java
@@ -0,0 +1,105 @@
+package com.keuin.kbackupfabric.backup.incremental.manager;
+
+import com.keuin.kbackupfabric.backup.BackupFilesystemUtil;
+
+import java.util.Objects;
+
+/**
+ * Returned by `addObjectCollection` in IncrementalBackupStorageManager.
+ * Immutable.
+ */
+public class IncCopyResult {
+
+ private final int totalFiles;
+ private final int filesCopied;
+ private final long bytesCopied;
+ private final long bytesTotal;
+
+ public static final IncCopyResult ZERO = new IncCopyResult(0, 0, 0, 0);
+
+ public IncCopyResult(int totalFiles, int filesCopied, long bytesCopied, long bytesTotal) {
+ this.totalFiles = totalFiles;
+ this.filesCopied = filesCopied;
+ this.bytesCopied = bytesCopied;
+ this.bytesTotal = bytesTotal;
+ }
+
+ /**
+ * Get total files in the collection, containing reused files.
+ *
+ * @return file count.
+ */
+ public int getTotalFiles() {
+ return totalFiles;
+ }
+
+ /**
+ * Get new files added to the base.
+ *
+ * @return file count.
+ */
+ public int getFilesCopied() {
+ return filesCopied;
+ }
+
+ /**
+ * Get total bytes of new files added to the base.
+ *
+ * @return bytes.
+ */
+ public long getBytesCopied() {
+ return bytesCopied;
+ }
+
+ /**
+ * Get total bytes of all files in the collection. This equals to copied_files_bytes + reused_files_bytes.
+ *
+ * @return bytes.
+ */
+ public long getBytesTotal() {
+ return bytesTotal;
+ }
+
+ /**
+ * Add with another AddResult.
+ *
+ * @param a object.
+ * @return the add result.
+ */
+ public IncCopyResult addWith(IncCopyResult a) {
+ Objects.requireNonNull(a);
+ return new IncCopyResult(
+ totalFiles + a.totalFiles,
+ filesCopied + a.filesCopied,
+ bytesCopied + a.bytesCopied,
+ bytesTotal + a.bytesTotal
+ );
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ IncCopyResult that = (IncCopyResult) o;
+ return totalFiles == that.totalFiles &&
+ filesCopied == that.filesCopied &&
+ bytesCopied == that.bytesCopied &&
+ bytesTotal == that.bytesTotal;
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(totalFiles, filesCopied, bytesCopied, bytesTotal);
+ }
+
+ @Override
+ public String toString() {
+ return String.format(
+ "Files copied: %d (%s in size, totally %d files). Total file tree size: %s.",
+ filesCopied,
+ BackupFilesystemUtil.getFriendlyFileSizeString(bytesCopied),
+ totalFiles,
+ BackupFilesystemUtil.getFriendlyFileSizeString(bytesTotal)
+ );
+ }
+}
diff --git a/src/main/java/com/keuin/kbackupfabric/backup/incremental/manager/IncrementalBackupStorageManager.java b/src/main/java/com/keuin/kbackupfabric/backup/incremental/manager/IncrementalBackupStorageManager.java
index 945fcc4..78c6943 100644
--- a/src/main/java/com/keuin/kbackupfabric/backup/incremental/manager/IncrementalBackupStorageManager.java
+++ b/src/main/java/com/keuin/kbackupfabric/backup/incremental/manager/IncrementalBackupStorageManager.java
@@ -1,17 +1,18 @@
package com.keuin.kbackupfabric.backup.incremental.manager;
-import com.keuin.kbackupfabric.backup.incremental.ObjectCollection;
+import com.keuin.kbackupfabric.backup.incremental.ObjectCollection2;
import com.keuin.kbackupfabric.backup.incremental.ObjectElement;
import com.keuin.kbackupfabric.backup.incremental.identifier.ObjectIdentifier;
import com.keuin.kbackupfabric.backup.incremental.identifier.StorageObjectLoader;
+import com.keuin.kbackupfabric.util.FilesystemUtil;
import com.keuin.kbackupfabric.util.PrintUtil;
+import org.jetbrains.annotations.Nullable;
import java.io.File;
import java.io.FileNotFoundException;
import java.io.IOException;
import java.nio.file.Files;
import java.nio.file.Path;
-import java.nio.file.Paths;
import java.util.*;
import static org.apache.commons.io.FileUtils.forceDelete;
@@ -28,11 +29,14 @@ public class IncrementalBackupStorageManager {
/**
* Add a object collection to storage base.
+ *
* @param collection the collection.
* @return objects copied to the base.
* @throws IOException I/O Error.
*/
- public int addObjectCollection(ObjectCollection collection, File collectionBasePath) throws IOException {
+ public @Nullable
+ IncCopyResult addObjectCollection(ObjectCollection2 collection, File collectionBasePath) throws IOException {
+ // TODO: add failure detection
if (!backupStorageBase.toFile().isDirectory()) {
if (!backupStorageBase.toFile().mkdirs())
throw new IOException("Backup storage base directory does not exist, and failed to create it.");
@@ -40,22 +44,28 @@ public class IncrementalBackupStorageManager {
Objects.requireNonNull(collection);
Objects.requireNonNull(collectionBasePath);
- int copyCount = 0;
+ IncCopyResult copyCount = IncCopyResult.ZERO;
// copy sub files
for (Map.Entry<String, ObjectElement> entry : collection.getElementMap().entrySet()) {
File copyDestination = new File(backupStorageBase.toFile(), entry.getValue().getIdentifier().getIdentification());
+ File copySourceFile = new File(collectionBasePath.getAbsolutePath(), entry.getKey());
+ final long fileBytes = FilesystemUtil.getFileSizeBytes(copySourceFile.getAbsolutePath());
if (!baseContainsObject(entry.getValue())) {
// element does not exist. copy.
- Files.copy(Paths.get(collectionBasePath.getAbsolutePath(), entry.getKey()), copyDestination.toPath());
- ++copyCount;
+ Files.copy(copySourceFile.toPath(), copyDestination.toPath());
+ copyCount = copyCount.addWith(new IncCopyResult(1, 1, fileBytes, fileBytes));
+ }
+ {
+ // element exists (file reused). Just update the stat info
+ copyCount = copyCount.addWith(new IncCopyResult(1, 1, 0, fileBytes));
}
}
//copy sub dirs recursively
- for (Map.Entry<String, ObjectCollection> entry : collection.getSubCollectionMap().entrySet()) {
+ for (Map.Entry<String, ObjectCollection2> entry : collection.getSubCollectionMap().entrySet()) {
File newBase = new File(collectionBasePath, entry.getKey());
- copyCount += addObjectCollection(entry.getValue(), newBase);
+ copyCount = copyCount.addWith(addObjectCollection(entry.getValue(), newBase));
}
return copyCount;
@@ -63,12 +73,13 @@ public class IncrementalBackupStorageManager {
/**
* Restore an object collection from the storage base. i.e., restore the save from backup storage.
- * @param collection the collection to be restored.
+ *
+ * @param collection the collection to be restored.
* @param collectionBasePath save path of the collection.
* @return objects restored from the base.
* @throws IOException I/O Error.
*/
- public int restoreObjectCollection(ObjectCollection collection, File collectionBasePath) throws IOException {
+ public int restoreObjectCollection(ObjectCollection2 collection, File collectionBasePath) throws IOException {
Objects.requireNonNull(collection);
Objects.requireNonNull(collectionBasePath);
@@ -122,7 +133,7 @@ public class IncrementalBackupStorageManager {
}
//copy sub dirs recursively
- for (Map.Entry<String, ObjectCollection> entry : collection.getSubCollectionMap().entrySet()) {
+ for (Map.Entry<String, ObjectCollection2> entry : collection.getSubCollectionMap().entrySet()) {
File newBase = new File(collectionBasePath, entry.getKey());
copyCount += restoreObjectCollection(entry.getValue(), newBase);
}
@@ -130,12 +141,12 @@ public class IncrementalBackupStorageManager {
return copyCount;
}
- public int cleanUnusedObjects(Iterable<ObjectCollection> collectionIterable) {
+ public int cleanUnusedObjects(Iterable<ObjectCollection2> collectionIterable) {
// construct object list in memory
Set<String> objects = new HashSet<>();
// backupStorageBase
- for (ObjectCollection collection : collectionIterable) {
+ for (ObjectCollection2 collection : collectionIterable) {
for (ObjectElement ele : collection.getElementMap().values()) {
}
@@ -185,5 +196,4 @@ public class IncrementalBackupStorageManager {
map.put(identifier, file);
});
}
-
}
diff --git a/src/main/java/com/keuin/kbackupfabric/backup/incremental/serializer/IncBackupInfoSerializer.java b/src/main/java/com/keuin/kbackupfabric/backup/incremental/serializer/IncBackupInfoSerializer.java
new file mode 100644
index 0000000..45590ba
--- /dev/null
+++ b/src/main/java/com/keuin/kbackupfabric/backup/incremental/serializer/IncBackupInfoSerializer.java
@@ -0,0 +1,65 @@
+package com.keuin.kbackupfabric.backup.incremental.serializer;
+
+import com.keuin.kbackupfabric.backup.incremental.ObjectCollection2;
+import com.keuin.kbackupfabric.backup.incremental.ObjectCollectionConverter;
+import com.keuin.kbackupfabric.backup.name.BackupFileNameEncoder;
+import com.keuin.kbackupfabric.backup.name.IncrementalBackupFileNameEncoder;
+import com.keuin.kbackupfabric.util.backup.incremental.ObjectCollection;
+
+import java.io.*;
+import java.util.Objects;
+
+public class IncBackupInfoSerializer {
+ /**
+ * Load incremental backup index file into object, no matter what version it is.
+ *
+ * @param file a valid incremental backup file. (with a valid file name)
+ * @return the object. Not null.
+ * @throws IOException when failed due to an I/O error.
+ */
+ public static SavedIncrementalBackup fromFile(File file) throws IOException {
+ Objects.requireNonNull(file);
+ try (FileInputStream fileInputStream = new FileInputStream(file)) {
+ try (ObjectInputStream objectInputStream = new ObjectInputStream(fileInputStream)) {
+ Object o = objectInputStream.readObject();
+ if (o instanceof SavedIncrementalBackup) {
+ return (SavedIncrementalBackup) o;
+ } else if (o instanceof ObjectCollection) {
+ // backward compatibility with old-style (v0) incremental backup
+ BackupFileNameEncoder.BackupBasicInformation info = new IncrementalBackupFileNameEncoder().decode(file.getName());
+ if (info == null)
+ throw new IOException("Invalid backup file name.");
+ return new SavedIncBackupV0(ObjectCollectionConverter.convert((ObjectCollection) o), info);
+ } else if (o instanceof ObjectCollection2) {
+ // compatible with 1.4.6 implementation
+ BackupFileNameEncoder.BackupBasicInformation info = new IncrementalBackupFileNameEncoder().decode(file.getName());
+ if (info == null)
+ throw new IOException("Invalid backup file name.");
+ return new SavedIncBackupV0((ObjectCollection2) o, info);
+ } else {
+ throw new RuntimeException("Unrecognized backup file format: unknown class " + o.getClass().getCanonicalName());
+ }
+ } catch (ClassNotFoundException e) {
+ // this should not happen
+ throw new RuntimeException(e);
+ }
+ }
+ }
+
+ /**
+ * Save incremental backup index and metadata into file.
+ *
+ * @param file the file.
+ * @param backup the backup.
+ * @throws IOException when failed due to an I/O error.
+ */
+ public static void toFile(File file, SavedIncrementalBackup backup) throws IOException {
+ Objects.requireNonNull(file);
+ Objects.requireNonNull(backup);
+ try (FileOutputStream fileOutputStream = new FileOutputStream(file)) {
+ try (ObjectOutputStream objectOutputStream = new ObjectOutputStream(fileOutputStream)) {
+ objectOutputStream.writeObject(backup);
+ }
+ }
+ }
+}
diff --git a/src/main/java/com/keuin/kbackupfabric/backup/incremental/serializer/SavedIncBackupV0.java b/src/main/java/com/keuin/kbackupfabric/backup/incremental/serializer/SavedIncBackupV0.java
new file mode 100644
index 0000000..8b4a4a1
--- /dev/null
+++ b/src/main/java/com/keuin/kbackupfabric/backup/incremental/serializer/SavedIncBackupV0.java
@@ -0,0 +1,64 @@
+package com.keuin.kbackupfabric.backup.incremental.serializer;
+
+import com.keuin.kbackupfabric.backup.incremental.ObjectCollection2;
+import com.keuin.kbackupfabric.backup.name.BackupFileNameEncoder;
+
+import java.time.LocalDateTime;
+import java.time.ZoneId;
+import java.time.ZonedDateTime;
+import java.util.Objects;
+
+/**
+ * The old-style incremental backup. Just to keep backward compatibility with old backups.
+ */
+public class SavedIncBackupV0 implements SavedIncrementalBackup {
+
+ private final ObjectCollection2 objectCollection2;
+ private final String backupName;
+ private final LocalDateTime namedBackupTime;
+
+ public SavedIncBackupV0(ObjectCollection2 objectCollection2, BackupFileNameEncoder.BackupBasicInformation backupBasicInformation) {
+ Objects.requireNonNull(objectCollection2);
+ Objects.requireNonNull(backupBasicInformation);
+
+ this.objectCollection2 = objectCollection2;
+ this.backupName = backupBasicInformation.customName;
+ this.namedBackupTime = backupBasicInformation.time;
+ }
+
+
+ @Override
+ public ObjectCollection2 getObjectCollection() {
+ return objectCollection2;
+ }
+
+ @Override
+ public String getBackupName() {
+ return backupName;
+ }
+
+ @Override
+ public ZonedDateTime getBackupTime() {
+ return namedBackupTime.atZone(ZoneId.systemDefault());
+ }
+
+ @Override
+ public int getFilesAdded() {
+ return -1; // missing info
+ }
+
+ @Override
+ public long getTotalSizeBytes() {
+ return -1; // missing info
+ }
+
+ @Override
+ public long getIncreasedSizeBytes() {
+ return -1; // missing info
+ }
+
+ @Override
+ public String toString() {
+ return String.format("(Legacy Backup) %s, created at %s", backupName, namedBackupTime);
+ }
+}
diff --git a/src/main/java/com/keuin/kbackupfabric/backup/incremental/serializer/SavedIncBackupV1.java b/src/main/java/com/keuin/kbackupfabric/backup/incremental/serializer/SavedIncBackupV1.java
new file mode 100644
index 0000000..0ebe06a
--- /dev/null
+++ b/src/main/java/com/keuin/kbackupfabric/backup/incremental/serializer/SavedIncBackupV1.java
@@ -0,0 +1,100 @@
+package com.keuin.kbackupfabric.backup.incremental.serializer;
+
+import com.keuin.kbackupfabric.backup.BackupFilesystemUtil;
+import com.keuin.kbackupfabric.backup.incremental.ObjectCollection2;
+
+import java.io.Serializable;
+import java.time.ZonedDateTime;
+import java.time.format.DateTimeFormatter;
+import java.util.Objects;
+
+public class SavedIncBackupV1 implements SavedIncrementalBackup, Serializable {
+
+ private final ObjectCollection2 objectCollection2;
+ private final String backupName;
+ private final ZonedDateTime backupTime;
+ private final long totalSizeBytes;
+ private final long increasedSizeBytes;
+ private final int filesAdded;
+ private final int totalFiles;
+ private static final DateTimeFormatter formatter = DateTimeFormatter.ofPattern("yyyy-MM-dd_HH-mm-ss");
+
+ public SavedIncBackupV1(ObjectCollection2 objectCollection2, String backupName, ZonedDateTime backupTime, long totalSizeBytes, long increasedSizeBytes, int filesAdded, int totalFiles) {
+ this.totalFiles = totalFiles;
+ Objects.requireNonNull(objectCollection2);
+ Objects.requireNonNull(backupName);
+ Objects.requireNonNull(backupTime);
+ this.objectCollection2 = objectCollection2;
+ this.backupName = backupName;
+ this.backupTime = backupTime;
+ this.totalSizeBytes = totalSizeBytes;
+ this.increasedSizeBytes = increasedSizeBytes;
+ this.filesAdded = filesAdded;
+ }
+
+ @Override
+ public ObjectCollection2 getObjectCollection() {
+ return objectCollection2;
+ }
+
+ @Override
+ public String getBackupName() {
+ return backupName;
+ }
+
+ @Override
+ public ZonedDateTime getBackupTime() {
+ return backupTime;
+ }
+
+ @Override
+ public int getFilesAdded() {
+ return filesAdded;
+ }
+
+ @Override
+ public long getTotalSizeBytes() {
+ return totalSizeBytes;
+ }
+
+ @Override
+ public long getIncreasedSizeBytes() {
+ return increasedSizeBytes;
+ }
+
+ public int getTotalFiles() {
+ return totalFiles;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ SavedIncBackupV1 that = (SavedIncBackupV1) o;
+ return totalSizeBytes == that.totalSizeBytes &&
+ increasedSizeBytes == that.increasedSizeBytes &&
+ filesAdded == that.filesAdded &&
+ totalFiles == that.totalFiles &&
+ objectCollection2.equals(that.objectCollection2) &&
+ backupName.equals(that.backupName) &&
+ backupTime.equals(that.backupTime);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(objectCollection2, backupName, backupTime, totalSizeBytes, increasedSizeBytes, filesAdded, totalFiles);
+ }
+
+ @Override
+ public String toString() {
+ return String.format(
+ "%s, created at %s, files: %d (total size: %s), copied size: %s, files added: %d",
+ backupName,
+ backupTime.format(formatter),
+ totalFiles,
+ BackupFilesystemUtil.getFriendlyFileSizeString(totalSizeBytes),
+ BackupFilesystemUtil.getFriendlyFileSizeString(increasedSizeBytes),
+ filesAdded
+ );
+ }
+}
diff --git a/src/main/java/com/keuin/kbackupfabric/backup/incremental/serializer/SavedIncrementalBackup.java b/src/main/java/com/keuin/kbackupfabric/backup/incremental/serializer/SavedIncrementalBackup.java
new file mode 100644
index 0000000..e2e50b6
--- /dev/null
+++ b/src/main/java/com/keuin/kbackupfabric/backup/incremental/serializer/SavedIncrementalBackup.java
@@ -0,0 +1,62 @@
+package com.keuin.kbackupfabric.backup.incremental.serializer;
+
+
+import com.keuin.kbackupfabric.backup.incremental.ObjectCollection2;
+
+import java.io.Serializable;
+import java.time.ZonedDateTime;
+
+/**
+ * The abstraction of an object saved in the disk, containing all information (except binary data of files) about an incremental backup.
+ */
+public interface SavedIncrementalBackup extends Serializable {
+
+ /**
+ * Get an instance with the latest version.
+ */
+ static SavedIncrementalBackup newLatest(ObjectCollection2 objectCollection2, String backupName, ZonedDateTime backupTime, long totalSizeBytes, long increasedSizeBytes, int filesAdded, int totalFiles) {
+ return new SavedIncBackupV1(objectCollection2, backupName, backupTime, totalSizeBytes, increasedSizeBytes, filesAdded, totalFiles);
+ }
+
+ /**
+ * Get the object collection of the level directory.
+ *
+ * @return the object collection.
+ */
+ ObjectCollection2 getObjectCollection();
+
+ /**
+ * Get the custom backup name.
+ *
+ * @return the backup name.
+ */
+ String getBackupName();
+
+ /**
+ * Get the time when this backup was made.
+ *
+ * @return the time.
+ */
+ ZonedDateTime getBackupTime();
+
+ /**
+ * Get new files added to the base.
+ *
+ * @return file count.
+ */
+ int getFilesAdded();
+
+ /**
+ * Get the total size of the saved world.
+ *
+ * @return the size in bytes.
+ */
+ long getTotalSizeBytes();
+
+ /**
+ * Get the size we cost to add this backup into the base.
+ *
+ * @return the increased size in bytes.
+ */
+ long getIncreasedSizeBytes();
+}
diff --git a/src/main/java/com/keuin/kbackupfabric/operation/backup/feedback/IncrementalBackupFeedback.java b/src/main/java/com/keuin/kbackupfabric/operation/backup/feedback/IncrementalBackupFeedback.java
index f39fde6..1a05c8c 100644
--- a/src/main/java/com/keuin/kbackupfabric/operation/backup/feedback/IncrementalBackupFeedback.java
+++ b/src/main/java/com/keuin/kbackupfabric/operation/backup/feedback/IncrementalBackupFeedback.java
@@ -1,12 +1,15 @@
package com.keuin.kbackupfabric.operation.backup.feedback;
+import com.keuin.kbackupfabric.backup.incremental.manager.IncCopyResult;
+import org.jetbrains.annotations.Nullable;
+
public class IncrementalBackupFeedback implements BackupFeedback {
private final boolean success;
- private final int newFilesAdded;
+ private final IncCopyResult copyResult;
- public IncrementalBackupFeedback(boolean success, int newFilesAdded) {
+ public IncrementalBackupFeedback(boolean success, @Nullable IncCopyResult copyResult) {
this.success = success;
- this.newFilesAdded = newFilesAdded;
+ this.copyResult = copyResult;
}
@Override
@@ -14,15 +17,15 @@ public class IncrementalBackupFeedback implements BackupFeedback {
return success;
}
- public long getNewFilesAdded() {
- return newFilesAdded;
+ public IncCopyResult getCopyResult() {
+ return copyResult;
}
@Override
public String getFeedback() {
- if (success && newFilesAdded >= 0)
- return String.format("File(s) added: %d.", newFilesAdded);
+ if (success && copyResult != null)
+ return String.format("File(s) added: %s.", copyResult);
else
- return "";
+ return "Backup failed.";
}
}
diff --git a/src/main/java/com/keuin/kbackupfabric/operation/backup/method/ConfiguredIncrementalBackupMethod.java b/src/main/java/com/keuin/kbackupfabric/operation/backup/method/ConfiguredIncrementalBackupMethod.java
index 569c4c6..2320ef3 100644
--- a/src/main/java/com/keuin/kbackupfabric/operation/backup/method/ConfiguredIncrementalBackupMethod.java
+++ b/src/main/java/com/keuin/kbackupfabric/operation/backup/method/ConfiguredIncrementalBackupMethod.java
@@ -1,10 +1,14 @@
package com.keuin.kbackupfabric.operation.backup.method;
-import com.keuin.kbackupfabric.backup.incremental.ObjectCollection;
+import com.keuin.kbackupfabric.backup.incremental.ObjectCollection2;
import com.keuin.kbackupfabric.backup.incremental.ObjectCollectionFactory;
-import com.keuin.kbackupfabric.backup.incremental.ObjectCollectionSerializer;
import com.keuin.kbackupfabric.backup.incremental.identifier.Sha256Identifier;
+import com.keuin.kbackupfabric.backup.incremental.manager.IncCopyResult;
import com.keuin.kbackupfabric.backup.incremental.manager.IncrementalBackupStorageManager;
+import com.keuin.kbackupfabric.backup.incremental.serializer.IncBackupInfoSerializer;
+import com.keuin.kbackupfabric.backup.incremental.serializer.SavedIncrementalBackup;
+import com.keuin.kbackupfabric.backup.name.BackupFileNameEncoder;
+import com.keuin.kbackupfabric.backup.name.IncrementalBackupFileNameEncoder;
import com.keuin.kbackupfabric.operation.backup.feedback.IncrementalBackupFeedback;
import com.keuin.kbackupfabric.util.FilesystemUtil;
import com.keuin.kbackupfabric.util.PrintUtil;
@@ -13,6 +17,7 @@ import com.keuin.kbackupfabric.util.ThreadingUtil;
import java.io.File;
import java.io.IOException;
import java.nio.file.Paths;
+import java.time.ZoneId;
import java.util.Arrays;
import java.util.HashSet;
import java.util.logging.Logger;
@@ -35,9 +40,7 @@ public class ConfiguredIncrementalBackupMethod implements ConfiguredBackupMethod
@Override
public IncrementalBackupFeedback backup() {
-
final int hashFactoryThreads = ThreadingUtil.getRecommendedThreadCount(); // how many threads do we use to generate the hash tree
-
LOGGER.info("Threads: " + hashFactoryThreads);
IncrementalBackupFeedback feedback;
@@ -46,24 +49,44 @@ public class ConfiguredIncrementalBackupMethod implements ConfiguredBackupMethod
// construct incremental backup index
PrintUtil.info("Hashing files...");
- ObjectCollection collection = new ObjectCollectionFactory<>(Sha256Identifier.getFactory(), hashFactoryThreads, 16)
+ // TODO
+ ObjectCollection2 collection = new ObjectCollectionFactory<>(Sha256Identifier.getFactory(), hashFactoryThreads, 16)
.fromDirectory(levelPathFile, new HashSet<>(Arrays.asList("session.lock", "kbackup_metadata")));
// update storage
PrintUtil.info("Copying files...");
IncrementalBackupStorageManager storageManager = new IncrementalBackupStorageManager(Paths.get(backupBaseDirectory));
- int filesAdded = storageManager.addObjectCollection(collection, levelPathFile);
+ IncCopyResult copyResult = storageManager.addObjectCollection(collection, levelPathFile);
+ if (copyResult == null) {
+ PrintUtil.info("Failed to backup. No further information.");
+ return new IncrementalBackupFeedback(false, null);
+ }
// save index file
PrintUtil.info("Saving index file...");
- ObjectCollectionSerializer.toFile(collection, new File(backupIndexFileSaveDirectory, backupIndexFileName));
+
+ // legacy index file
+// ObjectCollectionSerializer.toFile(collection, new File(backupIndexFileSaveDirectory, backupIndexFileName));
+
+ // newer saved info (with metadata)
+ File indexFile = new File(backupIndexFileSaveDirectory, backupIndexFileName);
+ BackupFileNameEncoder.BackupBasicInformation info = new IncrementalBackupFileNameEncoder().decode(backupIndexFileName);
+ IncBackupInfoSerializer.toFile(indexFile, SavedIncrementalBackup.newLatest(
+ collection,
+ info.customName,
+ info.time.atZone(ZoneId.systemDefault()),
+ copyResult.getBytesTotal(),
+ copyResult.getBytesCopied(),
+ copyResult.getFilesCopied(),
+ copyResult.getTotalFiles()
+ ));
// return result
PrintUtil.info("Incremental backup finished.");
- feedback = new IncrementalBackupFeedback(filesAdded >= 0, filesAdded);
+ feedback = new IncrementalBackupFeedback(true, copyResult);
} catch (IOException e) {
e.printStackTrace(); // at least we should print it out if we discard the exception... Better than doing nothing.
- feedback = new IncrementalBackupFeedback(false, 0);
+ feedback = new IncrementalBackupFeedback(false, null);
}
if (!feedback.isSuccess()) {
@@ -85,10 +108,12 @@ public class ConfiguredIncrementalBackupMethod implements ConfiguredBackupMethod
public boolean restore() throws IOException {
// load collection
PrintUtil.info("Loading file list...");
- ObjectCollection collection = ObjectCollectionSerializer.fromFile(
+ SavedIncrementalBackup info = IncBackupInfoSerializer.fromFile(
new File(backupIndexFileSaveDirectory, backupIndexFileName)
);
+ PrintUtil.info("Backup Info: " + info);
+
// delete old level
File levelPathFile = new File(levelPath);
PrintUtil.info("Deleting old level...");
@@ -100,7 +125,7 @@ public class ConfiguredIncrementalBackupMethod implements ConfiguredBackupMethod
// restore file
PrintUtil.info("Copying files...");
IncrementalBackupStorageManager storageManager = new IncrementalBackupStorageManager(Paths.get(backupBaseDirectory));
- int restoreObjectCount = storageManager.restoreObjectCollection(collection, levelPathFile);
+ int restoreObjectCount = storageManager.restoreObjectCollection(info.getObjectCollection(), levelPathFile);
PrintUtil.info(String.format("%d file(s) restored.", restoreObjectCount));
return true;
diff --git a/src/main/java/com/keuin/kbackupfabric/backup/incremental/ObjectCollection.java b/src/main/java/com/keuin/kbackupfabric/util/backup/incremental/ObjectCollection.java
index 2d07fb4..d36ff59 100644
--- a/src/main/java/com/keuin/kbackupfabric/backup/incremental/ObjectCollection.java
+++ b/src/main/java/com/keuin/kbackupfabric/util/backup/incremental/ObjectCollection.java
@@ -1,9 +1,17 @@
-package com.keuin.kbackupfabric.backup.incremental;
+package com.keuin.kbackupfabric.util.backup.incremental;
import java.io.Serializable;
import java.util.*;
+/**
+ * This class must be in package `com.keuin.kbackupfabric.util.backup.incremental.ObjectCollection`,
+ * or it will not be compatible with old backups.
+ * It remains only to keep a backward compatibility, and should be converted to `ObjectCollection2` as soon as possible.
+ */
+@Deprecated
public class ObjectCollection implements Serializable {
+
+ private static final long serialVersionUID = -3098905094513096717L;
private final String name;
private final Map<String, ObjectElement> elements;
private final Map<String, ObjectCollection> subCollections;
diff --git a/src/main/java/com/keuin/kbackupfabric/util/backup/incremental/ObjectElement.java b/src/main/java/com/keuin/kbackupfabric/util/backup/incremental/ObjectElement.java
new file mode 100644
index 0000000..a101fd3
--- /dev/null
+++ b/src/main/java/com/keuin/kbackupfabric/util/backup/incremental/ObjectElement.java
@@ -0,0 +1,61 @@
+package com.keuin.kbackupfabric.util.backup.incremental;
+
+import com.keuin.kbackupfabric.util.backup.incremental.identifier.ObjectIdentifier;
+
+import java.io.Serializable;
+import java.util.Objects;
+
+public class ObjectElement implements Serializable {
+
+ private static final long serialVersionUID = 268304683651745899L;
+ private final String name;
+ private final ObjectIdentifier identifier;
+
+ public ObjectElement(String name, ObjectIdentifier identifier) {
+ Objects.requireNonNull(name);
+ Objects.requireNonNull(identifier);
+ this.name = name;
+ this.identifier = identifier;
+ }
+
+ /**
+ * Get file name.
+ *
+ * @return the file name.
+ */
+ public String getName() {
+ return name;
+ }
+
+ /**
+ * Get file identifier, which is considered to be different between files with different contents.
+ *
+ * @return the identifier.
+ */
+ public ObjectIdentifier getIdentifier() {
+ return identifier;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ ObjectElement that = (ObjectElement) o;
+ return name.equals(that.getName()) &&
+ identifier.equals(that.getIdentifier());
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(name, identifier);
+ }
+
+ @Override
+ public String toString() {
+ return "ObjectElement{" +
+ "name='" + name + '\'' +
+ ", identifier=" + identifier +
+ '}';
+ }
+
+} \ No newline at end of file
diff --git a/src/main/java/com/keuin/kbackupfabric/util/backup/incremental/identifier/ObjectIdentifier.java b/src/main/java/com/keuin/kbackupfabric/util/backup/incremental/identifier/ObjectIdentifier.java
new file mode 100644
index 0000000..aece07d
--- /dev/null
+++ b/src/main/java/com/keuin/kbackupfabric/util/backup/incremental/identifier/ObjectIdentifier.java
@@ -0,0 +1,13 @@
+package com.keuin.kbackupfabric.util.backup.incremental.identifier;
+
+import java.io.Serializable;
+
+/**
+ * The identifier distinguishing files in the object collection.
+ * It should be based on cryptographic hash function in order to prevent possible attacks to the backup system.
+ * All identifiers should be immutable and implement their own equals method.
+ * Immutable.
+ */
+public interface ObjectIdentifier extends Serializable {
+ String getIdentification();
+}
diff --git a/src/main/java/com/keuin/kbackupfabric/util/backup/incremental/identifier/Sha256Identifier.java b/src/main/java/com/keuin/kbackupfabric/util/backup/incremental/identifier/Sha256Identifier.java
new file mode 100644
index 0000000..31cde29
--- /dev/null
+++ b/src/main/java/com/keuin/kbackupfabric/util/backup/incremental/identifier/Sha256Identifier.java
@@ -0,0 +1,84 @@
+package com.keuin.kbackupfabric.util.backup.incremental.identifier;
+
+import com.keuin.kbackupfabric.util.BytesUtil;
+
+import java.io.File;
+import java.io.FileInputStream;
+import java.io.IOException;
+import java.security.MessageDigest;
+import java.security.NoSuchAlgorithmException;
+import java.util.Objects;
+
+/**
+ * Identifier based on sha256.
+ * Immutable.
+ */
+public class Sha256Identifier extends SingleHashIdentifier {
+
+ private static final long serialVersionUID = 968324214777435054L;
+ private static final int SHA256_LENGTH = 32;
+ private static final Sha256Identifier DUMMY = new Sha256Identifier(new byte[SHA256_LENGTH]); // only for using its hash method
+ private static final String marker = "S2";
+
+ public static Sha256Identifier fromFile(File file) throws IOException {
+ if (!file.isFile()) {
+ throw new IllegalArgumentException("file is not a file");
+ }
+ return new Sha256Identifier(DUMMY.hash(file));
+ }
+
+ /**
+ * Load sha-256 from a named file. Only used in StorageObjectLoader.
+ *
+ * @param fileName the file name.
+ * @return identifier.
+ */
+ static Sha256Identifier fromFileName(String fileName) {
+ if (!fileName.matches(marker + "-[0-9A-Fa-f]{32}"))
+ return null;
+ String hexString = fileName.substring(marker.length() + 1);
+ return new Sha256Identifier(BytesUtil.hexToBytes(hexString));
+ }
+
+ protected Sha256Identifier(byte[] hash) {
+ super(hash, marker);
+ Objects.requireNonNull(hash);
+ if (hash.length != SHA256_LENGTH) {
+ throw new IllegalStateException(String.format("SHA256 must be %d bytes", SHA256_LENGTH));
+ }
+ }
+
+ @Override
+ protected byte[] hash(File file) throws IOException {
+ try {
+ MessageDigest digest = MessageDigest.getInstance("SHA-256");
+
+ try (FileInputStream inputStream = new FileInputStream(file)) {
+ // This does not work. I don't know why
+// FileChannel channel = inputStream.getChannel();
+// ByteBuffer buffer = ByteBuffer.allocate(128);
+// int readLength;
+// while ((readLength = channel.read(buffer)) > 0)
+// digest.update(buffer);
+
+ // This also works, without warnings
+ byte[] readBuffer = new byte[1024 * 1024];
+ int readLength;
+ while ((readLength = inputStream.read(readBuffer)) > 0)
+ digest.update(readBuffer, 0, readLength);
+
+ // The below lines also works, but the IDE will complain about the while loop
+// DigestInputStream digestInputStream = new DigestInputStream(inputStream, digest);
+// while(digestInputStream.read() > 0)
+// ;
+
+ return digest.digest();
+ }
+
+ } catch (NoSuchAlgorithmException ignored) {
+ // this shouldn't happen
+ return new byte[SHA256_LENGTH];
+ }
+ }
+
+} \ No newline at end of file
diff --git a/src/main/java/com/keuin/kbackupfabric/util/backup/incremental/identifier/SingleHashIdentifier.java b/src/main/java/com/keuin/kbackupfabric/util/backup/incremental/identifier/SingleHashIdentifier.java
new file mode 100644
index 0000000..6c67918
--- /dev/null
+++ b/src/main/java/com/keuin/kbackupfabric/util/backup/incremental/identifier/SingleHashIdentifier.java
@@ -0,0 +1,53 @@
+package com.keuin.kbackupfabric.util.backup.incremental.identifier;
+
+import com.keuin.kbackupfabric.util.BytesUtil;
+
+import java.io.File;
+import java.io.IOException;
+import java.util.Arrays;
+import java.util.Objects;
+
+/**
+ * A simple identifier based on a single hash function.
+ * Immutable.
+ */
+public abstract class SingleHashIdentifier implements ObjectIdentifier {
+
+ private final byte[] hash;
+ private final String type;
+
+ protected SingleHashIdentifier(byte[] hash, String type) {
+ Objects.requireNonNull(hash);
+ Objects.requireNonNull(type);
+ this.hash = Arrays.copyOf(hash, hash.length);
+ this.type = type;
+ }
+
+ /**
+ * The hash function.
+ *
+ * @param file the file to be hashed.
+ * @return the hash bytes.
+ */
+ protected abstract byte[] hash(File file) throws IOException;
+
+ @Override
+ public String getIdentification() {
+ return type + "-" + BytesUtil.bytesToHex(hash);
+ }
+
+ @Override
+ public boolean equals(Object obj) {
+ if (!(obj instanceof com.keuin.kbackupfabric.backup.incremental.identifier.SingleHashIdentifier)) {
+ return false;
+ }
+ return Arrays.equals(hash, ((SingleHashIdentifier) obj).hash);
+ }
+
+ @Override
+ public int hashCode() {
+ int result = Objects.hash(type);
+ result = 31 * result + Arrays.hashCode(hash);
+ return result;
+ }
+}
diff --git a/src/test/java/com/keuin/kbackupfabric/backup/incremental/ObjectCollectionFactoryTest.java b/src/test/java/com/keuin/kbackupfabric/backup/incremental/ObjectCollectionFactoryTest.java
index 5052a82..e686e30 100644
--- a/src/test/java/com/keuin/kbackupfabric/backup/incremental/ObjectCollectionFactoryTest.java
+++ b/src/test/java/com/keuin/kbackupfabric/backup/incremental/ObjectCollectionFactoryTest.java
@@ -11,10 +11,10 @@ import static org.junit.Assert.*;
public class ObjectCollectionFactoryTest {
- private void validate(ObjectCollection collection, List<String> subCollections, Map<String, String> subElements) {
+ private void validate(ObjectCollection2 collection, List<String> subCollections, Map<String, String> subElements) {
assertEquals(subCollections.size(), collection.getSubCollectionMap().size());
assertEquals(subElements.size(), collection.getElementSet().size());
- for (Map.Entry<String, ObjectCollection> c : collection.getSubCollectionMap().entrySet()) {
+ for (Map.Entry<String, ObjectCollection2> c : collection.getSubCollectionMap().entrySet()) {
assertEquals(c.getKey(), c.getValue().getName());
assertTrue(subCollections.contains(c.getKey()));
}
@@ -68,7 +68,7 @@ public class ObjectCollectionFactoryTest {
try {
ObjectCollectionFactory<Sha256Identifier> factory =
new ObjectCollectionFactory<>(Sha256Identifier.getFactory(), threads, multiThreadThreshold);
- ObjectCollection collection =
+ ObjectCollection2 collection =
factory.fromDirectory(new File("./testfile/ObjectCollectionFactoryTest"));
assertEquals("ObjectCollectionFactoryTest", collection.getName());
diff --git a/src/test/java/com/keuin/kbackupfabric/backup/incremental/ObjectCollectionSerializerTest.java b/src/test/java/com/keuin/kbackupfabric/backup/incremental/serializer/IncBackupInfoSerializerTest.java
index d21e5b3..e7f56b0 100644
--- a/src/test/java/com/keuin/kbackupfabric/backup/incremental/ObjectCollectionSerializerTest.java
+++ b/src/test/java/com/keuin/kbackupfabric/backup/incremental/serializer/IncBackupInfoSerializerTest.java
@@ -1,15 +1,18 @@
-package com.keuin.kbackupfabric.backup.incremental;
+package com.keuin.kbackupfabric.backup.incremental.serializer;
+import com.keuin.kbackupfabric.backup.incremental.ObjectCollection2;
+import com.keuin.kbackupfabric.backup.incremental.ObjectCollectionFactory;
import com.keuin.kbackupfabric.backup.incremental.identifier.Sha256Identifier;
import org.junit.Test;
import java.io.File;
import java.io.IOException;
import java.nio.file.Files;
+import java.time.ZonedDateTime;
import static org.junit.Assert.assertEquals;
-public class ObjectCollectionSerializerTest {
+public class IncBackupInfoSerializerTest {
@Test
public void testSerializationConsistency1() throws IOException {
@@ -54,16 +57,17 @@ public class ObjectCollectionSerializerTest {
public void testSerializationConsistency(int threads, int multiThreadThreshold) throws IOException {
ObjectCollectionFactory<Sha256Identifier> factory =
new ObjectCollectionFactory<>(Sha256Identifier.getFactory(), threads, multiThreadThreshold);
- ObjectCollection collection =
+ ObjectCollection2 collection =
factory.fromDirectory(new File("./testfile/ObjectCollectionFactoryTest"));
File file = new File("./testfile/serialized");
if (file.exists()) {
Files.delete(file.toPath());
}
- ObjectCollectionSerializer.toFile(collection, file);
- ObjectCollection collection2 = ObjectCollectionSerializer.fromFile(file);
+ SavedIncrementalBackup backup = SavedIncrementalBackup.newLatest(collection, ":name:", ZonedDateTime.now(), 10000, 2000, 10, 20);
+ IncBackupInfoSerializer.toFile(file, backup);
+ SavedIncrementalBackup info = IncBackupInfoSerializer.fromFile(file);
Files.delete(file.toPath());
- assertEquals(collection, collection2);
+ assertEquals(backup, info);
}
} \ No newline at end of file
diff --git a/src/test/java/com/keuin/kbackupfabric/backup/incremental/serializer/IncBakupBackwardCompatibilityTest.java b/src/test/java/com/keuin/kbackupfabric/backup/incremental/serializer/IncBakupBackwardCompatibilityTest.java
new file mode 100644
index 0000000..d50b264
--- /dev/null
+++ b/src/test/java/com/keuin/kbackupfabric/backup/incremental/serializer/IncBakupBackwardCompatibilityTest.java
@@ -0,0 +1,45 @@
+package com.keuin.kbackupfabric.backup.incremental.serializer;
+
+import com.keuin.kbackupfabric.backup.incremental.ObjectCollection2;
+import com.keuin.kbackupfabric.backup.incremental.ObjectCollectionFactory;
+import com.keuin.kbackupfabric.backup.incremental.ObjectCollectionSerializer;
+import com.keuin.kbackupfabric.backup.incremental.identifier.Sha256Identifier;
+import com.keuin.kbackupfabric.backup.name.IncrementalBackupFileNameEncoder;
+import org.junit.Test;
+
+import java.io.File;
+import java.io.IOException;
+import java.nio.file.Path;
+import java.nio.file.Paths;
+import java.time.LocalDateTime;
+import java.time.ZoneOffset;
+
+import static org.junit.Assert.*;
+
+public class IncBakupBackwardCompatibilityTest {
+
+ private final String customName = "test_backup";
+ private final LocalDateTime backupTime = LocalDateTime.of(2000, 1, 1, 1, 1, 1, 1);
+
+ private final Path testRoot = Paths.get(".\\testfile\\IncBackupBackwardCompatibilityTest");
+ private final File indexFile = new File(testRoot.toString(), new IncrementalBackupFileNameEncoder().encode(customName, backupTime));
+
+ @Test
+ public void testBackwardCompatibility() throws IOException {
+
+ if (!testRoot.toFile().isDirectory()) {
+ if (!testRoot.toFile().mkdirs())
+ fail("Cannot initialize test environment: cannot create path.");
+ }
+
+ // now we make an old-style backup index file
+ ObjectCollectionFactory<Sha256Identifier> factory =
+ new ObjectCollectionFactory<>(Sha256Identifier.getFactory(), 1, 0);
+ ObjectCollection2 collection = factory.fromDirectory(testRoot.toFile());
+ ObjectCollectionSerializer.toFile(collection, indexFile);
+ SavedIncrementalBackup info = IncBackupInfoSerializer.fromFile(indexFile);
+ assertEquals(collection, info.getObjectCollection());
+ assertEquals(customName, info.getBackupName());
+ assertTrue(backupTime.toEpochSecond(ZoneOffset.UTC) - info.getBackupTime().toLocalDateTime().toEpochSecond(ZoneOffset.UTC) <= 2);
+ }
+}
diff --git a/src/test/java/com/keuin/kbackupfabric/operation/backup/method/ConfiguredIncrementalBackupMethodTest.java b/src/test/java/com/keuin/kbackupfabric/operation/backup/method/ConfiguredIncrementalBackupMethodTest.java
index badc744..80c643d 100644
--- a/src/test/java/com/keuin/kbackupfabric/operation/backup/method/ConfiguredIncrementalBackupMethodTest.java
+++ b/src/test/java/com/keuin/kbackupfabric/operation/backup/method/ConfiguredIncrementalBackupMethodTest.java
@@ -1,5 +1,6 @@
package com.keuin.kbackupfabric.operation.backup.method;
+import com.keuin.kbackupfabric.backup.name.IncrementalBackupFileNameEncoder;
import com.keuin.kbackupfabric.operation.backup.feedback.IncrementalBackupFeedback;
import org.apache.commons.codec.digest.DigestUtils;
import org.junit.Test;
@@ -8,6 +9,7 @@ import java.io.*;
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.Paths;
+import java.time.LocalDateTime;
import java.util.*;
import java.util.function.Function;
@@ -19,7 +21,8 @@ public class ConfiguredIncrementalBackupMethodTest {
private final String testTempPath = (new File("R:\\").isDirectory()) ? "R:\\" : ".\\testfile\\ConfiguredIncrementalBackupMethodTest";
private final String sourceDirectoryName = "source";
private final String destDirectoryName = "destination";
- private final String indexFileName = "index";
+ private final String customBackupName = "index";
+ private final String indexFileName = new IncrementalBackupFileNameEncoder().encode(customBackupName, LocalDateTime.now());
private final double directoryFactor = 0.05;
private final double fileFactor = 0.1;