diff options
Diffstat (limited to 'src/main/java/com/keuin/kbackupfabric/operation')
7 files changed, 248 insertions, 55 deletions
diff --git a/src/main/java/com/keuin/kbackupfabric/operation/BackupOperation.java b/src/main/java/com/keuin/kbackupfabric/operation/BackupOperation.java index 071726b..d03c347 100644 --- a/src/main/java/com/keuin/kbackupfabric/operation/BackupOperation.java +++ b/src/main/java/com/keuin/kbackupfabric/operation/BackupOperation.java @@ -8,7 +8,6 @@ import com.keuin.kbackupfabric.util.PrintUtil; import com.keuin.kbackupfabric.util.backup.builder.BackupFileNameBuilder; import com.keuin.kbackupfabric.util.backup.formatter.BackupFileNameFormatter; import com.mojang.brigadier.context.CommandContext; -import com.sun.istack.internal.NotNull; import net.minecraft.server.MinecraftServer; import net.minecraft.server.command.ServerCommandSource; import net.minecraft.world.World; @@ -30,7 +29,7 @@ public class BackupOperation extends InvokableAsyncBlockingOperation { private long startTime; - public BackupOperation(@NotNull CommandContext<ServerCommandSource> context, @NotNull String backupName, @NotNull BackupMethod backupMethod, @NotNull BackupFileNameBuilder backupFileNameBuilder, @NotNull BackupFileNameFormatter backupFileNameFormatter) { + public BackupOperation(CommandContext<ServerCommandSource> context, String backupName, BackupMethod backupMethod) { super("BackupWorker"); this.context = context; this.backupName = backupName; @@ -56,7 +55,8 @@ public class BackupOperation extends InvokableAsyncBlockingOperation { String levelPath = getLevelPath(server); String backupFileName = getBackupFileName(backupName); - if(backupMethod.backup(backupName,levelPath,backupSaveDirectory)) { + BackupMethod.BackupResult result = backupMethod.backup(backupName,levelPath,backupSaveDirectory); + if(result.isSuccess()) { // Restore old autosave switch stat server.getWorlds().forEach(world -> world.savingDisabled = oldWorldsSavingDisabled.getOrDefault(world, true)); @@ -64,10 +64,7 @@ public class BackupOperation extends InvokableAsyncBlockingOperation { long timeElapsedMillis = System.currentTimeMillis() - startTime; String msgText = String.format("Backup finished. Time elapsed: %.2fs.", timeElapsedMillis / 1000.0); File backupZipFile = new File(backupSaveDirectory, backupFileName); - try { - msgText += String.format(" File size: %s.", humanFileSize(backupZipFile.length())); - } catch (SecurityException ignored) { - } + msgText += String.format(" File size: %s.", humanFileSize(result.getBackupSizeBytes())); PrintUtil.msgInfo(context, msgText, true); } else { // failed diff --git a/src/main/java/com/keuin/kbackupfabric/operation/DeleteOperation.java b/src/main/java/com/keuin/kbackupfabric/operation/DeleteOperation.java index 30fdfc0..444ca9a 100644 --- a/src/main/java/com/keuin/kbackupfabric/operation/DeleteOperation.java +++ b/src/main/java/com/keuin/kbackupfabric/operation/DeleteOperation.java @@ -1,7 +1,7 @@ package com.keuin.kbackupfabric.operation; import com.keuin.kbackupfabric.operation.abstracts.InvokableAsyncBlockingOperation; -import com.keuin.kbackupfabric.util.backup.BackupNameSuggestionProvider; +import com.keuin.kbackupfabric.util.backup.suggestion.BackupNameSuggestionProvider; import com.keuin.kbackupfabric.util.PrintUtil; import com.mojang.brigadier.context.CommandContext; import net.minecraft.server.MinecraftServer; diff --git a/src/main/java/com/keuin/kbackupfabric/operation/RestoreOperation.java b/src/main/java/com/keuin/kbackupfabric/operation/RestoreOperation.java index ef6ab2b..22397a1 100644 --- a/src/main/java/com/keuin/kbackupfabric/operation/RestoreOperation.java +++ b/src/main/java/com/keuin/kbackupfabric/operation/RestoreOperation.java @@ -84,54 +84,21 @@ public class RestoreOperation extends InvokableBlockingOperation { } } - PrintUtil.info("Wait for 5 seconds ..."); - try { - Thread.sleep(5000); - } catch (InterruptedException ignored) { - } - - // Delete old level - PrintUtil.info("Server stopped. Deleting old level ..."); - File levelDirFile = new File(levelDirectory); - long startTime = System.currentTimeMillis(); - - int failedCounter = 0; - final int MAX_RETRY_TIMES = 20; - while (failedCounter < MAX_RETRY_TIMES) { - System.gc(); - if (!levelDirFile.delete() && levelDirFile.exists()) { - System.gc(); - forceDelete(levelDirFile); // Try to force delete. - } - if (!levelDirFile.exists()) - break; - ++failedCounter; - try { - Thread.sleep(500); + int cnt = 5; + do { + PrintUtil.info(String.format("Wait %d seconds ...", cnt)); + try{ + Thread.sleep(1000); } catch (InterruptedException ignored) { } - } - if (levelDirFile.exists()) { - PrintUtil.error(String.format("Cannot restore: failed to delete old level %s .", levelDirFile.getName())); - return; - } + }while(--cnt > 0); - // Decompress archive - PrintUtil.info("Decompressing archived level ..."); - ZipUtil.unzip(backupFilePath, levelDirectory, false); - long endTime = System.currentTimeMillis(); - PrintUtil.info(String.format("Restore complete! (%.2fs) Please restart the server manually.", (endTime - startTime) / 1000.0)); - PrintUtil.info("If you want to restart automatically after restoring, please visit the project manual at: https://github.com/keuin/KBackup-Fabric/blob/master/README.md"); - - try { - Thread.sleep(1000); - } catch (InterruptedException ignored) { - } + //////////////////// //ServerRestartUtil.forkAndRestart(); System.exit(111); - } catch (SecurityException | IOException | ZipUtilException e) { + } catch (SecurityException e) { PrintUtil.error("An exception occurred while restoring: " + e.getMessage()); } } diff --git a/src/main/java/com/keuin/kbackupfabric/operation/backup/BackupMethod.java b/src/main/java/com/keuin/kbackupfabric/operation/backup/BackupMethod.java index b0b77cb..4e9eb6c 100644 --- a/src/main/java/com/keuin/kbackupfabric/operation/backup/BackupMethod.java +++ b/src/main/java/com/keuin/kbackupfabric/operation/backup/BackupMethod.java @@ -2,10 +2,12 @@ package com.keuin.kbackupfabric.operation.backup; import com.keuin.kbackupfabric.util.backup.builder.BackupFileNameBuilder; import com.keuin.kbackupfabric.util.backup.formatter.BackupFileNameFormatter; -import com.sun.istack.internal.NotNull; import java.io.IOException; +/** + * Provide specific backup method, which is implemented statelessly. + */ public interface BackupMethod { /** @@ -14,10 +16,29 @@ public interface BackupMethod { * @param backupName the backup name. * @return if the backup operation succeed. */ - boolean backup(@NotNull String backupName, @NotNull String levelPath, @NotNull String backupSaveDirectory) throws IOException; + BackupResult backup(String backupName, String levelPath, String backupSaveDirectory) throws IOException; + + boolean restore(String backupName, String levelPath, String backupSaveDirectory) throws IOException; BackupFileNameBuilder getBackupFileNameBuilder(); BackupFileNameFormatter getBackupFileNameFormatter(); + class BackupResult { + private final boolean success; + private final long backupSizeBytes; + + public BackupResult(boolean success, long backupSizeBytes) { + this.success = success; + this.backupSizeBytes = backupSizeBytes; + } + + public boolean isSuccess() { + return success; + } + + public long getBackupSizeBytes() { + return backupSizeBytes; + } + } } diff --git a/src/main/java/com/keuin/kbackupfabric/operation/backup/IncrementalBackupMethod.java b/src/main/java/com/keuin/kbackupfabric/operation/backup/IncrementalBackupMethod.java new file mode 100644 index 0000000..4a87bb3 --- /dev/null +++ b/src/main/java/com/keuin/kbackupfabric/operation/backup/IncrementalBackupMethod.java @@ -0,0 +1,62 @@ +package com.keuin.kbackupfabric.operation.backup; + +import com.google.gson.JsonObject; +import com.keuin.kbackupfabric.util.PrintUtil; +import com.keuin.kbackupfabric.util.backup.builder.BackupFileNameBuilder; +import com.keuin.kbackupfabric.util.backup.builder.ObjectTreeBackupFileNameBuilder; +import com.keuin.kbackupfabric.util.backup.formatter.BackupFileNameFormatter; +import com.keuin.kbackupfabric.util.backup.formatter.ObjectTreeBackupFileNameFormatter; + +import java.io.File; +import java.io.IOException; +import java.nio.file.Paths; +import java.time.LocalDateTime; + +public class IncrementalBackupMethod implements BackupMethod { + + private static final IncrementalBackupMethod INSTANCE = new IncrementalBackupMethod(); + + public static IncrementalBackupMethod getInstance() { + return INSTANCE; + } + + @Override + public BackupResult backup(String backupName, String levelPath, String backupSaveDirectory) throws IOException { + /* + 1. Analyze the save directory, to get a json containing md5 values of all files. + 2. Copy new files which we do not have in our backup repository. + 3. Save the above json as a backup file. When restoring from this, + what we have to do is just copy all files back from the repository, + based on their md5 digests. + */ + + boolean success = true; + // Generate JSON + JsonObject hashJson = IncrementalBackupUtil.generateDirectoryJsonObject(levelPath); + // Copy files + long newFilesSizeBytes = IncrementalBackupUtil.saveNewFiles(backupSaveDirectory, levelPath, hashJson); + if(newFilesSizeBytes < 0) { + success = false; + PrintUtil.error("Failed to copy new files to object tree."); + } + // Save JSON tree + File jsonFile = new File(String.valueOf(Paths.get(backupSaveDirectory, BackupFileNameBuilder.objectTreeBackup().build(LocalDateTime.now(), backupName)))); + // TODO + return new BackupResult(success, newFilesSizeBytes); + } + + @Override + public boolean restore(String backupName, String levelPath, String backupSaveDirectory) throws IOException { + return false; + } + + @Override + public BackupFileNameBuilder getBackupFileNameBuilder() { + return null; + } + + @Override + public BackupFileNameFormatter getBackupFileNameFormatter() { + return null; + } +} diff --git a/src/main/java/com/keuin/kbackupfabric/operation/backup/IncrementalBackupUtil.java b/src/main/java/com/keuin/kbackupfabric/operation/backup/IncrementalBackupUtil.java new file mode 100644 index 0000000..f90aef1 --- /dev/null +++ b/src/main/java/com/keuin/kbackupfabric/operation/backup/IncrementalBackupUtil.java @@ -0,0 +1,90 @@ +package com.keuin.kbackupfabric.operation.backup; + +import com.google.gson.JsonElement; +import com.google.gson.JsonObject; +import com.keuin.kbackupfabric.util.FilesystemUtil; +import org.apache.commons.codec.digest.DigestUtils; + +import java.io.File; +import java.io.IOException; +import java.io.InputStream; +import java.nio.file.*; +import java.util.Map; + +public class IncrementalBackupUtil { + /** + * Generate a json object representing a directory and its all sub files and directories. + * @param path path to the directory. + * @return a json object. + */ + public static JsonObject generateDirectoryJsonObject(String path) throws IOException { + JsonObject json = new JsonObject(); + File directory = new File(path); + if (!(directory.isDirectory() && directory.exists())) + throw new IOException(String.format("Path %s is not a valid directory.", path)); + + // Iterate all sub files using BFS. + try (DirectoryStream<Path> directoryStream = Files.newDirectoryStream(Paths.get(path))) { + for (Path sub : directoryStream) { + if (sub.toFile().isFile()) { + // A sub file + // Just hash and add it as a string + try (InputStream is = Files.newInputStream(sub)) { + String md5 = org.apache.commons.codec.digest.DigestUtils.md5Hex(is); + json.addProperty(sub.getFileName().toString(), md5); + } + } else { + // A sub directory + // Search into + json.addProperty(String.valueOf(sub.getFileName()), sub.toString()); + } + } + } + + return json; + } + + /** + * Save new (or modified) files to target path, based on hash json. + * @param targetSavePath where we should save new files. + * @param sourcePath where new files come from. This path must be the base directory of given hash json. + * @param hashJson the json object obtained by calling generateDirectoryJsonObject method. + * @return total size of new files. If failed, will return -1. + */ + public static long saveNewFiles(String targetSavePath, String sourcePath, JsonObject hashJson) throws IOException { + long bytesCopied = 0; + for (Map.Entry<String, JsonElement> entry : hashJson.entrySet()) { + String key = entry.getKey(); + JsonElement value = entry.getValue(); + if (value.isJsonPrimitive() && value.getAsJsonPrimitive().isString()) { + // A sub file + // key is file name + // value is file md5 + String md5 = value.getAsJsonPrimitive().getAsString(); + File saveTarget = new File(targetSavePath, md5); + if (!saveTarget.exists()) { + // Target file does not exist. We have to copy this to the target. + File sourceFile = new File(sourcePath, key); + Files.copy(sourceFile.toPath(), saveTarget.toPath(), StandardCopyOption.REPLACE_EXISTING, StandardCopyOption.COPY_ATTRIBUTES); + try { + bytesCopied += sourceFile.length(); + } catch (SecurityException ignored) { + // failed to get the file size. Just ignore this. + } + } + } else if (value.isJsonObject()) { + // A sub directory + // key is directory name + // value is directory json object + // Go into + if(!value.isJsonObject()) + throw new IllegalArgumentException(String.format("Hash json contains illegal argument of a directory item: %s -> %s.", key, value)); + Path pathSource = Paths.get(sourcePath, key); + bytesCopied += saveNewFiles(targetSavePath, pathSource.toString(), value.getAsJsonObject()); + } else { + throw new IllegalArgumentException(String.format("Hash json contains illegal element: %s -> %s.", key, value)); + } + } + return bytesCopied; + } +} diff --git a/src/main/java/com/keuin/kbackupfabric/operation/backup/PrimitiveBackupMethod.java b/src/main/java/com/keuin/kbackupfabric/operation/backup/PrimitiveBackupMethod.java index bb1c8cb..854355d 100644 --- a/src/main/java/com/keuin/kbackupfabric/operation/backup/PrimitiveBackupMethod.java +++ b/src/main/java/com/keuin/kbackupfabric/operation/backup/PrimitiveBackupMethod.java @@ -2,6 +2,7 @@ package com.keuin.kbackupfabric.operation.backup; import com.keuin.kbackupfabric.exception.ZipUtilException; import com.keuin.kbackupfabric.metadata.BackupMetadata; +import com.keuin.kbackupfabric.util.FilesystemUtil; import com.keuin.kbackupfabric.util.PrintUtil; import com.keuin.kbackupfabric.util.ZipUtil; import com.keuin.kbackupfabric.util.backup.builder.BackupFileNameBuilder; @@ -9,14 +10,23 @@ import com.keuin.kbackupfabric.util.backup.formatter.BackupFileNameFormatter; import java.io.File; import java.io.IOException; -import java.time.LocalDate; +import java.nio.file.Paths; import java.time.LocalDateTime; +import static org.apache.commons.io.FileUtils.forceDelete; + public class PrimitiveBackupMethod implements BackupMethod { + + private static final PrimitiveBackupMethod INSTANCE = new PrimitiveBackupMethod(); + + public static PrimitiveBackupMethod getInstance() { + return INSTANCE; + } + @Override - public boolean backup(String backupName, String levelPath, String backupSaveDirectory) throws IOException { + public BackupResult backup(String backupName, String levelPath, String backupSaveDirectory) throws IOException { + String backupFileName = BackupFileNameBuilder.primitiveZipBackup().build(LocalDateTime.now(),backupName); try { - String backupFileName = BackupFileNameBuilder.primitiveZipBackup().build(LocalDateTime.now(),backupName); BackupMetadata backupMetadata = new BackupMetadata(System.currentTimeMillis(), backupName); PrintUtil.info(String.format("zip(srcPath=%s, destPath=%s)", levelPath, backupSaveDirectory)); @@ -25,8 +35,54 @@ public class PrimitiveBackupMethod implements BackupMethod { } catch (ZipUtilException exception) { PrintUtil.info("Infinite recursive of directory tree detected, backup was aborted."); + return new BackupResult(false, 0); + } + + // Get backup file size and return + return new BackupResult(true, FilesystemUtil.getFileSizeBytes(backupSaveDirectory, backupFileName)); + } + + @Override + public boolean restore(String backupName, String levelDirectory, String backupSaveDirectory) throws IOException { + // Delete old level + PrintUtil.info("Server stopped. Deleting old level ..."); + File levelDirFile = new File(levelDirectory); + long startTime = System.currentTimeMillis(); + + int failedCounter = 0; + final int MAX_RETRY_TIMES = 20; + while (failedCounter < MAX_RETRY_TIMES) { + System.gc(); + if (!levelDirFile.delete() && levelDirFile.exists()) { + System.gc(); + forceDelete(levelDirFile); // Try to force delete. + } + if (!levelDirFile.exists()) + break; + ++failedCounter; + try { + Thread.sleep(500); + } catch (InterruptedException ignored) { + } + } + if (levelDirFile.exists()) { + PrintUtil.error(String.format("Cannot restore: failed to delete old level %s .", levelDirFile.getName())); return false; } + + // TODO: Refactor this to the concrete BackupMethod. + // Decompress archive + PrintUtil.info("Decompressing archived level ..."); + ZipUtil.unzip(Paths.get(backupSaveDirectory, backupName).toString(), levelDirectory, false); + long endTime = System.currentTimeMillis(); + PrintUtil.info(String.format("Restore complete! (%.2fs) Please restart the server manually.", (endTime - startTime) / 1000.0)); + PrintUtil.info("If you want to restart automatically after restoring, please visit the project manual at: https://github.com/keuin/KBackup-Fabric/blob/master/README.md"); + +// try { +// Thread.sleep(1000); +// } catch (InterruptedException ignored) { +// } + return true; } @@ -37,6 +93,6 @@ public class PrimitiveBackupMethod implements BackupMethod { @Override public BackupFileNameFormatter getBackupFileNameFormatter() { - return BFNF; + return BackupFileNameFormatter.primitiveZipBackup(); } } |