From f9603dad3cefdae225a953095bb46c285bae28da Mon Sep 17 00:00:00 2001 From: Danny MacMillan Date: Tue, 26 Sep 2017 15:04:32 -0600 Subject: [PATCH] Retry downloads with corrupted content up to three times. Wasabi's GetObject occasionally (approximately 2% of the time in my testing) returns objects whose contents disagree with what has been stored in Wasabi. These cause errors when chunks are downloaded (during restore, for example). Previously, these errors would abort the restore, requiring that it be started over from the beginning. This made it effectively impossible to complete any normally-sized restore where the cumulative chance of encountering such an error approaches unity. With this change Duplicacy will retry up to three times if it can't decrypt the downloaded chunk, or if the downloaded chunk's ID doesn't agree with a chunk ID computed from the downloaded chunk's content. --- src/duplicacy_chunkdownloader.go | 45 ++++++++++++++++++++++---------- 1 file changed, 31 insertions(+), 14 deletions(-) diff --git a/src/duplicacy_chunkdownloader.go b/src/duplicacy_chunkdownloader.go index b74e877..d1da10e 100644 --- a/src/duplicacy_chunkdownloader.go +++ b/src/duplicacy_chunkdownloader.go @@ -324,22 +324,39 @@ func (downloader *ChunkDownloader) Download(threadIndex int, task ChunkDownloadT LOG_DEBUG("CHUNK_FOSSIL", "Chunk %s has been marked as a fossil", chunkID) } - err = downloader.storage.DownloadFile(threadIndex, chunkPath, chunk) - if err != nil { - LOG_ERROR("UPLOAD_FATAL", "Failed to download the chunk %s: %v", chunkID, err) - return false - } + const MaxDownloadAttempts = 3 + for downloadAttempt := 0;; downloadAttempt++ { + err = downloader.storage.DownloadFile(threadIndex, chunkPath, chunk) + if err != nil { + LOG_ERROR("UPLOAD_FATAL", "Failed to download the chunk %s: %v", chunkID, err) + return false + } - err = chunk.Decrypt(downloader.config.ChunkKey, task.chunkHash) - if err != nil { - LOG_ERROR("UPLOAD_CHUNK", "Failed to decrypt the chunk %s: %v", chunkID, err) - return false - } + err = chunk.Decrypt(downloader.config.ChunkKey, task.chunkHash) + if err != nil { + if downloadAttempt < MaxDownloadAttempts { + LOG_WARN("RETRY_DOWNLOAD", "Failed to decrypt the chunk %s: %v", chunkID, err) + chunk.Reset(false) + continue + } else { + LOG_ERROR("UPLOAD_CHUNK", "Failed to decrypt the chunk %s: %v", chunkID, err) + return false + } + } - actualChunkID := chunk.GetID() - if actualChunkID != chunkID { - LOG_FATAL("UPLOAD_CORRUPTED", "The chunk %s has a hash id of %s", chunkID, actualChunkID) - return false + actualChunkID := chunk.GetID() + if actualChunkID != chunkID { + if downloadAttempt < MaxDownloadAttempts { + LOG_WARN("RETRY_DOWNLOAD", "The chunk %s has a hash id of %s", chunkID, actualChunkID) + chunk.Reset(false) + continue + } else { + LOG_FATAL("UPLOAD_CORRUPTED", "The chunk %s has a hash id of %s", chunkID, actualChunkID) + return false + } + } + + break } if len(cachedPath) > 0 {