Merge "Import BackupFileBuilder"
This commit is contained in:
@@ -0,0 +1,232 @@
|
||||
/*
|
||||
* Copyright (C) 2019 The Android Open Source Project
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package com.android.server.backup.encryption.chunking;
|
||||
|
||||
import static com.android.internal.util.Preconditions.checkArgument;
|
||||
import static com.android.internal.util.Preconditions.checkNotNull;
|
||||
import static com.android.internal.util.Preconditions.checkState;
|
||||
|
||||
import android.annotation.Nullable;
|
||||
import android.util.Slog;
|
||||
|
||||
import com.android.server.backup.encryption.chunk.ChunkHash;
|
||||
import com.android.server.backup.encryption.chunk.ChunkListingMap;
|
||||
import com.android.server.backup.encryption.protos.nano.ChunksMetadataProto;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.io.OutputStream;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Arrays;
|
||||
import java.util.Collections;
|
||||
import java.util.HashMap;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
|
||||
/**
|
||||
* Writes batches of {@link EncryptedChunk} to a diff script, and generates the associated {@link
|
||||
* ChunksMetadataProto.ChunkListing} and {@link ChunksMetadataProto.ChunkOrdering}.
|
||||
*/
|
||||
public class BackupFileBuilder {
|
||||
private static final String TAG = "BackupFileBuilder";
|
||||
|
||||
private static final int BYTES_PER_KILOBYTE = 1024;
|
||||
|
||||
private final BackupWriter mBackupWriter;
|
||||
private final EncryptedChunkEncoder mEncryptedChunkEncoder;
|
||||
private final ChunkListingMap mOldChunkListing;
|
||||
private final ChunksMetadataProto.ChunkListing mNewChunkListing;
|
||||
private final ChunksMetadataProto.ChunkOrdering mChunkOrdering;
|
||||
private final List<ChunksMetadataProto.Chunk> mKnownChunks = new ArrayList<>();
|
||||
private final List<Integer> mKnownStarts = new ArrayList<>();
|
||||
private final Map<ChunkHash, Long> mChunkStartPositions;
|
||||
|
||||
private long mNewChunksSizeBytes;
|
||||
private boolean mFinished;
|
||||
|
||||
/**
|
||||
* Constructs a new instance which writes raw data to the given {@link OutputStream}, without
|
||||
* generating a diff.
|
||||
*
|
||||
* <p>This class never closes the output stream.
|
||||
*/
|
||||
public static BackupFileBuilder createForNonIncremental(OutputStream outputStream) {
|
||||
return new BackupFileBuilder(
|
||||
new RawBackupWriter(outputStream), new ChunksMetadataProto.ChunkListing());
|
||||
}
|
||||
|
||||
/**
|
||||
* Constructs a new instance which writes a diff script to the given {@link OutputStream} using
|
||||
* a {@link SingleStreamDiffScriptWriter}.
|
||||
*
|
||||
* <p>This class never closes the output stream.
|
||||
*
|
||||
* @param oldChunkListing against which the diff will be generated.
|
||||
*/
|
||||
public static BackupFileBuilder createForIncremental(
|
||||
OutputStream outputStream, ChunksMetadataProto.ChunkListing oldChunkListing) {
|
||||
return new BackupFileBuilder(
|
||||
DiffScriptBackupWriter.newInstance(outputStream), oldChunkListing);
|
||||
}
|
||||
|
||||
private BackupFileBuilder(
|
||||
BackupWriter backupWriter, ChunksMetadataProto.ChunkListing oldChunkListing) {
|
||||
this.mBackupWriter = backupWriter;
|
||||
// TODO(b/77188289): Use InlineLengthsEncryptedChunkEncoder for key-value backups
|
||||
this.mEncryptedChunkEncoder = new LengthlessEncryptedChunkEncoder();
|
||||
this.mOldChunkListing = ChunkListingMap.fromProto(oldChunkListing);
|
||||
|
||||
mNewChunkListing = new ChunksMetadataProto.ChunkListing();
|
||||
mNewChunkListing.cipherType = ChunksMetadataProto.AES_256_GCM;
|
||||
mNewChunkListing.chunkOrderingType = ChunksMetadataProto.CHUNK_ORDERING_TYPE_UNSPECIFIED;
|
||||
|
||||
mChunkOrdering = new ChunksMetadataProto.ChunkOrdering();
|
||||
mChunkStartPositions = new HashMap<>();
|
||||
}
|
||||
|
||||
/**
|
||||
* Writes the given chunks to the output stream, and adds them to the new chunk listing and
|
||||
* chunk ordering.
|
||||
*
|
||||
* <p>Sorts the chunks in lexicographical order before writing.
|
||||
*
|
||||
* @param allChunks The hashes of all the chunks, in the order they appear in the plaintext.
|
||||
* @param newChunks A map from hash to {@link EncryptedChunk} containing the new chunks not
|
||||
* present in the previous backup.
|
||||
*/
|
||||
public void writeChunks(List<ChunkHash> allChunks, Map<ChunkHash, EncryptedChunk> newChunks)
|
||||
throws IOException {
|
||||
checkState(!mFinished, "Cannot write chunks after flushing.");
|
||||
|
||||
List<ChunkHash> sortedChunks = new ArrayList<>(allChunks);
|
||||
Collections.sort(sortedChunks);
|
||||
for (ChunkHash chunkHash : sortedChunks) {
|
||||
// As we have already included this chunk in the backup file, don't add it again to
|
||||
// deduplicate identical chunks.
|
||||
if (!mChunkStartPositions.containsKey(chunkHash)) {
|
||||
// getBytesWritten() gives us the start of the chunk.
|
||||
mChunkStartPositions.put(chunkHash, mBackupWriter.getBytesWritten());
|
||||
|
||||
writeChunkToFileAndListing(chunkHash, newChunks);
|
||||
}
|
||||
}
|
||||
|
||||
long totalSizeKb = mBackupWriter.getBytesWritten() / BYTES_PER_KILOBYTE;
|
||||
long newChunksSizeKb = mNewChunksSizeBytes / BYTES_PER_KILOBYTE;
|
||||
Slog.d(
|
||||
TAG,
|
||||
"Total backup size: "
|
||||
+ totalSizeKb
|
||||
+ " kb, new chunks size: "
|
||||
+ newChunksSizeKb
|
||||
+ " kb");
|
||||
|
||||
for (ChunkHash chunkHash : allChunks) {
|
||||
mKnownStarts.add(mChunkStartPositions.get(chunkHash).intValue());
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns a new listing for all of the chunks written so far, setting the given fingerprint
|
||||
* mixer salt (this overrides the {@link ChunksMetadataProto.ChunkListing#fingerprintMixerSalt}
|
||||
* in the old {@link ChunksMetadataProto.ChunkListing} passed into the
|
||||
* {@link #BackupFileBuilder).
|
||||
*/
|
||||
public ChunksMetadataProto.ChunkListing getNewChunkListing(
|
||||
@Nullable byte[] fingerprintMixerSalt) {
|
||||
// TODO: b/141537803 Add check to ensure this is called only once per instance
|
||||
mNewChunkListing.fingerprintMixerSalt =
|
||||
fingerprintMixerSalt != null
|
||||
? Arrays.copyOf(fingerprintMixerSalt, fingerprintMixerSalt.length)
|
||||
: new byte[0];
|
||||
mNewChunkListing.chunks = mKnownChunks.toArray(new ChunksMetadataProto.Chunk[0]);
|
||||
return mNewChunkListing;
|
||||
}
|
||||
|
||||
/** Returns a new ordering for all of the chunks written so far, setting the given checksum. */
|
||||
public ChunksMetadataProto.ChunkOrdering getNewChunkOrdering(byte[] checksum) {
|
||||
// TODO: b/141537803 Add check to ensure this is called only once per instance
|
||||
mChunkOrdering.starts = new int[mKnownStarts.size()];
|
||||
for (int i = 0; i < mKnownStarts.size(); i++) {
|
||||
mChunkOrdering.starts[i] = mKnownStarts.get(i).intValue();
|
||||
}
|
||||
mChunkOrdering.checksum = Arrays.copyOf(checksum, checksum.length);
|
||||
return mChunkOrdering;
|
||||
}
|
||||
|
||||
/**
|
||||
* Finishes the backup file by writing the chunk metadata and metadata position.
|
||||
*
|
||||
* <p>Once this is called, calling {@link #writeChunks(List, Map)} will throw {@link
|
||||
* IllegalStateException}.
|
||||
*/
|
||||
public void finish(ChunksMetadataProto.ChunksMetadata metadata) throws IOException {
|
||||
checkNotNull(metadata, "Metadata cannot be null");
|
||||
|
||||
long startOfMetadata = mBackupWriter.getBytesWritten();
|
||||
mBackupWriter.writeBytes(ChunksMetadataProto.ChunksMetadata.toByteArray(metadata));
|
||||
mBackupWriter.writeBytes(toByteArray(startOfMetadata));
|
||||
|
||||
mBackupWriter.flush();
|
||||
mFinished = true;
|
||||
}
|
||||
|
||||
/**
|
||||
* Checks if the given chunk hash references an existing chunk or a new chunk, and adds this
|
||||
* chunk to the backup file and new chunk listing.
|
||||
*/
|
||||
private void writeChunkToFileAndListing(
|
||||
ChunkHash chunkHash, Map<ChunkHash, EncryptedChunk> newChunks) throws IOException {
|
||||
checkNotNull(chunkHash, "Hash cannot be null");
|
||||
|
||||
if (mOldChunkListing.hasChunk(chunkHash)) {
|
||||
ChunkListingMap.Entry oldChunk = mOldChunkListing.getChunkEntry(chunkHash);
|
||||
mBackupWriter.writeChunk(oldChunk.getStart(), oldChunk.getLength());
|
||||
|
||||
checkArgument(oldChunk.getLength() >= 0, "Chunk must have zero or positive length");
|
||||
addChunk(chunkHash.getHash(), oldChunk.getLength());
|
||||
} else if (newChunks.containsKey(chunkHash)) {
|
||||
EncryptedChunk newChunk = newChunks.get(chunkHash);
|
||||
mEncryptedChunkEncoder.writeChunkToWriter(mBackupWriter, newChunk);
|
||||
int length = mEncryptedChunkEncoder.getEncodedLengthOfChunk(newChunk);
|
||||
mNewChunksSizeBytes += length;
|
||||
|
||||
checkArgument(length >= 0, "Chunk must have zero or positive length");
|
||||
addChunk(chunkHash.getHash(), length);
|
||||
} else {
|
||||
throw new IllegalArgumentException(
|
||||
"Chunk did not exist in old chunks or new chunks: " + chunkHash);
|
||||
}
|
||||
}
|
||||
|
||||
private void addChunk(byte[] chunkHash, int length) {
|
||||
ChunksMetadataProto.Chunk chunk = new ChunksMetadataProto.Chunk();
|
||||
chunk.hash = Arrays.copyOf(chunkHash, chunkHash.length);
|
||||
chunk.length = length;
|
||||
mKnownChunks.add(chunk);
|
||||
}
|
||||
|
||||
private static byte[] toByteArray(long value) {
|
||||
// Note that this code needs to stay compatible with GWT, which has known
|
||||
// bugs when narrowing byte casts of long values occur.
|
||||
byte[] result = new byte[8];
|
||||
for (int i = 7; i >= 0; i--) {
|
||||
result[i] = (byte) (value & 0xffL);
|
||||
value >>= 8;
|
||||
}
|
||||
return result;
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,614 @@
|
||||
/*
|
||||
* Copyright (C) 2019 The Android Open Source Project
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package com.android.server.backup.encryption.chunking;
|
||||
|
||||
import static com.android.server.backup.encryption.protos.nano.ChunksMetadataProto.AES_256_GCM;
|
||||
import static com.android.server.backup.encryption.protos.nano.ChunksMetadataProto.CHUNK_ORDERING_TYPE_UNSPECIFIED;
|
||||
import static com.android.server.backup.testing.CryptoTestUtils.newChunk;
|
||||
|
||||
import static com.google.common.truth.Truth.assertThat;
|
||||
import static com.google.common.truth.Truth.assertWithMessage;
|
||||
|
||||
import static junit.framework.Assert.fail;
|
||||
|
||||
import static org.mockito.Mockito.mock;
|
||||
import static org.mockito.Mockito.verify;
|
||||
import static org.testng.Assert.assertThrows;
|
||||
|
||||
import static java.nio.charset.StandardCharsets.UTF_8;
|
||||
|
||||
import android.platform.test.annotations.Presubmit;
|
||||
|
||||
import com.android.server.backup.encryption.chunk.ChunkHash;
|
||||
import com.android.server.backup.encryption.protos.nano.ChunksMetadataProto;
|
||||
import com.android.server.backup.encryption.testing.DiffScriptProcessor;
|
||||
|
||||
import com.google.common.collect.ImmutableList;
|
||||
import com.google.common.collect.ImmutableMap;
|
||||
import com.google.common.io.Files;
|
||||
import com.google.common.primitives.Bytes;
|
||||
import com.google.common.primitives.Longs;
|
||||
|
||||
import org.junit.Before;
|
||||
import org.junit.Rule;
|
||||
import org.junit.Test;
|
||||
import org.junit.rules.TemporaryFolder;
|
||||
import org.junit.runner.RunWith;
|
||||
import org.robolectric.RobolectricTestRunner;
|
||||
|
||||
import java.io.ByteArrayInputStream;
|
||||
import java.io.ByteArrayOutputStream;
|
||||
import java.io.File;
|
||||
import java.io.FileOutputStream;
|
||||
import java.io.OutputStream;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Arrays;
|
||||
import java.util.List;
|
||||
|
||||
@RunWith(RobolectricTestRunner.class)
|
||||
@Presubmit
|
||||
public class BackupFileBuilderTest {
|
||||
private static final String TEST_DATA_1 =
|
||||
"I'm already there or close to [T7-9/executive level] in terms of big-picture vision";
|
||||
private static final String TEST_DATA_2 =
|
||||
"I was known for Real Games and should have been brought in for advice";
|
||||
private static final String TEST_DATA_3 =
|
||||
"Pride is rooted in the delusional belief held by all humans in an unchanging self";
|
||||
|
||||
private static final byte[] TEST_FINGERPRINT_MIXER_SALT =
|
||||
Arrays.copyOf(new byte[] {22}, ChunkHash.HASH_LENGTH_BYTES);
|
||||
|
||||
private static final ChunkHash TEST_HASH_1 =
|
||||
new ChunkHash(Arrays.copyOf(new byte[] {0}, EncryptedChunk.KEY_LENGTH_BYTES));
|
||||
private static final ChunkHash TEST_HASH_2 =
|
||||
new ChunkHash(Arrays.copyOf(new byte[] {1}, EncryptedChunk.KEY_LENGTH_BYTES));
|
||||
private static final ChunkHash TEST_HASH_3 =
|
||||
new ChunkHash(Arrays.copyOf(new byte[] {2}, EncryptedChunk.KEY_LENGTH_BYTES));
|
||||
|
||||
private static final byte[] TEST_NONCE =
|
||||
Arrays.copyOf(new byte[] {3}, EncryptedChunk.NONCE_LENGTH_BYTES);
|
||||
|
||||
private static final EncryptedChunk TEST_CHUNK_1 =
|
||||
EncryptedChunk.create(TEST_HASH_1, TEST_NONCE, TEST_DATA_1.getBytes(UTF_8));
|
||||
private static final EncryptedChunk TEST_CHUNK_2 =
|
||||
EncryptedChunk.create(TEST_HASH_2, TEST_NONCE, TEST_DATA_2.getBytes(UTF_8));
|
||||
private static final EncryptedChunk TEST_CHUNK_3 =
|
||||
EncryptedChunk.create(TEST_HASH_3, TEST_NONCE, TEST_DATA_3.getBytes(UTF_8));
|
||||
|
||||
private static final byte[] TEST_CHECKSUM = {1, 2, 3, 4, 5, 6};
|
||||
|
||||
@Rule public TemporaryFolder mTemporaryFolder = new TemporaryFolder();
|
||||
|
||||
private File mOldFile;
|
||||
private ChunksMetadataProto.ChunkListing mOldChunkListing;
|
||||
private EncryptedChunkEncoder mEncryptedChunkEncoder;
|
||||
|
||||
@Before
|
||||
public void setUp() {
|
||||
mEncryptedChunkEncoder = new LengthlessEncryptedChunkEncoder();
|
||||
}
|
||||
|
||||
@Test
|
||||
public void writeChunks_nonIncremental_writesCorrectRawData() throws Exception {
|
||||
ByteArrayOutputStream output = new ByteArrayOutputStream();
|
||||
BackupFileBuilder backupFileBuilder = BackupFileBuilder.createForNonIncremental(output);
|
||||
|
||||
backupFileBuilder.writeChunks(
|
||||
ImmutableList.of(TEST_HASH_1, TEST_HASH_2),
|
||||
getNewChunkMap(TEST_HASH_1, TEST_HASH_2));
|
||||
|
||||
byte[] actual = output.toByteArray();
|
||||
byte[] expected =
|
||||
Bytes.concat(
|
||||
TEST_CHUNK_1.nonce(),
|
||||
TEST_CHUNK_1.encryptedBytes(),
|
||||
TEST_CHUNK_2.nonce(),
|
||||
TEST_CHUNK_2.encryptedBytes());
|
||||
assertThat(actual).asList().containsExactlyElementsIn(Bytes.asList(expected)).inOrder();
|
||||
}
|
||||
|
||||
@Test
|
||||
public void writeChunks_nonIncrementalWithDuplicates_writesEachChunkOnlyOnce()
|
||||
throws Exception {
|
||||
ByteArrayOutputStream output = new ByteArrayOutputStream();
|
||||
BackupFileBuilder backupFileBuilder = BackupFileBuilder.createForNonIncremental(output);
|
||||
|
||||
backupFileBuilder.writeChunks(
|
||||
ImmutableList.of(TEST_HASH_1, TEST_HASH_2, TEST_HASH_1),
|
||||
getNewChunkMap(TEST_HASH_1, TEST_HASH_2));
|
||||
|
||||
byte[] actual = output.toByteArray();
|
||||
byte[] expected =
|
||||
Bytes.concat(
|
||||
TEST_CHUNK_1.nonce(),
|
||||
TEST_CHUNK_1.encryptedBytes(),
|
||||
TEST_CHUNK_2.nonce(),
|
||||
TEST_CHUNK_2.encryptedBytes());
|
||||
assertThat(actual).asList().containsExactlyElementsIn(Bytes.asList(expected)).inOrder();
|
||||
}
|
||||
|
||||
@Test
|
||||
public void writeChunks_incremental_writesParsableDiffScript() throws Exception {
|
||||
// We will insert chunk 2 in between chunks 1 and 3.
|
||||
setUpOldBackupWithChunks(ImmutableList.of(TEST_CHUNK_1, TEST_CHUNK_3));
|
||||
ByteArrayOutputStream diffOutputStream = new ByteArrayOutputStream();
|
||||
BackupFileBuilder backupFileBuilder =
|
||||
BackupFileBuilder.createForIncremental(diffOutputStream, mOldChunkListing);
|
||||
|
||||
backupFileBuilder.writeChunks(
|
||||
ImmutableList.of(TEST_HASH_1, TEST_HASH_2, TEST_HASH_3),
|
||||
getNewChunkMap(TEST_HASH_2));
|
||||
backupFileBuilder.finish(getTestMetadata());
|
||||
|
||||
byte[] actual =
|
||||
stripMetadataAndPositionFromOutput(parseDiffScript(diffOutputStream.toByteArray()));
|
||||
byte[] expected =
|
||||
Bytes.concat(
|
||||
TEST_CHUNK_1.nonce(),
|
||||
TEST_CHUNK_1.encryptedBytes(),
|
||||
TEST_CHUNK_2.nonce(),
|
||||
TEST_CHUNK_2.encryptedBytes(),
|
||||
TEST_CHUNK_3.nonce(),
|
||||
TEST_CHUNK_3.encryptedBytes());
|
||||
assertThat(actual).asList().containsExactlyElementsIn(Bytes.asList(expected)).inOrder();
|
||||
}
|
||||
|
||||
@Test
|
||||
public void writeChunks_incrementalWithDuplicates_writesEachChunkOnlyOnce() throws Exception {
|
||||
// We will insert chunk 2 twice in between chunks 1 and 3.
|
||||
setUpOldBackupWithChunks(ImmutableList.of(TEST_CHUNK_1, TEST_CHUNK_3));
|
||||
ByteArrayOutputStream diffOutputStream = new ByteArrayOutputStream();
|
||||
BackupFileBuilder backupFileBuilder =
|
||||
BackupFileBuilder.createForIncremental(diffOutputStream, mOldChunkListing);
|
||||
|
||||
backupFileBuilder.writeChunks(
|
||||
ImmutableList.of(TEST_HASH_1, TEST_HASH_2, TEST_HASH_2, TEST_HASH_3),
|
||||
getNewChunkMap(TEST_HASH_2));
|
||||
backupFileBuilder.finish(getTestMetadata());
|
||||
|
||||
byte[] actual =
|
||||
stripMetadataAndPositionFromOutput(parseDiffScript(diffOutputStream.toByteArray()));
|
||||
byte[] expected =
|
||||
Bytes.concat(
|
||||
TEST_CHUNK_1.nonce(),
|
||||
TEST_CHUNK_1.encryptedBytes(),
|
||||
TEST_CHUNK_2.nonce(),
|
||||
TEST_CHUNK_2.encryptedBytes(),
|
||||
TEST_CHUNK_3.nonce(),
|
||||
TEST_CHUNK_3.encryptedBytes());
|
||||
assertThat(actual).asList().containsExactlyElementsIn(Bytes.asList(expected)).inOrder();
|
||||
}
|
||||
|
||||
@Test
|
||||
public void writeChunks_writesChunksInOrderOfHash() throws Exception {
|
||||
setUpOldBackupWithChunks(ImmutableList.of());
|
||||
ByteArrayOutputStream diffOutputStream = new ByteArrayOutputStream();
|
||||
BackupFileBuilder backupFileBuilder =
|
||||
BackupFileBuilder.createForIncremental(diffOutputStream, mOldChunkListing);
|
||||
|
||||
// Write chunks out of order.
|
||||
backupFileBuilder.writeChunks(
|
||||
ImmutableList.of(TEST_HASH_2, TEST_HASH_1),
|
||||
getNewChunkMap(TEST_HASH_2, TEST_HASH_1));
|
||||
backupFileBuilder.finish(getTestMetadata());
|
||||
|
||||
byte[] actual =
|
||||
stripMetadataAndPositionFromOutput(parseDiffScript(diffOutputStream.toByteArray()));
|
||||
byte[] expected =
|
||||
Bytes.concat(
|
||||
TEST_CHUNK_1.nonce(),
|
||||
TEST_CHUNK_1.encryptedBytes(),
|
||||
TEST_CHUNK_2.nonce(),
|
||||
TEST_CHUNK_2.encryptedBytes());
|
||||
assertThat(actual).asList().containsExactlyElementsIn(Bytes.asList(expected)).inOrder();
|
||||
}
|
||||
|
||||
@Test
|
||||
public void writeChunks_alreadyFlushed_throwsException() throws Exception {
|
||||
BackupFileBuilder backupFileBuilder =
|
||||
BackupFileBuilder.createForIncremental(
|
||||
new ByteArrayOutputStream(), new ChunksMetadataProto.ChunkListing());
|
||||
backupFileBuilder.finish(getTestMetadata());
|
||||
|
||||
assertThrows(
|
||||
IllegalStateException.class,
|
||||
() -> backupFileBuilder.writeChunks(ImmutableList.of(), getNewChunkMap()));
|
||||
}
|
||||
|
||||
@Test
|
||||
public void getNewChunkListing_hasChunksInOrderOfKey() throws Exception {
|
||||
// We will insert chunk 2 in between chunks 1 and 3.
|
||||
setUpOldBackupWithChunks(ImmutableList.of(TEST_CHUNK_1, TEST_CHUNK_3));
|
||||
BackupFileBuilder backupFileBuilder =
|
||||
BackupFileBuilder.createForIncremental(
|
||||
new ByteArrayOutputStream(), mOldChunkListing);
|
||||
|
||||
// Write chunks out of order.
|
||||
backupFileBuilder.writeChunks(
|
||||
ImmutableList.of(TEST_HASH_1, TEST_HASH_3, TEST_HASH_2),
|
||||
getNewChunkMap(TEST_HASH_2));
|
||||
backupFileBuilder.finish(getTestMetadata());
|
||||
|
||||
ChunksMetadataProto.ChunkListing expected = expectedChunkListing();
|
||||
ChunksMetadataProto.ChunkListing actual =
|
||||
backupFileBuilder.getNewChunkListing(TEST_FINGERPRINT_MIXER_SALT);
|
||||
assertListingsEqual(actual, expected);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void getNewChunkListing_writeChunksInTwoBatches_returnsListingContainingAllChunks()
|
||||
throws Exception {
|
||||
// We will insert chunk 2 in between chunks 1 and 3.
|
||||
setUpOldBackupWithChunks(ImmutableList.of(TEST_CHUNK_1, TEST_CHUNK_3));
|
||||
BackupFileBuilder backupFileBuilder =
|
||||
BackupFileBuilder.createForIncremental(
|
||||
new ByteArrayOutputStream(), mOldChunkListing);
|
||||
|
||||
backupFileBuilder.writeChunks(
|
||||
ImmutableList.of(TEST_HASH_1, TEST_HASH_2), getNewChunkMap(TEST_HASH_2));
|
||||
backupFileBuilder.writeChunks(ImmutableList.of(TEST_HASH_3), getNewChunkMap(TEST_HASH_2));
|
||||
backupFileBuilder.finish(getTestMetadata());
|
||||
|
||||
ChunksMetadataProto.ChunkListing expected = expectedChunkListing();
|
||||
ChunksMetadataProto.ChunkListing actual =
|
||||
backupFileBuilder.getNewChunkListing(TEST_FINGERPRINT_MIXER_SALT);
|
||||
assertListingsEqual(actual, expected);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void getNewChunkListing_writeDuplicateChunks_writesEachChunkOnlyOnce() throws Exception {
|
||||
// We will append [2][3][3][2] onto [1].
|
||||
setUpOldBackupWithChunks(ImmutableList.of(TEST_CHUNK_1));
|
||||
BackupFileBuilder backupFileBuilder =
|
||||
BackupFileBuilder.createForIncremental(
|
||||
new ByteArrayOutputStream(), mOldChunkListing);
|
||||
|
||||
backupFileBuilder.writeChunks(
|
||||
ImmutableList.of(TEST_HASH_1, TEST_HASH_2, TEST_HASH_3),
|
||||
getNewChunkMap(TEST_HASH_3, TEST_HASH_2));
|
||||
backupFileBuilder.writeChunks(
|
||||
ImmutableList.of(TEST_HASH_3, TEST_HASH_2),
|
||||
getNewChunkMap(TEST_HASH_3, TEST_HASH_2));
|
||||
backupFileBuilder.finish(getTestMetadata());
|
||||
|
||||
ChunksMetadataProto.ChunkListing expected = expectedChunkListing();
|
||||
ChunksMetadataProto.ChunkListing actual =
|
||||
backupFileBuilder.getNewChunkListing(TEST_FINGERPRINT_MIXER_SALT);
|
||||
assertListingsEqual(actual, expected);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void getNewChunkListing_nonIncrementalWithNoSalt_doesNotThrowOnSerialisation() {
|
||||
BackupFileBuilder backupFileBuilder =
|
||||
BackupFileBuilder.createForNonIncremental(new ByteArrayOutputStream());
|
||||
|
||||
ChunksMetadataProto.ChunkListing newChunkListing =
|
||||
backupFileBuilder.getNewChunkListing(/*fingerprintMixerSalt=*/ null);
|
||||
|
||||
// Does not throw.
|
||||
ChunksMetadataProto.ChunkListing.toByteArray(newChunkListing);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void getNewChunkListing_incrementalWithNoSalt_doesNotThrowOnSerialisation()
|
||||
throws Exception {
|
||||
|
||||
setUpOldBackupWithChunks(ImmutableList.of());
|
||||
BackupFileBuilder backupFileBuilder =
|
||||
BackupFileBuilder.createForIncremental(
|
||||
new ByteArrayOutputStream(), mOldChunkListing);
|
||||
|
||||
ChunksMetadataProto.ChunkListing newChunkListing =
|
||||
backupFileBuilder.getNewChunkListing(/*fingerprintMixerSalt=*/ null);
|
||||
|
||||
// Does not throw.
|
||||
ChunksMetadataProto.ChunkListing.toByteArray(newChunkListing);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void getNewChunkListing_nonIncrementalWithNoSalt_hasEmptySalt() {
|
||||
BackupFileBuilder backupFileBuilder =
|
||||
BackupFileBuilder.createForNonIncremental(new ByteArrayOutputStream());
|
||||
|
||||
ChunksMetadataProto.ChunkListing newChunkListing =
|
||||
backupFileBuilder.getNewChunkListing(/*fingerprintMixerSalt=*/ null);
|
||||
|
||||
assertThat(newChunkListing.fingerprintMixerSalt).isEmpty();
|
||||
}
|
||||
|
||||
@Test
|
||||
public void getNewChunkListing_incrementalWithNoSalt_hasEmptySalt() throws Exception {
|
||||
setUpOldBackupWithChunks(ImmutableList.of());
|
||||
BackupFileBuilder backupFileBuilder =
|
||||
BackupFileBuilder.createForIncremental(
|
||||
new ByteArrayOutputStream(), mOldChunkListing);
|
||||
|
||||
ChunksMetadataProto.ChunkListing newChunkListing =
|
||||
backupFileBuilder.getNewChunkListing(/*fingerprintMixerSalt=*/ null);
|
||||
|
||||
assertThat(newChunkListing.fingerprintMixerSalt).isEmpty();
|
||||
}
|
||||
|
||||
@Test
|
||||
public void getNewChunkListing_nonIncrementalWithSalt_hasGivenSalt() {
|
||||
BackupFileBuilder backupFileBuilder =
|
||||
BackupFileBuilder.createForNonIncremental(new ByteArrayOutputStream());
|
||||
|
||||
ChunksMetadataProto.ChunkListing newChunkListing =
|
||||
backupFileBuilder.getNewChunkListing(TEST_FINGERPRINT_MIXER_SALT);
|
||||
|
||||
assertThat(newChunkListing.fingerprintMixerSalt).isEqualTo(TEST_FINGERPRINT_MIXER_SALT);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void getNewChunkListing_incrementalWithSalt_hasGivenSalt() throws Exception {
|
||||
setUpOldBackupWithChunks(ImmutableList.of());
|
||||
BackupFileBuilder backupFileBuilder =
|
||||
BackupFileBuilder.createForIncremental(
|
||||
new ByteArrayOutputStream(), mOldChunkListing);
|
||||
|
||||
ChunksMetadataProto.ChunkListing newChunkListing =
|
||||
backupFileBuilder.getNewChunkListing(TEST_FINGERPRINT_MIXER_SALT);
|
||||
|
||||
assertThat(newChunkListing.fingerprintMixerSalt).isEqualTo(TEST_FINGERPRINT_MIXER_SALT);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void getNewChunkListing_nonIncremental_hasCorrectCipherTypeAndChunkOrderingType() {
|
||||
BackupFileBuilder backupFileBuilder =
|
||||
BackupFileBuilder.createForNonIncremental(new ByteArrayOutputStream());
|
||||
|
||||
ChunksMetadataProto.ChunkListing newChunkListing =
|
||||
backupFileBuilder.getNewChunkListing(/*fingerprintMixerSalt=*/ null);
|
||||
|
||||
assertThat(newChunkListing.cipherType).isEqualTo(ChunksMetadataProto.AES_256_GCM);
|
||||
assertThat(newChunkListing.chunkOrderingType)
|
||||
.isEqualTo(ChunksMetadataProto.CHUNK_ORDERING_TYPE_UNSPECIFIED);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void getNewChunkListing_incremental_hasCorrectCipherTypeAndChunkOrderingType()
|
||||
throws Exception {
|
||||
setUpOldBackupWithChunks(ImmutableList.of());
|
||||
BackupFileBuilder backupFileBuilder =
|
||||
BackupFileBuilder.createForIncremental(
|
||||
new ByteArrayOutputStream(), mOldChunkListing);
|
||||
|
||||
ChunksMetadataProto.ChunkListing newChunkListing =
|
||||
backupFileBuilder.getNewChunkListing(/*fingerprintMixerSalt=*/ null);
|
||||
|
||||
assertThat(newChunkListing.cipherType).isEqualTo(ChunksMetadataProto.AES_256_GCM);
|
||||
assertThat(newChunkListing.chunkOrderingType)
|
||||
.isEqualTo(ChunksMetadataProto.CHUNK_ORDERING_TYPE_UNSPECIFIED);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void getNewChunkOrdering_chunksHaveCorrectStartPositions() throws Exception {
|
||||
BackupFileBuilder backupFileBuilder =
|
||||
BackupFileBuilder.createForIncremental(
|
||||
new ByteArrayOutputStream(), new ChunksMetadataProto.ChunkListing());
|
||||
|
||||
// Write out of order by key to check that ordering is maintained.
|
||||
backupFileBuilder.writeChunks(
|
||||
ImmutableList.of(TEST_HASH_1, TEST_HASH_3, TEST_HASH_2),
|
||||
getNewChunkMap(TEST_HASH_1, TEST_HASH_3, TEST_HASH_2));
|
||||
backupFileBuilder.finish(getTestMetadata());
|
||||
|
||||
ChunksMetadataProto.ChunkOrdering actual =
|
||||
backupFileBuilder.getNewChunkOrdering(TEST_CHECKSUM);
|
||||
// The chunks are listed in the order they are written above, but the start positions are
|
||||
// determined by the order in the encrypted blob (which is lexicographical by key).
|
||||
int chunk1Start = 0;
|
||||
int chunk2Start =
|
||||
chunk1Start + mEncryptedChunkEncoder.getEncodedLengthOfChunk(TEST_CHUNK_1);
|
||||
int chunk3Start =
|
||||
chunk2Start + mEncryptedChunkEncoder.getEncodedLengthOfChunk(TEST_CHUNK_2);
|
||||
|
||||
int[] expected = {chunk1Start, chunk3Start, chunk2Start};
|
||||
assertThat(actual.starts.length).isEqualTo(expected.length);
|
||||
for (int i = 0; i < actual.starts.length; i++) {
|
||||
assertThat(expected[i]).isEqualTo(actual.starts[i]);
|
||||
}
|
||||
}
|
||||
|
||||
@Test
|
||||
public void getNewChunkOrdering_duplicateChunks_writesDuplicates() throws Exception {
|
||||
BackupFileBuilder backupFileBuilder =
|
||||
BackupFileBuilder.createForIncremental(
|
||||
new ByteArrayOutputStream(), new ChunksMetadataProto.ChunkListing());
|
||||
|
||||
backupFileBuilder.writeChunks(
|
||||
ImmutableList.of(TEST_HASH_1, TEST_HASH_2, TEST_HASH_2),
|
||||
getNewChunkMap(TEST_HASH_1, TEST_HASH_2));
|
||||
backupFileBuilder.writeChunks(
|
||||
ImmutableList.of(TEST_HASH_3, TEST_HASH_3), getNewChunkMap(TEST_HASH_3));
|
||||
backupFileBuilder.finish(getTestMetadata());
|
||||
|
||||
ChunksMetadataProto.ChunkOrdering actual =
|
||||
backupFileBuilder.getNewChunkOrdering(TEST_CHECKSUM);
|
||||
int chunk1Start = 0;
|
||||
int chunk2Start =
|
||||
chunk1Start + mEncryptedChunkEncoder.getEncodedLengthOfChunk(TEST_CHUNK_1);
|
||||
int chunk3Start =
|
||||
chunk2Start + mEncryptedChunkEncoder.getEncodedLengthOfChunk(TEST_CHUNK_2);
|
||||
|
||||
int[] expected = {chunk1Start, chunk2Start, chunk2Start, chunk3Start, chunk3Start};
|
||||
assertThat(actual.starts.length).isEqualTo(expected.length);
|
||||
for (int i = 0; i < actual.starts.length; i++) {
|
||||
assertThat(expected[i]).isEqualTo(actual.starts[i]);
|
||||
}
|
||||
}
|
||||
|
||||
@Test
|
||||
public void getNewChunkOrdering_returnsOrderingWithChecksum() throws Exception {
|
||||
BackupFileBuilder backupFileBuilder =
|
||||
BackupFileBuilder.createForIncremental(
|
||||
new ByteArrayOutputStream(), new ChunksMetadataProto.ChunkListing());
|
||||
|
||||
backupFileBuilder.writeChunks(ImmutableList.of(TEST_HASH_1), getNewChunkMap(TEST_HASH_1));
|
||||
backupFileBuilder.finish(getTestMetadata());
|
||||
|
||||
ChunksMetadataProto.ChunkOrdering actual =
|
||||
backupFileBuilder.getNewChunkOrdering(TEST_CHECKSUM);
|
||||
assertThat(actual.checksum).isEqualTo(TEST_CHECKSUM);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void finish_writesMetadata() throws Exception {
|
||||
ByteArrayOutputStream output = new ByteArrayOutputStream();
|
||||
BackupFileBuilder builder = BackupFileBuilder.createForNonIncremental(output);
|
||||
ChunksMetadataProto.ChunksMetadata expectedMetadata = getTestMetadata();
|
||||
|
||||
builder.finish(expectedMetadata);
|
||||
|
||||
// The output is [metadata]+[long giving size of metadata].
|
||||
byte[] metadataBytes =
|
||||
Arrays.copyOfRange(output.toByteArray(), 0, output.size() - Long.BYTES);
|
||||
ChunksMetadataProto.ChunksMetadata actualMetadata =
|
||||
ChunksMetadataProto.ChunksMetadata.parseFrom(metadataBytes);
|
||||
assertThat(actualMetadata.checksumType).isEqualTo(ChunksMetadataProto.SHA_256);
|
||||
assertThat(actualMetadata.cipherType).isEqualTo(ChunksMetadataProto.AES_256_GCM);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void finish_writesMetadataPosition() throws Exception {
|
||||
ByteArrayOutputStream output = new ByteArrayOutputStream();
|
||||
BackupFileBuilder builder = BackupFileBuilder.createForNonIncremental(output);
|
||||
|
||||
builder.writeChunks(
|
||||
ImmutableList.of(TEST_HASH_1, TEST_HASH_2),
|
||||
getNewChunkMap(TEST_HASH_1, TEST_HASH_2));
|
||||
builder.writeChunks(ImmutableList.of(TEST_HASH_3), getNewChunkMap(TEST_HASH_3));
|
||||
builder.finish(getTestMetadata());
|
||||
|
||||
long expectedPosition =
|
||||
(long) mEncryptedChunkEncoder.getEncodedLengthOfChunk(TEST_CHUNK_1)
|
||||
+ mEncryptedChunkEncoder.getEncodedLengthOfChunk(TEST_CHUNK_2)
|
||||
+ mEncryptedChunkEncoder.getEncodedLengthOfChunk(TEST_CHUNK_3);
|
||||
long actualPosition =
|
||||
Longs.fromByteArray(
|
||||
Arrays.copyOfRange(
|
||||
output.toByteArray(), output.size() - Long.BYTES, output.size()));
|
||||
assertThat(actualPosition).isEqualTo(expectedPosition);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void finish_flushesOutputStream() throws Exception {
|
||||
OutputStream diffOutputStream = mock(OutputStream.class);
|
||||
BackupFileBuilder backupFileBuilder =
|
||||
BackupFileBuilder.createForIncremental(
|
||||
diffOutputStream, new ChunksMetadataProto.ChunkListing());
|
||||
|
||||
backupFileBuilder.writeChunks(ImmutableList.of(TEST_HASH_1), getNewChunkMap(TEST_HASH_1));
|
||||
diffOutputStream.flush();
|
||||
|
||||
verify(diffOutputStream).flush();
|
||||
}
|
||||
|
||||
private void setUpOldBackupWithChunks(List<EncryptedChunk> chunks) throws Exception {
|
||||
mOldFile = mTemporaryFolder.newFile();
|
||||
ChunksMetadataProto.ChunkListing chunkListing = new ChunksMetadataProto.ChunkListing();
|
||||
chunkListing.fingerprintMixerSalt =
|
||||
Arrays.copyOf(TEST_FINGERPRINT_MIXER_SALT, TEST_FINGERPRINT_MIXER_SALT.length);
|
||||
chunkListing.cipherType = AES_256_GCM;
|
||||
chunkListing.chunkOrderingType = CHUNK_ORDERING_TYPE_UNSPECIFIED;
|
||||
|
||||
List<ChunksMetadataProto.Chunk> knownChunks = new ArrayList<>();
|
||||
try (FileOutputStream outputStream = new FileOutputStream(mOldFile)) {
|
||||
for (EncryptedChunk chunk : chunks) {
|
||||
// Chunks are encoded in the format [nonce]+[data].
|
||||
outputStream.write(chunk.nonce());
|
||||
outputStream.write(chunk.encryptedBytes());
|
||||
|
||||
knownChunks.add(createChunkFor(chunk));
|
||||
}
|
||||
|
||||
outputStream.flush();
|
||||
}
|
||||
|
||||
chunkListing.chunks = knownChunks.toArray(new ChunksMetadataProto.Chunk[0]);
|
||||
mOldChunkListing = chunkListing;
|
||||
}
|
||||
|
||||
private byte[] parseDiffScript(byte[] diffScript) throws Exception {
|
||||
File newFile = mTemporaryFolder.newFile();
|
||||
new DiffScriptProcessor(mOldFile, newFile).process(new ByteArrayInputStream(diffScript));
|
||||
return Files.toByteArray(newFile);
|
||||
}
|
||||
|
||||
private void assertListingsEqual(
|
||||
ChunksMetadataProto.ChunkListing result, ChunksMetadataProto.ChunkListing expected) {
|
||||
assertThat(result.chunks.length).isEqualTo(expected.chunks.length);
|
||||
for (int i = 0; i < result.chunks.length; i++) {
|
||||
assertWithMessage("Chunk " + i)
|
||||
.that(result.chunks[i].length)
|
||||
.isEqualTo(expected.chunks[i].length);
|
||||
assertWithMessage("Chunk " + i)
|
||||
.that(result.chunks[i].hash)
|
||||
.isEqualTo(expected.chunks[i].hash);
|
||||
}
|
||||
}
|
||||
|
||||
private static ImmutableMap<ChunkHash, EncryptedChunk> getNewChunkMap(ChunkHash... hashes) {
|
||||
ImmutableMap.Builder<ChunkHash, EncryptedChunk> builder = ImmutableMap.builder();
|
||||
for (ChunkHash hash : hashes) {
|
||||
if (TEST_HASH_1.equals(hash)) {
|
||||
builder.put(TEST_HASH_1, TEST_CHUNK_1);
|
||||
} else if (TEST_HASH_2.equals(hash)) {
|
||||
builder.put(TEST_HASH_2, TEST_CHUNK_2);
|
||||
} else if (TEST_HASH_3.equals(hash)) {
|
||||
builder.put(TEST_HASH_3, TEST_CHUNK_3);
|
||||
} else {
|
||||
fail("Hash was not recognised: " + hash);
|
||||
}
|
||||
}
|
||||
return builder.build();
|
||||
}
|
||||
|
||||
private static ChunksMetadataProto.ChunksMetadata getTestMetadata() {
|
||||
ChunksMetadataProto.ChunksMetadata metadata = new ChunksMetadataProto.ChunksMetadata();
|
||||
metadata.checksumType = ChunksMetadataProto.SHA_256;
|
||||
metadata.cipherType = AES_256_GCM;
|
||||
return metadata;
|
||||
}
|
||||
|
||||
private static byte[] stripMetadataAndPositionFromOutput(byte[] output) {
|
||||
long metadataStart =
|
||||
Longs.fromByteArray(
|
||||
Arrays.copyOfRange(output, output.length - Long.BYTES, output.length));
|
||||
return Arrays.copyOfRange(output, 0, (int) metadataStart);
|
||||
}
|
||||
|
||||
private ChunksMetadataProto.ChunkListing expectedChunkListing() {
|
||||
ChunksMetadataProto.ChunkListing chunkListing = new ChunksMetadataProto.ChunkListing();
|
||||
chunkListing.fingerprintMixerSalt =
|
||||
Arrays.copyOf(TEST_FINGERPRINT_MIXER_SALT, TEST_FINGERPRINT_MIXER_SALT.length);
|
||||
chunkListing.cipherType = AES_256_GCM;
|
||||
chunkListing.chunkOrderingType = CHUNK_ORDERING_TYPE_UNSPECIFIED;
|
||||
chunkListing.chunks = new ChunksMetadataProto.Chunk[3];
|
||||
chunkListing.chunks[0] = createChunkFor(TEST_CHUNK_1);
|
||||
chunkListing.chunks[1] = createChunkFor(TEST_CHUNK_2);
|
||||
chunkListing.chunks[2] = createChunkFor(TEST_CHUNK_3);
|
||||
return chunkListing;
|
||||
}
|
||||
|
||||
private ChunksMetadataProto.Chunk createChunkFor(EncryptedChunk encryptedChunk) {
|
||||
byte[] chunkHash = encryptedChunk.key().getHash();
|
||||
byte[] hashCopy = Arrays.copyOf(chunkHash, chunkHash.length);
|
||||
return newChunk(hashCopy, mEncryptedChunkEncoder.getEncodedLengthOfChunk(encryptedChunk));
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,256 @@
|
||||
/*
|
||||
* Copyright (C) 2019 The Android Open Source Project
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package com.android.server.backup.encryption.testing;
|
||||
|
||||
import static com.android.internal.util.Preconditions.checkArgument;
|
||||
import static com.android.internal.util.Preconditions.checkNotNull;
|
||||
|
||||
import static java.nio.charset.StandardCharsets.UTF_8;
|
||||
|
||||
import java.io.ByteArrayOutputStream;
|
||||
import java.io.File;
|
||||
import java.io.FileOutputStream;
|
||||
import java.io.IOException;
|
||||
import java.io.InputStream;
|
||||
import java.io.OutputStream;
|
||||
import java.io.RandomAccessFile;
|
||||
import java.util.Locale;
|
||||
import java.util.Optional;
|
||||
import java.util.Scanner;
|
||||
import java.util.regex.Pattern;
|
||||
|
||||
/**
|
||||
* To be used as part of a fake backup server. Processes a Scotty diff script.
|
||||
*
|
||||
* <p>A Scotty diff script consists of an ASCII line denoting a command, optionally followed by a
|
||||
* range of bytes. Command format is either
|
||||
*
|
||||
* <ul>
|
||||
* <li>A single 64-bit integer, followed by a new line: this denotes that the given number of
|
||||
* bytes are to follow in the stream. These bytes should be written directly to the new file.
|
||||
* <li>Two 64-bit integers, separated by a hyphen, followed by a new line: this says that the
|
||||
* given range of bytes from the original file ought to be copied into the new file.
|
||||
* </ul>
|
||||
*/
|
||||
public class DiffScriptProcessor {
|
||||
|
||||
private static final int COPY_BUFFER_SIZE = 1024;
|
||||
|
||||
private static final String READ_MODE = "r";
|
||||
private static final Pattern VALID_COMMAND_PATTERN = Pattern.compile("^\\d+(-\\d+)?$");
|
||||
|
||||
private final File mInput;
|
||||
private final File mOutput;
|
||||
private final long mInputLength;
|
||||
|
||||
/**
|
||||
* A new instance, with {@code input} as previous file, and {@code output} as new file.
|
||||
*
|
||||
* @param input Previous file from which ranges of bytes are to be copied. This file should be
|
||||
* immutable.
|
||||
* @param output Output file, to which the new data should be written.
|
||||
* @throws IllegalArgumentException if input does not exist.
|
||||
*/
|
||||
public DiffScriptProcessor(File input, File output) {
|
||||
checkArgument(input.exists(), "input file did not exist.");
|
||||
mInput = input;
|
||||
mInputLength = input.length();
|
||||
mOutput = checkNotNull(output);
|
||||
}
|
||||
|
||||
public void process(InputStream diffScript) throws IOException, MalformedDiffScriptException {
|
||||
RandomAccessFile randomAccessInput = new RandomAccessFile(mInput, READ_MODE);
|
||||
|
||||
try (FileOutputStream outputStream = new FileOutputStream(mOutput)) {
|
||||
while (true) {
|
||||
Optional<String> commandString = readCommand(diffScript);
|
||||
if (!commandString.isPresent()) {
|
||||
return;
|
||||
}
|
||||
Command command = Command.parse(commandString.get());
|
||||
|
||||
if (command.mIsRange) {
|
||||
checkFileRange(command.mCount, command.mLimit);
|
||||
copyRange(randomAccessInput, outputStream, command.mCount, command.mLimit);
|
||||
} else {
|
||||
long bytesCopied = copyBytes(diffScript, outputStream, command.mCount);
|
||||
if (bytesCopied < command.mCount) {
|
||||
throw new MalformedDiffScriptException(
|
||||
String.format(
|
||||
Locale.US,
|
||||
"Command to copy %d bytes from diff script, but only %d"
|
||||
+ " bytes available",
|
||||
command.mCount,
|
||||
bytesCopied));
|
||||
}
|
||||
if (diffScript.read() != '\n') {
|
||||
throw new MalformedDiffScriptException("Expected new line after bytes.");
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private void checkFileRange(long start, long end) throws MalformedDiffScriptException {
|
||||
if (end < start) {
|
||||
throw new MalformedDiffScriptException(
|
||||
String.format(
|
||||
Locale.US,
|
||||
"Command to copy %d-%d bytes from original file, but %2$d < %1$d.",
|
||||
start,
|
||||
end));
|
||||
}
|
||||
|
||||
if (end >= mInputLength) {
|
||||
throw new MalformedDiffScriptException(
|
||||
String.format(
|
||||
Locale.US,
|
||||
"Command to copy %d-%d bytes from original file, but file is only %d"
|
||||
+ " bytes long.",
|
||||
start,
|
||||
end,
|
||||
mInputLength));
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Reads a command from the input stream.
|
||||
*
|
||||
* @param inputStream The input.
|
||||
* @return Optional of command, or empty if EOF.
|
||||
*/
|
||||
private static Optional<String> readCommand(InputStream inputStream) throws IOException {
|
||||
ByteArrayOutputStream byteArrayOutputStream = new ByteArrayOutputStream();
|
||||
|
||||
int b;
|
||||
while (!isEndOfCommand(b = inputStream.read())) {
|
||||
byteArrayOutputStream.write(b);
|
||||
}
|
||||
|
||||
byte[] bytes = byteArrayOutputStream.toByteArray();
|
||||
if (bytes.length == 0) {
|
||||
return Optional.empty();
|
||||
} else {
|
||||
return Optional.of(new String(bytes, UTF_8));
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* If the given output from {@link InputStream#read()} is the end of a command - i.e., a new
|
||||
* line or the EOF.
|
||||
*
|
||||
* @param b The byte or -1.
|
||||
* @return {@code true} if ends the command.
|
||||
*/
|
||||
private static boolean isEndOfCommand(int b) {
|
||||
return b == -1 || b == '\n';
|
||||
}
|
||||
|
||||
/**
|
||||
* Copies {@code n} bytes from {@code inputStream} to {@code outputStream}.
|
||||
*
|
||||
* @return The number of bytes copied.
|
||||
* @throws IOException if there was a problem reading or writing.
|
||||
*/
|
||||
private static long copyBytes(InputStream inputStream, OutputStream outputStream, long n)
|
||||
throws IOException {
|
||||
byte[] buffer = new byte[COPY_BUFFER_SIZE];
|
||||
long copied = 0;
|
||||
while (n - copied > COPY_BUFFER_SIZE) {
|
||||
long read = copyBlock(inputStream, outputStream, buffer, COPY_BUFFER_SIZE);
|
||||
if (read <= 0) {
|
||||
return copied;
|
||||
}
|
||||
}
|
||||
while (n - copied > 0) {
|
||||
copied += copyBlock(inputStream, outputStream, buffer, (int) (n - copied));
|
||||
}
|
||||
return copied;
|
||||
}
|
||||
|
||||
private static long copyBlock(
|
||||
InputStream inputStream, OutputStream outputStream, byte[] buffer, int size)
|
||||
throws IOException {
|
||||
int read = inputStream.read(buffer, 0, size);
|
||||
outputStream.write(buffer, 0, read);
|
||||
return read;
|
||||
}
|
||||
|
||||
/**
|
||||
* Copies the given range of bytes from the input file to the output stream.
|
||||
*
|
||||
* @param input The input file.
|
||||
* @param output The output stream.
|
||||
* @param start Start position in the input file.
|
||||
* @param end End position in the output file (inclusive).
|
||||
* @throws IOException if there was a problem reading or writing.
|
||||
*/
|
||||
private static void copyRange(RandomAccessFile input, OutputStream output, long start, long end)
|
||||
throws IOException {
|
||||
input.seek(start);
|
||||
|
||||
// Inefficient but obviously correct. If tests become slow, optimize.
|
||||
for (; start <= end; start++) {
|
||||
output.write(input.read());
|
||||
}
|
||||
}
|
||||
|
||||
/** Error thrown for a malformed diff script. */
|
||||
public static class MalformedDiffScriptException extends Exception {
|
||||
public MalformedDiffScriptException(String message) {
|
||||
super(message);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* A command telling the processor either to insert n bytes, which follow, or copy n-m bytes
|
||||
* from the original file.
|
||||
*/
|
||||
private static class Command {
|
||||
private final long mCount;
|
||||
private final long mLimit;
|
||||
private final boolean mIsRange;
|
||||
|
||||
private Command(long count, long limit, boolean isRange) {
|
||||
mCount = count;
|
||||
mLimit = limit;
|
||||
mIsRange = isRange;
|
||||
}
|
||||
|
||||
/**
|
||||
* Attempts to parse the command string into a usable structure.
|
||||
*
|
||||
* @param command The command string, without a new line at the end.
|
||||
* @throws MalformedDiffScriptException if the command is not a valid diff script command.
|
||||
* @return The parsed command.
|
||||
*/
|
||||
private static Command parse(String command) throws MalformedDiffScriptException {
|
||||
if (!VALID_COMMAND_PATTERN.matcher(command).matches()) {
|
||||
throw new MalformedDiffScriptException("Bad command: " + command);
|
||||
}
|
||||
|
||||
Scanner commandScanner = new Scanner(command);
|
||||
commandScanner.useDelimiter("-");
|
||||
long n = commandScanner.nextLong();
|
||||
if (!commandScanner.hasNextLong()) {
|
||||
return new Command(n, 0L, /*isRange=*/ false);
|
||||
}
|
||||
long m = commandScanner.nextLong();
|
||||
return new Command(n, m, /*isRange=*/ true);
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -16,7 +16,11 @@
|
||||
|
||||
package com.android.server.backup.testing;
|
||||
|
||||
import com.android.server.backup.encryption.chunk.ChunkHash;
|
||||
import com.android.server.backup.encryption.protos.nano.ChunksMetadataProto;
|
||||
|
||||
import java.security.NoSuchAlgorithmException;
|
||||
import java.util.Arrays;
|
||||
import java.util.Random;
|
||||
|
||||
import javax.crypto.KeyGenerator;
|
||||
@@ -42,4 +46,15 @@ public class CryptoTestUtils {
|
||||
random.nextBytes(bytes);
|
||||
return bytes;
|
||||
}
|
||||
|
||||
public static ChunksMetadataProto.Chunk newChunk(ChunkHash hash, int length) {
|
||||
return newChunk(hash.getHash(), length);
|
||||
}
|
||||
|
||||
public static ChunksMetadataProto.Chunk newChunk(byte[] hash, int length) {
|
||||
ChunksMetadataProto.Chunk newChunk = new ChunksMetadataProto.Chunk();
|
||||
newChunk.hash = Arrays.copyOf(hash, hash.length);
|
||||
newChunk.length = length;
|
||||
return newChunk;
|
||||
}
|
||||
}
|
||||
|
||||
Reference in New Issue
Block a user