/*
* Copyright 2019 Amazon.com, Inc. or its affiliates. All Rights Reserved.
* SPDX-License-Identifier: MIT-0
*
* Permission is hereby granted, free of charge, to any person obtaining a copy of this
* software and associated documentation files (the "Software"), to deal in the Software
* without restriction, including without limitation the rights to use, copy, modify,
* merge, publish, distribute, sublicense, and/or sell copies of the Software, and to
* permit persons to whom the Software is furnished to do so.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED,
* INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A
* PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
* OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
* SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
*/
package software.amazon.qldb.tutorial;
import java.nio.ByteBuffer;
import java.nio.charset.StandardCharsets;
import java.security.MessageDigest;
import java.security.NoSuchAlgorithmException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Comparator;
import java.util.Iterator;
import java.util.List;
import java.util.concurrent.ThreadLocalRandom;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.amazonaws.util.Base64;
import software.amazon.qldb.tutorial.qldb.Proof;
/**
* Encapsulates the logic to verify the integrity of revisions or blocks in a QLDB ledger.
*
* The main entry point is {@link #verify(byte[], byte[], String)}.
*
* This code expects that you have AWS credentials setup per:
* http://docs.aws.amazon.com/java-sdk/latest/developer-guide/setup-credentials.html
*/
public final class Verifier {
public static final Logger log = LoggerFactory.getLogger(Verifier.class);
private static final int HASH_LENGTH = 32;
private static final int UPPER_BOUND = 8;
/**
* Compares two hashes by their signed byte values in little-endian order.
*/
private static Comparator hashComparator = (h1, h2) -> {
if (h1.length != HASH_LENGTH || h2.length != HASH_LENGTH) {
throw new IllegalArgumentException("Invalid hash.");
}
for (int i = h1.length - 1; i >= 0; i--) {
int byteEqual = Byte.compare(h1[i], h2[i]);
if (byteEqual != 0) {
return byteEqual;
}
}
return 0;
};
private Verifier() { }
/**
* Verify the integrity of a document with respect to a QLDB ledger digest.
*
* The verification algorithm includes the following steps:
*
* 1. {@link #buildCandidateDigest(Proof, byte[])} build the candidate digest from the internal hashes
* in the {@link Proof}.
* 2. Check that the {@code candidateLedgerDigest} is equal to the {@code ledgerDigest}.
*
* @param documentHash
* The hash of the document to be verified.
* @param digest
* The QLDB ledger digest. This digest should have been retrieved using
* {@link com.amazonaws.services.qldb.AmazonQLDB#getDigest}
* @param proofBlob
* The ion encoded bytes representing the {@link Proof} associated with the supplied
* {@code digestTipAddress} and {@code address} retrieved using
* {@link com.amazonaws.services.qldb.AmazonQLDB#getRevision}.
* @return {@code true} if the record is verified or {@code false} if it is not verified.
*/
public static boolean verify(
final byte[] documentHash,
final byte[] digest,
final String proofBlob
) {
Proof proof = Proof.fromBlob(proofBlob);
byte[] candidateDigest = buildCandidateDigest(proof, documentHash);
return Arrays.equals(digest, candidateDigest);
}
/**
* Build the candidate digest representing the entire ledger from the internal hashes of the {@link Proof}.
*
* @param proof
* A Java representation of {@link Proof}
* returned from {@link com.amazonaws.services.qldb.AmazonQLDB#getRevision}.
* @param leafHash
* Leaf hash to build the candidate digest with.
* @return a byte array of the candidate digest.
*/
private static byte[] buildCandidateDigest(final Proof proof, final byte[] leafHash) {
return calculateRootHashFromInternalHashes(proof.getInternalHashes(), leafHash);
}
/**
* Get a new instance of {@link MessageDigest} using the SHA-256 algorithm.
*
* @return an instance of {@link MessageDigest}.
* @throws IllegalStateException if the algorithm is not available on the current JVM.
*/
static MessageDigest newMessageDigest() {
try {
return MessageDigest.getInstance("SHA-256");
} catch (NoSuchAlgorithmException e) {
log.error("Failed to create SHA-256 MessageDigest", e);
throw new IllegalStateException("SHA-256 message digest is unavailable", e);
}
}
/**
* Takes two hashes, sorts them, concatenates them, and then returns the
* hash of the concatenated array.
*
* @param h1
* Byte array containing one of the hashes to compare.
* @param h2
* Byte array containing one of the hashes to compare.
* @return the concatenated array of hashes.
*/
public static byte[] dot(final byte[] h1, final byte[] h2) {
if (h1.length == 0) {
return h2;
}
if (h2.length == 0) {
return h1;
}
byte[] concatenated = new byte[h1.length + h2.length];
if (hashComparator.compare(h1, h2) < 0) {
System.arraycopy(h1, 0, concatenated, 0, h1.length);
System.arraycopy(h2, 0, concatenated, h1.length, h2.length);
} else {
System.arraycopy(h2, 0, concatenated, 0, h2.length);
System.arraycopy(h1, 0, concatenated, h2.length, h1.length);
}
MessageDigest messageDigest = newMessageDigest();
messageDigest.update(concatenated);
return messageDigest.digest();
}
/**
* Starting with the provided {@code leafHash} combined with the provided {@code internalHashes}
* pairwise until only the root hash remains.
*
* @param internalHashes
* Internal hashes of Merkle tree.
* @param leafHash
* Leaf hashes of Merkle tree.
* @return the root hash.
*/
private static byte[] calculateRootHashFromInternalHashes(final List internalHashes, final byte[] leafHash) {
return internalHashes.stream().reduce(leafHash, Verifier::dot);
}
/**
* Flip a single random bit in the given byte array. This method is used to demonstrate
* QLDB's verification features.
*
* @param original
* The original byte array.
* @return the altered byte array with a single random bit changed.
*/
public static byte[] flipRandomBit(final byte[] original) {
if (original.length == 0) {
throw new IllegalArgumentException("Array cannot be empty!");
}
int alteredPosition = ThreadLocalRandom.current().nextInt(original.length);
int b = ThreadLocalRandom.current().nextInt(UPPER_BOUND);
byte[] altered = new byte[original.length];
System.arraycopy(original, 0, altered, 0, original.length);
altered[alteredPosition] = (byte) (altered[alteredPosition] ^ (1 << b));
return altered;
}
public static String toBase64(byte[] arr) {
return new String(Base64.encode(arr), StandardCharsets.UTF_8);
}
/**
* Convert a {@link ByteBuffer} into byte array.
*
* @param buffer
* The {@link ByteBuffer} to convert.
* @return the converted byte array.
*/
public static byte[] convertByteBufferToByteArray(final ByteBuffer buffer) {
byte[] arr = new byte[buffer.remaining()];
buffer.get(arr);
return arr;
}
/**
* Calculates the root hash from a list of hashes that represent the base of a Merkle tree.
*
* @param hashes
* The list of byte arrays representing hashes making up base of a Merkle tree.
* @return a byte array that is the root hash of the given list of hashes.
*/
public static byte[] calculateMerkleTreeRootHash(List hashes) {
if (hashes.isEmpty()) {
return new byte[0];
}
List remaining = combineLeafHashes(hashes);
while (remaining.size() > 1) {
remaining = combineLeafHashes(remaining);
}
return remaining.get(0);
}
private static List combineLeafHashes(List hashes) {
List combinedHashes = new ArrayList<>();
Iterator it = hashes.stream().iterator();
while (it.hasNext()) {
byte[] left = it.next();
if (it.hasNext()) {
byte[] right = it.next();
byte[] combined = dot(left, right);
combinedHashes.add(combined);
} else {
combinedHashes.add(left);
}
}
return combinedHashes;
}
}