summaryrefslogtreecommitdiff
path: root/bip-0158/gentestvectors.go
diff options
context:
space:
mode:
Diffstat (limited to 'bip-0158/gentestvectors.go')
-rw-r--r--bip-0158/gentestvectors.go527
1 files changed, 228 insertions, 299 deletions
diff --git a/bip-0158/gentestvectors.go b/bip-0158/gentestvectors.go
index f6560cb..472f8c1 100644
--- a/bip-0158/gentestvectors.go
+++ b/bip-0158/gentestvectors.go
@@ -10,110 +10,162 @@ package main
import (
"bytes"
"encoding/hex"
+ "encoding/json"
"fmt"
+ "io"
"io/ioutil"
"os"
- "path"
-
- "github.com/roasbeef/btcd/chaincfg"
- "github.com/roasbeef/btcd/chaincfg/chainhash"
- "github.com/roasbeef/btcd/rpcclient"
- "github.com/roasbeef/btcd/wire"
- "github.com/roasbeef/btcutil/gcs"
- "github.com/roasbeef/btcutil/gcs/builder"
+ "path/filepath"
+
+ "github.com/btcsuite/btcd/blockchain"
+ "github.com/btcsuite/btcd/chaincfg/chainhash"
+ "github.com/btcsuite/btcd/rpcclient"
+ "github.com/btcsuite/btcd/wire"
+ "github.com/btcsuite/btcutil"
+ "github.com/btcsuite/btcutil/gcs/builder"
+ "github.com/davecgh/go-spew/spew"
)
-func main() {
- err := os.Mkdir("gcstestvectors", os.ModeDir|0755)
- if err != nil { // Don't overwrite existing output if any
- fmt.Println("Couldn't create directory: ", err)
- return
+var (
+ // testBlockHeights are the heights of the blocks to include in the test
+ // vectors. Any new entries must be added in sorted order.
+ testBlockHeights = []testBlockCase{
+ {0, "Genesis block"},
+ {2, ""},
+ {3, ""},
+ {926485, "Duplicate pushdata 913bcc2be49cb534c20474c4dee1e9c4c317e7eb"},
+ {987876, "Coinbase tx has unparseable output script"},
+ {1263442, "Includes witness data"},
}
- files := make([]*os.File, 33)
- prevBasicHeaders := make([]chainhash.Hash, 33)
- prevExtHeaders := make([]chainhash.Hash, 33)
- for i := 1; i <= 32; i++ { // Min 1 bit of collision space, max 32
- var blockBuf bytes.Buffer
- fName := fmt.Sprintf("gcstestvectors/testnet-%02d.csv", i)
- file, err := os.Create(fName)
- if err != nil {
- fmt.Println("Error creating CSV file: ", err.Error())
- return
- }
- _, err = file.WriteString("Block Height,Block Hash,Block,Previous Basic Header,Previous Ext Header,Basic Filter,Ext Filter,Basic Header,Ext Header\n")
- if err != nil {
- fmt.Println("Error writing to CSV file: ", err.Error())
- return
- }
- files[i] = file
- basicFilter, err := buildBasicFilter(
- chaincfg.TestNet3Params.GenesisBlock, uint8(i))
- if err != nil {
- fmt.Println("Error generating basic filter: ", err.Error())
- return
- }
- prevBasicHeaders[i], err = builder.MakeHeaderForFilter(basicFilter,
- chaincfg.TestNet3Params.GenesisBlock.Header.PrevBlock)
- if err != nil {
- fmt.Println("Error generating header for filter: ", err.Error())
- return
- }
- if basicFilter == nil {
- basicFilter = &gcs.Filter{}
- }
- extFilter, err := buildExtFilter(
- chaincfg.TestNet3Params.GenesisBlock, uint8(i))
- if err != nil {
- fmt.Println("Error generating ext filter: ", err.Error())
- return
- }
- prevExtHeaders[i], err = builder.MakeHeaderForFilter(extFilter,
- chaincfg.TestNet3Params.GenesisBlock.Header.PrevBlock)
- if err != nil {
- fmt.Println("Error generating header for filter: ", err.Error())
- return
- }
- if extFilter == nil {
- extFilter = &gcs.Filter{}
- }
- err = chaincfg.TestNet3Params.GenesisBlock.Serialize(&blockBuf)
- if err != nil {
- fmt.Println("Error serializing block to buffer: ", err.Error())
- return
- }
- bfBytes, err := basicFilter.NBytes()
- if err != nil {
- fmt.Println("Couldn't get NBytes(): ", err)
- return
- }
- efBytes, err := extFilter.NBytes()
- if err != nil {
- fmt.Println("Couldn't get NBytes(): ", err)
- return
+
+ defaultBtcdDir = btcutil.AppDataDir("btcd", false)
+ defaultBtcdRPCCertFile = filepath.Join(defaultBtcdDir, "rpc.cert")
+)
+
+const (
+ fp = 19
+)
+
+type testBlockCase struct {
+ height uint32
+ comment string
+}
+
+type JSONTestWriter struct {
+ writer io.Writer
+ firstRowWritten bool
+}
+
+func NewJSONTestWriter(writer io.Writer) *JSONTestWriter {
+ return &JSONTestWriter{writer: writer}
+}
+
+func (w *JSONTestWriter) WriteComment(comment string) error {
+ return w.WriteTestCase([]interface{}{comment})
+}
+
+func (w *JSONTestWriter) WriteTestCase(row []interface{}) error {
+ var err error
+ if w.firstRowWritten {
+ _, err = io.WriteString(w.writer, ",\n")
+ } else {
+ _, err = io.WriteString(w.writer, "[\n")
+ w.firstRowWritten = true
+ }
+ if err != nil {
+ return err
+ }
+
+ rowBytes, err := json.Marshal(row)
+ if err != nil {
+ return err
+ }
+
+ _, err = w.writer.Write(rowBytes)
+ return err
+}
+
+func (w *JSONTestWriter) Close() error {
+ if !w.firstRowWritten {
+ return nil
+ }
+
+ _, err := io.WriteString(w.writer, "\n]\n")
+ return err
+}
+
+func fetchPrevOutputScripts(client *rpcclient.Client, block *wire.MsgBlock) ([][]byte, error) {
+ var prevScripts [][]byte
+
+ txCache := make(map[chainhash.Hash]*wire.MsgTx)
+ for _, tx := range block.Transactions {
+ if blockchain.IsCoinBaseTx(tx) {
+ continue
}
- err = writeCSVRow(
- file,
- 0, // Height
- *chaincfg.TestNet3Params.GenesisHash,
- blockBuf.Bytes(),
- chaincfg.TestNet3Params.GenesisBlock.Header.PrevBlock,
- chaincfg.TestNet3Params.GenesisBlock.Header.PrevBlock,
- bfBytes,
- efBytes,
- prevBasicHeaders[i],
- prevExtHeaders[i],
- )
- if err != nil {
- fmt.Println("Error writing to CSV file: ", err.Error())
- return
+
+ for _, txIn := range tx.TxIn {
+ prevOp := txIn.PreviousOutPoint
+
+ tx, ok := txCache[prevOp.Hash]
+ if !ok {
+ originTx, err := client.GetRawTransaction(
+ &prevOp.Hash,
+ )
+ if err != nil {
+ return nil, fmt.Errorf("unable to get "+
+ "txid=%v: %v", prevOp.Hash, err)
+ }
+
+ txCache[prevOp.Hash] = originTx.MsgTx()
+
+ tx = originTx.MsgTx()
+ }
+
+ index := prevOp.Index
+
+ prevScripts = append(
+ prevScripts, tx.TxOut[index].PkScript,
+ )
}
}
- cert, err := ioutil.ReadFile(
- path.Join(os.Getenv("HOME"), "/.btcd/rpc.cert"))
+
+ return prevScripts, nil
+}
+
+func main() {
+ var (
+ writerFile *JSONTestWriter
+ prevBasicHeader chainhash.Hash
+ )
+ fName := fmt.Sprintf("testnet-%02d.json", fp)
+ file, err := os.Create(fName)
+ if err != nil {
+ fmt.Println("Error creating output file: ", err.Error())
+ return
+ }
+ defer file.Close()
+
+ writer := &JSONTestWriter{
+ writer: file,
+ }
+ defer writer.Close()
+
+ err = writer.WriteComment("Block Height,Block Hash,Block," +
+ "[Prev Output Scripts for Block],Previous Basic Header," +
+ "Basic Filter,Basic Header,Notes")
+ if err != nil {
+ fmt.Println("Error writing to output file: ", err.Error())
+ return
+ }
+
+ writerFile = writer
+
+ cert, err := ioutil.ReadFile(defaultBtcdRPCCertFile)
if err != nil {
fmt.Println("Couldn't read RPC cert: ", err.Error())
return
}
+
conf := rpcclient.ConnConfig{
Host: "127.0.0.1:18334",
Endpoint: "ws",
@@ -126,18 +178,21 @@ func main() {
fmt.Println("Couldn't create a new client: ", err.Error())
return
}
- for height := 1; height < 988000; height++ {
- fmt.Printf("Height: %d\n", height)
+
+ var testBlockIndex int
+ for height := 0; testBlockIndex < len(testBlockHeights); height++ {
blockHash, err := client.GetBlockHash(int64(height))
if err != nil {
fmt.Println("Couldn't get block hash: ", err.Error())
return
}
+
block, err := client.GetBlock(blockHash)
if err != nil {
fmt.Println("Couldn't get block hash: ", err.Error())
return
}
+
var blockBuf bytes.Buffer
err = block.Serialize(&blockBuf)
if err != nil {
@@ -145,224 +200,98 @@ func main() {
return
}
blockBytes := blockBuf.Bytes()
- for i := 1; i <= 32; i++ {
- basicFilter, err := buildBasicFilter(block, uint8(i))
- if err != nil {
- fmt.Println("Error generating basic filter: ", err.Error())
- return
- }
- basicHeader, err := builder.MakeHeaderForFilter(basicFilter,
- prevBasicHeaders[i])
- if err != nil {
- fmt.Println("Error generating header for filter: ", err.Error())
- return
- }
- if basicFilter == nil {
- basicFilter = &gcs.Filter{}
- }
- extFilter, err := buildExtFilter(block, uint8(i))
- if err != nil {
- fmt.Println("Error generating ext filter: ", err.Error())
- return
- }
- extHeader, err := builder.MakeHeaderForFilter(extFilter,
- prevExtHeaders[i])
- if err != nil {
- fmt.Println("Error generating header for filter: ", err.Error())
- return
- }
- if extFilter == nil {
- extFilter = &gcs.Filter{}
- }
- if i == builder.DefaultP { // This is the default filter size so we can check against the server's info
- filter, err := client.GetCFilter(blockHash, wire.GCSFilterRegular)
- if err != nil {
- fmt.Println("Error getting basic filter: ", err.Error())
- return
- }
- nBytes, err := basicFilter.NBytes()
- if err != nil {
- fmt.Println("Couldn't get NBytes(): ", err)
- return
- }
- if !bytes.Equal(filter.Data, nBytes) {
- // Don't error on empty filters
- fmt.Println("Basic filter doesn't match!\n", filter.Data, "\n", nBytes)
- return
- }
- filter, err = client.GetCFilter(blockHash, wire.GCSFilterExtended)
- if err != nil {
- fmt.Println("Error getting extended filter: ", err.Error())
- return
- }
- nBytes, err = extFilter.NBytes()
- if err != nil {
- fmt.Println("Couldn't get NBytes(): ", err)
- return
- }
- if !bytes.Equal(filter.Data, nBytes) {
- fmt.Println("Extended filter doesn't match!")
- return
- }
- header, err := client.GetCFilterHeader(blockHash, wire.GCSFilterRegular)
- if err != nil {
- fmt.Println("Error getting basic header: ", err.Error())
- return
- }
- if !bytes.Equal(header.PrevFilterHeader[:], basicHeader[:]) {
- fmt.Println("Basic header doesn't match!")
- return
- }
- header, err = client.GetCFilterHeader(blockHash, wire.GCSFilterExtended)
- if err != nil {
- fmt.Println("Error getting extended header: ", err.Error())
- return
- }
- if !bytes.Equal(header.PrevFilterHeader[:], extHeader[:]) {
- fmt.Println("Extended header doesn't match!")
- return
- }
- fmt.Println("Verified against server")
- }
- switch height {
- case 1, 2, 3, 926485, 987876: // Blocks for test cases
- var bfBytes []byte
- var efBytes []byte
- if basicFilter.N() > 0 {
- bfBytes, err = basicFilter.NBytes()
- if err != nil {
- fmt.Println("Couldn't get NBytes(): ", err)
- return
- }
- }
- if extFilter.N() > 0 { // Exclude special case for block 987876
- efBytes, err = extFilter.NBytes()
- if err != nil {
- fmt.Println("Couldn't get NBytes(): ", err)
- return
- }
- }
- writeCSVRow(
- files[i],
- height,
- *blockHash,
- blockBytes,
- prevBasicHeaders[i],
- prevExtHeaders[i],
- bfBytes,
- efBytes,
- basicHeader,
- extHeader)
- }
- prevBasicHeaders[i] = basicHeader
- prevExtHeaders[i] = extHeader
+
+ prevOutputScripts, err := fetchPrevOutputScripts(client, block)
+ if err != nil {
+ fmt.Println("Couldn't fetch prev output scipts: ", err)
+ return
}
- }
-}
-// writeCSVRow writes a test vector to a CSV file.
-func writeCSVRow(file *os.File, height int, blockHash chainhash.Hash,
- blockBytes []byte, prevBasicHeader, prevExtHeader chainhash.Hash,
- basicFilter, extFilter []byte, basicHeader, extHeader chainhash.Hash) error {
- row := fmt.Sprintf("%d,%s,%s,%s,%s,%s,%s,%s,%s\n",
- height,
- blockHash.String(),
- hex.EncodeToString(blockBytes),
- prevBasicHeader.String(),
- prevExtHeader.String(),
- hex.EncodeToString(basicFilter),
- hex.EncodeToString(extFilter),
- basicHeader.String(),
- extHeader.String(),
- )
- _, err := file.WriteString(row)
- if err != nil {
- return err
- }
- return nil
-}
+ basicFilter, err := builder.BuildBasicFilter(block, prevOutputScripts)
+ if err != nil {
+ fmt.Println("Error generating basic filter: ", err.Error())
+ return
+ }
+ basicHeader, err := builder.MakeHeaderForFilter(basicFilter, prevBasicHeader)
+ if err != nil {
+ fmt.Println("Error generating header for filter: ", err.Error())
+ return
+ }
-// buildBasicFilter builds a basic GCS filter from a block. A basic GCS filter
-// will contain all the previous outpoints spent within a block, as well as the
-// data pushes within all the outputs created within a block. p is specified as
-// an argument in order to create test vectors with various values for p.
-func buildBasicFilter(block *wire.MsgBlock, p uint8) (*gcs.Filter, error) {
- blockHash := block.BlockHash()
- b := builder.WithKeyHashP(&blockHash, p)
-
- // If the filter had an issue with the specified key, then we force it
- // to bubble up here by calling the Key() function.
- _, err := b.Key()
- if err != nil {
- return nil, err
- }
+ // We'll now ensure that we've constructed the same filter as
+ // the chain server we're fetching blocks form.
+ filter, err := client.GetCFilter(
+ blockHash, wire.GCSFilterRegular,
+ )
+ if err != nil {
+ fmt.Println("Error getting basic filter: ",
+ err.Error())
+ return
+ }
- // In order to build a basic filter, we'll range over the entire block,
- // adding the outpoint data as well as the data pushes within the
- // pkScript.
- for i, tx := range block.Transactions {
- // First we'll compute the bash of the transaction and add that
- // directly to the filter.
- txHash := tx.TxHash()
- b.AddHash(&txHash)
-
- // Skip the inputs for the coinbase transaction
- if i != 0 {
- // Each each txin, we'll add a serialized version of
- // the txid:index to the filters data slices.
- for _, txIn := range tx.TxIn {
- b.AddOutPoint(txIn.PreviousOutPoint)
- }
+ nBytes, err := basicFilter.NBytes()
+ if err != nil {
+ fmt.Println("Couldn't get NBytes(): ", err)
+ return
+ }
+ if !bytes.Equal(filter.Data, nBytes) {
+ // Don't error on empty filters
+ fmt.Printf("basic filter doesn't match: generated "+
+ "%x, rpc returns %x, block %v", nBytes,
+ filter.Data, spew.Sdump(block))
+ return
}
- // For each output in a transaction, we'll add each of the
- // individual data pushes within the script.
- for _, txOut := range tx.TxOut {
- b.AddEntry(txOut.PkScript)
+ header, err := client.GetCFilterHeader(
+ blockHash, wire.GCSFilterRegular,
+ )
+ if err != nil {
+ fmt.Println("Error getting basic header: ", err.Error())
+ return
+ }
+ if !bytes.Equal(header.PrevFilterHeader[:], basicHeader[:]) {
+ fmt.Println("Basic header doesn't match!")
+ return
}
- }
- return b.Build()
-}
+ if height%1000 == 0 {
+ fmt.Printf("Verified height %v against server\n", height)
+ }
-// buildExtFilter builds an extended GCS filter from a block. An extended
-// filter supplements a regular basic filter by include all the _witness_ data
-// found within a block. This includes all the data pushes within any signature
-// scripts as well as each element of an input's witness stack. Additionally,
-// the _hashes_ of each transaction are also inserted into the filter. p is
-// specified as an argument in order to create test vectors with various values
-// for p.
-func buildExtFilter(block *wire.MsgBlock, p uint8) (*gcs.Filter, error) {
- blockHash := block.BlockHash()
- b := builder.WithKeyHashP(&blockHash, p)
-
- // If the filter had an issue with the specified key, then we force it
- // to bubble up here by calling the Key() function.
- _, err := b.Key()
- if err != nil {
- return nil, err
- }
+ if uint32(height) == testBlockHeights[testBlockIndex].height {
+ var bfBytes []byte
+ bfBytes, err = basicFilter.NBytes()
+ if err != nil {
+ fmt.Println("Couldn't get NBytes(): ", err)
+ return
+ }
- // In order to build an extended filter, we add the hash of each
- // transaction as well as each piece of witness data included in both
- // the sigScript and the witness stack of an input.
- for i, tx := range block.Transactions {
- // Skip the inputs for the coinbase transaction
- if i != 0 {
- // Next, for each input, we'll add the sigScript (if
- // it's present), and also the witness stack (if it's
- // present)
- for _, txIn := range tx.TxIn {
- if txIn.SignatureScript != nil {
- b.AddScript(txIn.SignatureScript)
- }
+ prevScriptStrings := make([]string, len(prevOutputScripts))
+ for i, prevScript := range prevOutputScripts {
+ prevScriptStrings[i] = hex.EncodeToString(prevScript)
+ }
- if len(txIn.Witness) != 0 {
- b.AddWitness(txIn.Witness)
- }
+ row := []interface{}{
+ height,
+ blockHash.String(),
+ hex.EncodeToString(blockBytes),
+ prevScriptStrings,
+ prevBasicHeader.String(),
+ hex.EncodeToString(bfBytes),
+ basicHeader.String(),
+ testBlockHeights[testBlockIndex].comment,
+ }
+ err = writerFile.WriteTestCase(row)
+ if err != nil {
+ fmt.Println("Error writing test case to output: ", err.Error())
+ return
}
}
- }
- return b.Build()
+ prevBasicHeader = basicHeader
+
+ if uint32(height) == testBlockHeights[testBlockIndex].height {
+ testBlockIndex++
+ }
+ }
}