aboutsummaryrefslogtreecommitdiff
path: root/contrib
diff options
context:
space:
mode:
Diffstat (limited to 'contrib')
-rw-r--r--contrib/builder-keys/README.md6
-rw-r--r--contrib/builder-keys/keys.txt1
-rw-r--r--contrib/devtools/README.md15
-rwxr-xr-xcontrib/devtools/copyright_header.py11
-rwxr-xr-xcontrib/devtools/gen-bitcoin-conf.sh83
-rw-r--r--contrib/devtools/iwyu/bitcoin.core.imp6
-rwxr-xr-xcontrib/devtools/symbol-check.py5
-rwxr-xr-xcontrib/devtools/test-symbol-check.py2
-rwxr-xr-xcontrib/guix/guix-build2
-rwxr-xr-xcontrib/guix/libexec/build.sh25
-rwxr-xr-xcontrib/guix/libexec/codesign.sh7
-rw-r--r--contrib/guix/libexec/prelude.bash2
-rw-r--r--contrib/guix/manifest.scm92
-rw-r--r--contrib/guix/patches/elfsteem-value-error-python-39.patch13
-rw-r--r--contrib/guix/patches/gcc-10-remap-guix-store.patch25
-rw-r--r--contrib/guix/patches/gcc-broken-longjmp.patch68
-rw-r--r--contrib/guix/patches/glibc-2.24-fcommon.patch32
-rw-r--r--contrib/guix/patches/glibc-2.24-guix-prefix.patch25
-rw-r--r--contrib/guix/patches/glibc-2.27-dont-redefine-nss-database.patch87
-rw-r--r--contrib/guix/patches/glibc-2.27-guix-prefix.patch25
-rw-r--r--contrib/guix/patches/glibc-2.27-riscv64-Use-__has_include-to-include-asm-syscalls.h.patch (renamed from contrib/guix/patches/glibc-2.27-riscv64-Use-__has_include__-to-include-asm-syscalls.h.patch)6
-rw-r--r--contrib/guix/patches/lief-fix-ppc64-nx-default.patch29
-rwxr-xr-xcontrib/linearize/linearize-data.py37
-rwxr-xr-xcontrib/linearize/linearize-hashes.py25
-rw-r--r--contrib/macdeploy/README.md25
-rwxr-xr-xcontrib/macdeploy/gen-sdk24
-rwxr-xr-xcontrib/macdeploy/macdeployqtplus2
-rwxr-xr-xcontrib/message-capture/message-capture-parser.py3
-rw-r--r--contrib/seeds/.gitignore1
-rw-r--r--contrib/seeds/README.md18
-rw-r--r--contrib/seeds/asmap.py815
-rwxr-xr-xcontrib/seeds/makeseeds.py135
-rw-r--r--contrib/seeds/suspicious_hosts.txt16
-rwxr-xr-xcontrib/signet/miner12
-rw-r--r--contrib/testgen/README.md2
-rwxr-xr-xcontrib/testgen/gen_key_io_test_vectors.py22
-rw-r--r--contrib/verify-commits/trusted-git-root2
-rw-r--r--contrib/verify-commits/trusted-keys3
-rwxr-xr-xcontrib/verify-commits/verify-commits.py15
-rw-r--r--contrib/windeploy/win-codesign.cert189
40 files changed, 1560 insertions, 353 deletions
diff --git a/contrib/builder-keys/README.md b/contrib/builder-keys/README.md
index 56bd87d0af..a6179d6012 100644
--- a/contrib/builder-keys/README.md
+++ b/contrib/builder-keys/README.md
@@ -19,9 +19,15 @@ gpg --refresh-keys
To fetch keys of builders and active developers, feed the list of fingerprints
of the primary keys into gpg:
+On \*NIX:
```sh
while read fingerprint keyholder_name; do gpg --keyserver hkps://keys.openpgp.org --recv-keys ${fingerprint}; done < ./keys.txt
```
+On Windows (requires Gpg4win >= 4.0.0):
+```
+FOR /F "tokens=1" %i IN (keys.txt) DO gpg --keyserver hkps://keys.openpgp.org --recv-keys %i
+```
+
Add your key to the list if you provided Guix attestations for two major or
minor releases of Bitcoin Core.
diff --git a/contrib/builder-keys/keys.txt b/contrib/builder-keys/keys.txt
index c70069b440..f8377cce33 100644
--- a/contrib/builder-keys/keys.txt
+++ b/contrib/builder-keys/keys.txt
@@ -19,6 +19,7 @@ BF6273FAEF7CC0BA1F562E50989F6B3048A116B5 Dev Random (devrandom)
D35176BE9264832E4ACA8986BF0792FBE95DC863 fivepiece (fivepiece)
6F993B250557E7B016ADE5713BDCDA2D87A881D9 Fuzzbawls (Fuzzbawls)
01CDF4627A3B88AAE4A571C87588242FBE38D3A8 Gavin Andresen (gavinandresen)
+6B002C6EA3F91B1B0DF0C9BC8F617F1200A6D25C Gloria Zhao (glozow)
D1DBF2C4B96F2DEBF4C16654410108112E7EA81F Hennadii Stepanov (hebasto)
A2FD494D0021AA9B4FA58F759102B7AE654A4A5A Ilyas Ridhuan (IlyasRidhuan)
2688F5A9A4BE0F295E921E8A25F27A38A47AD566 James O'Beirne (jamesob)
diff --git a/contrib/devtools/README.md b/contrib/devtools/README.md
index 79b0134adc..54b1a85588 100644
--- a/contrib/devtools/README.md
+++ b/contrib/devtools/README.md
@@ -90,6 +90,21 @@ example:
BUILDDIR=$PWD/build contrib/devtools/gen-manpages.py
```
+gen-bitcoin-conf.sh
+===================
+
+Generates a bitcoin.conf file in `share/examples/` by parsing the output from `bitcoind --help`. This script is run during the
+release process to include a bitcoin.conf with the release binaries and can also be run by users to generate a file locally.
+When generating a file as part of the release process, make sure to commit the changes after running the script.
+
+With in-tree builds this tool can be run from any directory within the
+repository. To use this tool with out-of-tree builds set `BUILDDIR`. For
+example:
+
+```bash
+BUILDDIR=$PWD/build contrib/devtools/gen-bitcoin-conf.sh
+```
+
security-check.py and test-security-check.py
============================================
diff --git a/contrib/devtools/copyright_header.py b/contrib/devtools/copyright_header.py
index 38f3df77c9..680de1f1b3 100755
--- a/contrib/devtools/copyright_header.py
+++ b/contrib/devtools/copyright_header.py
@@ -35,7 +35,6 @@ EXCLUDE_DIRS = [
"src/leveldb/",
"src/minisketch",
"src/secp256k1/",
- "src/univalue/",
"src/crc32c/",
]
@@ -320,15 +319,13 @@ def get_most_recent_git_change_year(filename):
################################################################################
def read_file_lines(filename):
- f = open(filename, 'r', encoding="utf8")
- file_lines = f.readlines()
- f.close()
+ with open(filename, 'r', encoding="utf8") as f:
+ file_lines = f.readlines()
return file_lines
def write_file_lines(filename, file_lines):
- f = open(filename, 'w', encoding="utf8")
- f.write(''.join(file_lines))
- f.close()
+ with open(filename, 'w', encoding="utf8") as f:
+ f.write(''.join(file_lines))
################################################################################
# update header years execution
diff --git a/contrib/devtools/gen-bitcoin-conf.sh b/contrib/devtools/gen-bitcoin-conf.sh
new file mode 100755
index 0000000000..2ebbd42022
--- /dev/null
+++ b/contrib/devtools/gen-bitcoin-conf.sh
@@ -0,0 +1,83 @@
+#!/usr/bin/env bash
+# Copyright (c) 2021 The Bitcoin Core developers
+# Distributed under the MIT software license, see the accompanying
+# file COPYING or http://www.opensource.org/licenses/mit-license.php.
+
+export LC_ALL=C
+TOPDIR=${TOPDIR:-$(git rev-parse --show-toplevel)}
+BUILDDIR=${BUILDDIR:-$TOPDIR}
+BINDIR=${BINDIR:-$BUILDDIR/src}
+BITCOIND=${BITCOIND:-$BINDIR/bitcoind}
+SHARE_EXAMPLES_DIR=${SHARE_EXAMPLES_DIR:-$TOPDIR/share/examples}
+EXAMPLE_CONF_FILE=${EXAMPLE_CONF_FILE:-$SHARE_EXAMPLES_DIR/bitcoin.conf}
+
+[ ! -x "$BITCOIND" ] && echo "$BITCOIND not found or not executable." && exit 1
+
+DIRTY=""
+VERSION_OUTPUT=$($BITCOIND --version)
+if [[ $VERSION_OUTPUT == *"dirty"* ]]; then
+ DIRTY="${DIRTY}${BITCOIND}\n"
+fi
+
+if [ -n "$DIRTY" ]
+then
+ echo -e "WARNING: $BITCOIND was built from a dirty tree.\n"
+ echo -e "To safely generate a bitcoin.conf file, please commit your changes to $BITCOIND, rebuild, then run this script again.\n"
+fi
+
+echo 'Generating example bitcoin.conf file in share/examples/'
+
+# create the directory, if it doesn't exist
+mkdir -p "${SHARE_EXAMPLES_DIR}"
+
+# create the header text
+cat > "${EXAMPLE_CONF_FILE}" << 'EOF'
+##
+## bitcoin.conf configuration file.
+## Generated by contrib/devtools/gen-bitcoin-conf.sh.
+##
+## Lines beginning with # are comments.
+## All possible configuration options are provided. To use, copy this file
+## to your data directory (default or specified by -datadir), uncomment
+## options you would like to change, and save the file.
+##
+
+
+### Options
+EOF
+
+# parse the output from bitcoind --help
+# adding newlines is a bit funky to ensure portability for BSD
+# see here for more details: https://stackoverflow.com/a/24575385
+${BITCOIND} --help \
+ | sed '1,/Print this help message and exit/d' \
+ | sed -E 's/^[[:space:]]{2}\-/#/' \
+ | sed -E 's/^[[:space:]]{7}/# /' \
+ | sed -E '/[=[:space:]]/!s/#.*$/&=1/' \
+ | awk '/^#[a-z]/{x=$0;next}{if (NF==0) print x"\n",x="";else print}' \
+ | sed 's,\(^[[:upper:]].*\)\:$,\
+### \1,' \
+ | sed 's/[[:space:]]*$//' >> "${EXAMPLE_CONF_FILE}"
+
+# create the footer text
+cat >> "${EXAMPLE_CONF_FILE}" << 'EOF'
+
+# [Sections]
+# Most options will apply to all networks. To confine an option to a specific
+# network, add it under the relevant section below.
+#
+# Note: If not specified under a network section, the options addnode, connect,
+# port, bind, rpcport, rpcbind, and wallet will only apply to mainnet.
+
+# Options for mainnet
+[main]
+
+# Options for testnet
+[test]
+
+# Options for signet
+[signet]
+
+# Options for regtest
+[regtest]
+EOF
diff --git a/contrib/devtools/iwyu/bitcoin.core.imp b/contrib/devtools/iwyu/bitcoin.core.imp
new file mode 100644
index 0000000000..ce7786f58c
--- /dev/null
+++ b/contrib/devtools/iwyu/bitcoin.core.imp
@@ -0,0 +1,6 @@
+# Fixups / upstreamed changes
+[
+ { include: [ "<bits/termios-c_lflag.h>", private, "<termios.h>", public ] },
+ { include: [ "<bits/termios-struct.h>", private, "<termios.h>", public ] },
+ { include: [ "<bits/termios-tcflow.h>", private, "<termios.h>", public ] },
+]
diff --git a/contrib/devtools/symbol-check.py b/contrib/devtools/symbol-check.py
index a419e392ee..23d29af3f1 100755
--- a/contrib/devtools/symbol-check.py
+++ b/contrib/devtools/symbol-check.py
@@ -50,9 +50,8 @@ MAX_VERSIONS = {
# Ignore symbols that are exported as part of every executable
IGNORE_EXPORTS = {
-'_edata', '_end', '__end__', '_init', '__bss_start', '__bss_start__', '_bss_end__',
-'__bss_end__', '_fini', '_IO_stdin_used', 'stdin', 'stdout', 'stderr',
-'environ', '_environ', '__environ',
+'environ', '_environ', '__environ', '_fini', '_init', 'stdin',
+'stdout', 'stderr',
}
# Expected linker-loader names can be found here:
diff --git a/contrib/devtools/test-symbol-check.py b/contrib/devtools/test-symbol-check.py
index b4c112b266..2881e3efac 100755
--- a/contrib/devtools/test-symbol-check.py
+++ b/contrib/devtools/test-symbol-check.py
@@ -39,7 +39,7 @@ class TestSymbolChecks(unittest.TestCase):
cc = determine_wellknown_cmd('CC', 'gcc')
# there's no way to do this test for RISC-V at the moment; we build for
- # RISC-V in a glibc 2.27 envinonment and we allow all symbols from 2.27.
+ # RISC-V in a glibc 2.27 environment and we allow all symbols from 2.27.
if 'riscv' in get_machine(cc):
self.skipTest("test not available for RISC-V")
diff --git a/contrib/guix/guix-build b/contrib/guix/guix-build
index c8f2e40f0a..74b24b9612 100755
--- a/contrib/guix/guix-build
+++ b/contrib/guix/guix-build
@@ -132,7 +132,7 @@ for host in $HOSTS; do
echo "Found macOS SDK at '${OSX_SDK}', using..."
break
else
- echo "macOS SDK does not exist at '${OSX_SDK}', please place the extracted, untarred SDK there to perform darwin builds, exiting..."
+ echo "macOS SDK does not exist at '${OSX_SDK}', please place the extracted, untarred SDK there to perform darwin builds, or define SDK_PATH environment variable. Exiting..."
exit 1
fi
;;
diff --git a/contrib/guix/libexec/build.sh b/contrib/guix/libexec/build.sh
index 4eeb360603..28cad05013 100755
--- a/contrib/guix/libexec/build.sh
+++ b/contrib/guix/libexec/build.sh
@@ -79,19 +79,6 @@ prepend_to_search_env_var() {
export "${1}=${2}${!1:+:}${!1}"
}
-case "$HOST" in
- *darwin*)
- # When targeting darwin, zlib is required by native_libdmg-hfsplus.
- zlib_store_path=$(store_path "zlib")
- zlib_static_store_path=$(store_path "zlib" static)
-
- prepend_to_search_env_var LIBRARY_PATH "${zlib_static_store_path}/lib:${zlib_store_path}/lib"
- prepend_to_search_env_var C_INCLUDE_PATH "${zlib_store_path}/include"
- prepend_to_search_env_var CPLUS_INCLUDE_PATH "${zlib_store_path}/include"
- prepend_to_search_env_var OBJC_INCLUDE_PATH "${zlib_store_path}/include"
- prepend_to_search_env_var OBJCPLUS_INCLUDE_PATH "${zlib_store_path}/include"
-esac
-
# Set environment variables to point the CROSS toolchain to the right
# includes/libs for $HOST
case "$HOST" in
@@ -236,6 +223,7 @@ CONFIGFLAGS="--enable-reduce-exports --disable-bench --disable-gui-tests --disab
# CFLAGS
HOST_CFLAGS="-O2 -g"
+HOST_CFLAGS+=$(find /gnu/store -maxdepth 1 -mindepth 1 -type d -exec echo -n " -ffile-prefix-map={}=/usr" \;)
case "$HOST" in
*linux*) HOST_CFLAGS+=" -ffile-prefix-map=${PWD}=." ;;
*mingw*) HOST_CFLAGS+=" -fno-ident" ;;
@@ -262,10 +250,6 @@ case "$HOST" in
*powerpc64*) HOST_LDFLAGS="${HOST_LDFLAGS} -Wl,--no-tls-get-addr-optimize" ;;
esac
-case "$HOST" in
- powerpc64-linux-*|riscv64-linux-*) HOST_LDFLAGS="${HOST_LDFLAGS} -Wl,-z,noexecstack" ;;
-esac
-
# Make $HOST-specific native binaries from depends available in $PATH
export PATH="${BASEPREFIX}/${HOST}/native/bin:${PATH}"
mkdir -p "$DISTSRC"
@@ -332,8 +316,7 @@ mkdir -p "$DISTSRC"
mkdir -p "unsigned-app-${HOST}"
cp --target-directory="unsigned-app-${HOST}" \
osx_volname \
- contrib/macdeploy/detached-sig-create.sh \
- "${BASEPREFIX}/${HOST}"/native/bin/dmg
+ contrib/macdeploy/detached-sig-create.sh
mv --target-directory="unsigned-app-${HOST}" dist
(
cd "unsigned-app-${HOST}"
@@ -382,6 +365,10 @@ mkdir -p "$DISTSRC"
;;
esac
+ # copy over the example bitcoin.conf file. if contrib/devtools/gen-bitcoin-conf.sh
+ # has not been run before buildling, this file will be a stub
+ cp "${DISTSRC}/share/examples/bitcoin.conf" "${DISTNAME}/"
+
# Finally, deterministically produce {non-,}debug binary tarballs ready
# for release
case "$HOST" in
diff --git a/contrib/guix/libexec/codesign.sh b/contrib/guix/libexec/codesign.sh
index 6ede95f42b..9a5d3a1ce5 100755
--- a/contrib/guix/libexec/codesign.sh
+++ b/contrib/guix/libexec/codesign.sh
@@ -84,14 +84,11 @@ mkdir -p "$DISTSRC"
# Apply detached codesignatures to dist/ (in-place)
signapple apply dist/Bitcoin-Qt.app codesignatures/osx/dist
- # Make an uncompressed DMG from dist/
+ # Make a DMG from dist/
xorrisofs -D -l -V "$(< osx_volname)" -no-pad -r -dir-mode 0755 \
- -o uncompressed.dmg \
+ -o "${OUTDIR}/${DISTNAME}-${HOST}.dmg" \
dist \
-- -volume_date all_file_dates ="$SOURCE_DATE_EPOCH"
-
- # Compress uncompressed.dmg and output to OUTDIR
- ./dmg dmg uncompressed.dmg "${OUTDIR}/${DISTNAME}-${HOST}.dmg"
;;
*)
exit 1
diff --git a/contrib/guix/libexec/prelude.bash b/contrib/guix/libexec/prelude.bash
index f24c120863..3eb8fc02da 100644
--- a/contrib/guix/libexec/prelude.bash
+++ b/contrib/guix/libexec/prelude.bash
@@ -51,7 +51,7 @@ fi
time-machine() {
# shellcheck disable=SC2086
guix time-machine --url=https://git.savannah.gnu.org/git/guix.git \
- --commit=34e9eae68c9583acce5abc4100add3d88932a5ae \
+ --commit=998eda3067c7d21e0d9bb3310d2f5a14b8f1c681 \
--cores="$JOBS" \
--keep-failed \
--fallback \
diff --git a/contrib/guix/manifest.scm b/contrib/guix/manifest.scm
index fcec592c2c..12b563eac2 100644
--- a/contrib/guix/manifest.scm
+++ b/contrib/guix/manifest.scm
@@ -130,26 +130,13 @@ chain for " target " development."))
(license (package-license xgcc)))))
(define base-gcc gcc-10)
-
-;; Building glibc with stack smashing protector first landed in glibc 2.25, use
-;; this function to disable for older glibcs
-;;
-;; From glibc 2.25 changelog:
-;;
-;; * Most of glibc can now be built with the stack smashing protector enabled.
-;; It is recommended to build glibc with --enable-stack-protector=strong.
-;; Implemented by Nick Alcock (Oracle).
-(define (make-glibc-without-ssp xglibc)
- (package-with-extra-configure-variable
- (package-with-extra-configure-variable
- xglibc "libc_cv_ssp" "no")
- "libc_cv_ssp_strong" "no"))
+(define base-linux-kernel-headers linux-libre-headers-5.15)
(define* (make-bitcoin-cross-toolchain target
#:key
- (base-gcc-for-libc gcc-7)
- (base-kernel-headers linux-libre-headers-4.9)
- (base-libc (make-glibc-without-ssp glibc-2.24))
+ (base-gcc-for-libc base-gcc)
+ (base-kernel-headers base-linux-kernel-headers)
+ (base-libc (make-glibc-without-werror glibc-2.24))
(base-gcc (make-gcc-rpath-link base-gcc)))
"Convenience wrapper around MAKE-CROSS-TOOLCHAIN with default values
desirable for building Bitcoin Core release binaries."
@@ -160,11 +147,15 @@ desirable for building Bitcoin Core release binaries."
base-gcc))
(define (make-gcc-with-pthreads gcc)
- (package-with-extra-configure-variable gcc "--enable-threads" "posix"))
+ (package-with-extra-configure-variable
+ (package-with-extra-patches gcc
+ (search-our-patches "gcc-10-remap-guix-store.patch"))
+ "--enable-threads" "posix"))
-(define (make-mingw-w64-cross-gcc-vmov-alignment cross-gcc)
+(define (make-mingw-w64-cross-gcc cross-gcc)
(package-with-extra-patches cross-gcc
- (search-our-patches "vmov-alignment.patch")))
+ (search-our-patches "vmov-alignment.patch"
+ "gcc-broken-longjmp.patch")))
(define (make-mingw-pthreads-cross-toolchain target)
"Create a cross-compilation toolchain package for TARGET"
@@ -172,7 +163,7 @@ desirable for building Bitcoin Core release binaries."
(pthreads-xlibc mingw-w64-x86_64-winpthreads)
(pthreads-xgcc (make-gcc-with-pthreads
(cross-gcc target
- #:xgcc (make-ssp-fixed-gcc (make-mingw-w64-cross-gcc-vmov-alignment base-gcc))
+ #:xgcc (make-ssp-fixed-gcc (make-mingw-w64-cross-gcc base-gcc))
#:xbinutils xbinutils
#:libc pthreads-xlibc))))
;; Define a meta-package that propagates the resulting XBINUTILS, XLIBC, and
@@ -198,10 +189,14 @@ chain for " target " development."))
(package-with-extra-patches base-nsis
(search-our-patches "nsis-gcc-10-memmove.patch")))
+(define (fix-ppc64-nx-default lief)
+ (package-with-extra-patches lief
+ (search-our-patches "lief-fix-ppc64-nx-default.patch")))
+
(define-public lief
(package
(name "python-lief")
- (version "0.12.0")
+ (version "0.12.1")
(source
(origin
(method git-fetch)
@@ -211,8 +206,15 @@ chain for " target " development."))
(file-name (git-file-name name version))
(sha256
(base32
- "026jchj56q25v6gc0754dj9cj5hz5zaza8ij93y5ga94w20kzm9q"))))
+ "1xzbh3bxy4rw1yamnx68da1v5s56ay4g081cyamv67256g0qy2i1"))))
(build-system python-build-system)
+ (arguments
+ `(#:phases
+ (modify-phases %standard-phases
+ (add-after 'unpack 'parallel-jobs
+ ;; build with multiple cores
+ (lambda _
+ (substitute* "setup.py" (("self.parallel if self.parallel else 1") (number->string (parallel-job-count)))))))))
(native-inputs
`(("cmake" ,cmake)))
(home-page "https://github.com/lief-project/LIEF")
@@ -254,7 +256,7 @@ thus should be able to compile on most platforms where these exist.")
(license license:gpl3+))) ; license is with openssl exception
(define-public python-elfesteem
- (let ((commit "87bbd79ab7e361004c98cc8601d4e5f029fd8bd5"))
+ (let ((commit "2eb1e5384ff7a220fd1afacd4a0170acff54fe56"))
(package
(name "python-elfesteem")
(version (git-version "0.1" "1" commit))
@@ -267,8 +269,7 @@ thus should be able to compile on most platforms where these exist.")
(file-name (git-file-name name commit))
(sha256
(base32
- "1nyvjisvyxyxnd0023xjf5846xd03lwawp5pfzr8vrky7wwm5maz"))
- (patches (search-our-patches "elfsteem-value-error-python-39.patch"))))
+ "07x6p8clh11z8s1n2kdxrqwqm2almgc5qpkcr9ckb6y5ivjdr5r6"))))
(build-system python-build-system)
;; There are no tests, but attempting to run python setup.py test leads to
;; PYTHONPATH problems, just disable the test
@@ -516,6 +517,9 @@ and endian independent.")
inspecting signatures in Mach-O binaries.")
(license license:expat))))
+(define (make-glibc-without-werror glibc)
+ (package-with-extra-configure-variable glibc "enable_werror" "no"))
+
(define-public glibc-2.24
(package
(inherit glibc-2.31)
@@ -532,7 +536,9 @@ inspecting signatures in Mach-O binaries.")
(patches (search-our-patches "glibc-ldd-x86_64.patch"
"glibc-versioned-locpath.patch"
"glibc-2.24-elfm-loadaddr-dynamic-rewrite.patch"
- "glibc-2.24-no-build-time-cxx-header-run.patch"))))))
+ "glibc-2.24-no-build-time-cxx-header-run.patch"
+ "glibc-2.24-fcommon.patch"
+ "glibc-2.24-guix-prefix.patch"))))))
(define-public glibc-2.27/bitcoin-patched
(package
@@ -548,7 +554,9 @@ inspecting signatures in Mach-O binaries.")
(base32
"1b2n1gxv9f4fd5yy68qjbnarhf8mf4vmlxk10i3328c1w5pmp0ca"))
(patches (search-our-patches "glibc-ldd-x86_64.patch"
- "glibc-2.27-riscv64-Use-__has_include__-to-include-asm-syscalls.h.patch"))))))
+ "glibc-2.27-riscv64-Use-__has_include-to-include-asm-syscalls.h.patch"
+ "glibc-2.27-dont-redefine-nss-database.patch"
+ "glibc-2.27-guix-prefix.patch"))))))
(packages->manifest
(append
@@ -572,45 +580,37 @@ inspecting signatures in Mach-O binaries.")
bzip2
gzip
xz
- zlib
- (list zlib "static")
;; Build tools
gnu-make
- libtool
+ libtool-2.4.7
autoconf-2.71
automake
pkg-config
bison
+ ;; Native GCC 10 toolchain
+ gcc-toolchain-10
+ (list gcc-toolchain-10 "static")
;; Scripting
perl
python-3
;; Git
git
;; Tests
- lief)
+ (fix-ppc64-nx-default lief))
(let ((target (getenv "HOST")))
(cond ((string-suffix? "-mingw32" target)
;; Windows
- (list ;; Native GCC 10 toolchain
- gcc-toolchain-10
- (list gcc-toolchain-10 "static")
- zip
+ (list zip
(make-mingw-pthreads-cross-toolchain "x86_64-w64-mingw32")
(make-nsis-for-gcc-10 nsis-x86_64)
osslsigncode))
((string-contains target "-linux-")
- (list ;; Native GCC 7 toolchain
- gcc-toolchain-7
- (list gcc-toolchain-7 "static")
- (cond ((string-contains target "riscv64-")
+ (list (cond ((string-contains target "riscv64-")
(make-bitcoin-cross-toolchain target
- #:base-libc glibc-2.27/bitcoin-patched
- #:base-kernel-headers linux-libre-headers-4.19))
+ #:base-libc (make-glibc-without-werror glibc-2.27/bitcoin-patched)
+ #:base-kernel-headers base-linux-kernel-headers))
(else
(make-bitcoin-cross-toolchain target)))))
((string-contains target "darwin")
- (list ;; Native GCC 10 toolchain
- gcc-toolchain-10
- (list gcc-toolchain-10 "static")
- clang-toolchain-10 binutils cmake xorriso python-signapple))
+ (list clang-toolchain-10 binutils cmake xorriso python-signapple))
(else '())))))
diff --git a/contrib/guix/patches/elfsteem-value-error-python-39.patch b/contrib/guix/patches/elfsteem-value-error-python-39.patch
deleted file mode 100644
index 21e1228afd..0000000000
--- a/contrib/guix/patches/elfsteem-value-error-python-39.patch
+++ /dev/null
@@ -1,13 +0,0 @@
-diff --git a/examples/otool.py b/examples/otool.py
-index 2b8efc0..d797b2e 100755
---- a/examples/otool.py
-+++ b/examples/otool.py
-@@ -342,7 +342,7 @@ if __name__ == '__main__':
- try:
- e = macho_init.MACHO(raw,
- parseSymbols = False)
-- except ValueError, err:
-+ except ValueError as err:
- print("%s:" %file)
- print(" %s" % err)
- continue
diff --git a/contrib/guix/patches/gcc-10-remap-guix-store.patch b/contrib/guix/patches/gcc-10-remap-guix-store.patch
new file mode 100644
index 0000000000..a47ef7a2df
--- /dev/null
+++ b/contrib/guix/patches/gcc-10-remap-guix-store.patch
@@ -0,0 +1,25 @@
+From aad25427e74f387412e8bc9a9d7bbc6c496c792f Mon Sep 17 00:00:00 2001
+From: Andrew Chow <achow101-github@achow101.com>
+Date: Wed, 6 Jul 2022 16:49:41 -0400
+Subject: [PATCH] guix: remap guix store paths to /usr
+
+---
+ libgcc/Makefile.in | 2 +-
+ 1 file changed, 1 insertion(+), 1 deletion(-)
+
+diff --git a/libgcc/Makefile.in b/libgcc/Makefile.in
+index 851e7657d07..476c2becd1c 100644
+--- a/libgcc/Makefile.in
++++ b/libgcc/Makefile.in
+@@ -854,7 +854,7 @@ endif
+ # libgcc_eh.a, only LIB2ADDEH matters. If we do, only LIB2ADDEHSTATIC and
+ # LIB2ADDEHSHARED matter. (Usually all three are identical.)
+
+-c_flags := -fexceptions
++c_flags := -fexceptions $(shell find /gnu/store -maxdepth 1 -mindepth 1 -type d -exec echo -n " -ffile-prefix-map={}=/usr" \;)
+
+ ifeq ($(enable_shared),yes)
+
+--
+2.37.0
+
diff --git a/contrib/guix/patches/gcc-broken-longjmp.patch b/contrib/guix/patches/gcc-broken-longjmp.patch
new file mode 100644
index 0000000000..1cfc0918b0
--- /dev/null
+++ b/contrib/guix/patches/gcc-broken-longjmp.patch
@@ -0,0 +1,68 @@
+commit eb5698897c52702498938592d7f76e67d126451f
+Author: Eric Botcazou <ebotcazou@adacore.com>
+Date: Wed May 5 22:48:51 2021 +0200
+
+ Fix PR target/100402
+
+ This is a regression for 64-bit Windows present from mainline down to the 9
+ branch and introduced by the fix for PR target/99234. Again SEH, but with
+ a twist related to the way MinGW implements setjmp/longjmp, which turns out
+ to be piggybacked on SEH with recent versions of MinGW, i.e. the longjmp
+ performs a bona-fide unwinding of the stack, because it calls RtlUnwindEx
+ with the second argument initially passed to setjmp, which is the result of
+ __builtin_frame_address (0) in the MinGW header file:
+
+ define setjmp(BUF) _setjmp((BUF), __builtin_frame_address (0))
+
+ This means that we directly expose the frame pointer to the SEH machinery
+ here (unlike with regular exception handling where we use an intermediate
+ CFA) and thus that we cannot do whatever we want with it. The old code
+ would leave it unaligned, i.e. not multiple of 16, whereas the new code
+ aligns it, but this breaks for some reason; at least it appears that a
+ .seh_setframe directive with 0 as second argument always works, so the
+ fix aligns it this way.
+
+ gcc/
+ PR target/100402
+ * config/i386/i386.c (ix86_compute_frame_layout): For a SEH target,
+ always return the establisher frame for __builtin_frame_address (0).
+ gcc/testsuite/
+ * gcc.c-torture/execute/20210505-1.c: New test.
+
+diff --git a/gcc/config/i386/i386.c b/gcc/config/i386/i386.c
+index 2f838840e96..06ad1b2274e 100644
+--- a/gcc/config/i386/i386.c
++++ b/gcc/config/i386/i386.c
+@@ -6356,12 +6356,29 @@ ix86_compute_frame_layout (void)
+ area, see the SEH code in config/i386/winnt.c for the rationale. */
+ frame->hard_frame_pointer_offset = frame->sse_reg_save_offset;
+
+- /* If we can leave the frame pointer where it is, do so. Also, return
++ /* If we can leave the frame pointer where it is, do so; however return
+ the establisher frame for __builtin_frame_address (0) or else if the
+- frame overflows the SEH maximum frame size. */
++ frame overflows the SEH maximum frame size.
++
++ Note that the value returned by __builtin_frame_address (0) is quite
++ constrained, because setjmp is piggybacked on the SEH machinery with
++ recent versions of MinGW:
++
++ # elif defined(__SEH__)
++ # if defined(__aarch64__) || defined(_ARM64_)
++ # define setjmp(BUF) _setjmp((BUF), __builtin_sponentry())
++ # elif (__MINGW_GCC_VERSION < 40702)
++ # define setjmp(BUF) _setjmp((BUF), mingw_getsp())
++ # else
++ # define setjmp(BUF) _setjmp((BUF), __builtin_frame_address (0))
++ # endif
++
++ and the second argument passed to _setjmp, if not null, is forwarded
++ to the TargetFrame parameter of RtlUnwindEx by longjmp (after it has
++ built an ExceptionRecord on the fly describing the setjmp buffer). */
+ const HOST_WIDE_INT diff
+ = frame->stack_pointer_offset - frame->hard_frame_pointer_offset;
+- if (diff <= 255)
++ if (diff <= 255 && !crtl->accesses_prior_frames)
+ {
+ /* The resulting diff will be a multiple of 16 lower than 255,
+ i.e. at most 240 as required by the unwind data structure. */
diff --git a/contrib/guix/patches/glibc-2.24-fcommon.patch b/contrib/guix/patches/glibc-2.24-fcommon.patch
new file mode 100644
index 0000000000..2bc32ede90
--- /dev/null
+++ b/contrib/guix/patches/glibc-2.24-fcommon.patch
@@ -0,0 +1,32 @@
+commit 264a4a0dbe1f4369db315080034b500bed66016c
+Author: fanquake <fanquake@gmail.com>
+Date: Fri May 6 11:03:04 2022 +0100
+
+ build: use -fcommon to retain legacy behaviour with GCC 10
+
+ GCC 10 started using -fno-common by default, which causes issues with
+ the powerpc builds using gibc 2.24. A patch was commited to glibc to fix
+ the issue, 18363b4f010da9ba459b13310b113ac0647c2fcc but is non-trvial
+ to backport, and was broken in at least one way, see the followup in
+ commit 7650321ce037302bfc2f026aa19e0213b8d02fe6.
+
+ For now, retain the legacy GCC behaviour by passing -fcommon when
+ building glibc.
+
+ https://gcc.gnu.org/onlinedocs/gcc/Code-Gen-Options.html.
+ https://sourceware.org/git/?p=glibc.git;a=commit;h=18363b4f010da9ba459b13310b113ac0647c2fcc
+ https://sourceware.org/git/?p=glibc.git;a=commit;h=7650321ce037302bfc2f026aa19e0213b8d02fe6
+
+diff --git a/Makeconfig b/Makeconfig
+index ee379f5852..63c4a2f234 100644
+--- a/Makeconfig
++++ b/Makeconfig
+@@ -824,7 +824,7 @@ ifeq "$(strip $(+cflags))" ""
+ +cflags := $(default_cflags)
+ endif # $(+cflags) == ""
+
+-+cflags += $(cflags-cpu) $(+gccwarn) $(+merge-constants) $(+math-flags)
+++cflags += $(cflags-cpu) $(+gccwarn) $(+merge-constants) $(+math-flags) -fcommon
+ +gcc-nowarn := -w
+
+ # Don't duplicate options if we inherited variables from the parent.
diff --git a/contrib/guix/patches/glibc-2.24-guix-prefix.patch b/contrib/guix/patches/glibc-2.24-guix-prefix.patch
new file mode 100644
index 0000000000..cba2f59a8d
--- /dev/null
+++ b/contrib/guix/patches/glibc-2.24-guix-prefix.patch
@@ -0,0 +1,25 @@
+Without ffile-prefix-map, the debug symbols will contain paths for the
+guix store which will include the hashes of each package. However, the
+hash for the same package will differ when on different architectures.
+In order to be reproducible regardless of the architecture used to build
+the package, map all guix store prefixes to something fixed, e.g. /usr.
+
+We might be able to drop this in favour of using --with-nonshared-cflags
+when we being using newer versions of glibc.
+
+--- a/Makeconfig
++++ b/Makeconfig
+@@ -950,6 +950,10 @@ object-suffixes-for-libc += .oS
+ # shared objects. We don't want to use CFLAGS-os because users may, for
+ # example, make that processor-specific.
+ CFLAGS-.oS = $(CFLAGS-.o) $(PIC-ccflag)
++
++# Map Guix store paths to /usr
++CFLAGS-.oS += `find /gnu/store -maxdepth 1 -mindepth 1 -type d -exec echo -n " -fdebug-prefix-map={}=/usr" \;`
++
+ CPPFLAGS-.oS = $(CPPFLAGS-.o) -DPIC -DLIBC_NONSHARED=1
+ libtype.oS = lib%_nonshared.a
+ endif
+--
+2.35.1
+
diff --git a/contrib/guix/patches/glibc-2.27-dont-redefine-nss-database.patch b/contrib/guix/patches/glibc-2.27-dont-redefine-nss-database.patch
new file mode 100644
index 0000000000..16a595d613
--- /dev/null
+++ b/contrib/guix/patches/glibc-2.27-dont-redefine-nss-database.patch
@@ -0,0 +1,87 @@
+commit 78a90c2f74a2012dd3eff302189e47ff6779a757
+Author: Andreas Schwab <schwab@linux-m68k.org>
+Date: Fri Mar 2 23:07:14 2018 +0100
+
+ Fix multiple definitions of __nss_*_database (bug 22918)
+
+ (cherry picked from commit eaf6753f8aac33a36deb98c1031d1bad7b593d2d)
+
+diff --git a/nscd/gai.c b/nscd/gai.c
+index d081747797..576fd0045b 100644
+--- a/nscd/gai.c
++++ b/nscd/gai.c
+@@ -45,3 +45,6 @@
+ #ifdef HAVE_LIBIDN
+ # include <libidn/idn-stub.c>
+ #endif
++
++/* Some variables normally defined in libc. */
++service_user *__nss_hosts_database attribute_hidden;
+diff --git a/nss/nsswitch.c b/nss/nsswitch.c
+index d5e655974f..b0f0c11a3e 100644
+--- a/nss/nsswitch.c
++++ b/nss/nsswitch.c
+@@ -62,7 +62,7 @@ static service_library *nss_new_service (name_database *database,
+
+ /* Declare external database variables. */
+ #define DEFINE_DATABASE(name) \
+- extern service_user *__nss_##name##_database attribute_hidden; \
++ service_user *__nss_##name##_database attribute_hidden; \
+ weak_extern (__nss_##name##_database)
+ #include "databases.def"
+ #undef DEFINE_DATABASE
+diff --git a/nss/nsswitch.h b/nss/nsswitch.h
+index eccb535ef5..63573b9ebc 100644
+--- a/nss/nsswitch.h
++++ b/nss/nsswitch.h
+@@ -226,10 +226,10 @@ libc_hidden_proto (__nss_hostname_digits_dots)
+ #define MAX_NR_ADDRS 48
+
+ /* Prototypes for __nss_*_lookup2 functions. */
+-#define DEFINE_DATABASE(arg) \
+- service_user *__nss_##arg##_database attribute_hidden; \
+- int __nss_##arg##_lookup2 (service_user **, const char *, \
+- const char *, void **); \
++#define DEFINE_DATABASE(arg) \
++ extern service_user *__nss_##arg##_database attribute_hidden; \
++ int __nss_##arg##_lookup2 (service_user **, const char *, \
++ const char *, void **); \
+ libc_hidden_proto (__nss_##arg##_lookup2)
+ #include "databases.def"
+ #undef DEFINE_DATABASE
+diff --git a/posix/tst-rfc3484-2.c b/posix/tst-rfc3484-2.c
+index f509534ca9..8c64ac59ff 100644
+--- a/posix/tst-rfc3484-2.c
++++ b/posix/tst-rfc3484-2.c
+@@ -58,6 +58,7 @@ _res_hconf_init (void)
+ #undef USE_NSCD
+ #include "../sysdeps/posix/getaddrinfo.c"
+
++service_user *__nss_hosts_database attribute_hidden;
+
+ /* This is the beginning of the real test code. The above defines
+ (among other things) the function rfc3484_sort. */
+diff --git a/posix/tst-rfc3484-3.c b/posix/tst-rfc3484-3.c
+index ae44087a10..1c61aaf844 100644
+--- a/posix/tst-rfc3484-3.c
++++ b/posix/tst-rfc3484-3.c
+@@ -58,6 +58,7 @@ _res_hconf_init (void)
+ #undef USE_NSCD
+ #include "../sysdeps/posix/getaddrinfo.c"
+
++service_user *__nss_hosts_database attribute_hidden;
+
+ /* This is the beginning of the real test code. The above defines
+ (among other things) the function rfc3484_sort. */
+diff --git a/posix/tst-rfc3484.c b/posix/tst-rfc3484.c
+index 7f191abbbc..8f45848e44 100644
+--- a/posix/tst-rfc3484.c
++++ b/posix/tst-rfc3484.c
+@@ -58,6 +58,7 @@ _res_hconf_init (void)
+ #undef USE_NSCD
+ #include "../sysdeps/posix/getaddrinfo.c"
+
++service_user *__nss_hosts_database attribute_hidden;
+
+ /* This is the beginning of the real test code. The above defines
+ (among other things) the function rfc3484_sort. */
diff --git a/contrib/guix/patches/glibc-2.27-guix-prefix.patch b/contrib/guix/patches/glibc-2.27-guix-prefix.patch
new file mode 100644
index 0000000000..cdb3971f7a
--- /dev/null
+++ b/contrib/guix/patches/glibc-2.27-guix-prefix.patch
@@ -0,0 +1,25 @@
+Without ffile-prefix-map, the debug symbols will contain paths for the
+guix store which will include the hashes of each package. However, the
+hash for the same package will differ when on different architectures.
+In order to be reproducible regardless of the architecture used to build
+the package, map all guix store prefixes to something fixed, e.g. /usr.
+
+We might be able to drop this in favour of using --with-nonshared-cflags
+when we being using newer versions of glibc.
+
+--- a/Makeconfig
++++ b/Makeconfig
+@@ -992,6 +992,10 @@ object-suffixes :=
+ CPPFLAGS-.o = $(pic-default)
+ # libc.a must be compiled with -fPIE/-fpie for static PIE.
+ CFLAGS-.o = $(filter %frame-pointer,$(+cflags)) $(pie-default)
++
++# Map Guix store paths to /usr
++CFLAGS-.o += `find /gnu/store -maxdepth 1 -mindepth 1 -type d -exec echo -n " -fdebug-prefix-map={}=/usr" \;`
++
+ libtype.o := lib%.a
+ object-suffixes += .o
+ ifeq (yes,$(build-shared))
+--
+2.35.1
+
diff --git a/contrib/guix/patches/glibc-2.27-riscv64-Use-__has_include__-to-include-asm-syscalls.h.patch b/contrib/guix/patches/glibc-2.27-riscv64-Use-__has_include-to-include-asm-syscalls.h.patch
index d6217157ee..c0f8495c41 100644
--- a/contrib/guix/patches/glibc-2.27-riscv64-Use-__has_include__-to-include-asm-syscalls.h.patch
+++ b/contrib/guix/patches/glibc-2.27-riscv64-Use-__has_include-to-include-asm-syscalls.h.patch
@@ -1,3 +1,7 @@
+Note that this has been modified from the original commit, to use __has_include
+instead of __has_include__, as the later was causing build failures with GCC 10.
+See also: http://lists.busybox.net/pipermail/buildroot/2020-July/590376.html.
+
https://sourceware.org/git/?p=glibc.git;a=commit;h=0b9c84906f653978fb8768c7ebd0ee14a47e662e
From 562c52cc81a4e456a62e6455feb32732049e9070 Mon Sep 17 00:00:00 2001
@@ -59,7 +63,7 @@ index d612ef4c6c..0b2042620b 100644
#include <atomic.h>
#include <sys/cachectl.h>
-#include <asm/syscalls.h>
-+#if __has_include__ (<asm/syscalls.h>)
++#if __has_include (<asm/syscalls.h>)
+# include <asm/syscalls.h>
+#else
+# include <asm/unistd.h>
diff --git a/contrib/guix/patches/lief-fix-ppc64-nx-default.patch b/contrib/guix/patches/lief-fix-ppc64-nx-default.patch
new file mode 100644
index 0000000000..101bc1ddc0
--- /dev/null
+++ b/contrib/guix/patches/lief-fix-ppc64-nx-default.patch
@@ -0,0 +1,29 @@
+Correct default for Binary::has_nx on ppc64
+
+From the Linux kernel source:
+
+ * This is the default if a program doesn't have a PT_GNU_STACK
+ * program header entry. The PPC64 ELF ABI has a non executable stack
+ * stack by default, so in the absence of a PT_GNU_STACK program header
+ * we turn execute permission off.
+
+This patch can be dropped the next time we update LIEF.
+
+diff --git a/src/ELF/Binary.cpp b/src/ELF/Binary.cpp
+index a90be1ab..fd2d9764 100644
+--- a/src/ELF/Binary.cpp
++++ b/src/ELF/Binary.cpp
+@@ -1084,7 +1084,12 @@ bool Binary::has_nx() const {
+ return segment->type() == SEGMENT_TYPES::PT_GNU_STACK;
+ });
+ if (it_stack == std::end(segments_)) {
+- return false;
++ if (header().machine_type() == ARCH::EM_PPC64) {
++ // The PPC64 ELF ABI has a non-executable stack by default.
++ return true;
++ } else {
++ return false;
++ }
+ }
+
+ return !(*it_stack)->has(ELF_SEGMENT_FLAGS::PF_X);
diff --git a/contrib/linearize/linearize-data.py b/contrib/linearize/linearize-data.py
index 7510204bb1..b72c7b0d08 100755
--- a/contrib/linearize/linearize-data.py
+++ b/contrib/linearize/linearize-data.py
@@ -34,12 +34,12 @@ def get_blk_dt(blk_hdr):
# When getting the list of block hashes, undo any byte reversals.
def get_block_hashes(settings):
blkindex = []
- f = open(settings['hashlist'], "r", encoding="utf8")
- for line in f:
- line = line.rstrip()
- if settings['rev_hash_bytes'] == 'true':
- line = bytes.fromhex(line)[::-1].hex()
- blkindex.append(line)
+ with open(settings['hashlist'], "r", encoding="utf8") as f:
+ for line in f:
+ line = line.rstrip()
+ if settings['rev_hash_bytes'] == 'true':
+ line = bytes.fromhex(line)[::-1].hex()
+ blkindex.append(line)
print("Read " + str(len(blkindex)) + " hashes")
@@ -249,19 +249,18 @@ if __name__ == '__main__':
print("Usage: linearize-data.py CONFIG-FILE")
sys.exit(1)
- f = open(sys.argv[1], encoding="utf8")
- for line in f:
- # skip comment lines
- m = re.search(r'^\s*#', line)
- if m:
- continue
-
- # parse key=value lines
- m = re.search(r'^(\w+)\s*=\s*(\S.*)$', line)
- if m is None:
- continue
- settings[m.group(1)] = m.group(2)
- f.close()
+ with open(sys.argv[1], encoding="utf8") as f:
+ for line in f:
+ # skip comment lines
+ m = re.search(r'^\s*#', line)
+ if m:
+ continue
+
+ # parse key=value lines
+ m = re.search(r'^(\w+)\s*=\s*(\S.*)$', line)
+ if m is None:
+ continue
+ settings[m.group(1)] = m.group(2)
# Force hash byte format setting to be lowercase to make comparisons easier.
# Also place upfront in case any settings need to know about it.
diff --git a/contrib/linearize/linearize-hashes.py b/contrib/linearize/linearize-hashes.py
index 0a316eb818..5959300e74 100755
--- a/contrib/linearize/linearize-hashes.py
+++ b/contrib/linearize/linearize-hashes.py
@@ -98,19 +98,18 @@ if __name__ == '__main__':
print("Usage: linearize-hashes.py CONFIG-FILE")
sys.exit(1)
- f = open(sys.argv[1], encoding="utf8")
- for line in f:
- # skip comment lines
- m = re.search(r'^\s*#', line)
- if m:
- continue
-
- # parse key=value lines
- m = re.search(r'^(\w+)\s*=\s*(\S.*)$', line)
- if m is None:
- continue
- settings[m.group(1)] = m.group(2)
- f.close()
+ with open(sys.argv[1], encoding="utf8") as f:
+ for line in f:
+ # skip comment lines
+ m = re.search(r'^\s*#', line)
+ if m:
+ continue
+
+ # parse key=value lines
+ m = re.search(r'^(\w+)\s*=\s*(\S.*)$', line)
+ if m is None:
+ continue
+ settings[m.group(1)] = m.group(2)
if 'host' not in settings:
settings['host'] = '127.0.0.1'
diff --git a/contrib/macdeploy/README.md b/contrib/macdeploy/README.md
index ce69079e29..599a0bfa6c 100644
--- a/contrib/macdeploy/README.md
+++ b/contrib/macdeploy/README.md
@@ -15,13 +15,16 @@ When complete, it will have produced `Bitcoin-Core.dmg`.
A free Apple Developer Account is required to proceed.
Our current macOS SDK
-(`Xcode-12.2-12B45b-extracted-SDK-with-libcxx-headers.tar.gz`) can be
-extracted from
+(`Xcode-12.2-12B45b-extracted-SDK-with-libcxx-headers.tar.gz`)
+can be extracted from
[Xcode_12.2.xip](https://download.developer.apple.com/Developer_Tools/Xcode_12.2/Xcode_12.2.xip).
+
Alternatively, after logging in to your account go to 'Downloads', then 'More'
-and search for [`Xcode_12.2`](https://developer.apple.com/download/all/?q=Xcode%2012.2).
+and search for [`Xcode 12.2`](https://developer.apple.com/download/all/?q=Xcode%2012.2).
+
An Apple ID and cookies enabled for the hostname are needed to download this.
-The `sha256sum` of the archive should be `28d352f8c14a43d9b8a082ac6338dc173cb153f964c6e8fb6ba389e5be528bd0`.
+
+The `sha256sum` of the downloaded XIP archive should be `28d352f8c14a43d9b8a082ac6338dc173cb153f964c6e8fb6ba389e5be528bd0`.
After Xcode version 7.x, Apple started shipping the `Xcode.app` in a `.xip`
archive. This makes the SDK less-trivial to extract on non-macOS machines. One
@@ -55,7 +58,10 @@ previous stage) as the first argument.
./contrib/macdeploy/gen-sdk '/path/to/Xcode.app'
```
+The `sha256sum` of the generated TAR.GZ archive should be `df75d30ecafc429e905134333aeae56ac65fac67cb4182622398fd717df77619`.
+
## Deterministic macOS DMG Notes
+
Working macOS DMGs are created in Linux by combining a recent `clang`, the Apple
`binutils` (`ld`, `ar`, etc) and DMG authoring tools.
@@ -89,16 +95,7 @@ redistributed.
[`xorrisofs`](https://www.gnu.org/software/xorriso/) is used to create the DMG.
-`xorrisofs` cannot compress DMGs, so afterwards, the DMG tool from the
-`libdmg-hfsplus` project is used to compress it. There are several bugs in this
-tool and its maintainer has seemingly abandoned the project.
-
-The DMG tool has the ability to create DMGs from scratch as well, but this functionality is
-broken. Only the compression feature is currently used. Ideally, the creation could be fixed
-and `xorrisofs` would no longer be necessary.
-
-Background images and other features can be added to DMG files by inserting a
-`.DS_Store` during creation.
+A background image is added to DMG files by inserting a `.DS_Store` during creation.
As of OS X 10.9 Mavericks, using an Apple-blessed key to sign binaries is a requirement in
order to satisfy the new Gatekeeper requirements. Because this private key cannot be
diff --git a/contrib/macdeploy/gen-sdk b/contrib/macdeploy/gen-sdk
index ebef1d2db0..6efaaccb8e 100755
--- a/contrib/macdeploy/gen-sdk
+++ b/contrib/macdeploy/gen-sdk
@@ -8,6 +8,21 @@ import gzip
import os
import contextlib
+# monkey-patch Python 3.8 and older to fix wrong TAR header handling
+# see https://github.com/bitcoin/bitcoin/pull/24534
+# and https://github.com/python/cpython/pull/18080 for more info
+if sys.version_info < (3, 9):
+ _old_create_header = tarfile.TarInfo._create_header
+ def _create_header(info, format, encoding, errors):
+ buf = _old_create_header(info, format, encoding, errors)
+ # replace devmajor/devminor with binary zeroes
+ buf = buf[:329] + bytes(16) + buf[345:]
+ # recompute checksum
+ chksum = tarfile.calc_chksums(buf)[0]
+ buf = buf[:-364] + bytes("%06o\0" % chksum, "ascii") + buf[-357:]
+ return buf
+ tarfile.TarInfo._create_header = staticmethod(_create_header)
+
@contextlib.contextmanager
def cd(path):
"""Context manager that restores PWD even if an exception was raised."""
@@ -75,14 +90,21 @@ def run():
tarinfo.name = str(pathlib.Path(alt_base_dir, tarinfo.name))
if tarinfo.linkname and tarinfo.linkname.startswith("./"):
tarinfo.linkname = str(pathlib.Path(alt_base_dir, tarinfo.linkname))
+ # make metadata deterministic
+ tarinfo.mtime = 0
+ tarinfo.uid, tarinfo.uname = 0, ''
+ tarinfo.gid, tarinfo.gname = 0, ''
+ # don't use isdir() as there are also executable files present
+ tarinfo.mode = 0o0755 if tarinfo.mode & 0o0100 else 0o0644
return tarinfo
with cd(dir_to_add):
+ # recursion already adds entries in sorted order
tarfp.add(".", recursive=True, filter=change_tarinfo_base)
print("Creating output .tar.gz file...")
with out_sdktgz_path.open("wb") as fp:
with gzip.GzipFile(fileobj=fp, mode='wb', compresslevel=9, mtime=0) as gzf:
- with tarfile.open(mode="w", fileobj=gzf) as tarfp:
+ with tarfile.open(mode="w", fileobj=gzf, format=tarfile.GNU_FORMAT) as tarfp:
print("Adding MacOSX SDK {} files...".format(sdk_version))
tarfp_add_with_base_change(tarfp, sdk_dir, out_name)
print("Adding libc++ headers...")
diff --git a/contrib/macdeploy/macdeployqtplus b/contrib/macdeploy/macdeployqtplus
index cc24e0317b..2420539b7c 100755
--- a/contrib/macdeploy/macdeployqtplus
+++ b/contrib/macdeploy/macdeployqtplus
@@ -211,7 +211,7 @@ def getFrameworks(binaryPath: str, verbose: int) -> List[FrameworkInfo]:
return libraries
def runInstallNameTool(action: str, *args):
- installnametoolbin=os.getenv("INSTALLNAMETOOL", "install_name_tool")
+ installnametoolbin=os.getenv("INSTALL_NAME_TOOL", "install_name_tool")
run([installnametoolbin, "-"+action] + list(args), check=True)
def changeInstallName(oldName: str, newName: str, binaryPath: str, verbose: int):
diff --git a/contrib/message-capture/message-capture-parser.py b/contrib/message-capture/message-capture-parser.py
index 9988478f1b..eefd22a60e 100755
--- a/contrib/message-capture/message-capture-parser.py
+++ b/contrib/message-capture/message-capture-parser.py
@@ -79,7 +79,8 @@ def to_jsonable(obj: Any) -> Any:
val = getattr(obj, slot, None)
if slot in HASH_INTS and isinstance(val, int):
ret[slot] = ser_uint256(val).hex()
- elif slot in HASH_INT_VECTORS and isinstance(val[0], int):
+ elif slot in HASH_INT_VECTORS:
+ assert all(isinstance(a, int) for a in val)
ret[slot] = [ser_uint256(a).hex() for a in val]
else:
ret[slot] = to_jsonable(val)
diff --git a/contrib/seeds/.gitignore b/contrib/seeds/.gitignore
index e4a39d6093..d9a2451f70 100644
--- a/contrib/seeds/.gitignore
+++ b/contrib/seeds/.gitignore
@@ -1 +1,2 @@
seeds_main.txt
+asmap-filled.dat
diff --git a/contrib/seeds/README.md b/contrib/seeds/README.md
index c53446bfb0..b2ea7522ac 100644
--- a/contrib/seeds/README.md
+++ b/contrib/seeds/README.md
@@ -8,21 +8,11 @@ and remove old versions as necessary (at a minimum when GetDesirableServiceFlags
changes its default return value, as those are the services which seeds are added
to addrman with).
-The seeds compiled into the release are created from sipa's DNS seed data, like this:
+The seeds compiled into the release are created from sipa's DNS seed and AS map
+data. Run the following commands from the `/contrib/seeds` directory:
curl https://bitcoin.sipa.be/seeds.txt.gz | gzip -dc > seeds_main.txt
- python3 makeseeds.py < seeds_main.txt > nodes_main.txt
+ curl https://bitcoin.sipa.be/asmap-filled.dat > asmap-filled.dat
+ python3 makeseeds.py -a asmap-filled.dat < seeds_main.txt > nodes_main.txt
cat nodes_main_manual.txt >> nodes_main.txt
python3 generate-seeds.py . > ../../src/chainparamsseeds.h
-
-## Dependencies
-
-Ubuntu, Debian:
-
- sudo apt-get install python3-dnspython
-
-and/or for other operating systems:
-
- pip install dnspython
-
-See https://dnspython.readthedocs.io/en/latest/installation.html for more information.
diff --git a/contrib/seeds/asmap.py b/contrib/seeds/asmap.py
new file mode 100644
index 0000000000..e28e5cf532
--- /dev/null
+++ b/contrib/seeds/asmap.py
@@ -0,0 +1,815 @@
+# Copyright (c) 2022 Pieter Wuille
+# Distributed under the MIT software license, see the accompanying
+# file LICENSE or http://www.opensource.org/licenses/mit-license.php.
+
+"""
+This module provides the ASNEntry and ASMap classes.
+"""
+
+import copy
+import ipaddress
+import random
+import unittest
+from enum import Enum
+from functools import total_ordering
+from typing import Callable, Dict, Iterable, List, Optional, Tuple, Union, overload
+
+def net_to_prefix(net: Union[ipaddress.IPv4Network,ipaddress.IPv6Network]) -> List[bool]:
+ """
+ Convert an IPv4 or IPv6 network to a prefix represented as a list of bits.
+
+ IPv4 ranges are remapped to their IPv4-mapped IPv6 range (::ffff:0:0/96).
+ """
+ num_bits = net.prefixlen
+ netrange = int.from_bytes(net.network_address.packed, 'big')
+
+ # Map an IPv4 prefix into IPv6 space.
+ if isinstance(net, ipaddress.IPv4Network):
+ num_bits += 96
+ netrange += 0xffff00000000
+
+ # Strip unused bottom bits.
+ assert (netrange & ((1 << (128 - num_bits)) - 1)) == 0
+ return [((netrange >> (127 - i)) & 1) != 0 for i in range(num_bits)]
+
+def prefix_to_net(prefix: List[bool]) -> Union[ipaddress.IPv4Network,ipaddress.IPv6Network]:
+ """The reverse operation of net_to_prefix."""
+ # Convert to number
+ netrange = sum(b << (127 - i) for i, b in enumerate(prefix))
+ num_bits = len(prefix)
+ assert num_bits <= 128
+
+ # Return IPv4 range if in ::ffff:0:0/96
+ if num_bits >= 96 and (netrange >> 32) == 0xffff:
+ return ipaddress.IPv4Network((netrange & 0xffffffff, num_bits - 96), True)
+
+ # Return IPv6 range otherwise.
+ return ipaddress.IPv6Network((netrange, num_bits), True)
+
+# Shortcut for (prefix, ASN) entries.
+ASNEntry = Tuple[List[bool], int]
+
+# Shortcut for (prefix, old ASN, new ASN) entries.
+ASNDiff = Tuple[List[bool], int, int]
+
+class _VarLenCoder:
+ """
+ A class representing a custom variable-length binary encoder/decoder for
+ integers. Each object represents a different coder, with different parameters
+ minval and clsbits.
+
+ The encoding is easiest to describe using an example. Let's say minval=100 and
+ clsbits=[4,2,2,3]. In that case:
+ - x in [100..115]: encoded as [0] + [4-bit BE encoding of (x-100)].
+ - x in [116..119]: encoded as [1,0] + [2-bit BE encoding of (x-116)].
+ - x in [120..123]: encoded as [1,1,0] + [2-bit BE encoding of (x-120)].
+ - x in [124..131]: encoded as [1,1,1] + [3-bit BE encoding of (x-124)].
+
+ In general, every number is encoded as:
+ - First, k "1"-bits, where k is the class the number falls in (there is one class
+ per element of clsbits).
+ - Then, a "0"-bit, unless k is the highest class, in which case there is nothing.
+ - Lastly, clsbits[k] bits encoding in big endian the position in its class that
+ number falls into.
+ - Every class k consists of 2^clsbits[k] consecutive integers. k=0 starts at minval,
+ other classes start one past the last element of the class before it.
+ """
+
+ def __init__(self, minval: int, clsbits: List[int]):
+ """Construct a new _VarLenCoder."""
+ self._minval = minval
+ self._clsbits = clsbits
+ self._maxval = minval + sum(1 << b for b in clsbits) - 1
+
+ def can_encode(self, val: int) -> bool:
+ """Check whether value val is in the range this coder supports."""
+ return self._minval <= val <= self._maxval
+
+ def encode(self, val: int, ret: List[int]) -> None:
+ """Append encoding of val onto integer list ret."""
+
+ assert self._minval <= val <= self._maxval
+ val -= self._minval
+ bits = 0
+ for k, bits in enumerate(self._clsbits):
+ if val >> bits:
+ # If the value will not fit in class k, subtract its range from v,
+ # emit a "1" bit and continue with the next class.
+ val -= 1 << bits
+ ret.append(1)
+ else:
+ if k + 1 < len(self._clsbits):
+ # Unless we're in the last class, emit a "0" bit.
+ ret.append(0)
+ break
+ # And then encode v (now the position within the class) in big endian.
+ ret.extend((val >> (bits - 1 - b)) & 1 for b in range(bits))
+
+ def encode_size(self, val: int) -> int:
+ """Compute how many bits are needed to encode val."""
+ assert self._minval <= val <= self._maxval
+ val -= self._minval
+ ret = 0
+ bits = 0
+ for k, bits in enumerate(self._clsbits):
+ if val >> bits:
+ val -= 1 << bits
+ ret += 1
+ else:
+ ret += k + 1 < len(self._clsbits)
+ break
+ return ret + bits
+
+ def decode(self, stream, bitpos) -> Tuple[int,int]:
+ """Decode a number starting at bitpos in stream, returning value and new bitpos."""
+ val = self._minval
+ bits = 0
+ for k, bits in enumerate(self._clsbits):
+ bit = 0
+ if k + 1 < len(self._clsbits):
+ bit = stream[bitpos]
+ bitpos += 1
+ if not bit:
+ break
+ val += 1 << bits
+ for i in range(bits):
+ bit = stream[bitpos]
+ bitpos += 1
+ val += bit << (bits - 1 - i)
+ return val, bitpos
+
+# Variable-length encoders used in the binary asmap format.
+_CODER_INS = _VarLenCoder(0, [0, 0, 1])
+_CODER_ASN = _VarLenCoder(1, list(range(15, 25)))
+_CODER_MATCH = _VarLenCoder(2, list(range(1, 9)))
+_CODER_JUMP = _VarLenCoder(17, list(range(5, 31)))
+
+class _Instruction(Enum):
+ """One instruction in the binary asmap format."""
+ # A return instruction, encoded as [0], returns a constant ASN. It is followed by
+ # an integer using the ASN encoding.
+ RETURN = 0
+ # A jump instruction, encoded as [1,0] inspects the next unused bit in the input
+ # and either continues execution (if 0), or skips a specified number of bits (if 1).
+ # It is followed by an integer, and then two subprograms. The integer uses jump encoding
+ # and corresponds to the length of the first subprogram (so it can be skipped).
+ JUMP = 1
+ # A match instruction, encoded as [1,1,0] inspects 1 or more of the next unused bits
+ # in the input with its argument. If they all match, execution continues. If they do
+ # not, failure is returned. If a default instruction has been executed before, instead
+ # of failure the default instruction's argument is returned. It is followed by an
+ # integer in match encoding, and a subprogram. That value is at least 2 bits and at
+ # most 9 bits. An n-bit value signifies matching (n-1) bits in the input with the lower
+ # (n-1) bits in the match value.
+ MATCH = 2
+ # A default instruction, encoded as [1,1,1] sets the default variable to its argument,
+ # and continues execution. It is followed by an integer in ASN encoding, and a subprogram.
+ DEFAULT = 3
+ # Not an actual instruction, but a way to encode the empty program that fails. In the
+ # encoder, it is used more generally to represent the failure case inside MATCH instructions,
+ # which may (if used inside the context of a DEFAULT instruction) actually correspond to
+ # a successful return. In this usage, they're always converted to an actual MATCH or RETURN
+ # before the top level is reached (see make_default below).
+ END = 4
+
+class _BinNode:
+ """A class representing a (node of) the parsed binary asmap format."""
+
+ @overload
+ def __init__(self, ins: _Instruction): ...
+ @overload
+ def __init__(self, ins: _Instruction, arg1: int): ...
+ @overload
+ def __init__(self, ins: _Instruction, arg1: "_BinNode", arg2: "_BinNode"): ...
+ @overload
+ def __init__(self, ins: _Instruction, arg1: int, arg2: "_BinNode"): ...
+
+ def __init__(self, ins: _Instruction, arg1=None, arg2=None):
+ """
+ Construct a new asmap node. Possibilities are:
+ - _BinNode(_Instruction.RETURN, asn)
+ - _BinNode(_Instruction.JUMP, node_0, node_1)
+ - _BinNode(_Instruction.MATCH, val, node)
+ - _BinNode(_Instruction.DEFAULT, asn, node)
+ - _BinNode(_Instruction.END)
+ """
+ self.ins = ins
+ self.arg1 = arg1
+ self.arg2 = arg2
+ if ins == _Instruction.RETURN:
+ assert isinstance(arg1, int)
+ assert arg2 is None
+ self.size = _CODER_INS.encode_size(ins.value) + _CODER_ASN.encode_size(arg1)
+ elif ins == _Instruction.JUMP:
+ assert isinstance(arg1, _BinNode)
+ assert isinstance(arg2, _BinNode)
+ self.size = (_CODER_INS.encode_size(ins.value) + _CODER_JUMP.encode_size(arg1.size) +
+ arg1.size + arg2.size)
+ elif ins == _Instruction.DEFAULT:
+ assert isinstance(arg1, int)
+ assert isinstance(arg2, _BinNode)
+ self.size = _CODER_INS.encode_size(ins.value) + _CODER_ASN.encode_size(arg1) + arg2.size
+ elif ins == _Instruction.MATCH:
+ assert isinstance(arg1, int)
+ assert isinstance(arg2, _BinNode)
+ self.size = (_CODER_INS.encode_size(ins.value) + _CODER_MATCH.encode_size(arg1)
+ + arg2.size)
+ elif ins == _Instruction.END:
+ assert arg1 is None
+ assert arg2 is None
+ self.size = 0
+ else:
+ assert False
+
+ @staticmethod
+ def make_end() -> "_BinNode":
+ """Constructor for a _BinNode with just an END instruction."""
+ return _BinNode(_Instruction.END)
+
+ @staticmethod
+ def make_leaf(val: int) -> "_BinNode":
+ """Constructor for a _BinNode of just a RETURN instruction."""
+ assert val is not None and val > 0
+ return _BinNode(_Instruction.RETURN, val)
+
+ @staticmethod
+ def make_branch(node0: "_BinNode", node1: "_BinNode") -> "_BinNode":
+ """
+ Construct a _BinNode corresponding to running either the node0 or node1 subprogram,
+ based on the next input bit. It exploits shortcuts that are possible in the encoding,
+ and uses either a JUMP, MATCH, or END instruction.
+ """
+ if node0.ins == _Instruction.END and node1.ins == _Instruction.END:
+ return node0
+ if node0.ins == _Instruction.END:
+ if node1.ins == _Instruction.MATCH and node1.arg1 <= 0xFF:
+ return _BinNode(node1.ins, node1.arg1 + (1 << node1.arg1.bit_length()), node1.arg2)
+ return _BinNode(_Instruction.MATCH, 3, node1)
+ if node1.ins == _Instruction.END:
+ if node0.ins == _Instruction.MATCH and node0.arg1 <= 0xFF:
+ return _BinNode(node0.ins, node0.arg1 + (1 << (node0.arg1.bit_length() - 1)),
+ node0.arg2)
+ return _BinNode(_Instruction.MATCH, 2, node0)
+ return _BinNode(_Instruction.JUMP, node0, node1)
+
+ @staticmethod
+ def make_default(val: int, sub: "_BinNode") -> "_BinNode":
+ """
+ Construct a _BinNode that corresponds to the specified subprogram, with the specified
+ default value. It exploits shortcuts that are possible in the encoding, and will use
+ either a DEFAULT or a RETURN instruction."""
+ assert val is not None and val > 0
+ if sub.ins == _Instruction.END:
+ return _BinNode(_Instruction.RETURN, val)
+ if sub.ins in (_Instruction.RETURN, _Instruction.DEFAULT):
+ return sub
+ return _BinNode(_Instruction.DEFAULT, val, sub)
+
+@total_ordering
+class ASMap:
+ """
+ A class whose objects represent a mapping from subnets to ASNs.
+
+ Internally the mapping is stored as a binary trie, but can be converted
+ from/to a list of ASNEntry objects, and from/to the binary asmap file format.
+
+ In the trie representation, nodes are represented as bare lists for efficiency
+ and ease of manipulation:
+ - [0] means an unassigned subnet (no ASN mapping for it is present)
+ - [int] means a subnet mapped entirely to the specified ASN.
+ - [node,node] means a subnet whose lower half and upper half have different
+ - mappings, represented by new trie nodes.
+ """
+
+ def update(self, prefix: List[bool], asn: int) -> None:
+ """Update this ASMap object to map prefix to the specified asn."""
+ assert asn == 0 or _CODER_ASN.can_encode(asn)
+
+ def recurse(node: List, offset: int) -> None:
+ if offset == len(prefix):
+ # Reached the end of prefix; overwrite this node.
+ node.clear()
+ node.append(asn)
+ return
+ if len(node) == 1:
+ # Need to descend into a leaf node; split it up.
+ oldasn = node[0]
+ node.clear()
+ node.append([oldasn])
+ node.append([oldasn])
+ # Descend into the node.
+ recurse(node[prefix[offset]], offset + 1)
+ # If the result is two identical leaf children, merge them.
+ if len(node[0]) == 1 and len(node[1]) == 1 and node[0] == node[1]:
+ oldasn = node[0][0]
+ node.clear()
+ node.append(oldasn)
+ recurse(self._trie, 0)
+
+ def update_multi(self, entries: List[Tuple[List[bool], int]]) -> None:
+ """Apply multiple update operations, where longer prefixes take precedence."""
+ entries.sort(key=lambda entry: len(entry[0]))
+ for prefix, asn in entries:
+ self.update(prefix, asn)
+
+ def _set_trie(self, trie) -> None:
+ """Set trie directly. Internal use only."""
+ def recurse(node: List) -> None:
+ if len(node) < 2:
+ return
+ recurse(node[0])
+ recurse(node[1])
+ if len(node[0]) == 2:
+ return
+ if node[0] == node[1]:
+ if len(node[0]) == 0:
+ node.clear()
+ else:
+ asn = node[0][0]
+ node.clear()
+ node.append(asn)
+ recurse(trie)
+ self._trie = trie
+
+ def __init__(self, entries: Optional[Iterable[ASNEntry]] = None) -> None:
+ """Construct an ASMap object from an optional list of entries."""
+ self._trie = [0]
+ if entries is not None:
+ def entry_key(entry):
+ """Sort function that places shorter prefixes first."""
+ prefix, asn = entry
+ return len(prefix), prefix, asn
+ for prefix, asn in sorted(entries, key=entry_key):
+ self.update(prefix, asn)
+
+ def lookup(self, prefix: List[bool]) -> Optional[int]:
+ """Look up a prefix. Returns ASN, or 0 if unassigned, or None if indeterminate."""
+ node = self._trie
+ for bit in prefix:
+ if len(node) == 1:
+ break
+ node = node[bit]
+ if len(node) == 1:
+ return node[0]
+ return None
+
+ def _to_entries_flat(self, fill: bool = False) -> List[ASNEntry]:
+ """Convert an ASMap object to a list of non-overlapping (prefix, asn) objects."""
+ prefix : List[bool] = []
+
+ def recurse(node: List) -> List[ASNEntry]:
+ ret = []
+ if len(node) == 1:
+ if node[0] > 0:
+ ret = [(list(prefix), node[0])]
+ elif len(node) == 2:
+ prefix.append(False)
+ ret = recurse(node[0])
+ prefix[-1] = True
+ ret += recurse(node[1])
+ prefix.pop()
+ if fill and len(ret) > 1:
+ asns = set(x[1] for x in ret)
+ if len(asns) == 1:
+ ret = [(list(prefix), list(asns)[0])]
+ return ret
+ return recurse(self._trie)
+
+ def _to_entries_minimal(self, fill: bool = False) -> List[ASNEntry]:
+ """Convert a trie to a minimal list of ASNEntry objects, exploiting overlap."""
+ prefix : List[bool] = []
+
+ def recurse(node: List) -> (Tuple[Dict[Optional[int], List[ASNEntry]], bool]):
+ if len(node) == 1 and node[0] == 0:
+ return {None if fill else 0: []}, True
+ if len(node) == 1:
+ return {node[0]: [], None: [(list(prefix), node[0])]}, False
+ ret: Dict[Optional[int], List[ASNEntry]] = {}
+ prefix.append(False)
+ left, lhole = recurse(node[0])
+ prefix[-1] = True
+ right, rhole = recurse(node[1])
+ prefix.pop()
+ hole = not fill and (lhole or rhole)
+ def candidate(ctx: Optional[int], res0: Optional[List[ASNEntry]],
+ res1: Optional[List[ASNEntry]]):
+ if res0 is not None and res1 is not None:
+ if ctx not in ret or len(res0) + len(res1) < len(ret[ctx]):
+ ret[ctx] = res0 + res1
+ for ctx in set(left) | set(right):
+ candidate(ctx, left.get(ctx), right.get(ctx))
+ candidate(ctx, left.get(None), right.get(ctx))
+ candidate(ctx, left.get(ctx), right.get(None))
+ if not hole:
+ for ctx in list(ret):
+ if ctx is not None:
+ candidate(None, [(list(prefix), ctx)], ret[ctx])
+ if None in ret:
+ ret = {ctx:entries for ctx, entries in ret.items()
+ if ctx is None or len(entries) < len(ret[None])}
+ if hole:
+ ret = {ctx:entries for ctx, entries in ret.items() if ctx is None or ctx == 0}
+ return ret, hole
+ res, _ = recurse(self._trie)
+ return res[0] if 0 in res else res[None]
+
+ def __str__(self) -> str:
+ """Convert this ASMap object to a string containing Python code constructing it."""
+ return f"ASMap({self._trie})"
+
+ def to_entries(self, overlapping: bool = True, fill: bool = False) -> List[ASNEntry]:
+ """
+ Convert the mappings in this ASMap object to a list of ASNEntry objects.
+
+ Arguments:
+ overlapping: Permit the subnets in the resulting ASNEntry to overlap.
+ Setting this can result in a shorter list.
+ fill: Permit the resulting ASNEntry objects to cover subnets that
+ are unassigned in this ASMap object. Setting this can
+ result in a shorter list.
+ """
+ if overlapping:
+ return self._to_entries_minimal(fill)
+ return self._to_entries_flat(fill)
+
+ @staticmethod
+ def from_random(num_leaves: int = 10, max_asn: int = 6,
+ unassigned_prob: float = 0.5) -> "ASMap":
+ """
+ Construct a random ASMap object, with specified:
+ - Number of leaves in its trie (at least 1)
+ - Maximum ASN value (at least 1)
+ - Probability for leaf nodes to be unassigned
+
+ The number of leaves in the resulting object may be less than what is
+ requested. This method is mostly intended for testing.
+ """
+ assert num_leaves >= 1
+ assert max_asn >= 1 or unassigned_prob == 1
+ assert _CODER_ASN.can_encode(max_asn)
+ assert 0.0 <= unassigned_prob <= 1.0
+ trie: List = []
+ leaves = [trie]
+ ret = ASMap()
+ for i in range(1, num_leaves):
+ idx = random.randrange(i)
+ leaf = leaves[idx]
+ lastleaf = leaves.pop()
+ if idx + 1 < i:
+ leaves[idx] = lastleaf
+ leaf.append([])
+ leaf.append([])
+ leaves.append(leaf[0])
+ leaves.append(leaf[1])
+ for leaf in leaves:
+ if random.random() >= unassigned_prob:
+ leaf.append(random.randrange(1, max_asn + 1))
+ else:
+ leaf.append(0)
+ #pylint: disable=protected-access
+ ret._set_trie(trie)
+ return ret
+
+ def _to_binnode(self, fill: bool = False) -> _BinNode:
+ """Convert a trie to a _BinNode object."""
+ def recurse(node: List) -> Tuple[Dict[Optional[int], _BinNode], bool]:
+ if len(node) == 1 and node[0] == 0:
+ return {(None if fill else 0): _BinNode.make_end()}, True
+ if len(node) == 1:
+ return {None: _BinNode.make_leaf(node[0]), node[0]: _BinNode.make_end()}, False
+ ret: Dict[Optional[int], _BinNode] = {}
+ left, lhole = recurse(node[0])
+ right, rhole = recurse(node[1])
+ hole = (lhole or rhole) and not fill
+
+ def candidate(ctx: Optional[int], arg1, arg2, func: Callable):
+ if arg1 is not None and arg2 is not None:
+ cand = func(arg1, arg2)
+ if ctx not in ret or cand.size < ret[ctx].size:
+ ret[ctx] = cand
+
+ for ctx in set(left) | set(right):
+ candidate(ctx, left.get(ctx), right.get(ctx), _BinNode.make_branch)
+ candidate(ctx, left.get(None), right.get(ctx), _BinNode.make_branch)
+ candidate(ctx, left.get(ctx), right.get(None), _BinNode.make_branch)
+ if not hole:
+ for ctx in set(ret) - set([None]):
+ candidate(None, ctx, ret[ctx], _BinNode.make_default)
+ if None in ret:
+ ret = {ctx:enc for ctx, enc in ret.items()
+ if ctx is None or enc.size < ret[None].size}
+ if hole:
+ ret = {ctx:enc for ctx, enc in ret.items() if ctx is None or ctx == 0}
+ return ret, hole
+ res, _ = recurse(self._trie)
+ return res[0] if 0 in res else res[None]
+
+ @staticmethod
+ def _from_binnode(binnode: _BinNode) -> "ASMap":
+ """Construct an ASMap object from a _BinNode. Internal use only."""
+ def recurse(node: _BinNode, default: int) -> List:
+ if node.ins == _Instruction.RETURN:
+ return [node.arg1]
+ if node.ins == _Instruction.JUMP:
+ return [recurse(node.arg1, default), recurse(node.arg2, default)]
+ if node.ins == _Instruction.MATCH:
+ val = node.arg1
+ sub = recurse(node.arg2, default)
+ while val >= 2:
+ bit = val & 1
+ val >>= 1
+ if bit:
+ sub = [[default], sub]
+ else:
+ sub = [sub, [default]]
+ return sub
+ assert node.ins == _Instruction.DEFAULT
+ return recurse(node.arg2, node.arg1)
+ ret = ASMap()
+ if binnode.ins != _Instruction.END:
+ #pylint: disable=protected-access
+ ret._set_trie(recurse(binnode, 0))
+ return ret
+
+ def to_binary(self, fill: bool = False) -> bytes:
+ """
+ Convert this ASMap object to binary.
+
+ Argument:
+ fill: permit the resulting binary encoder to contain mappers for
+ unassigned subnets in this ASMap object. Doing so may
+ reduce the size of the encoding.
+ Returns:
+ A bytes object with the encoding of this ASMap object.
+ """
+ bits: List[int] = []
+
+ def recurse(node: _BinNode) -> None:
+ _CODER_INS.encode(node.ins.value, bits)
+ if node.ins == _Instruction.RETURN:
+ _CODER_ASN.encode(node.arg1, bits)
+ elif node.ins == _Instruction.JUMP:
+ _CODER_JUMP.encode(node.arg1.size, bits)
+ recurse(node.arg1)
+ recurse(node.arg2)
+ elif node.ins == _Instruction.DEFAULT:
+ _CODER_ASN.encode(node.arg1, bits)
+ recurse(node.arg2)
+ else:
+ assert node.ins == _Instruction.MATCH
+ _CODER_MATCH.encode(node.arg1, bits)
+ recurse(node.arg2)
+
+ binnode = self._to_binnode(fill)
+ if binnode.ins != _Instruction.END:
+ recurse(binnode)
+
+ val = 0
+ nbits = 0
+ ret = []
+ for bit in bits:
+ val += (bit << nbits)
+ nbits += 1
+ if nbits == 8:
+ ret.append(val)
+ val = 0
+ nbits = 0
+ if nbits:
+ ret.append(val)
+ return bytes(ret)
+
+ @staticmethod
+ def from_binary(bindata: bytes) -> Optional["ASMap"]:
+ """Decode an ASMap object from the provided binary encoding."""
+
+ bits: List[int] = []
+ for byte in bindata:
+ bits.extend((byte >> i) & 1 for i in range(8))
+
+ def recurse(bitpos: int) -> Tuple[_BinNode, int]:
+ insval, bitpos = _CODER_INS.decode(bits, bitpos)
+ ins = _Instruction(insval)
+ if ins == _Instruction.RETURN:
+ asn, bitpos = _CODER_ASN.decode(bits, bitpos)
+ return _BinNode(ins, asn), bitpos
+ if ins == _Instruction.JUMP:
+ jump, bitpos = _CODER_JUMP.decode(bits, bitpos)
+ left, bitpos1 = recurse(bitpos)
+ if bitpos1 != bitpos + jump:
+ raise ValueError("Inconsistent jump")
+ right, bitpos = recurse(bitpos1)
+ return _BinNode(ins, left, right), bitpos
+ if ins == _Instruction.MATCH:
+ match, bitpos = _CODER_MATCH.decode(bits, bitpos)
+ sub, bitpos = recurse(bitpos)
+ return _BinNode(ins, match, sub), bitpos
+ assert ins == _Instruction.DEFAULT
+ asn, bitpos = _CODER_ASN.decode(bits, bitpos)
+ sub, bitpos = recurse(bitpos)
+ return _BinNode(ins, asn, sub), bitpos
+
+ if len(bits) == 0:
+ binnode = _BinNode(_Instruction.END)
+ else:
+ try:
+ binnode, bitpos = recurse(0)
+ except (ValueError, IndexError):
+ return None
+ if bitpos < len(bits) - 7:
+ return None
+ if not all(bit == 0 for bit in bits[bitpos:]):
+ return None
+
+ return ASMap._from_binnode(binnode)
+
+ def __lt__(self, other: "ASMap") -> bool:
+ return self._trie < other._trie
+
+ def __eq__(self, other: object) -> bool:
+ if isinstance(other, ASMap):
+ return self._trie == other._trie
+ return False
+
+ def extends(self, req: "ASMap") -> bool:
+ """Determine whether this matches req for all subranges where req is assigned."""
+ def recurse(actual: List, require: List) -> bool:
+ if len(require) == 1 and require[0] == 0:
+ return True
+ if len(require) == 1:
+ if len(actual) == 1:
+ return bool(require[0] == actual[0])
+ return recurse(actual[0], require) and recurse(actual[1], require)
+ if len(actual) == 2:
+ return recurse(actual[0], require[0]) and recurse(actual[1], require[1])
+ return recurse(actual, require[0]) and recurse(actual, require[1])
+ assert isinstance(req, ASMap)
+ #pylint: disable=protected-access
+ return recurse(self._trie, req._trie)
+
+ def diff(self, other: "ASMap") -> List[ASNDiff]:
+ """Compute the diff from self to other."""
+ prefix: List[bool] = []
+ ret: List[ASNDiff] = []
+
+ def recurse(old_node: List, new_node: List):
+ if len(old_node) == 1 and len(new_node) == 1:
+ if old_node[0] != new_node[0]:
+ ret.append((list(prefix), old_node[0], new_node[0]))
+ else:
+ old_left: List = old_node if len(old_node) == 1 else old_node[0]
+ old_right: List = old_node if len(old_node) == 1 else old_node[1]
+ new_left: List = new_node if len(new_node) == 1 else new_node[0]
+ new_right: List = new_node if len(new_node) == 1 else new_node[1]
+ prefix.append(False)
+ recurse(old_left, new_left)
+ prefix[-1] = True
+ recurse(old_right, new_right)
+ prefix.pop()
+ assert isinstance(other, ASMap)
+ #pylint: disable=protected-access
+ recurse(self._trie, other._trie)
+ return ret
+
+ def __copy__(self) -> "ASMap":
+ """Construct a copy of this ASMap object. Its state will not be shared."""
+ ret = ASMap()
+ #pylint: disable=protected-access
+ ret._set_trie(copy.deepcopy(self._trie))
+ return ret
+
+ def __deepcopy__(self, _) -> "ASMap":
+ # ASMap objects do not allow sharing of the _trie member, so we don't need the memoization.
+ return self.__copy__()
+
+
+class TestASMap(unittest.TestCase):
+ """Unit tests for this module."""
+
+ def test_ipv6_prefix_roundtrips(self) -> None:
+ """Test that random IPv6 network ranges roundtrip through prefix encoding."""
+ for _ in range(20):
+ net_bits = random.getrandbits(128)
+ for prefix_len in range(0, 129):
+ masked_bits = (net_bits >> (128 - prefix_len)) << (128 - prefix_len)
+ net = ipaddress.IPv6Network((masked_bits.to_bytes(16, 'big'), prefix_len))
+ prefix = net_to_prefix(net)
+ self.assertTrue(len(prefix) <= 128)
+ net2 = prefix_to_net(prefix)
+ self.assertEqual(net, net2)
+
+ def test_ipv4_prefix_roundtrips(self) -> None:
+ """Test that random IPv4 network ranges roundtrip through prefix encoding."""
+ for _ in range(100):
+ net_bits = random.getrandbits(32)
+ for prefix_len in range(0, 33):
+ masked_bits = (net_bits >> (32 - prefix_len)) << (32 - prefix_len)
+ net = ipaddress.IPv4Network((masked_bits.to_bytes(4, 'big'), prefix_len))
+ prefix = net_to_prefix(net)
+ self.assertTrue(32 <= len(prefix) <= 128)
+ net2 = prefix_to_net(prefix)
+ self.assertEqual(net, net2)
+
+ def test_asmap_roundtrips(self) -> None:
+ """Test case that verifies random ASMap objects roundtrip to/from entries/binary."""
+ # Iterate over the number of leaves the random test ASMap objects have.
+ for leaves in range(1, 20):
+ # Iterate over the number of bits in the AS numbers used.
+ for asnbits in range(0, 24):
+ # Iterate over the probability that leaves are unassigned.
+ for pct in range(101):
+ # Construct a random ASMap object according to the above parameters.
+ asmap = ASMap.from_random(num_leaves=leaves, max_asn=1 + (1 << asnbits),
+ unassigned_prob=0.01 * pct)
+ # Run tests for to_entries and construction from those entries, both
+ # for overlapping and non-overlapping ones.
+ for overlapping in [False, True]:
+ entries = asmap.to_entries(overlapping=overlapping, fill=False)
+ random.shuffle(entries)
+ asmap2 = ASMap(entries)
+ assert asmap2 is not None
+ self.assertEqual(asmap2, asmap)
+ entries = asmap.to_entries(overlapping=overlapping, fill=True)
+ random.shuffle(entries)
+ asmap2 = ASMap(entries)
+ assert asmap2 is not None
+ self.assertTrue(asmap2.extends(asmap))
+
+ # Run tests for to_binary and construction from binary.
+ enc = asmap.to_binary(fill=False)
+ asmap3 = ASMap.from_binary(enc)
+ assert asmap3 is not None
+ self.assertEqual(asmap3, asmap)
+ enc = asmap.to_binary(fill=True)
+ asmap3 = ASMap.from_binary(enc)
+ assert asmap3 is not None
+ self.assertTrue(asmap3.extends(asmap))
+
+ def test_patching(self) -> None:
+ """Test behavior of update, lookup, extends, and diff."""
+ #pylint: disable=too-many-locals,too-many-nested-blocks
+ # Iterate over the number of leaves the random test ASMap objects have.
+ for leaves in range(1, 20):
+ # Iterate over the number of bits in the AS numbers used.
+ for asnbits in range(0, 10):
+ # Iterate over the probability that leaves are unassigned.
+ for pct in range(0, 101):
+ # Construct a random ASMap object according to the above parameters.
+ asmap = ASMap.from_random(num_leaves=leaves, max_asn=1 + (1 << asnbits),
+ unassigned_prob=0.01 * pct)
+ # Make a copy of that asmap object to which patches will be applied.
+ # It starts off being equal to asmap.
+ patched = copy.copy(asmap)
+ # Keep a list of patches performed.
+ patches: List[ASNEntry] = []
+ # Initially there cannot be any difference.
+ self.assertEqual(asmap.diff(patched), [])
+ # Make 5 patches, each building on top of the previous ones.
+ for _ in range(0, 5):
+ # Construct a random path and new ASN to assign it to, apply it to patched,
+ # and remember it in patches.
+ pathlen = random.randrange(5)
+ path = [random.getrandbits(1) != 0 for _ in range(pathlen)]
+ newasn = random.randrange(1 + (1 << asnbits))
+ patched.update(path, newasn)
+ patches = [(path, newasn)] + patches
+
+ # Compute the diff, and whether asmap extends patched, and the other way
+ # around.
+ diff = asmap.diff(patched)
+ self.assertEqual(asmap == patched, len(diff) == 0)
+ extends = asmap.extends(patched)
+ back_extends = patched.extends(asmap)
+ # Determine whether those extends results are consistent with the diff
+ # result.
+ self.assertEqual(extends, all(d[2] == 0 for d in diff))
+ self.assertEqual(back_extends, all(d[1] == 0 for d in diff))
+ # For every diff found:
+ for path, old_asn, new_asn in diff:
+ # Verify asmap and patched actually differ there.
+ self.assertTrue(old_asn != new_asn)
+ self.assertEqual(asmap.lookup(path), old_asn)
+ self.assertEqual(patched.lookup(path), new_asn)
+ for _ in range(2):
+ # Extend the path far enough that it's smaller than any mapped
+ # range, and check the lookup holds there too.
+ spec_path = list(path)
+ while len(spec_path) < 32:
+ spec_path.append(random.getrandbits(1) != 0)
+ self.assertEqual(asmap.lookup(spec_path), old_asn)
+ self.assertEqual(patched.lookup(spec_path), new_asn)
+ # Search through the list of performed patches to find the last one
+ # applying to the extended path (note that patches is in reverse
+ # order, so the first match should work).
+ found = False
+ for patch_path, patch_asn in patches:
+ if spec_path[:len(patch_path)] == patch_path:
+ # When found, it must match whatever the result was patched
+ # to.
+ self.assertEqual(new_asn, patch_asn)
+ found = True
+ break
+ # And such a patch must exist.
+ self.assertTrue(found)
+
+if __name__ == '__main__':
+ unittest.main()
diff --git a/contrib/seeds/makeseeds.py b/contrib/seeds/makeseeds.py
index 2b377f6c01..37c6f5fd7c 100755
--- a/contrib/seeds/makeseeds.py
+++ b/contrib/seeds/makeseeds.py
@@ -6,22 +6,23 @@
# Generate seeds.txt from Pieter's DNS seeder
#
+import argparse
+import ipaddress
import re
import sys
-import dns.resolver
import collections
+from typing import List, Dict, Union
-NSEEDS=512
-
-MAX_SEEDS_PER_ASN=2
+from asmap import ASMap, net_to_prefix
-MIN_BLOCKS = 337600
+NSEEDS=512
-# These are hosts that have been observed to be behaving strangely (e.g.
-# aggressively connecting to every node).
-with open("suspicious_hosts.txt", mode="r", encoding="utf-8") as f:
- SUSPICIOUS_HOSTS = {s.strip() for s in f if s.strip()}
+MAX_SEEDS_PER_ASN = {
+ 'ipv4': 2,
+ 'ipv6': 10,
+}
+MIN_BLOCKS = 730000
PATTERN_IPV4 = re.compile(r"^((\d{1,3})\.(\d{1,3})\.(\d{1,3})\.(\d{1,3})):(\d+)$")
PATTERN_IPV6 = re.compile(r"^\[([0-9a-z:]+)\]:(\d+)$")
@@ -40,10 +41,14 @@ PATTERN_AGENT = re.compile(
r"23.99"
r")")
-def parseline(line):
+def parseline(line: str) -> Union[dict, None]:
+ """ Parses a line from `seeds_main.txt` into a dictionary of details for that line.
+ or `None`, if the line could not be parsed.
+ """
sline = line.split()
if len(sline) < 11:
- return None
+ # line too short to be valid, skip it.
+ return None
m = PATTERN_IPV4.match(sline[0])
sortkey = None
ip = None
@@ -107,98 +112,95 @@ def parseline(line):
'sortkey': sortkey,
}
-def dedup(ips):
- '''deduplicate by address,port'''
+def dedup(ips: List[Dict]) -> List[Dict]:
+ """ Remove duplicates from `ips` where multiple ips share address and port. """
d = {}
for ip in ips:
d[ip['ip'],ip['port']] = ip
return list(d.values())
-def filtermultiport(ips):
- '''Filter out hosts with more nodes per IP'''
+def filtermultiport(ips: List[Dict]) -> List[Dict]:
+ """ Filter out hosts with more nodes per IP"""
hist = collections.defaultdict(list)
for ip in ips:
hist[ip['sortkey']].append(ip)
return [value[0] for (key,value) in list(hist.items()) if len(value)==1]
-def lookup_asn(net, ip):
- '''
- Look up the asn for an IP (4 or 6) address by querying cymru.com, or None
- if it could not be found.
- '''
- try:
- if net == 'ipv4':
- ipaddr = ip
- prefix = '.origin'
- else: # http://www.team-cymru.com/IP-ASN-mapping.html
- res = str() # 2001:4860:b002:23::68
- for nb in ip.split(':')[:4]: # pick the first 4 nibbles
- for c in nb.zfill(4): # right padded with '0'
- res += c + '.' # 2001 4860 b002 0023
- ipaddr = res.rstrip('.') # 2.0.0.1.4.8.6.0.b.0.0.2.0.0.2.3
- prefix = '.origin6'
-
- asn = int([x.to_text() for x in dns.resolver.resolve('.'.join(
- reversed(ipaddr.split('.'))) + prefix + '.asn.cymru.com',
- 'TXT').response.answer][0].split('\"')[1].split(' ')[0])
- return asn
- except Exception as e:
- sys.stderr.write(f'ERR: Could not resolve ASN for "{ip}": {e}\n')
- return None
-
# Based on Greg Maxwell's seed_filter.py
-def filterbyasn(ips, max_per_asn, max_per_net):
+def filterbyasn(asmap: ASMap, ips: List[Dict], max_per_asn: Dict, max_per_net: int) -> List[Dict]:
+ """ Prunes `ips` by
+ (a) trimming ips to have at most `max_per_net` ips from each net (e.g. ipv4, ipv6); and
+ (b) trimming ips to have at most `max_per_asn` ips from each asn in each net.
+ """
# Sift out ips by type
ips_ipv46 = [ip for ip in ips if ip['net'] in ['ipv4', 'ipv6']]
ips_onion = [ip for ip in ips if ip['net'] == 'onion']
# Filter IPv46 by ASN, and limit to max_per_net per network
result = []
- net_count = collections.defaultdict(int)
- asn_count = collections.defaultdict(int)
- for ip in ips_ipv46:
+ net_count: Dict[str, int] = collections.defaultdict(int)
+ asn_count: Dict[int, int] = collections.defaultdict(int)
+
+ for i, ip in enumerate(ips_ipv46):
if net_count[ip['net']] == max_per_net:
+ # do not add this ip as we already too many
+ # ips from this network
continue
- asn = lookup_asn(ip['net'], ip['ip'])
- if asn is None or asn_count[asn] == max_per_asn:
+ asn = asmap.lookup(net_to_prefix(ipaddress.ip_network(ip['ip'])))
+ if not asn or asn_count[ip['net'], asn] == max_per_asn[ip['net']]:
+ # do not add this ip as we already have too many
+ # ips from this ASN on this network
continue
- asn_count[asn] += 1
+ asn_count[ip['net'], asn] += 1
net_count[ip['net']] += 1
+ ip['asn'] = asn
result.append(ip)
# Add back Onions (up to max_per_net)
result.extend(ips_onion[0:max_per_net])
return result
-def ip_stats(ips):
- hist = collections.defaultdict(int)
+def ip_stats(ips: List[Dict]) -> str:
+ """ Format and return pretty string from `ips`. """
+ hist: Dict[str, int] = collections.defaultdict(int)
for ip in ips:
if ip is not None:
hist[ip['net']] += 1
- return '%6d %6d %6d' % (hist['ipv4'], hist['ipv6'], hist['onion'])
+ return f"{hist['ipv4']:6d} {hist['ipv6']:6d} {hist['onion']:6d}"
+
+def parse_args():
+ argparser = argparse.ArgumentParser(description='Generate a list of bitcoin node seed ip addresses.')
+ argparser.add_argument("-a","--asmap", help='the location of the asmap asn database file (required)', required=True)
+ return argparser.parse_args()
def main():
+ args = parse_args()
+
+ print(f'Loading asmap database "{args.asmap}"…', end='', file=sys.stderr, flush=True)
+ with open(args.asmap, 'rb') as f:
+ asmap = ASMap.from_binary(f.read())
+ print('Done.', file=sys.stderr)
+
+ print('Loading and parsing DNS seeds…', end='', file=sys.stderr, flush=True)
lines = sys.stdin.readlines()
ips = [parseline(line) for line in lines]
+ print('Done.', file=sys.stderr)
print('\x1b[7m IPv4 IPv6 Onion Pass \x1b[0m', file=sys.stderr)
- print('%s Initial' % (ip_stats(ips)), file=sys.stderr)
+ print(f'{ip_stats(ips):s} Initial', file=sys.stderr)
# Skip entries with invalid address.
ips = [ip for ip in ips if ip is not None]
- print('%s Skip entries with invalid address' % (ip_stats(ips)), file=sys.stderr)
+ print(f'{ip_stats(ips):s} Skip entries with invalid address', file=sys.stderr)
# Skip duplicates (in case multiple seeds files were concatenated)
ips = dedup(ips)
- print('%s After removing duplicates' % (ip_stats(ips)), file=sys.stderr)
- # Skip entries from suspicious hosts.
- ips = [ip for ip in ips if ip['ip'] not in SUSPICIOUS_HOSTS]
- print('%s Skip entries from suspicious hosts' % (ip_stats(ips)), file=sys.stderr)
+ print(f'{ip_stats(ips):s} After removing duplicates', file=sys.stderr)
# Enforce minimal number of blocks.
ips = [ip for ip in ips if ip['blocks'] >= MIN_BLOCKS]
- print('%s Enforce minimal number of blocks' % (ip_stats(ips)), file=sys.stderr)
+ print(f'{ip_stats(ips):s} Enforce minimal number of blocks', file=sys.stderr)
# Require service bit 1.
ips = [ip for ip in ips if (ip['service'] & 1) == 1]
- print('%s Require service bit 1' % (ip_stats(ips)), file=sys.stderr)
+ print(f'{ip_stats(ips):s} Require service bit 1', file=sys.stderr)
# Require at least 50% 30-day uptime for clearnet, 10% for onion.
req_uptime = {
'ipv4': 50,
@@ -206,25 +208,28 @@ def main():
'onion': 10,
}
ips = [ip for ip in ips if ip['uptime'] > req_uptime[ip['net']]]
- print('%s Require minimum uptime' % (ip_stats(ips)), file=sys.stderr)
+ print(f'{ip_stats(ips):s} Require minimum uptime', file=sys.stderr)
# Require a known and recent user agent.
ips = [ip for ip in ips if PATTERN_AGENT.match(ip['agent'])]
- print('%s Require a known and recent user agent' % (ip_stats(ips)), file=sys.stderr)
+ print(f'{ip_stats(ips):s} Require a known and recent user agent', file=sys.stderr)
# Sort by availability (and use last success as tie breaker)
ips.sort(key=lambda x: (x['uptime'], x['lastsuccess'], x['ip']), reverse=True)
# Filter out hosts with multiple bitcoin ports, these are likely abusive
ips = filtermultiport(ips)
- print('%s Filter out hosts with multiple bitcoin ports' % (ip_stats(ips)), file=sys.stderr)
+ print(f'{ip_stats(ips):s} Filter out hosts with multiple bitcoin ports', file=sys.stderr)
# Look up ASNs and limit results, both per ASN and globally.
- ips = filterbyasn(ips, MAX_SEEDS_PER_ASN, NSEEDS)
- print('%s Look up ASNs and limit results per ASN and per net' % (ip_stats(ips)), file=sys.stderr)
+ ips = filterbyasn(asmap, ips, MAX_SEEDS_PER_ASN, NSEEDS)
+ print(f'{ip_stats(ips):s} Look up ASNs and limit results per ASN and per net', file=sys.stderr)
# Sort the results by IP address (for deterministic output).
ips.sort(key=lambda x: (x['net'], x['sortkey']))
for ip in ips:
if ip['net'] == 'ipv6':
- print('[%s]:%i' % (ip['ip'], ip['port']))
+ print(f"[{ip['ip']}]:{ip['port']}", end="")
else:
- print('%s:%i' % (ip['ip'], ip['port']))
+ print(f"{ip['ip']}:{ip['port']}", end="")
+ if 'asn' in ip:
+ print(f" # AS{ip['asn']}", end="")
+ print()
if __name__ == '__main__':
main()
diff --git a/contrib/seeds/suspicious_hosts.txt b/contrib/seeds/suspicious_hosts.txt
deleted file mode 100644
index 13385cc816..0000000000
--- a/contrib/seeds/suspicious_hosts.txt
+++ /dev/null
@@ -1,16 +0,0 @@
-130.211.129.106
-148.251.238.178
-176.9.46.6
-178.63.107.226
-54.173.72.127
-54.174.10.182
-54.183.64.54
-54.194.231.211
-54.66.214.167
-54.66.220.137
-54.67.33.14
-54.77.251.214
-54.94.195.96
-54.94.200.247
-83.81.130.26
-88.198.17.7 \ No newline at end of file
diff --git a/contrib/signet/miner b/contrib/signet/miner
index b366b98e2d..61415cb2dd 100755
--- a/contrib/signet/miner
+++ b/contrib/signet/miner
@@ -21,8 +21,8 @@ PATH_BASE_CONTRIB_SIGNET = os.path.abspath(os.path.dirname(os.path.realpath(__fi
PATH_BASE_TEST_FUNCTIONAL = os.path.abspath(os.path.join(PATH_BASE_CONTRIB_SIGNET, "..", "..", "test", "functional"))
sys.path.insert(0, PATH_BASE_TEST_FUNCTIONAL)
-from test_framework.blocktools import WITNESS_COMMITMENT_HEADER, script_BIP34_coinbase_height # noqa: E402
-from test_framework.messages import CBlock, CBlockHeader, COutPoint, CTransaction, CTxIn, CTxInWitness, CTxOut, from_hex, deser_string, hash256, ser_compact_size, ser_string, ser_uint256, tx_from_hex, uint256_from_str # noqa: E402
+from test_framework.blocktools import get_witness_script, script_BIP34_coinbase_height # noqa: E402
+from test_framework.messages import CBlock, CBlockHeader, COutPoint, CTransaction, CTxIn, CTxInWitness, CTxOut, from_hex, deser_string, ser_compact_size, ser_string, ser_uint256, tx_from_hex # noqa: E402
from test_framework.script import CScriptOp # noqa: E402
logging.basicConfig(
@@ -123,10 +123,6 @@ def create_coinbase(height, value, spk):
cb.vout = [CTxOut(value, spk)]
return cb
-def get_witness_script(witness_root, witness_nonce):
- commitment = uint256_from_str(hash256(ser_uint256(witness_root) + ser_uint256(witness_nonce)))
- return b"\x6a" + CScriptOp.encode_op_pushdata(WITNESS_COMMITMENT_HEADER + ser_uint256(commitment))
-
def signet_txs(block, challenge):
# assumes signet solution has not been added yet so does not need
# to be removed
@@ -222,7 +218,7 @@ def generate_psbt(tmpl, reward_spk, *, blocktime=None):
cbwit = CTxInWitness()
cbwit.scriptWitness.stack = [ser_uint256(witnonce)]
block.vtx[0].wit.vtxinwit = [cbwit]
- block.vtx[0].vout.append(CTxOut(0, get_witness_script(witroot, witnonce)))
+ block.vtx[0].vout.append(CTxOut(0, bytes(get_witness_script(witroot, witnonce))))
signme, spendme = signet_txs(block, signet_spk_bin)
@@ -627,5 +623,3 @@ def main():
if __name__ == "__main__":
main()
-
-
diff --git a/contrib/testgen/README.md b/contrib/testgen/README.md
index 66276ec9dd..2f0288df16 100644
--- a/contrib/testgen/README.md
+++ b/contrib/testgen/README.md
@@ -2,7 +2,7 @@
Utilities to generate test vectors for the data-driven Bitcoin tests.
-Usage:
+To use inside a scripted-diff (or just execute directly):
./gen_key_io_test_vectors.py valid 70 > ../../src/test/data/key_io_valid.json
./gen_key_io_test_vectors.py invalid 70 > ../../src/test/data/key_io_invalid.json
diff --git a/contrib/testgen/gen_key_io_test_vectors.py b/contrib/testgen/gen_key_io_test_vectors.py
index 4aa7dc200b..7bfb1d76a8 100755
--- a/contrib/testgen/gen_key_io_test_vectors.py
+++ b/contrib/testgen/gen_key_io_test_vectors.py
@@ -4,10 +4,6 @@
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
'''
Generate valid and invalid base58/bech32(m) address and private key test vectors.
-
-Usage:
- ./gen_key_io_test_vectors.py valid 70 > ../../src/test/data/key_io_valid.json
- ./gen_key_io_test_vectors.py invalid 70 > ../../src/test/data/key_io_invalid.json
'''
from itertools import islice
@@ -131,7 +127,7 @@ def is_valid_bech32(v):
def gen_valid_base58_vector(template):
'''Generate valid base58 vector'''
prefix = bytearray(template[0])
- payload = bytearray(os.urandom(template[1]))
+ payload = rand_bytes(size=template[1])
suffix = bytearray(template[2])
dst_prefix = bytearray(template[4])
dst_suffix = bytearray(template[5])
@@ -143,7 +139,7 @@ def gen_valid_bech32_vector(template):
'''Generate valid bech32 vector'''
hrp = template[0]
witver = template[1]
- witprog = bytearray(os.urandom(template[2]))
+ witprog = rand_bytes(size=template[2])
encoding = template[4]
dst_prefix = bytearray(template[5])
rv = bech32_encode(encoding, hrp, [witver] + convertbits(witprog, 8, 5))
@@ -173,17 +169,17 @@ def gen_invalid_base58_vector(template):
corrupt_suffix = randbool(0.2)
if corrupt_prefix:
- prefix = os.urandom(1)
+ prefix = rand_bytes(size=1)
else:
prefix = bytearray(template[0])
if randomize_payload_size:
- payload = os.urandom(max(int(random.expovariate(0.5)), 50))
+ payload = rand_bytes(size=max(int(random.expovariate(0.5)), 50))
else:
- payload = os.urandom(template[1])
+ payload = rand_bytes(size=template[1])
if corrupt_suffix:
- suffix = os.urandom(len(template[2]))
+ suffix = rand_bytes(size=len(template[2]))
else:
suffix = bytearray(template[2])
@@ -204,7 +200,7 @@ def gen_invalid_bech32_vector(template):
to_upper = randbool(0.1)
hrp = template[0]
witver = template[1]
- witprog = bytearray(os.urandom(template[2]))
+ witprog = rand_bytes(size=template[2])
encoding = template[3]
if no_data:
@@ -234,6 +230,9 @@ def randbool(p = 0.5):
'''Return True with P(p)'''
return random.random() < p
+def rand_bytes(*, size):
+ return bytearray(random.getrandbits(8) for _ in range(size))
+
def gen_invalid_vectors():
'''Generate invalid test vectors'''
# start with some manual edge-cases
@@ -250,6 +249,7 @@ def gen_invalid_vectors():
if __name__ == '__main__':
import json
iters = {'valid':gen_valid_vectors, 'invalid':gen_invalid_vectors}
+ random.seed(42)
try:
uiter = iters[sys.argv[1]]
except IndexError:
diff --git a/contrib/verify-commits/trusted-git-root b/contrib/verify-commits/trusted-git-root
index c60f8ab695..1c42195961 100644
--- a/contrib/verify-commits/trusted-git-root
+++ b/contrib/verify-commits/trusted-git-root
@@ -1 +1 @@
-82bcf405f6db1d55b684a1f63a4aabad376cdad7
+577bd51a4b8de066466a445192c1c653872657e2
diff --git a/contrib/verify-commits/trusted-keys b/contrib/verify-commits/trusted-keys
index e83bfd7345..046589a583 100644
--- a/contrib/verify-commits/trusted-keys
+++ b/contrib/verify-commits/trusted-keys
@@ -1,8 +1,7 @@
71A3B16735405025D447E8F274810B012346C9A6
133EAC179436F14A5CF1B794860FEB804E669320
-32EE5C4C3FA15CCADB46ABE529D4BCB6416F53EC
B8B3F1C0E58C15DB6A81D30C3648A882F4316B9B
-CA03882CB1FC067B5D3ACFE4D300116E1C875A3D
E777299FC265DD04793070EB944D35F9AC3DB76A
D1DBF2C4B96F2DEBF4C16654410108112E7EA81F
152812300785C96444D3334D17565732E08E5E41
+6B002C6EA3F91B1B0DF0C9BC8F617F1200A6D25C
diff --git a/contrib/verify-commits/verify-commits.py b/contrib/verify-commits/verify-commits.py
index 7e46c6fd47..2ff14c1f86 100755
--- a/contrib/verify-commits/verify-commits.py
+++ b/contrib/verify-commits/verify-commits.py
@@ -82,11 +82,16 @@ def main():
# get directory of this program and read data files
dirname = os.path.dirname(os.path.abspath(__file__))
print("Using verify-commits data from " + dirname)
- verified_root = open(dirname + "/trusted-git-root", "r", encoding="utf8").read().splitlines()[0]
- verified_sha512_root = open(dirname + "/trusted-sha512-root-commit", "r", encoding="utf8").read().splitlines()[0]
- revsig_allowed = open(dirname + "/allow-revsig-commits", "r", encoding="utf-8").read().splitlines()
- unclean_merge_allowed = open(dirname + "/allow-unclean-merge-commits", "r", encoding="utf-8").read().splitlines()
- incorrect_sha512_allowed = open(dirname + "/allow-incorrect-sha512-commits", "r", encoding="utf-8").read().splitlines()
+ with open(dirname + "/trusted-git-root", "r", encoding="utf8") as f:
+ verified_root = f.read().splitlines()[0]
+ with open(dirname + "/trusted-sha512-root-commit", "r", encoding="utf8") as f:
+ verified_sha512_root = f.read().splitlines()[0]
+ with open(dirname + "/allow-revsig-commits", "r", encoding="utf8") as f:
+ revsig_allowed = f.read().splitlines()
+ with open(dirname + "/allow-unclean-merge-commits", "r", encoding="utf8") as f:
+ unclean_merge_allowed = f.read().splitlines()
+ with open(dirname + "/allow-incorrect-sha512-commits", "r", encoding="utf8") as f:
+ incorrect_sha512_allowed = f.read().splitlines()
# Set commit and branch and set variables
current_commit = args.commit
diff --git a/contrib/windeploy/win-codesign.cert b/contrib/windeploy/win-codesign.cert
index e763df5847..22f17296b6 100644
--- a/contrib/windeploy/win-codesign.cert
+++ b/contrib/windeploy/win-codesign.cert
@@ -1,89 +1,112 @@
+-----BEGIN CERTIFICATE-----
+MIIHfDCCBWSgAwIBAgIQCmVvdQal72U2QxbUTT3SRTANBgkqhkiG9w0BAQsFADBp
+MQswCQYDVQQGEwJVUzEXMBUGA1UEChMORGlnaUNlcnQsIEluYy4xQTA/BgNVBAMT
+OERpZ2lDZXJ0IFRydXN0ZWQgRzQgQ29kZSBTaWduaW5nIFJTQTQwOTYgU0hBMzg0
+IDIwMjEgQ0ExMB4XDTIyMDUyNDAwMDAwMFoXDTI0MDUyOTIzNTk1OVowgYAxCzAJ
+BgNVBAYTAlVTMREwDwYDVQQIEwhEZWxhd2FyZTEOMAwGA1UEBxMFTGV3ZXMxJjAk
+BgNVBAoTHUJpdGNvaW4gQ29yZSBDb2RlIFNpZ25pbmcgTExDMSYwJAYDVQQDEx1C
+aXRjb2luIENvcmUgQ29kZSBTaWduaW5nIExMQzCCAiIwDQYJKoZIhvcNAQEBBQAD
+ggIPADCCAgoCggIBALewxfjztuRTDNAGf7zkqqWNEt28CZmVJHoYltVRxtE1BP45
+BfmptH5eM1JC/XosTPytHRFeOkO4YVAtiELxK9S/82OZlKA7Mx7PW6vv1184u8+m
+P3WpTN/KAZTaW9fB0ELTSCuqsvXq2crM2T7NudJnSyWh2VBjLfPPCAcYwzyGKQbl
+jQWjFEJDJWFK83t9mK/v0WQgA3jGJeaz+V6CYXMS7UgpdG8dUhg9o63gYJZAW5pY
+RIsNRcJCM5LHhwEMW5329UsTmYCfP7/53emepbQ0n8ijVZjgaJ+LZ8NspBLSeCiF
+9UPCKX82uWiQAUTbYHCfSi3I0f3wQidXL9ZY+PXmalM7BMuQ+c2xEcl97CnhrDzx
+EBwZvvOC9wGoG+8+epV4TjUZWf+7QN1ZYeg1rai7c7c8u9ILogE8su2xVoz333TH
+CDvScIgnQXmk+cbKMBtg9kM0F+aLWsN2xVf0uAj3U7sdXLrfJeW0DZIktWtTBQzX
+O/OE4Ka+1WFnDg0HJIih0cTjl9YYvfe53L4pCGy+qGt/XGBRqCMfXp3g+H9FGR5r
+pensVVcsrv3GbTfYdlpdmp9OHH5G57GTAZueobCZg7r7RKK0zPU9EiTLJxzyXuai
+v/Ksd8eIhHRjewMaQuAtQM1tO+oKAbLF0v2M7v7/aVT76X32JllYAizm3zjvAgMB
+AAGjggIGMIICAjAfBgNVHSMEGDAWgBRoN+Drtjv4XxGG+/5hewiIZfROQjAdBgNV
+HQ4EFgQUvCpU58PIuofv0kHJ3Ty0YDKEy3cwDgYDVR0PAQH/BAQDAgeAMBMGA1Ud
+JQQMMAoGCCsGAQUFBwMDMIG1BgNVHR8Ega0wgaowU6BRoE+GTWh0dHA6Ly9jcmwz
+LmRpZ2ljZXJ0LmNvbS9EaWdpQ2VydFRydXN0ZWRHNENvZGVTaWduaW5nUlNBNDA5
+NlNIQTM4NDIwMjFDQTEuY3JsMFOgUaBPhk1odHRwOi8vY3JsNC5kaWdpY2VydC5j
+b20vRGlnaUNlcnRUcnVzdGVkRzRDb2RlU2lnbmluZ1JTQTQwOTZTSEEzODQyMDIx
+Q0ExLmNybDA+BgNVHSAENzA1MDMGBmeBDAEEATApMCcGCCsGAQUFBwIBFhtodHRw
+Oi8vd3d3LmRpZ2ljZXJ0LmNvbS9DUFMwgZQGCCsGAQUFBwEBBIGHMIGEMCQGCCsG
+AQUFBzABhhhodHRwOi8vb2NzcC5kaWdpY2VydC5jb20wXAYIKwYBBQUHMAKGUGh0
+dHA6Ly9jYWNlcnRzLmRpZ2ljZXJ0LmNvbS9EaWdpQ2VydFRydXN0ZWRHNENvZGVT
+aWduaW5nUlNBNDA5NlNIQTM4NDIwMjFDQTEuY3J0MAwGA1UdEwEB/wQCMAAwDQYJ
+KoZIhvcNAQELBQADggIBABhpTZufRws1vrtI0xB1/UWrSEJxdPHivfpXE708dzum
+Jh3TFzpsEUCQX5BJJet1l7x92sKNeAL7votA+8O8YvMD64Kim7VKA2BB8AOHKQbp
+r1c2iZBwwofInviRYvsrvQta6KBy2KOe1L/l0KnpUazL9Tv4VKvuWAw/Qc0/eTQr
+NZRsmADORxnZ1qW+SpF+/WbazIYjod/Oqb1U3on+PzyiGD3SjzNhsdFRptqzrIaY
+UVV+2XHG4fN6A8wkyQL5NIVXGiK7rqS5VrRAv58Lf1ZZTghdAL+5SySE0OsR9t0K
+W73ZB9pxbuZZ6Zfxjotjw+IilCEm3ADbc7Eb2ijI4x8mix0XWMUrhL34s7/jRyDi
+P+30aSgjWp611tp/EYRW5kpIaFR8AesDdM0DSSCCRXOMwQG2Tq2+CnqItB5oLNPp
+2XySwlIWvmjbzsREfIpE3yh3bxmHY+vFIc2R0nNkbWNIT6AGtaEQ7oWkgpK8YMkA
+QCf4EUC4Qa7qHiH6YSmYJhjApBLC7UDwevgwxuDrwimWAj+tDkzdnENMcBp4SAy6
+LwUuDi2IU6HRSXWdh2YEkDbc3FdwknnnEWaB4dlRL85YjHyLXN0KiE7SKTj1LfR4
+dGeDqVUlDj9D5+X4a7F89wLP/um40/52HUQv5t5WcNr/47r9aVkx9DHs1b8oUnLg
+-----END CERTIFICATE-----
-----BEGIN CERTIFICATE-----
-MIIGQzCCBSugAwIBAgIQBSN7Cm16Z0UT9p7lA2jiKDANBgkqhkiG9w0BAQsFADBy
+MIIGsDCCBJigAwIBAgIQCK1AsmDSnEyfXs2pvZOu2TANBgkqhkiG9w0BAQwFADBi
MQswCQYDVQQGEwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3
-d3cuZGlnaWNlcnQuY29tMTEwLwYDVQQDEyhEaWdpQ2VydCBTSEEyIEFzc3VyZWQg
-SUQgQ29kZSBTaWduaW5nIENBMB4XDTIxMDUyMTAwMDAwMFoXDTIyMDUyNjIzNTk1
-OVowgYAxCzAJBgNVBAYTAlVTMREwDwYDVQQIEwhEZWxhd2FyZTEOMAwGA1UEBxMF
-TGV3ZXMxJjAkBgNVBAoTHUJpdGNvaW4gQ29yZSBDb2RlIFNpZ25pbmcgTExDMSYw
-JAYDVQQDEx1CaXRjb2luIENvcmUgQ29kZSBTaWduaW5nIExMQzCCAiIwDQYJKoZI
-hvcNAQEBBQADggIPADCCAgoCggIBAKe6xtFgKAQ68MvxwCjNtpgPobfDQCLKvCAN
-uBKGYuub6ufQB5dhCLN9fjMgfg33AyauvU3PcEUDUWD3/k925bPqgxHC3E7YqoB+
-11b/2Y7a86okqUgcGgvKhaKoHmXxElpM9EjQHjJ0yL4QAR1Lp+9CMMW3wIulBYKt
-wLIArFvbuQhMO/6rxL8frpK049v//WfQzB16GXuFnzN/6fDK7oOt5IrKTg4H6EY2
-fj4+QaUj0lNX7aHnZ6Ki45h2RUPDgN1ipRIuhM67npyZ/tdzPPjI3PUgfXCccN6D
-+qWWnbbbvPuOht4ziPciVnPd57PqJmAOnLI86gisDfd7VKlcpOSEaagdUGvMbU6f
-uAps818GwnJzwCGllxlKASCgXDAckLLvMuit4RfYAhhdhw5R0AsaWK0HW88oHOqi
-U7eWlMCbSGk34x9hBrxYl7tvcNcLPWIPYrrhFWNFpkV8bVVIoV5rUNRgWvBcdOq1
-CCPTfsJp3nEH2WCoBghZquDZLSW12wMw2UsQyEojBeGhrR1inn8uK93wSnVCC8F4
-21yWNRMNe/LQVhmZDgFOen9r/WijBsBdQw1bL8N4zGdYv8+soqkrWzW417FfSx81
-pj4j5FEXYXXV5k/4/eBpIARXVRR8xya0nGkhNJmBk0jjDGD8fPW2gFQbqnUwAQ34
-vOr8NUqHAgMBAAGjggHEMIIBwDAfBgNVHSMEGDAWgBRaxLl7KgqjpepxA8Bg+S32
-ZXUOWDAdBgNVHQ4EFgQUVSLtZnifEHvd8z3E7AyLYNuDiaMwDgYDVR0PAQH/BAQD
-AgeAMBMGA1UdJQQMMAoGCCsGAQUFBwMDMHcGA1UdHwRwMG4wNaAzoDGGL2h0dHA6
-Ly9jcmwzLmRpZ2ljZXJ0LmNvbS9zaGEyLWFzc3VyZWQtY3MtZzEuY3JsMDWgM6Ax
-hi9odHRwOi8vY3JsNC5kaWdpY2VydC5jb20vc2hhMi1hc3N1cmVkLWNzLWcxLmNy
-bDBLBgNVHSAERDBCMDYGCWCGSAGG/WwDATApMCcGCCsGAQUFBwIBFhtodHRwOi8v
-d3d3LmRpZ2ljZXJ0LmNvbS9DUFMwCAYGZ4EMAQQBMIGEBggrBgEFBQcBAQR4MHYw
-JAYIKwYBBQUHMAGGGGh0dHA6Ly9vY3NwLmRpZ2ljZXJ0LmNvbTBOBggrBgEFBQcw
-AoZCaHR0cDovL2NhY2VydHMuZGlnaWNlcnQuY29tL0RpZ2lDZXJ0U0hBMkFzc3Vy
-ZWRJRENvZGVTaWduaW5nQ0EuY3J0MAwGA1UdEwEB/wQCMAAwDQYJKoZIhvcNAQEL
-BQADggEBAOaJneI91NJgqghUxgc0AWQ01SAJTgN4z7xMQ3W0ZAtwGbA0byT7YRlj
-j7h+j+hMX/JYkRJETTh8Nalq2tPWJBiMMEPOGFVttFER1pwouHkK9pSKyp4xRvNU
-L0LPh7fE4EYMJoynys6ZTpMCHLku+X3jFat1+1moh9TJRvK5+ETZYGl0seFNU3mJ
-dZzusObm4scffIGgi40kmmISKd5ZRuooRTu9FFR/3vpfbA+7Vg4RSH3CcQPo9bfk
-+h/qRQhSfQInTBn7obRpIlvEcK782qivqseJGdtnTmcdVRShD5ckTVza1yv25uQz
-l/yTqmG2LXlYjl5iMSdF0C1xYq6IsOA=
+d3cuZGlnaWNlcnQuY29tMSEwHwYDVQQDExhEaWdpQ2VydCBUcnVzdGVkIFJvb3Qg
+RzQwHhcNMjEwNDI5MDAwMDAwWhcNMzYwNDI4MjM1OTU5WjBpMQswCQYDVQQGEwJV
+UzEXMBUGA1UEChMORGlnaUNlcnQsIEluYy4xQTA/BgNVBAMTOERpZ2lDZXJ0IFRy
+dXN0ZWQgRzQgQ29kZSBTaWduaW5nIFJTQTQwOTYgU0hBMzg0IDIwMjEgQ0ExMIIC
+IjANBgkqhkiG9w0BAQEFAAOCAg8AMIICCgKCAgEA1bQvQtAorXi3XdU5WRuxiEL1
+M4zrPYGXcMW7xIUmMJ+kjmjYXPXrNCQH4UtP03hD9BfXHtr50tVnGlJPDqFX/IiZ
+wZHMgQM+TXAkZLON4gh9NH1MgFcSa0OamfLFOx/y78tHWhOmTLMBICXzENOLsvsI
+8IrgnQnAZaf6mIBJNYc9URnokCF4RS6hnyzhGMIazMXuk0lwQjKP+8bqHPNlaJGi
+TUyCEUhSaN4QvRRXXegYE2XFf7JPhSxIpFaENdb5LpyqABXRN/4aBpTCfMjqGzLm
+ysL0p6MDDnSlrzm2q2AS4+jWufcx4dyt5Big2MEjR0ezoQ9uo6ttmAaDG7dqZy3S
+vUQakhCBj7A7CdfHmzJawv9qYFSLScGT7eG0XOBv6yb5jNWy+TgQ5urOkfW+0/tv
+k2E0XLyTRSiDNipmKF+wc86LJiUGsoPUXPYVGUztYuBeM/Lo6OwKp7ADK5GyNnm+
+960IHnWmZcy740hQ83eRGv7bUKJGyGFYmPV8AhY8gyitOYbs1LcNU9D4R+Z1MI3s
+MJN2FKZbS110YU0/EpF23r9Yy3IQKUHw1cVtJnZoEUETWJrcJisB9IlNWdt4z4FK
+PkBHX8mBUHOFECMhWWCKZFTBzCEa6DgZfGYczXg4RTCZT/9jT0y7qg0IU0F8WD1H
+s/q27IwyCQLMbDwMVhECAwEAAaOCAVkwggFVMBIGA1UdEwEB/wQIMAYBAf8CAQAw
+HQYDVR0OBBYEFGg34Ou2O/hfEYb7/mF7CIhl9E5CMB8GA1UdIwQYMBaAFOzX44LS
+cV1kTN8uZz/nupiuHA9PMA4GA1UdDwEB/wQEAwIBhjATBgNVHSUEDDAKBggrBgEF
+BQcDAzB3BggrBgEFBQcBAQRrMGkwJAYIKwYBBQUHMAGGGGh0dHA6Ly9vY3NwLmRp
+Z2ljZXJ0LmNvbTBBBggrBgEFBQcwAoY1aHR0cDovL2NhY2VydHMuZGlnaWNlcnQu
+Y29tL0RpZ2lDZXJ0VHJ1c3RlZFJvb3RHNC5jcnQwQwYDVR0fBDwwOjA4oDagNIYy
+aHR0cDovL2NybDMuZGlnaWNlcnQuY29tL0RpZ2lDZXJ0VHJ1c3RlZFJvb3RHNC5j
+cmwwHAYDVR0gBBUwEzAHBgVngQwBAzAIBgZngQwBBAEwDQYJKoZIhvcNAQEMBQAD
+ggIBADojRD2NCHbuj7w6mdNW4AIapfhINPMstuZ0ZveUcrEAyq9sMCcTEp6QRJ9L
+/Z6jfCbVN7w6XUhtldU/SfQnuxaBRVD9nL22heB2fjdxyyL3WqqQz/WTauPrINHV
+UHmImoqKwba9oUgYftzYgBoRGRjNYZmBVvbJ43bnxOQbX0P4PpT/djk9ntSZz0rd
+KOtfJqGVWEjVGv7XJz/9kNF2ht0csGBc8w2o7uCJob054ThO2m67Np375SFTWsPK
+6Wrxoj7bQ7gzyE84FJKZ9d3OVG3ZXQIUH0AzfAPilbLCIXVzUstG2MQ0HKKlS43N
+b3Y3LIU/Gs4m6Ri+kAewQ3+ViCCCcPDMyu/9KTVcH4k4Vfc3iosJocsL6TEa/y4Z
+XDlx4b6cpwoG1iZnt5LmTl/eeqxJzy6kdJKt2zyknIYf48FWGysj/4+16oh7cGvm
+oLr9Oj9FpsToFpFSi0HASIRLlk2rREDjjfAVKM7t8RhWByovEMQMCGQ8M4+uKIw8
+y4+ICw2/O/TOHnuO77Xry7fwdxPm5yg/rBKupS8ibEH5glwVZsxsDsrFhsP2JjMM
+B0ug0wcCampAMEhLNKhRILutG4UI4lkNbcoFUCvqShyepf2gpx8GdOfy1lKQ/a+F
+SCH5Vzu0nAPthkX0tGFuv2jiJmCG6sivqf6UHedjGzqGVnhO
-----END CERTIFICATE-----
-----BEGIN CERTIFICATE-----
-MIIFMDCCBBigAwIBAgIQBAkYG1/Vu2Z1U0O1b5VQCDANBgkqhkiG9w0BAQsFADBl
+MIIFkDCCA3igAwIBAgIQBZsbV56OITLiOQe9p3d1XDANBgkqhkiG9w0BAQwFADBi
MQswCQYDVQQGEwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3
-d3cuZGlnaWNlcnQuY29tMSQwIgYDVQQDExtEaWdpQ2VydCBBc3N1cmVkIElEIFJv
-b3QgQ0EwHhcNMTMxMDIyMTIwMDAwWhcNMjgxMDIyMTIwMDAwWjByMQswCQYDVQQG
-EwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3d3cuZGlnaWNl
-cnQuY29tMTEwLwYDVQQDEyhEaWdpQ2VydCBTSEEyIEFzc3VyZWQgSUQgQ29kZSBT
-aWduaW5nIENBMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEA+NOzHH8O
-Ea9ndwfTCzFJGc/Q+0WZsTrbRPV/5aid2zLXcep2nQUut4/6kkPApfmJ1DcZ17aq
-8JyGpdglrA55KDp+6dFn08b7KSfH03sjlOSRI5aQd4L5oYQjZhJUM1B0sSgmuyRp
-wsJS8hRniolF1C2ho+mILCCVrhxKhwjfDPXiTWAYvqrEsq5wMWYzcT6scKKrzn/p
-fMuSoeU7MRzP6vIK5Fe7SrXpdOYr/mzLfnQ5Ng2Q7+S1TqSp6moKq4TzrGdOtcT3
-jNEgJSPrCGQ+UpbB8g8S9MWOD8Gi6CxR93O8vYWxYoNzQYIH5DiLanMg0A9kczye
-n6Yzqf0Z3yWT0QIDAQABo4IBzTCCAckwEgYDVR0TAQH/BAgwBgEB/wIBADAOBgNV
-HQ8BAf8EBAMCAYYwEwYDVR0lBAwwCgYIKwYBBQUHAwMweQYIKwYBBQUHAQEEbTBr
-MCQGCCsGAQUFBzABhhhodHRwOi8vb2NzcC5kaWdpY2VydC5jb20wQwYIKwYBBQUH
-MAKGN2h0dHA6Ly9jYWNlcnRzLmRpZ2ljZXJ0LmNvbS9EaWdpQ2VydEFzc3VyZWRJ
-RFJvb3RDQS5jcnQwgYEGA1UdHwR6MHgwOqA4oDaGNGh0dHA6Ly9jcmw0LmRpZ2lj
-ZXJ0LmNvbS9EaWdpQ2VydEFzc3VyZWRJRFJvb3RDQS5jcmwwOqA4oDaGNGh0dHA6
-Ly9jcmwzLmRpZ2ljZXJ0LmNvbS9EaWdpQ2VydEFzc3VyZWRJRFJvb3RDQS5jcmww
-TwYDVR0gBEgwRjA4BgpghkgBhv1sAAIEMCowKAYIKwYBBQUHAgEWHGh0dHBzOi8v
-d3d3LmRpZ2ljZXJ0LmNvbS9DUFMwCgYIYIZIAYb9bAMwHQYDVR0OBBYEFFrEuXsq
-CqOl6nEDwGD5LfZldQ5YMB8GA1UdIwQYMBaAFEXroq/0ksuCMS1Ri6enIZ3zbcgP
-MA0GCSqGSIb3DQEBCwUAA4IBAQA+7A1aJLPzItEVyCx8JSl2qB1dHC06GsTvMGHX
-fgtg/cM9D8Svi/3vKt8gVTew4fbRknUPUbRupY5a4l4kgU4QpO4/cY5jDhNLrddf
-RHnzNhQGivecRk5c/5CxGwcOkRX7uq+1UcKNJK4kxscnKqEpKBo6cSgCPC6Ro8Al
-EeKcFEehemhor5unXCBc2XGxDI+7qPjFEmifz0DLQESlE/DmZAwlCEIysjaKJAL+
-L3J+HNdJRZboWR3p+nRka7LrZkPas7CM1ekN3fYBIM6ZMWM9CBoYs4GbT8aTEAb8
-B4H6i9r5gkn3Ym6hU/oSlBiFLpKR6mhsRDKyZqHnGKSaZFHv
+d3cuZGlnaWNlcnQuY29tMSEwHwYDVQQDExhEaWdpQ2VydCBUcnVzdGVkIFJvb3Qg
+RzQwHhcNMTMwODAxMTIwMDAwWhcNMzgwMTE1MTIwMDAwWjBiMQswCQYDVQQGEwJV
+UzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3d3cuZGlnaWNlcnQu
+Y29tMSEwHwYDVQQDExhEaWdpQ2VydCBUcnVzdGVkIFJvb3QgRzQwggIiMA0GCSqG
+SIb3DQEBAQUAA4ICDwAwggIKAoICAQC/5pBzaN675F1KPDAiMGkz7MKnJS7JIT3y
+ithZwuEppz1Yq3aaza57G4QNxDAf8xukOBbrVsaXbR2rsnnyyhHS5F/WBTxSD1If
+xp4VpX6+n6lXFllVcq9ok3DCsrp1mWpzMpTREEQQLt+C8weE5nQ7bXHiLQwb7iDV
+ySAdYyktzuxeTsiT+CFhmzTrBcZe7FsavOvJz82sNEBfsXpm7nfISKhmV1efVFiO
+DCu3T6cw2Vbuyntd463JT17lNecxy9qTXtyOj4DatpGYQJB5w3jHtrHEtWoYOAMQ
+jdjUN6QuBX2I9YI+EJFwq1WCQTLX2wRzKm6RAXwhTNS8rhsDdV14Ztk6MUSaM0C/
+CNdaSaTC5qmgZ92kJ7yhTzm1EVgX9yRcRo9k98FpiHaYdj1ZXUJ2h4mXaXpI8OCi
+EhtmmnTK3kse5w5jrubU75KSOp493ADkRSWJtppEGSt+wJS00mFt6zPZxd9LBADM
+fRyVw4/3IbKyEbe7f/LVjHAsQWCqsWMYRJUadmJ+9oCw++hkpjPRiQfhvbfmQ6QY
+uKZ3AeEPlAwhHbJUKSWJbOUOUlFHdL4mrLZBdd56rF+NP8m800ERElvlEFDrMcXK
+chYiCd98THU/Y+whX8QgUWtvsauGi0/C1kVfnSD8oR7FwI+isX4KJpn15GkvmB0t
+9dmpsh3lGwIDAQABo0IwQDAPBgNVHRMBAf8EBTADAQH/MA4GA1UdDwEB/wQEAwIB
+hjAdBgNVHQ4EFgQU7NfjgtJxXWRM3y5nP+e6mK4cD08wDQYJKoZIhvcNAQEMBQAD
+ggIBALth2X2pbL4XxJEbw6GiAI3jZGgPVs93rnD5/ZpKmbnJeFwMDF/k5hQpVgs2
+SV1EY+CtnJYYZhsjDT156W1r1lT40jzBQ0CuHVD1UvyQO7uYmWlrx8GnqGikJ9yd
++SeuMIW59mdNOj6PWTkiU0TryF0Dyu1Qen1iIQqAyHNm0aAFYF/opbSnr6j3bTWc
+fFqK1qI4mfN4i/RN0iAL3gTujJtHgXINwBQy7zBZLq7gcfJW5GqXb5JQbZaNaHqa
+sjYUegbyJLkJEVDXCLG4iXqEI2FCKeWjzaIgQdfRnGTZ6iahixTXTBmyUEFxPT9N
+cCOGDErcgdLMMpSEDQgJlxxPwO5rIHQw0uA5NBCFIRUBCOhVMt5xSdkoF1BN5r5N
+0XWs0Mr7QbhDparTwwVETyw2m+L64kW4I1NsBm9nVX9GtUw/bihaeSbSpKhil9Ie
+4u1Ki7wb/UdKDd9nZn6yW0HQO+T0O/QEY+nvwlQAUaCKKsnOeMzV6ocEGLPOr0mI
+r/OSmbaz5mEP0oUA51Aa5BuVnRmhuZyxm7EAHu/QD09CbMkKvO5D+jpxpchNJqU1
+/YldvIViHTLSoCtU7ZpXwdv6EM8Zt4tKG48BtieVU+i2iW1bvGjUI+iLUaJW+fCm
+gKDWHrO8Dw9TdSmq6hN35N6MgSGtBxBHEa2HPQfRdbzP82Z+
-----END CERTIFICATE-----
------BEGIN CERTIFICATE-----
-MIIDtzCCAp+gAwIBAgIQDOfg5RfYRv6P5WD8G/AwOTANBgkqhkiG9w0BAQUFADBl
-MQswCQYDVQQGEwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3
-d3cuZGlnaWNlcnQuY29tMSQwIgYDVQQDExtEaWdpQ2VydCBBc3N1cmVkIElEIFJv
-b3QgQ0EwHhcNMDYxMTEwMDAwMDAwWhcNMzExMTEwMDAwMDAwWjBlMQswCQYDVQQG
-EwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3d3cuZGlnaWNl
-cnQuY29tMSQwIgYDVQQDExtEaWdpQ2VydCBBc3N1cmVkIElEIFJvb3QgQ0EwggEi
-MA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQCtDhXO5EOAXLGH87dg+XESpa7c
-JpSIqvTO9SA5KFhgDPiA2qkVlTJhPLWxKISKityfCgyDF3qPkKyK53lTXDGEKvYP
-mDI2dsze3Tyoou9q+yHyUmHfnyDXH+Kx2f4YZNISW1/5WBg1vEfNoTb5a3/UsDg+
-wRvDjDPZ2C8Y/igPs6eD1sNuRMBhNZYW/lmci3Zt1/GiSw0r/wty2p5g0I6QNcZ4
-VYcgoc/lbQrISXwxmDNsIumH0DJaoroTghHtORedmTpyoeb6pNnVFzF1roV9Iq4/
-AUaG9ih5yLHa5FcXxH4cDrC0kqZWs72yl+2qp/C3xag/lRbQ/6GW6whfGHdPAgMB
-AAGjYzBhMA4GA1UdDwEB/wQEAwIBhjAPBgNVHRMBAf8EBTADAQH/MB0GA1UdDgQW
-BBRF66Kv9JLLgjEtUYunpyGd823IDzAfBgNVHSMEGDAWgBRF66Kv9JLLgjEtUYun
-pyGd823IDzANBgkqhkiG9w0BAQUFAAOCAQEAog683+Lt8ONyc3pklL/3cmbYMuRC
-dWKuh+vy1dneVrOfzM4UKLkNl2BcEkxY5NM9g0lFWJc1aRqoR+pWxnmrEthngYTf
-fwk8lOa4JiwgvT2zKIn3X/8i4peEH+ll74fg38FnSbNd67IJKusm7Xi+fT8r87cm
-NW1fiQG2SVufAQWbqz0lwcy2f8Lxb4bG+mRo64EtlOtCt/qMHt1i8b5QZ7dsvfPx
-H2sMNgcWfzd8qVttevESRmCD1ycEvkvOl77DZypoEd+A5wwzZr8TDRRu838fYxAe
-+o0bJW1sj6W3YQGx0qMmoRBxna3iw/nDmVG3KwcIzi7mULKn+gpFL6Lw8g==
------END CERTIFICATE-----
-