aboutsummaryrefslogtreecommitdiff
path: root/contrib
diff options
context:
space:
mode:
Diffstat (limited to 'contrib')
-rw-r--r--contrib/README.md8
-rw-r--r--contrib/bitcoin-qt.pro1
-rw-r--r--contrib/debian/copyright19
-rw-r--r--contrib/devtools/README.md72
-rwxr-xr-xcontrib/devtools/clang-format-diff.py4
-rwxr-xr-xcontrib/devtools/copyright_header.py30
-rwxr-xr-xcontrib/devtools/github-merge.py413
-rwxr-xr-xcontrib/devtools/symbol-check.py4
-rwxr-xr-xcontrib/devtools/update-translations.py215
-rw-r--r--contrib/gitian-keys/keys.txt1
-rw-r--r--contrib/guix/libexec/build.sh42
-rw-r--r--contrib/init/bitcoind.service12
-rwxr-xr-xcontrib/linearize/linearize-data.py4
-rwxr-xr-xcontrib/linearize/linearize-hashes.py4
-rw-r--r--contrib/macdeploy/README.md2
-rwxr-xr-xcontrib/macdeploy/macdeployqtplus150
-rwxr-xr-xcontrib/seeds/generate-seeds.py3
-rw-r--r--contrib/testgen/README.md2
-rwxr-xr-xcontrib/verify-commits/verify-commits.py2
19 files changed, 167 insertions, 821 deletions
diff --git a/contrib/README.md b/contrib/README.md
index 8915919766..361975baa4 100644
--- a/contrib/README.md
+++ b/contrib/README.md
@@ -3,10 +3,10 @@ Repository Tools
### [Developer tools](/contrib/devtools) ###
Specific tools for developers working on this repository.
-Contains the script `github-merge.py` for merging GitHub pull requests securely and signing them using GPG.
+Additional tools, including the `github-merge.py` script, are available in the [maintainer-tools](https://github.com/bitcoin-core/bitcoin-maintainer-tools) repository.
### [Verify-Commits](/contrib/verify-commits) ###
-Tool to verify that every merge commit was signed by a developer using the above `github-merge.py` script.
+Tool to verify that every merge commit was signed by a developer using the `github-merge.py` script.
### [Linearize](/contrib/linearize) ###
Construct a linear, no-fork, best version of the blockchain.
@@ -33,12 +33,12 @@ Files used during the gitian build process. For more information about gitian, s
PGP keys used for signing Bitcoin Core [Gitian release](/doc/release-process.md) results.
### [MacDeploy](/contrib/macdeploy) ###
-Scripts and notes for Mac builds.
+Scripts and notes for Mac builds.
### [Gitian-build](/contrib/gitian-build.py) ###
Script for running full Gitian builds.
-Test and Verify Tools
+Test and Verify Tools
---------------------
### [TestGen](/contrib/testgen) ###
diff --git a/contrib/bitcoin-qt.pro b/contrib/bitcoin-qt.pro
index b8133bf789..0e4eeee0a7 100644
--- a/contrib/bitcoin-qt.pro
+++ b/contrib/bitcoin-qt.pro
@@ -16,6 +16,7 @@ FORMS += \
../src/qt/forms/sendcoinsentry.ui \
../src/qt/forms/signverifymessagedialog.ui \
../src/qt/forms/transactiondescdialog.ui \
+ ../src/qt/forms/createwalletdialog.ui
RESOURCES += \
../src/qt/bitcoin.qrc
diff --git a/contrib/debian/copyright b/contrib/debian/copyright
index 2d5b0188d2..0eccbacb96 100644
--- a/contrib/debian/copyright
+++ b/contrib/debian/copyright
@@ -26,21 +26,14 @@ License: GNU-All-permissive-License
Files: src/qt/res/icons/add.png
src/qt/res/icons/address-book.png
src/qt/res/icons/chevron.png
- src/qt/res/icons/configure.png
- src/qt/res/icons/debugwindow.png
src/qt/res/icons/edit.png
src/qt/res/icons/editcopy.png
src/qt/res/icons/editpaste.png
src/qt/res/icons/export.png
src/qt/res/icons/eye.png
- src/qt/res/icons/filesave.png
src/qt/res/icons/history.png
- src/qt/res/icons/info.png
- src/qt/res/icons/key.png
src/qt/res/icons/lock_*.png
- src/qt/res/icons/open.png
src/qt/res/icons/overview.png
- src/qt/res/icons/quit.png
src/qt/res/icons/receive.png
src/qt/res/icons/remove.png
src/qt/res/icons/send.png
@@ -60,7 +53,7 @@ Files: src/qt/res/icons/connect*.png
Copyright: Marco Falke
Luke Dashjr
License: Expat
-Comment: Inspired by Stephan Hutchings Typicons
+Comment: Inspired by Stephen Hutchings' Typicons
Files: src/qt/res/icons/tx_mined.png
src/qt/res/src/mine.svg
@@ -72,21 +65,17 @@ Files: src/qt/res/icons/tx_mined.png
src/qt/res/src/hd_enabled.svg
Copyright: Jonas Schnelli
License: Expat
-Comment:
Files: src/qt/res/icons/clock*.png
src/qt/res/icons/eye_*.png
src/qt/res/icons/tx_in*.png
- src/qt/res/icons/verify.png
src/qt/res/src/clock_*.svg
src/qt/res/src/tx_*.svg
- src/qt/res/src/verify.svg
-Copyright: Stephan Hutching, Jonas Schnelli
+Copyright: Stephen Hutchings, Jonas Schnelli
License: Expat
-Comment: Modifications of Stephan Hutchings Typicons
+Comment: Modifications of Stephen Hutchings' Typicons
-Files: src/qt/res/icons/about.png
- src/qt/res/icons/bitcoin.*
+Files: src/qt/res/icons/bitcoin.*
share/pixmaps/bitcoin*
src/qt/res/src/bitcoin.svg
Copyright: Bitboy, Jonas Schnelli
diff --git a/contrib/devtools/README.md b/contrib/devtools/README.md
index 4994d7f0a5..04fa02484f 100644
--- a/contrib/devtools/README.md
+++ b/contrib/devtools/README.md
@@ -89,66 +89,6 @@ example:
BUILDDIR=$PWD/build contrib/devtools/gen-manpages.sh
```
-github-merge.py
-===============
-
-A small script to automate merging pull-requests securely and sign them with GPG.
-
-For example:
-
- ./github-merge.py 3077
-
-(in any git repository) will help you merge pull request #3077 for the
-bitcoin/bitcoin repository.
-
-What it does:
-* Fetch master and the pull request.
-* Locally construct a merge commit.
-* Show the diff that merge results in.
-* Ask you to verify the resulting source tree (so you can do a make
-check or whatever).
-* Ask you whether to GPG sign the merge commit.
-* Ask you whether to push the result upstream.
-
-This means that there are no potential race conditions (where a
-pullreq gets updated while you're reviewing it, but before you click
-merge), and when using GPG signatures, that even a compromised GitHub
-couldn't mess with the sources.
-
-Setup
----------
-Configuring the github-merge tool for the bitcoin repository is done in the following way:
-
- git config githubmerge.repository bitcoin/bitcoin
- git config githubmerge.testcmd "make -j4 check" (adapt to whatever you want to use for testing)
- git config --global user.signingkey mykeyid
-
-Authentication (optional)
---------------------------
-
-The API request limit for unauthenticated requests is quite low, but the
-limit for authenticated requests is much higher. If you start running
-into rate limiting errors it can be useful to set an authentication token
-so that the script can authenticate requests.
-
-- First, go to [Personal access tokens](https://github.com/settings/tokens).
-- Click 'Generate new token'.
-- Fill in an arbitrary token description. No further privileges are needed.
-- Click the `Generate token` button at the bottom of the form.
-- Copy the generated token (should be a hexadecimal string)
-
-Then do:
-
- git config --global user.ghtoken "pasted token"
-
-Create and verify timestamps of merge commits
----------------------------------------------
-To create or verify timestamps on the merge commits, install the OpenTimestamps
-client via `pip3 install opentimestamps-client`. Then, download the gpg wrapper
-`ots-git-gpg-wrapper.sh` and set it as git's `gpg.program`. See
-[the ots git integration documentation](https://github.com/opentimestamps/opentimestamps-client/blob/master/doc/git-integration.md#usage)
-for further details.
-
optimize-pngs.py
================
@@ -180,18 +120,6 @@ If there are 'unsupported' symbols, the return value will be 1 a list like this
.../64/test_bitcoin: symbol std::out_of_range::~out_of_range() from unsupported version GLIBCXX_3.4.15
.../64/test_bitcoin: symbol _ZNSt8__detail15_List_nod from unsupported version GLIBCXX_3.4.15
-update-translations.py
-======================
-
-Run this script from the root of the repository to update all translations from transifex.
-It will do the following automatically:
-
-- fetch all translations
-- post-process them into valid and committable format
-- add missing translations to the build system (TODO)
-
-See doc/translation-process.md for more information.
-
circular-dependencies.py
========================
diff --git a/contrib/devtools/clang-format-diff.py b/contrib/devtools/clang-format-diff.py
index f322b3a880..98eee67f43 100755
--- a/contrib/devtools/clang-format-diff.py
+++ b/contrib/devtools/clang-format-diff.py
@@ -106,7 +106,7 @@ def main():
filename = None
lines_by_file = {}
for line in sys.stdin:
- match = re.search('^\+\+\+\ (.*?/){%s}(\S*)' % args.p, line)
+ match = re.search(r'^\+\+\+\ (.*?/){%s}(\S*)' % args.p, line)
if match:
filename = match.group(2)
if filename is None:
@@ -119,7 +119,7 @@ def main():
if not re.match('^%s$' % args.iregex, filename, re.IGNORECASE):
continue
- match = re.search('^@@.*\+(\d+)(,(\d+))?', line)
+ match = re.search(r'^@@.*\+(\d+)(,(\d+))?', line)
if match:
start_line = int(match.group(1))
line_count = 1
diff --git a/contrib/devtools/copyright_header.py b/contrib/devtools/copyright_header.py
index fc01e570aa..67e77bc63d 100755
--- a/contrib/devtools/copyright_header.py
+++ b/contrib/devtools/copyright_header.py
@@ -71,7 +71,7 @@ def get_filenames_to_examine(base_directory):
################################################################################
-COPYRIGHT_WITH_C = 'Copyright \(c\)'
+COPYRIGHT_WITH_C = r'Copyright \(c\)'
COPYRIGHT_WITHOUT_C = 'Copyright'
ANY_COPYRIGHT_STYLE = '(%s|%s)' % (COPYRIGHT_WITH_C, COPYRIGHT_WITHOUT_C)
@@ -85,21 +85,21 @@ ANY_COPYRIGHT_STYLE_OR_YEAR_STYLE = ("%s %s" % (ANY_COPYRIGHT_STYLE,
ANY_COPYRIGHT_COMPILED = re.compile(ANY_COPYRIGHT_STYLE_OR_YEAR_STYLE)
def compile_copyright_regex(copyright_style, year_style, name):
- return re.compile('%s %s,? %s' % (copyright_style, year_style, name))
+ return re.compile(r'%s %s,? %s( +\*)?\n' % (copyright_style, year_style, name))
EXPECTED_HOLDER_NAMES = [
- "Satoshi Nakamoto\n",
- "The Bitcoin Core developers\n",
- "BitPay Inc\.\n",
- "University of Illinois at Urbana-Champaign\.\n",
- "Pieter Wuille\n",
- "Wladimir J. van der Laan\n",
- "Jeff Garzik\n",
- "Jan-Klaas Kollhof\n",
- "ArtForz -- public domain half-a-node\n",
- "Intel Corporation",
- "The Zcash developers",
- "Jeremy Rubin",
+ r"Satoshi Nakamoto",
+ r"The Bitcoin Core developers",
+ r"BitPay Inc\.",
+ r"University of Illinois at Urbana-Champaign\.",
+ r"Pieter Wuille",
+ r"Wladimir J\. van der Laan",
+ r"Jeff Garzik",
+ r"Jan-Klaas Kollhof",
+ r"ArtForz -- public domain half-a-node",
+ r"Intel Corporation ?",
+ r"The Zcash developers",
+ r"Jeremy Rubin",
]
DOMINANT_STYLE_COMPILED = {}
@@ -329,7 +329,7 @@ def write_file_lines(filename, file_lines):
# update header years execution
################################################################################
-COPYRIGHT = 'Copyright \(c\)'
+COPYRIGHT = r'Copyright \(c\)'
YEAR = "20[0-9][0-9]"
YEAR_RANGE = '(%s)(-%s)?' % (YEAR, YEAR)
HOLDER = 'The Bitcoin Core developers'
diff --git a/contrib/devtools/github-merge.py b/contrib/devtools/github-merge.py
deleted file mode 100755
index 78ac671bfe..0000000000
--- a/contrib/devtools/github-merge.py
+++ /dev/null
@@ -1,413 +0,0 @@
-#!/usr/bin/env python3
-# Copyright (c) 2016-2017 The Bitcoin Core developers
-# Distributed under the MIT software license, see the accompanying
-# file COPYING or http://www.opensource.org/licenses/mit-license.php.
-
-# This script will locally construct a merge commit for a pull request on a
-# github repository, inspect it, sign it and optionally push it.
-
-# The following temporary branches are created/overwritten and deleted:
-# * pull/$PULL/base (the current master we're merging onto)
-# * pull/$PULL/head (the current state of the remote pull request)
-# * pull/$PULL/merge (github's merge)
-# * pull/$PULL/local-merge (our merge)
-
-# In case of a clean merge that is accepted by the user, the local branch with
-# name $BRANCH is overwritten with the merged result, and optionally pushed.
-import os
-from sys import stdin,stdout,stderr
-import argparse
-import hashlib
-import subprocess
-import sys
-import json
-import codecs
-from urllib.request import Request, urlopen
-from urllib.error import HTTPError
-
-# External tools (can be overridden using environment)
-GIT = os.getenv('GIT','git')
-BASH = os.getenv('BASH','bash')
-
-# OS specific configuration for terminal attributes
-ATTR_RESET = ''
-ATTR_PR = ''
-ATTR_NAME = ''
-ATTR_WARN = ''
-COMMIT_FORMAT = '%H %s (%an)%d'
-if os.name == 'posix': # if posix, assume we can use basic terminal escapes
- ATTR_RESET = '\033[0m'
- ATTR_PR = '\033[1;36m'
- ATTR_NAME = '\033[0;36m'
- ATTR_WARN = '\033[1;31m'
- COMMIT_FORMAT = '%C(bold blue)%H%Creset %s %C(cyan)(%an)%Creset%C(green)%d%Creset'
-
-def git_config_get(option, default=None):
- '''
- Get named configuration option from git repository.
- '''
- try:
- return subprocess.check_output([GIT,'config','--get',option]).rstrip().decode('utf-8')
- except subprocess.CalledProcessError:
- return default
-
-def get_response(req_url, ghtoken):
- req = Request(req_url)
- if ghtoken is not None:
- req.add_header('Authorization', 'token ' + ghtoken)
- return urlopen(req)
-
-def retrieve_json(req_url, ghtoken, use_pagination=False):
- '''
- Retrieve json from github.
- Return None if an error happens.
- '''
- try:
- reader = codecs.getreader('utf-8')
- if not use_pagination:
- return json.load(reader(get_response(req_url, ghtoken)))
-
- obj = []
- page_num = 1
- while True:
- req_url_page = '{}?page={}'.format(req_url, page_num)
- result = get_response(req_url_page, ghtoken)
- obj.extend(json.load(reader(result)))
-
- link = result.headers.get('link', None)
- if link is not None:
- link_next = [l for l in link.split(',') if 'rel="next"' in l]
- if len(link_next) > 0:
- page_num = int(link_next[0][link_next[0].find("page=")+5:link_next[0].find(">")])
- continue
- break
- return obj
- except HTTPError as e:
- error_message = e.read()
- print('Warning: unable to retrieve pull information from github: %s' % e)
- print('Detailed error: %s' % error_message)
- return None
- except Exception as e:
- print('Warning: unable to retrieve pull information from github: %s' % e)
- return None
-
-def retrieve_pr_info(repo,pull,ghtoken):
- req_url = "https://api.github.com/repos/"+repo+"/pulls/"+pull
- return retrieve_json(req_url,ghtoken)
-
-def retrieve_pr_comments(repo,pull,ghtoken):
- req_url = "https://api.github.com/repos/"+repo+"/issues/"+pull+"/comments"
- return retrieve_json(req_url,ghtoken,use_pagination=True)
-
-def retrieve_pr_reviews(repo,pull,ghtoken):
- req_url = "https://api.github.com/repos/"+repo+"/pulls/"+pull+"/reviews"
- return retrieve_json(req_url,ghtoken,use_pagination=True)
-
-def ask_prompt(text):
- print(text,end=" ",file=stderr)
- stderr.flush()
- reply = stdin.readline().rstrip()
- print("",file=stderr)
- return reply
-
-def get_symlink_files():
- files = sorted(subprocess.check_output([GIT, 'ls-tree', '--full-tree', '-r', 'HEAD']).splitlines())
- ret = []
- for f in files:
- if (int(f.decode('utf-8').split(" ")[0], 8) & 0o170000) == 0o120000:
- ret.append(f.decode('utf-8').split("\t")[1])
- return ret
-
-def tree_sha512sum(commit='HEAD'):
- # request metadata for entire tree, recursively
- files = []
- blob_by_name = {}
- for line in subprocess.check_output([GIT, 'ls-tree', '--full-tree', '-r', commit]).splitlines():
- name_sep = line.index(b'\t')
- metadata = line[:name_sep].split() # perms, 'blob', blobid
- assert(metadata[1] == b'blob')
- name = line[name_sep+1:]
- files.append(name)
- blob_by_name[name] = metadata[2]
-
- files.sort()
- # open connection to git-cat-file in batch mode to request data for all blobs
- # this is much faster than launching it per file
- p = subprocess.Popen([GIT, 'cat-file', '--batch'], stdout=subprocess.PIPE, stdin=subprocess.PIPE)
- overall = hashlib.sha512()
- for f in files:
- blob = blob_by_name[f]
- # request blob
- p.stdin.write(blob + b'\n')
- p.stdin.flush()
- # read header: blob, "blob", size
- reply = p.stdout.readline().split()
- assert(reply[0] == blob and reply[1] == b'blob')
- size = int(reply[2])
- # hash the blob data
- intern = hashlib.sha512()
- ptr = 0
- while ptr < size:
- bs = min(65536, size - ptr)
- piece = p.stdout.read(bs)
- if len(piece) == bs:
- intern.update(piece)
- else:
- raise IOError('Premature EOF reading git cat-file output')
- ptr += bs
- dig = intern.hexdigest()
- assert(p.stdout.read(1) == b'\n') # ignore LF that follows blob data
- # update overall hash with file hash
- overall.update(dig.encode("utf-8"))
- overall.update(" ".encode("utf-8"))
- overall.update(f)
- overall.update("\n".encode("utf-8"))
- p.stdin.close()
- if p.wait():
- raise IOError('Non-zero return value executing git cat-file')
- return overall.hexdigest()
-
-def get_acks_from_comments(head_commit, comments):
- # Look for abbreviated commit id, because not everyone wants to type/paste
- # the whole thing and the chance of collisions within a PR is small enough
- head_abbrev = head_commit[0:6]
- acks = []
- for c in comments:
- review = [l for l in c['body'].split('\r\n') if 'ACK' in l and head_abbrev in l]
- if review:
- acks.append((c['user']['login'], review[0]))
- return acks
-
-def make_acks_message(head_commit, acks):
- if acks:
- ack_str ='\n\nACKs for top commit:\n'.format(head_commit)
- for name, msg in acks:
- ack_str += ' {}:\n'.format(name)
- ack_str += ' {}\n'.format(msg)
- else:
- ack_str ='\n\nTop commit has no ACKs.\n'
- return ack_str
-
-def print_merge_details(pull, title, branch, base_branch, head_branch, acks):
- print('%s#%s%s %s %sinto %s%s' % (ATTR_RESET+ATTR_PR,pull,ATTR_RESET,title,ATTR_RESET+ATTR_PR,branch,ATTR_RESET))
- subprocess.check_call([GIT,'log','--graph','--topo-order','--pretty=format:'+COMMIT_FORMAT,base_branch+'..'+head_branch])
- if acks is not None:
- if acks:
- print('{}ACKs:{}'.format(ATTR_PR, ATTR_RESET))
- for (name, message) in acks:
- print('* {} {}({}){}'.format(message, ATTR_NAME, name, ATTR_RESET))
- else:
- print('{}Top commit has no ACKs!{}'.format(ATTR_WARN, ATTR_RESET))
-
-def parse_arguments():
- epilog = '''
- In addition, you can set the following git configuration variables:
- githubmerge.repository (mandatory),
- user.signingkey (mandatory),
- user.ghtoken (default: none).
- githubmerge.host (default: git@github.com),
- githubmerge.branch (no default),
- githubmerge.testcmd (default: none).
- '''
- parser = argparse.ArgumentParser(description='Utility to merge, sign and push github pull requests',
- epilog=epilog)
- parser.add_argument('pull', metavar='PULL', type=int, nargs=1,
- help='Pull request ID to merge')
- parser.add_argument('branch', metavar='BRANCH', type=str, nargs='?',
- default=None, help='Branch to merge against (default: githubmerge.branch setting, or base branch for pull, or \'master\')')
- return parser.parse_args()
-
-def main():
- # Extract settings from git repo
- repo = git_config_get('githubmerge.repository')
- host = git_config_get('githubmerge.host','git@github.com')
- opt_branch = git_config_get('githubmerge.branch',None)
- testcmd = git_config_get('githubmerge.testcmd')
- ghtoken = git_config_get('user.ghtoken')
- signingkey = git_config_get('user.signingkey')
- if repo is None:
- print("ERROR: No repository configured. Use this command to set:", file=stderr)
- print("git config githubmerge.repository <owner>/<repo>", file=stderr)
- sys.exit(1)
- if signingkey is None:
- print("ERROR: No GPG signing key set. Set one using:",file=stderr)
- print("git config --global user.signingkey <key>",file=stderr)
- sys.exit(1)
-
- if host.startswith(('https:','http:')):
- host_repo = host+"/"+repo+".git"
- else:
- host_repo = host+":"+repo
-
- # Extract settings from command line
- args = parse_arguments()
- pull = str(args.pull[0])
-
- # Receive pull information from github
- info = retrieve_pr_info(repo,pull,ghtoken)
- if info is None:
- sys.exit(1)
- title = info['title'].strip()
- body = info['body'].strip()
- # precedence order for destination branch argument:
- # - command line argument
- # - githubmerge.branch setting
- # - base branch for pull (as retrieved from github)
- # - 'master'
- branch = args.branch or opt_branch or info['base']['ref'] or 'master'
-
- # Initialize source branches
- head_branch = 'pull/'+pull+'/head'
- base_branch = 'pull/'+pull+'/base'
- merge_branch = 'pull/'+pull+'/merge'
- local_merge_branch = 'pull/'+pull+'/local-merge'
-
- devnull = open(os.devnull, 'w', encoding="utf8")
- try:
- subprocess.check_call([GIT,'checkout','-q',branch])
- except subprocess.CalledProcessError:
- print("ERROR: Cannot check out branch %s." % (branch), file=stderr)
- sys.exit(3)
- try:
- subprocess.check_call([GIT,'fetch','-q',host_repo,'+refs/pull/'+pull+'/*:refs/heads/pull/'+pull+'/*',
- '+refs/heads/'+branch+':refs/heads/'+base_branch])
- except subprocess.CalledProcessError:
- print("ERROR: Cannot find pull request #%s or branch %s on %s." % (pull,branch,host_repo), file=stderr)
- sys.exit(3)
- try:
- subprocess.check_call([GIT,'log','-q','-1','refs/heads/'+head_branch], stdout=devnull, stderr=stdout)
- head_commit = subprocess.check_output([GIT,'log','-1','--pretty=format:%H',head_branch]).decode('utf-8')
- assert len(head_commit) == 40
- except subprocess.CalledProcessError:
- print("ERROR: Cannot find head of pull request #%s on %s." % (pull,host_repo), file=stderr)
- sys.exit(3)
- try:
- subprocess.check_call([GIT,'log','-q','-1','refs/heads/'+merge_branch], stdout=devnull, stderr=stdout)
- except subprocess.CalledProcessError:
- print("ERROR: Cannot find merge of pull request #%s on %s." % (pull,host_repo), file=stderr)
- sys.exit(3)
- subprocess.check_call([GIT,'checkout','-q',base_branch])
- subprocess.call([GIT,'branch','-q','-D',local_merge_branch], stderr=devnull)
- subprocess.check_call([GIT,'checkout','-q','-b',local_merge_branch])
-
- try:
- # Go up to the repository's root.
- toplevel = subprocess.check_output([GIT,'rev-parse','--show-toplevel']).strip()
- os.chdir(toplevel)
- # Create unsigned merge commit.
- if title:
- firstline = 'Merge #%s: %s' % (pull,title)
- else:
- firstline = 'Merge #%s' % (pull,)
- message = firstline + '\n\n'
- message += subprocess.check_output([GIT,'log','--no-merges','--topo-order','--pretty=format:%H %s (%an)',base_branch+'..'+head_branch]).decode('utf-8')
- message += '\n\nPull request description:\n\n ' + body.replace('\n', '\n ') + '\n'
- try:
- subprocess.check_call([GIT,'merge','-q','--commit','--no-edit','--no-ff','--no-gpg-sign','-m',message.encode('utf-8'),head_branch])
- except subprocess.CalledProcessError:
- print("ERROR: Cannot be merged cleanly.",file=stderr)
- subprocess.check_call([GIT,'merge','--abort'])
- sys.exit(4)
- logmsg = subprocess.check_output([GIT,'log','--pretty=format:%s','-n','1']).decode('utf-8')
- if logmsg.rstrip() != firstline.rstrip():
- print("ERROR: Creating merge failed (already merged?).",file=stderr)
- sys.exit(4)
-
- symlink_files = get_symlink_files()
- for f in symlink_files:
- print("ERROR: File %s was a symlink" % f)
- if len(symlink_files) > 0:
- sys.exit(4)
-
- # Compute SHA512 of git tree (to be able to detect changes before sign-off)
- try:
- first_sha512 = tree_sha512sum()
- except subprocess.CalledProcessError:
- print("ERROR: Unable to compute tree hash")
- sys.exit(4)
-
- print_merge_details(pull, title, branch, base_branch, head_branch, None)
- print()
-
- # Run test command if configured.
- if testcmd:
- if subprocess.call(testcmd,shell=True):
- print("ERROR: Running %s failed." % testcmd,file=stderr)
- sys.exit(5)
-
- # Show the created merge.
- diff = subprocess.check_output([GIT,'diff',merge_branch+'..'+local_merge_branch])
- subprocess.check_call([GIT,'diff',base_branch+'..'+local_merge_branch])
- if diff:
- print("WARNING: merge differs from github!",file=stderr)
- reply = ask_prompt("Type 'ignore' to continue.")
- if reply.lower() == 'ignore':
- print("Difference with github ignored.",file=stderr)
- else:
- sys.exit(6)
- else:
- # Verify the result manually.
- print("Dropping you on a shell so you can try building/testing the merged source.",file=stderr)
- print("Run 'git diff HEAD~' to show the changes being merged.",file=stderr)
- print("Type 'exit' when done.",file=stderr)
- if os.path.isfile('/etc/debian_version'): # Show pull number on Debian default prompt
- os.putenv('debian_chroot',pull)
- subprocess.call([BASH,'-i'])
-
- second_sha512 = tree_sha512sum()
- if first_sha512 != second_sha512:
- print("ERROR: Tree hash changed unexpectedly",file=stderr)
- sys.exit(8)
-
- # Retrieve PR comments and ACKs and add to commit message, store ACKs to print them with commit
- # description
- comments = retrieve_pr_comments(repo,pull,ghtoken) + retrieve_pr_reviews(repo,pull,ghtoken)
- if comments is None:
- print("ERROR: Could not fetch PR comments and reviews",file=stderr)
- sys.exit(1)
- acks = get_acks_from_comments(head_commit=head_commit, comments=comments)
- message += make_acks_message(head_commit=head_commit, acks=acks)
- # end message with SHA512 tree hash, then update message
- message += '\n\nTree-SHA512: ' + first_sha512
- try:
- subprocess.check_call([GIT,'commit','--amend','--no-gpg-sign','-m',message.encode('utf-8')])
- except subprocess.CalledProcessError:
- print("ERROR: Cannot update message.", file=stderr)
- sys.exit(4)
-
- # Sign the merge commit.
- print_merge_details(pull, title, branch, base_branch, head_branch, acks)
- while True:
- reply = ask_prompt("Type 's' to sign off on the above merge, or 'x' to reject and exit.").lower()
- if reply == 's':
- try:
- subprocess.check_call([GIT,'commit','-q','--gpg-sign','--amend','--no-edit'])
- break
- except subprocess.CalledProcessError:
- print("Error while signing, asking again.",file=stderr)
- elif reply == 'x':
- print("Not signing off on merge, exiting.",file=stderr)
- sys.exit(1)
-
- # Put the result in branch.
- subprocess.check_call([GIT,'checkout','-q',branch])
- subprocess.check_call([GIT,'reset','-q','--hard',local_merge_branch])
- finally:
- # Clean up temporary branches.
- subprocess.call([GIT,'checkout','-q',branch])
- subprocess.call([GIT,'branch','-q','-D',head_branch],stderr=devnull)
- subprocess.call([GIT,'branch','-q','-D',base_branch],stderr=devnull)
- subprocess.call([GIT,'branch','-q','-D',merge_branch],stderr=devnull)
- subprocess.call([GIT,'branch','-q','-D',local_merge_branch],stderr=devnull)
-
- # Push the result.
- while True:
- reply = ask_prompt("Type 'push' to push the result to %s, branch %s, or 'x' to exit without pushing." % (host_repo,branch)).lower()
- if reply == 'push':
- subprocess.check_call([GIT,'push',host_repo,'refs/heads/'+branch])
- break
- elif reply == 'x':
- sys.exit(1)
-
-if __name__ == '__main__':
- main()
diff --git a/contrib/devtools/symbol-check.py b/contrib/devtools/symbol-check.py
index dd35d862c9..d8b684026c 100755
--- a/contrib/devtools/symbol-check.py
+++ b/contrib/devtools/symbol-check.py
@@ -141,7 +141,7 @@ def read_libraries(filename):
for line in stdout.splitlines():
tokens = line.split()
if len(tokens)>2 and tokens[1] == '(NEEDED)':
- match = re.match('^Shared library: \[(.*)\]$', ' '.join(tokens[2:]))
+ match = re.match(r'^Shared library: \[(.*)\]$', ' '.join(tokens[2:]))
if match:
libraries.append(match.group(1))
else:
@@ -171,5 +171,3 @@ if __name__ == '__main__':
retval = 1
sys.exit(retval)
-
-
diff --git a/contrib/devtools/update-translations.py b/contrib/devtools/update-translations.py
deleted file mode 100755
index 1b9d3a4c27..0000000000
--- a/contrib/devtools/update-translations.py
+++ /dev/null
@@ -1,215 +0,0 @@
-#!/usr/bin/env python3
-# Copyright (c) 2014 Wladimir J. van der Laan
-# Distributed under the MIT software license, see the accompanying
-# file COPYING or http://www.opensource.org/licenses/mit-license.php.
-'''
-Run this script from the root of the repository to update all translations from
-transifex.
-It will do the following automatically:
-
-- fetch all translations using the tx tool
-- post-process them into valid and committable format
- - remove invalid control characters
- - remove location tags (makes diffs less noisy)
-
-TODO:
-- auto-add new translations to the build system according to the translation process
-'''
-import subprocess
-import re
-import sys
-import os
-import io
-import xml.etree.ElementTree as ET
-
-# Name of transifex tool
-TX = 'tx'
-# Name of source language file
-SOURCE_LANG = 'bitcoin_en.ts'
-# Directory with locale files
-LOCALE_DIR = 'src/qt/locale'
-# Minimum number of messages for translation to be considered at all
-MIN_NUM_MESSAGES = 10
-# Regexp to check for Bitcoin addresses
-ADDRESS_REGEXP = re.compile('([13]|bc1)[a-zA-Z0-9]{30,}')
-
-def check_at_repository_root():
- if not os.path.exists('.git'):
- print('No .git directory found')
- print('Execute this script at the root of the repository', file=sys.stderr)
- sys.exit(1)
-
-def fetch_all_translations():
- if subprocess.call([TX, 'pull', '-f', '-a']):
- print('Error while fetching translations', file=sys.stderr)
- sys.exit(1)
-
-def find_format_specifiers(s):
- '''Find all format specifiers in a string.'''
- pos = 0
- specifiers = []
- while True:
- percent = s.find('%', pos)
- if percent < 0:
- break
- specifiers.append(s[percent+1])
- pos = percent+2
- return specifiers
-
-def split_format_specifiers(specifiers):
- '''Split format specifiers between numeric (Qt) and others (strprintf)'''
- numeric = []
- other = []
- for s in specifiers:
- if s in {'1','2','3','4','5','6','7','8','9'}:
- numeric.append(s)
- else:
- other.append(s)
-
- # If both numeric format specifiers and "others" are used, assume we're dealing
- # with a Qt-formatted message. In the case of Qt formatting (see https://doc.qt.io/qt-5/qstring.html#arg)
- # only numeric formats are replaced at all. This means "(percentage: %1%)" is valid, without needing
- # any kind of escaping that would be necessary for strprintf. Without this, this function
- # would wrongly detect '%)' as a printf format specifier.
- if numeric:
- other = []
-
- # numeric (Qt) can be present in any order, others (strprintf) must be in specified order
- return set(numeric),other
-
-def sanitize_string(s):
- '''Sanitize string for printing'''
- return s.replace('\n',' ')
-
-def check_format_specifiers(source, translation, errors, numerus):
- source_f = split_format_specifiers(find_format_specifiers(source))
- # assert that no source messages contain both Qt and strprintf format specifiers
- # if this fails, go change the source as this is hacky and confusing!
- assert(not(source_f[0] and source_f[1]))
- try:
- translation_f = split_format_specifiers(find_format_specifiers(translation))
- except IndexError:
- errors.append("Parse error in translation for '%s': '%s'" % (sanitize_string(source), sanitize_string(translation)))
- return False
- else:
- if source_f != translation_f:
- if numerus and source_f == (set(), ['n']) and translation_f == (set(), []) and translation.find('%') == -1:
- # Allow numerus translations to omit %n specifier (usually when it only has one possible value)
- return True
- errors.append("Mismatch between '%s' and '%s'" % (sanitize_string(source), sanitize_string(translation)))
- return False
- return True
-
-def all_ts_files(suffix=''):
- for filename in os.listdir(LOCALE_DIR):
- # process only language files, and do not process source language
- if not filename.endswith('.ts'+suffix) or filename == SOURCE_LANG+suffix:
- continue
- if suffix: # remove provided suffix
- filename = filename[0:-len(suffix)]
- filepath = os.path.join(LOCALE_DIR, filename)
- yield(filename, filepath)
-
-FIX_RE = re.compile(b'[\x00-\x09\x0b\x0c\x0e-\x1f]')
-def remove_invalid_characters(s):
- '''Remove invalid characters from translation string'''
- return FIX_RE.sub(b'', s)
-
-# Override cdata escape function to make our output match Qt's (optional, just for cleaner diffs for
-# comparison, disable by default)
-_orig_escape_cdata = None
-def escape_cdata(text):
- text = _orig_escape_cdata(text)
- text = text.replace("'", '&apos;')
- text = text.replace('"', '&quot;')
- return text
-
-def contains_bitcoin_addr(text, errors):
- if text is not None and ADDRESS_REGEXP.search(text) is not None:
- errors.append('Translation "%s" contains a bitcoin address. This will be removed.' % (text))
- return True
- return False
-
-def postprocess_translations(reduce_diff_hacks=False):
- print('Checking and postprocessing...')
-
- if reduce_diff_hacks:
- global _orig_escape_cdata
- _orig_escape_cdata = ET._escape_cdata
- ET._escape_cdata = escape_cdata
-
- for (filename,filepath) in all_ts_files():
- os.rename(filepath, filepath+'.orig')
-
- have_errors = False
- for (filename,filepath) in all_ts_files('.orig'):
- # pre-fixups to cope with transifex output
- parser = ET.XMLParser(encoding='utf-8') # need to override encoding because 'utf8' is not understood only 'utf-8'
- with open(filepath + '.orig', 'rb') as f:
- data = f.read()
- # remove control characters; this must be done over the entire file otherwise the XML parser will fail
- data = remove_invalid_characters(data)
- tree = ET.parse(io.BytesIO(data), parser=parser)
-
- # iterate over all messages in file
- root = tree.getroot()
- for context in root.findall('context'):
- for message in context.findall('message'):
- numerus = message.get('numerus') == 'yes'
- source = message.find('source').text
- translation_node = message.find('translation')
- # pick all numerusforms
- if numerus:
- translations = [i.text for i in translation_node.findall('numerusform')]
- else:
- translations = [translation_node.text]
-
- for translation in translations:
- if translation is None:
- continue
- errors = []
- valid = check_format_specifiers(source, translation, errors, numerus) and not contains_bitcoin_addr(translation, errors)
-
- for error in errors:
- print('%s: %s' % (filename, error))
-
- if not valid: # set type to unfinished and clear string if invalid
- translation_node.clear()
- translation_node.set('type', 'unfinished')
- have_errors = True
-
- # Remove location tags
- for location in message.findall('location'):
- message.remove(location)
-
- # Remove entire message if it is an unfinished translation
- if translation_node.get('type') == 'unfinished':
- context.remove(message)
-
- # check if document is (virtually) empty, and remove it if so
- num_messages = 0
- for context in root.findall('context'):
- for message in context.findall('message'):
- num_messages += 1
- if num_messages < MIN_NUM_MESSAGES:
- print('Removing %s, as it contains only %i messages' % (filepath, num_messages))
- continue
-
- # write fixed-up tree
- # if diff reduction requested, replace some XML to 'sanitize' to qt formatting
- if reduce_diff_hacks:
- out = io.BytesIO()
- tree.write(out, encoding='utf-8')
- out = out.getvalue()
- out = out.replace(b' />', b'/>')
- with open(filepath, 'wb') as f:
- f.write(out)
- else:
- tree.write(filepath, encoding='utf-8')
- return have_errors
-
-if __name__ == '__main__':
- check_at_repository_root()
- fetch_all_translations()
- postprocess_translations()
-
diff --git a/contrib/gitian-keys/keys.txt b/contrib/gitian-keys/keys.txt
index 33f0f7e5b0..9222a40b17 100644
--- a/contrib/gitian-keys/keys.txt
+++ b/contrib/gitian-keys/keys.txt
@@ -1,3 +1,4 @@
+9D3CC86A72F8494342EA5FD10A41BDC3F4FAFF1C Aaron Clauson (sipsorcery)
617C90010B3BD370B0AC7D424BB42E31C79111B8 Akira Takizawa
E944AE667CF960B1004BC32FCA662BE18B877A60 Andreas Schildbach
152812300785C96444D3334D17565732E08E5E41 Andrew Chow
diff --git a/contrib/guix/libexec/build.sh b/contrib/guix/libexec/build.sh
index 56b972a5cb..ee207a957c 100644
--- a/contrib/guix/libexec/build.sh
+++ b/contrib/guix/libexec/build.sh
@@ -30,23 +30,38 @@ fi
# Given a package name and an output name, return the path of that output in our
# current guix environment
store_path() {
- grep --extended-regexp "/[^-]{32}-${1}-cross-${HOST}-[^-]+${2:+-${2}}" "${GUIX_ENVIRONMENT}/manifest" \
+ grep --extended-regexp "/[^-]{32}-${1}-[^-]+${2:+-${2}}" "${GUIX_ENVIRONMENT}/manifest" \
| head --lines=1 \
| sed --expression='s|^[[:space:]]*"||' \
--expression='s|"[[:space:]]*$||'
}
# Determine output paths to use in CROSS_* environment variables
-CROSS_GLIBC="$(store_path glibc)"
-CROSS_GLIBC_STATIC="$(store_path glibc static)"
-CROSS_KERNEL="$(store_path linux-libre-headers)"
-CROSS_GCC="$(store_path gcc)"
+CROSS_GLIBC="$(store_path glibc-cross-${HOST})"
+CROSS_GLIBC_STATIC="$(store_path glibc-cross-${HOST} static)"
+CROSS_KERNEL="$(store_path linux-libre-headers-cross-${HOST})"
+CROSS_GCC="$(store_path gcc-cross-${HOST})"
+CROSS_GCC_LIBS=( "${CROSS_GCC}/lib/gcc/${HOST}"/* ) # This expands to an array of directories...
+CROSS_GCC_LIB="${CROSS_GCC_LIBS[0]}" # ...we just want the first one (there should only be one)
# Set environment variables to point Guix's cross-toolchain to the right
# includes/libs for $HOST
-export CROSS_C_INCLUDE_PATH="${CROSS_GCC}/include:${CROSS_GLIBC}/include:${CROSS_KERNEL}/include"
-export CROSS_CPLUS_INCLUDE_PATH="${CROSS_GCC}/include/c++:${CROSS_GLIBC}/include:${CROSS_KERNEL}/include"
-export CROSS_LIBRARY_PATH="${CROSS_GLIBC}/lib:${CROSS_GLIBC_STATIC}/lib:${CROSS_GCC}/lib:${CROSS_GCC}/${HOST}/lib:${CROSS_KERNEL}/lib"
+#
+# NOTE: CROSS_C_INCLUDE_PATH is missing ${CROSS_GCC_LIB}/include-fixed, because
+# the limits.h in it is missing a '#include_next <limits.h>'
+#
+export CROSS_C_INCLUDE_PATH="${CROSS_GCC_LIB}/include:${CROSS_GLIBC}/include:${CROSS_KERNEL}/include"
+export CROSS_CPLUS_INCLUDE_PATH="${CROSS_GCC}/include/c++:${CROSS_GCC}/include/c++/${HOST}:${CROSS_GCC}/include/c++/backward:${CROSS_C_INCLUDE_PATH}"
+export CROSS_LIBRARY_PATH="${CROSS_GCC}/lib:${CROSS_GCC}/${HOST}/lib:${CROSS_GCC_LIB}:${CROSS_GLIBC}/lib:${CROSS_GLIBC_STATIC}/lib"
+
+# Sanity check CROSS_*_PATH directories
+IFS=':' read -ra PATHS <<< "${CROSS_C_INCLUDE_PATH}:${CROSS_CPLUS_INCLUDE_PATH}:${CROSS_LIBRARY_PATH}"
+for p in "${PATHS[@]}"; do
+ if [ ! -d "$p" ]; then
+ echo "'$p' doesn't exist or isn't a directory... Aborting..."
+ exit 1
+ fi
+done
# Disable Guix ld auto-rpath behavior
export GUIX_LD_WRAPPER_DISABLE_RPATH=yes
@@ -121,17 +136,10 @@ DISTNAME="$(basename "$SOURCEDIST" '.tar.gz')"
# Binary Tarball Building #
###########################
-# Create a spec file to normalize ssp linking behaviour
-spec_file="$(mktemp)"
-cat << EOF > "$spec_file"
-*link_ssp:
-%{fstack-protector|fstack-protector-all|fstack-protector-strong|fstack-protector-explicit:}
-EOF
-
# Similar flags to Gitian
CONFIGFLAGS="--enable-glibc-back-compat --enable-reduce-exports --disable-bench --disable-gui-tests"
-HOST_CFLAGS="-O2 -g -specs=${spec_file} -ffile-prefix-map=${PWD}=."
-HOST_CXXFLAGS="-O2 -g -specs=${spec_file} -ffile-prefix-map=${PWD}=."
+HOST_CFLAGS="-O2 -g -ffile-prefix-map=${PWD}=."
+HOST_CXXFLAGS="-O2 -g -ffile-prefix-map=${PWD}=."
HOST_LDFLAGS="-Wl,--as-needed -Wl,--dynamic-linker=$glibc_dynamic_linker -static-libstdc++"
# Make $HOST-specific native binaries from depends available in $PATH
diff --git a/contrib/init/bitcoind.service b/contrib/init/bitcoind.service
index cfc5f77580..34c3e7b3ab 100644
--- a/contrib/init/bitcoind.service
+++ b/contrib/init/bitcoind.service
@@ -5,8 +5,9 @@
# See "man systemd.service" for details.
# Note that almost all daemon options could be specified in
-# /etc/bitcoin/bitcoin.conf, except for those explicitly specified as arguments
-# in ExecStart=
+# /etc/bitcoin/bitcoin.conf, but keep in mind those explicitly
+# specified as arguments in ExecStart= will override those in the
+# config file.
[Unit]
Description=Bitcoin daemon
@@ -18,6 +19,10 @@ ExecStart=/usr/bin/bitcoind -daemon \
-conf=/etc/bitcoin/bitcoin.conf \
-datadir=/var/lib/bitcoind
+# Make sure the config directory is readable by the service user
+PermissionsStartOnly=true
+ExecStartPre=/bin/chgrp bitcoin /etc/bitcoin
+
# Process management
####################
@@ -53,6 +58,9 @@ PrivateTmp=true
# Mount /usr, /boot/ and /etc read-only for the process.
ProtectSystem=full
+# Deny access to /home, /root and /run/user
+ProtectHome=true
+
# Disallow the process and all of its children to gain
# new privileges through execve().
NoNewPrivileges=true
diff --git a/contrib/linearize/linearize-data.py b/contrib/linearize/linearize-data.py
index 468aec04b5..95754ab937 100755
--- a/contrib/linearize/linearize-data.py
+++ b/contrib/linearize/linearize-data.py
@@ -263,12 +263,12 @@ if __name__ == '__main__':
f = open(sys.argv[1], encoding="utf8")
for line in f:
# skip comment lines
- m = re.search('^\s*#', line)
+ m = re.search(r'^\s*#', line)
if m:
continue
# parse key=value lines
- m = re.search('^(\w+)\s*=\s*(\S.*)$', line)
+ m = re.search(r'^(\w+)\s*=\s*(\S.*)$', line)
if m is None:
continue
settings[m.group(1)] = m.group(2)
diff --git a/contrib/linearize/linearize-hashes.py b/contrib/linearize/linearize-hashes.py
index 8529470e09..02c96d2a75 100755
--- a/contrib/linearize/linearize-hashes.py
+++ b/contrib/linearize/linearize-hashes.py
@@ -106,12 +106,12 @@ if __name__ == '__main__':
f = open(sys.argv[1], encoding="utf8")
for line in f:
# skip comment lines
- m = re.search('^\s*#', line)
+ m = re.search(r'^\s*#', line)
if m:
continue
# parse key=value lines
- m = re.search('^(\w+)\s*=\s*(\S.*)$', line)
+ m = re.search(r'^(\w+)\s*=\s*(\S.*)$', line)
if m is None:
continue
settings[m.group(1)] = m.group(2)
diff --git a/contrib/macdeploy/README.md b/contrib/macdeploy/README.md
index 6163734e62..e90120ea79 100644
--- a/contrib/macdeploy/README.md
+++ b/contrib/macdeploy/README.md
@@ -1,7 +1,7 @@
### MacDeploy ###
For Snow Leopard (which uses [Python 2.6](http://www.python.org/download/releases/2.6/)), you will need the param_parser package:
-
+
sudo easy_install argparse
This script should not be run manually, instead, after building as usual:
diff --git a/contrib/macdeploy/macdeployqtplus b/contrib/macdeploy/macdeployqtplus
index 9da03e5b02..d8088aa123 100755
--- a/contrib/macdeploy/macdeployqtplus
+++ b/contrib/macdeploy/macdeployqtplus
@@ -19,6 +19,7 @@
import subprocess, sys, re, os, shutil, stat, os.path, time
from string import Template
from argparse import ArgumentParser
+from typing import List, Optional
# This is ported from the original macdeployqt with modifications
@@ -48,18 +49,18 @@ class FrameworkInfo(object):
return False
def __str__(self):
- return """ Framework name: %s
- Framework directory: %s
- Framework path: %s
- Binary name: %s
- Binary directory: %s
- Binary path: %s
- Version: %s
- Install name: %s
- Deployed install name: %s
- Source file Path: %s
- Deployed Directory (relative to bundle): %s
-""" % (self.frameworkName,
+ return """ Framework name: {}
+ Framework directory: {}
+ Framework path: {}
+ Binary name: {}
+ Binary directory: {}
+ Binary path: {}
+ Version: {}
+ Install name: {}
+ Deployed install name: {}
+ Source file Path: {}
+ Deployed Directory (relative to bundle): {}
+""".format(self.frameworkName,
self.frameworkDirectory,
self.frameworkPath,
self.binaryName,
@@ -85,7 +86,7 @@ class FrameworkInfo(object):
bundleBinaryDirectory = "Contents/MacOS"
@classmethod
- def fromOtoolLibraryLine(cls, line):
+ def fromOtoolLibraryLine(cls, line: str) -> Optional['FrameworkInfo']:
# Note: line must be trimmed
if line == "":
return None
@@ -146,13 +147,12 @@ class FrameworkInfo(object):
info.sourceContentsDirectory = os.path.join(info.frameworkPath, "Contents")
info.sourceVersionContentsDirectory = os.path.join(info.frameworkPath, "Versions", info.version, "Contents")
info.destinationResourcesDirectory = os.path.join(cls.bundleFrameworkDirectory, info.frameworkName, "Resources")
- info.destinationContentsDirectory = os.path.join(cls.bundleFrameworkDirectory, info.frameworkName, "Contents")
info.destinationVersionContentsDirectory = os.path.join(cls.bundleFrameworkDirectory, info.frameworkName, "Versions", info.version, "Contents")
return info
class ApplicationBundleInfo(object):
- def __init__(self, path):
+ def __init__(self, path: str):
self.path = path
appName = "Bitcoin-Qt"
self.binaryPath = os.path.join(path, "Contents", "MacOS", appName)
@@ -167,7 +167,7 @@ class DeploymentInfo(object):
self.pluginPath = None
self.deployedFrameworks = []
- def detectQtPath(self, frameworkDirectory):
+ def detectQtPath(self, frameworkDirectory: str):
parentDir = os.path.dirname(frameworkDirectory)
if os.path.exists(os.path.join(parentDir, "translations")):
# Classic layout, e.g. "/usr/local/Trolltech/Qt-4.x.x"
@@ -180,9 +180,9 @@ class DeploymentInfo(object):
if os.path.exists(pluginPath):
self.pluginPath = pluginPath
- def usesFramework(self, name):
- nameDot = "%s." % name
- libNameDot = "lib%s." % name
+ def usesFramework(self, name: str) -> bool:
+ nameDot = "{}.".format(name)
+ libNameDot = "lib{}.".format(name)
for framework in self.deployedFrameworks:
if framework.endswith(".framework"):
if framework.startswith(nameDot):
@@ -192,7 +192,7 @@ class DeploymentInfo(object):
return True
return False
-def getFrameworks(binaryPath, verbose):
+def getFrameworks(binaryPath: str, verbose: int) -> List[FrameworkInfo]:
if verbose >= 3:
print("Inspecting with otool: " + binaryPath)
otoolbin=os.getenv("OTOOL", "otool")
@@ -202,7 +202,7 @@ def getFrameworks(binaryPath, verbose):
if verbose >= 1:
sys.stderr.write(o_stderr)
sys.stderr.flush()
- raise RuntimeError("otool failed with return code %d" % otool.returncode)
+ raise RuntimeError("otool failed with return code {}".format(otool.returncode))
otoolLines = o_stdout.split("\n")
otoolLines.pop(0) # First line is the inspected binary
@@ -221,11 +221,11 @@ def getFrameworks(binaryPath, verbose):
return libraries
-def runInstallNameTool(action, *args):
+def runInstallNameTool(action: str, *args):
installnametoolbin=os.getenv("INSTALLNAMETOOL", "install_name_tool")
subprocess.check_call([installnametoolbin, "-"+action] + list(args))
-def changeInstallName(oldName, newName, binaryPath, verbose):
+def changeInstallName(oldName: str, newName: str, binaryPath: str, verbose: int):
if verbose >= 3:
print("Using install_name_tool:")
print(" in", binaryPath)
@@ -233,21 +233,21 @@ def changeInstallName(oldName, newName, binaryPath, verbose):
print(" to", newName)
runInstallNameTool("change", oldName, newName, binaryPath)
-def changeIdentification(id, binaryPath, verbose):
+def changeIdentification(id: str, binaryPath: str, verbose: int):
if verbose >= 3:
print("Using install_name_tool:")
print(" change identification in", binaryPath)
print(" to", id)
runInstallNameTool("id", id, binaryPath)
-def runStrip(binaryPath, verbose):
+def runStrip(binaryPath: str, verbose: int):
stripbin=os.getenv("STRIP", "strip")
if verbose >= 3:
print("Using strip:")
print(" stripped", binaryPath)
subprocess.check_call([stripbin, "-x", binaryPath])
-def copyFramework(framework, path, verbose):
+def copyFramework(framework: FrameworkInfo, path: str, verbose: int) -> Optional[str]:
if framework.sourceFilePath.startswith("Qt"):
#standard place for Nokia Qt installer's frameworks
fromPath = "/Library/Frameworks/" + framework.sourceFilePath
@@ -309,7 +309,7 @@ def copyFramework(framework, path, verbose):
return toPath
-def deployFrameworks(frameworks, bundlePath, binaryPath, strip, verbose, deploymentInfo=None):
+def deployFrameworks(frameworks: List[FrameworkInfo], bundlePath: str, binaryPath: str, strip: bool, verbose: int, deploymentInfo: Optional[DeploymentInfo] = None) -> DeploymentInfo:
if deploymentInfo is None:
deploymentInfo = DeploymentInfo()
@@ -355,15 +355,15 @@ def deployFrameworks(frameworks, bundlePath, binaryPath, strip, verbose, deploym
return deploymentInfo
-def deployFrameworksForAppBundle(applicationBundle, strip, verbose):
+def deployFrameworksForAppBundle(applicationBundle: ApplicationBundleInfo, strip: bool, verbose: int) -> DeploymentInfo:
frameworks = getFrameworks(applicationBundle.binaryPath, verbose)
if len(frameworks) == 0 and verbose >= 1:
- print("Warning: Could not find any external frameworks to deploy in %s." % (applicationBundle.path))
+ print("Warning: Could not find any external frameworks to deploy in {}.".format(applicationBundle.path))
return DeploymentInfo()
else:
return deployFrameworks(frameworks, applicationBundle.path, applicationBundle.binaryPath, strip, verbose)
-def deployPlugins(appBundleInfo, deploymentInfo, strip, verbose):
+def deployPlugins(appBundleInfo: ApplicationBundleInfo, deploymentInfo: DeploymentInfo, strip: bool, verbose: int):
# Lookup available plugins, exclude unneeded
plugins = []
if deploymentInfo.pluginPath is None:
@@ -373,10 +373,12 @@ def deployPlugins(appBundleInfo, deploymentInfo, strip, verbose):
if pluginDirectory == "designer":
# Skip designer plugins
continue
- elif pluginDirectory == "phonon" or pluginDirectory == "phonon_backend":
- # Deploy the phonon plugins only if phonon is in use
- if not deploymentInfo.usesFramework("phonon"):
- continue
+ elif pluginDirectory == "printsupport":
+ # Skip printsupport plugins
+ continue
+ elif pluginDirectory == "imageformats":
+ # Skip imageformats plugins
+ continue
elif pluginDirectory == "sqldrivers":
# Deploy the sql plugins only if QtSql is in use
if not deploymentInfo.usesFramework("QtSql"):
@@ -409,6 +411,42 @@ def deployPlugins(appBundleInfo, deploymentInfo, strip, verbose):
# Deploy the mediaservice plugins only if QtMultimediaWidgets is in use
if not deploymentInfo.usesFramework("QtMultimediaWidgets"):
continue
+ elif pluginDirectory == "canbus":
+ # Deploy the canbus plugins only if QtSerialBus is in use
+ if not deploymentInfo.usesFramework("QtSerialBus"):
+ continue
+ elif pluginDirectory == "webview":
+ # Deploy the webview plugins only if QtWebView is in use
+ if not deploymentInfo.usesFramework("QtWebView"):
+ continue
+ elif pluginDirectory == "gamepads":
+ # Deploy the webview plugins only if QtGamepad is in use
+ if not deploymentInfo.usesFramework("QtGamepad"):
+ continue
+ elif pluginDirectory == "geoservices":
+ # Deploy the webview plugins only if QtLocation is in use
+ if not deploymentInfo.usesFramework("QtLocation"):
+ continue
+ elif pluginDirectory == "texttospeech":
+ # Deploy the texttospeech plugins only if QtTextToSpeech is in use
+ if not deploymentInfo.usesFramework("QtTextToSpeech"):
+ continue
+ elif pluginDirectory == "virtualkeyboard":
+ # Deploy the virtualkeyboard plugins only if QtVirtualKeyboard is in use
+ if not deploymentInfo.usesFramework("QtVirtualKeyboard"):
+ continue
+ elif pluginDirectory == "sceneparsers":
+ # Deploy the virtualkeyboard plugins only if Qt3DCore is in use
+ if not deploymentInfo.usesFramework("Qt3DCore"):
+ continue
+ elif pluginDirectory == "renderplugins":
+ # Deploy the renderplugins plugins only if Qt3DCore is in use
+ if not deploymentInfo.usesFramework("Qt3DCore"):
+ continue
+ elif pluginDirectory == "geometryloaders":
+ # Deploy the geometryloaders plugins only if Qt3DCore is in use
+ if not deploymentInfo.usesFramework("Qt3DCore"):
+ continue
for pluginName in filenames:
pluginPath = os.path.join(pluginDirectory, pluginName)
@@ -431,6 +469,10 @@ def deployPlugins(appBundleInfo, deploymentInfo, strip, verbose):
# Deploy the accessible qtquick plugin only if QtQuick is in use
if not deploymentInfo.usesFramework("QtQuick"):
continue
+ elif pluginPath == "platforminputcontexts/libqtvirtualkeyboardplugin.dylib":
+ # Deploy the virtualkeyboardplugin plugin only if QtVirtualKeyboard is in use
+ if not deploymentInfo.usesFramework("QtVirtualKeyboard"):
+ continue
plugins.append((pluginDirectory, pluginName))
@@ -499,7 +541,7 @@ app_bundle = config.app_bundle[0]
if not os.path.exists(app_bundle):
if verbose >= 1:
- sys.stderr.write("Error: Could not find app bundle \"%s\"\n" % (app_bundle))
+ sys.stderr.write("Error: Could not find app bundle \"{}\"\n".format(app_bundle))
sys.exit(1)
app_bundle_name = os.path.splitext(os.path.basename(app_bundle))[0]
@@ -511,7 +553,7 @@ if config.translations_dir and config.translations_dir[0]:
translations_dir = config.translations_dir[0]
else:
if verbose >= 1:
- sys.stderr.write("Error: Could not find translation dir \"%s\"\n" % (translations_dir))
+ sys.stderr.write("Error: Could not find translation dir \"{}\"\n".format(translations_dir))
sys.exit(1)
# ------------------------------------------------
@@ -520,7 +562,7 @@ for p in config.add_resources:
print("Checking for \"%s\"..." % p)
if not os.path.exists(p):
if verbose >= 1:
- sys.stderr.write("Error: Could not find additional resource file \"%s\"\n" % (p))
+ sys.stderr.write("Error: Could not find additional resource file \"{}\"\n".format(p))
sys.exit(1)
# ------------------------------------------------
@@ -537,17 +579,17 @@ if len(config.fancy) == 1:
p = config.fancy[0]
if verbose >= 3:
- print("Fancy: Loading \"%s\"..." % p)
+ print("Fancy: Loading \"{}\"...".format(p))
if not os.path.exists(p):
if verbose >= 1:
- sys.stderr.write("Error: Could not find fancy disk image plist at \"%s\"\n" % (p))
+ sys.stderr.write("Error: Could not find fancy disk image plist at \"{}\"\n".format(p))
sys.exit(1)
try:
fancy = plistlib.readPlist(p)
except:
if verbose >= 1:
- sys.stderr.write("Error: Could not parse fancy disk image plist at \"%s\"\n" % (p))
+ sys.stderr.write("Error: Could not parse fancy disk image plist at \"{}\"\n".format(p))
sys.exit(1)
try:
@@ -561,18 +603,18 @@ if len(config.fancy) == 1:
assert isinstance(value, list) and len(value) == 2 and isinstance(value[0], int) and isinstance(value[1], int)
except:
if verbose >= 1:
- sys.stderr.write("Error: Bad format of fancy disk image plist at \"%s\"\n" % (p))
+ sys.stderr.write("Error: Bad format of fancy disk image plist at \"{}\"\n".format(p))
sys.exit(1)
if "background_picture" in fancy:
bp = fancy["background_picture"]
if verbose >= 3:
- print("Fancy: Resolving background picture \"%s\"..." % bp)
+ print("Fancy: Resolving background picture \"{}\"...".format(bp))
if not os.path.exists(bp):
bp = os.path.join(os.path.dirname(p), bp)
if not os.path.exists(bp):
if verbose >= 1:
- sys.stderr.write("Error: Could not find background picture at \"%s\" or \"%s\"\n" % (fancy["background_picture"], bp))
+ sys.stderr.write("Error: Could not find background picture at \"{}\" or \"{}\"\n".format(fancy["background_picture"], bp))
sys.exit(1)
else:
fancy["background_picture"] = bp
@@ -623,7 +665,7 @@ try:
config.plugins = False
except RuntimeError as e:
if verbose >= 1:
- sys.stderr.write("Error: %s\n" % str(e))
+ sys.stderr.write("Error: {}\n".format(str(e)))
sys.exit(1)
# ------------------------------------------------
@@ -636,7 +678,7 @@ if config.plugins:
deployPlugins(applicationBundle, deploymentInfo, config.strip, verbose)
except RuntimeError as e:
if verbose >= 1:
- sys.stderr.write("Error: %s\n" % str(e))
+ sys.stderr.write("Error: {}\n".format(str(e)))
sys.exit(1)
# ------------------------------------------------
@@ -652,14 +694,14 @@ else:
else:
sys.stderr.write("Error: Could not find Qt translation path\n")
sys.exit(1)
- add_qt_tr = ["qt_%s.qm" % lng for lng in config.add_qt_tr[0].split(",")]
+ add_qt_tr = ["qt_{}.qm".format(lng) for lng in config.add_qt_tr[0].split(",")]
for lng_file in add_qt_tr:
p = os.path.join(qt_tr_dir, lng_file)
if verbose >= 3:
- print("Checking for \"%s\"..." % p)
+ print("Checking for \"{}\"...".format(p))
if not os.path.exists(p):
if verbose >= 1:
- sys.stderr.write("Error: Could not find Qt translation file \"%s\"\n" % (lng_file))
+ sys.stderr.write("Error: Could not find Qt translation file \"{}\"\n".format(lng_file))
sys.exit(1)
# ------------------------------------------------
@@ -700,14 +742,14 @@ if config.sign and 'CODESIGNARGS' not in os.environ:
print("You must set the CODESIGNARGS environment variable. Skipping signing.")
elif config.sign:
if verbose >= 1:
- print("Code-signing app bundle %s"%(target,))
- subprocess.check_call("codesign --force %s %s"%(os.environ['CODESIGNARGS'], target), shell=True)
+ print("Code-signing app bundle {}".format(target))
+ subprocess.check_call("codesign --force {} {}".format(os.environ['CODESIGNARGS'], target), shell=True)
# ------------------------------------------------
if config.dmg is not None:
- def runHDIUtil(verb, image_basename, **kwargs):
+ def runHDIUtil(verb: str, image_basename: str, **kwargs) -> int:
hdiutil_args = ["hdiutil", verb, image_basename + ".dmg"]
if "capture_stdout" in kwargs:
del kwargs["capture_stdout"]
@@ -721,7 +763,7 @@ if config.dmg is not None:
for key, value in kwargs.items():
hdiutil_args.append("-" + key)
- if not value is True:
+ if value is not True:
hdiutil_args.append(str(value))
return run(hdiutil_args, universal_newlines=True)
@@ -765,8 +807,8 @@ if config.dmg is not None:
output = runHDIUtil("attach", dmg_name + ".temp", readwrite=True, noverify=True, noautoopen=True, capture_stdout=True)
except subprocess.CalledProcessError as e:
sys.exit(e.returncode)
-
- m = re.search("/Volumes/(.+$)", output)
+
+ m = re.search(r"/Volumes/(.+$)", output)
disk_root = m.group(0)
disk_name = m.group(1)
diff --git a/contrib/seeds/generate-seeds.py b/contrib/seeds/generate-seeds.py
index fe7cd1d597..7630a7a4fa 100755
--- a/contrib/seeds/generate-seeds.py
+++ b/contrib/seeds/generate-seeds.py
@@ -74,7 +74,7 @@ def name_to_ipv6(addr):
raise ValueError('Could not parse address %s' % addr)
def parse_spec(s, defaultport):
- match = re.match('\[([0-9a-fA-F:]+)\](?::([0-9]+))?$', s)
+ match = re.match(r'\[([0-9a-fA-F:]+)\](?::([0-9]+))?$', s)
if match: # ipv6
host = match.group(1)
port = match.group(2)
@@ -136,4 +136,3 @@ def main():
if __name__ == '__main__':
main()
-
diff --git a/contrib/testgen/README.md b/contrib/testgen/README.md
index 580ed541cf..573a71a675 100644
--- a/contrib/testgen/README.md
+++ b/contrib/testgen/README.md
@@ -2,7 +2,7 @@
Utilities to generate test vectors for the data-driven Bitcoin tests.
-Usage:
+Usage:
PYTHONPATH=../../test/functional/test_framework ./gen_key_io_test_vectors.py valid 50 > ../../src/test/data/key_io_keys_valid.json
PYTHONPATH=../../test/functional/test_framework ./gen_key_io_test_vectors.py invalid 50 > ../../src/test/data/key_io_keys_invalid.json
diff --git a/contrib/verify-commits/verify-commits.py b/contrib/verify-commits/verify-commits.py
index 255ce75092..9ec8663fba 100755
--- a/contrib/verify-commits/verify-commits.py
+++ b/contrib/verify-commits/verify-commits.py
@@ -16,7 +16,7 @@ GIT = os.getenv('GIT', 'git')
def tree_sha512sum(commit='HEAD'):
"""Calculate the Tree-sha512 for the commit.
- This is copied from github-merge.py."""
+ This is copied from github-merge.py. See https://github.com/bitcoin-core/bitcoin-maintainer-tools."""
# request metadata for entire tree, recursively
files = []