Compare commits
No commits in common. "722e00b1e9869e0ff337d40b28f2ed71d8afcd76" and "a1989efdab77c0e2e0628b6d52d7195fe50eb588" have entirely different histories.
722e00b1e9
...
a1989efdab
@ -207,12 +207,12 @@ taler_auditor_sync_CPPFLAGS = \
|
|||||||
|
|
||||||
check_SCRIPTS = \
|
check_SCRIPTS = \
|
||||||
test-auditor.sh \
|
test-auditor.sh \
|
||||||
test-kyc.sh \
|
|
||||||
test-revocation.sh \
|
test-revocation.sh \
|
||||||
test-sync.sh
|
test-sync.sh
|
||||||
|
|
||||||
.NOTPARALLEL:
|
.NOTPARALLEL:
|
||||||
# TESTS = $(check_SCRIPTS)
|
# revocation test disabled for now: need working wallet first!
|
||||||
|
TESTS = $(check_SCRIPTS)
|
||||||
|
|
||||||
EXTRA_DIST = \
|
EXTRA_DIST = \
|
||||||
taler-auditor.in \
|
taler-auditor.in \
|
||||||
@ -223,6 +223,5 @@ EXTRA_DIST = \
|
|||||||
test-sync-out.conf \
|
test-sync-out.conf \
|
||||||
generate-auditor-basedb.sh \
|
generate-auditor-basedb.sh \
|
||||||
generate-auditor-basedb.conf \
|
generate-auditor-basedb.conf \
|
||||||
generate-kyc-basedb.conf \
|
|
||||||
generate-revoke-basedb.sh \
|
generate-revoke-basedb.sh \
|
||||||
$(check_SCRIPTS)
|
$(check_SCRIPTS)
|
||||||
|
@ -12,41 +12,16 @@
|
|||||||
#
|
#
|
||||||
set -eu
|
set -eu
|
||||||
|
|
||||||
. setup.sh
|
|
||||||
|
|
||||||
CONF="generate-auditor-basedb.conf"
|
|
||||||
# Parse command-line options
|
|
||||||
while getopts ':c:d:h' OPTION; do
|
|
||||||
case "$OPTION" in
|
|
||||||
c)
|
|
||||||
CONF="$OPTARG"
|
|
||||||
;;
|
|
||||||
d)
|
|
||||||
BASEDB="$OPTARG"
|
|
||||||
;;
|
|
||||||
h)
|
|
||||||
echo 'Supported options:'
|
|
||||||
# shellcheck disable=SC2016
|
|
||||||
echo ' -c $CONF -- set configuration'
|
|
||||||
# shellcheck disable=SC2016
|
|
||||||
echo ' -d $DB -- set database name'
|
|
||||||
;;
|
|
||||||
?)
|
|
||||||
exit_fail "Unrecognized command line option"
|
|
||||||
;;
|
|
||||||
esac
|
|
||||||
done
|
|
||||||
|
|
||||||
# Where do we write the result?
|
# Where do we write the result?
|
||||||
if [ ! -v BASEDB ]
|
BASEDB="$1"
|
||||||
then
|
|
||||||
exit_fail "-d option required"
|
. setup.sh
|
||||||
fi
|
|
||||||
|
|
||||||
echo -n "Testing for curl ..."
|
echo -n "Testing for curl ..."
|
||||||
curl --help >/dev/null </dev/null || exit_skip " MISSING"
|
curl --help >/dev/null </dev/null || exit_skip " MISSING"
|
||||||
echo " FOUND"
|
echo " FOUND"
|
||||||
|
|
||||||
|
CONF="generate-auditor-basedb.conf"
|
||||||
|
|
||||||
# reset database
|
# reset database
|
||||||
echo -n "Reset 'auditor-basedb' database at $PGHOST ..."
|
echo -n "Reset 'auditor-basedb' database at $PGHOST ..."
|
||||||
|
@ -1,4 +0,0 @@
|
|||||||
# This file is in the public domain.
|
|
||||||
@INLINE@ generate-auditor-basedb.conf
|
|
||||||
|
|
||||||
# FIXME: add options for KYC here!
|
|
@ -1,4 +1,4 @@
|
|||||||
#!/bin/bash
|
#!/bin/sh
|
||||||
# This file is in the public domain
|
# This file is in the public domain
|
||||||
|
|
||||||
# Script to be inlined into the main test scripts. Defines function 'setup()'
|
# Script to be inlined into the main test scripts. Defines function 'setup()'
|
||||||
@ -70,78 +70,3 @@ function get_bankaccount_transactions() {
|
|||||||
export LIBEUFIN_SANDBOX_URL="http://localhost:18082"
|
export LIBEUFIN_SANDBOX_URL="http://localhost:18082"
|
||||||
libeufin-cli sandbox demobank list-transactions --bank-account $1
|
libeufin-cli sandbox demobank list-transactions --bank-account $1
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
# Stop libeufin sandbox and nexus (if running)
|
|
||||||
function stop_libeufin()
|
|
||||||
{
|
|
||||||
echo -n "Stopping libeufin... "
|
|
||||||
if [ -f "${MY_TMP_DIR:-/}/libeufin-sandbox.pid" ]
|
|
||||||
then
|
|
||||||
PID=$(cat "${MY_TMP_DIR}/libeufin-sandbox.pid" 2> /dev/null)
|
|
||||||
echo "Killing libeufin sandbox $PID"
|
|
||||||
rm "${MY_TMP_DIR}/libeufin-sandbox.pid"
|
|
||||||
kill "$PID" 2> /dev/null || true
|
|
||||||
wait "$PID" || true
|
|
||||||
fi
|
|
||||||
if [ -f "${MY_TMP_DIR:-/}/libeufin-nexus.pid" ]
|
|
||||||
then
|
|
||||||
PID=$(cat "${MY_TMP_DIR}/libeufin-nexus.pid" 2> /dev/null)
|
|
||||||
echo "Killing libeufin nexus $PID"
|
|
||||||
rm "${MY_TMP_DIR}/libeufin-nexus.pid"
|
|
||||||
kill "$PID" 2> /dev/null || true
|
|
||||||
wait "$PID" || true
|
|
||||||
fi
|
|
||||||
echo "DONE"
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
function launch_libeufin () {
|
|
||||||
# shellcheck disable=SC2016
|
|
||||||
export LIBEUFIN_SANDBOX_DB_CONNECTION='jdbc:postgresql://localhost/'"${DB}"'?socketFactory=org.newsclub.net.unix.AFUNIXSocketFactory$FactoryArg&socketFactoryArg='"$SOCKETDIR"'/.s.PGSQL.5432'
|
|
||||||
libeufin-sandbox serve \
|
|
||||||
--no-auth \
|
|
||||||
--port 18082 \
|
|
||||||
> "${MY_TMP_DIR}/libeufin-sandbox-stdout.log" \
|
|
||||||
2> "${MY_TMP_DIR}/libeufin-sandbox-stderr.log" &
|
|
||||||
echo $! > "${MY_TMP_DIR}/libeufin-sandbox.pid"
|
|
||||||
# shellcheck disable=SC2016
|
|
||||||
export LIBEUFIN_NEXUS_DB_CONNECTION='jdbc:postgresql://localhost/'"${DB}"'?socketFactory=org.newsclub.net.unix.AFUNIXSocketFactory$FactoryArg&socketFactoryArg='"$SOCKETDIR"'/.s.PGSQL.5432'
|
|
||||||
libeufin-nexus serve \
|
|
||||||
--port 8082 \
|
|
||||||
2> "${MY_TMP_DIR}/libeufin-nexus-stderr.log" \
|
|
||||||
> "${MY_TMP_DIR}/libeufin-nexus-stdout.log" &
|
|
||||||
echo $! > "${MY_TMP_DIR}/libeufin-nexus.pid"
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
# Downloads new transactions from the bank.
|
|
||||||
function nexus_fetch_transactions () {
|
|
||||||
export LIBEUFIN_NEXUS_USERNAME="exchange"
|
|
||||||
export LIBEUFIN_NEXUS_PASSWORD="x"
|
|
||||||
export LIBEUFIN_NEXUS_URL="http://localhost:8082/"
|
|
||||||
libeufin-cli accounts \
|
|
||||||
fetch-transactions \
|
|
||||||
--range-type since-last \
|
|
||||||
--level report \
|
|
||||||
exchange-nexus > /dev/null
|
|
||||||
unset LIBEUFIN_NEXUS_USERNAME
|
|
||||||
unset LIBEUFIN_NEXUS_PASSWORD
|
|
||||||
unset LIBEUFIN_NEXUS_URL
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
# Instruct Nexus to all the prepared payments (= those
|
|
||||||
# POSTed to /transfer by the exchange).
|
|
||||||
function nexus_submit_to_sandbox () {
|
|
||||||
export LIBEUFIN_NEXUS_USERNAME="exchange"
|
|
||||||
export LIBEUFIN_NEXUS_PASSWORD="x"
|
|
||||||
export LIBEUFIN_NEXUS_URL="http://localhost:8082/"
|
|
||||||
libeufin-cli accounts \
|
|
||||||
submit-payments\
|
|
||||||
exchange-nexus
|
|
||||||
unset LIBEUFIN_NEXUS_USERNAME
|
|
||||||
unset LIBEUFIN_NEXUS_PASSWORD
|
|
||||||
unset LIBEUFIN_NEXUS_URL
|
|
||||||
}
|
|
||||||
|
@ -55,6 +55,28 @@ LIBEUFIN_SETTLE_TIME=1
|
|||||||
|
|
||||||
. setup.sh
|
. setup.sh
|
||||||
|
|
||||||
|
# Stop libeufin sandbox and nexus (if running)
|
||||||
|
function stop_libeufin()
|
||||||
|
{
|
||||||
|
echo -n "Stopping libeufin... "
|
||||||
|
if test -f ${MY_TMP_DIR:-/}/libeufin-sandbox.pid
|
||||||
|
then
|
||||||
|
PID=$(cat ${MY_TMP_DIR}/libeufin-sandbox.pid 2> /dev/null)
|
||||||
|
echo "Killing libeufin sandbox $PID"
|
||||||
|
rm "${MY_TMP_DIR}/libeufin-sandbox.pid"
|
||||||
|
kill "$PID" 2> /dev/null || true
|
||||||
|
wait "$PID" || true
|
||||||
|
fi
|
||||||
|
if test -f ${MY_TMP_DIR:-/}/libeufin-nexus.pid
|
||||||
|
then
|
||||||
|
PID=$(cat ${MY_TMP_DIR}/libeufin-nexus.pid 2> /dev/null)
|
||||||
|
echo "Killing libeufin nexus $PID"
|
||||||
|
rm "${MY_TMP_DIR}/libeufin-nexus.pid"
|
||||||
|
kill "$PID" 2> /dev/null || true
|
||||||
|
wait "$PID" || true
|
||||||
|
fi
|
||||||
|
echo "DONE"
|
||||||
|
}
|
||||||
|
|
||||||
# Cleanup exchange and libeufin between runs.
|
# Cleanup exchange and libeufin between runs.
|
||||||
function cleanup()
|
function cleanup()
|
||||||
@ -96,6 +118,52 @@ function exit_cleanup()
|
|||||||
# Install cleanup handler (except for kill -9)
|
# Install cleanup handler (except for kill -9)
|
||||||
trap exit_cleanup EXIT
|
trap exit_cleanup EXIT
|
||||||
|
|
||||||
|
function launch_libeufin () {
|
||||||
|
# shellcheck disable=SC2016
|
||||||
|
export LIBEUFIN_SANDBOX_DB_CONNECTION='jdbc:postgresql://localhost/'"${DB}"'?socketFactory=org.newsclub.net.unix.AFUNIXSocketFactory$FactoryArg&socketFactoryArg='"$SOCKETDIR"'/.s.PGSQL.5432'
|
||||||
|
export MY_TMP_DIR
|
||||||
|
libeufin-sandbox serve --no-auth --port 18082 \
|
||||||
|
> "${MY_TMP_DIR}/libeufin-sandbox-stdout.log" \
|
||||||
|
2> "${MY_TMP_DIR}/libeufin-sandbox-stderr.log" &
|
||||||
|
echo $! > "${MY_TMP_DIR}/libeufin-sandbox.pid"
|
||||||
|
# shellcheck disable=SC2016
|
||||||
|
export LIBEUFIN_NEXUS_DB_CONNECTION='jdbc:postgresql://localhost/'"${DB}"'?socketFactory=org.newsclub.net.unix.AFUNIXSocketFactory$FactoryArg&socketFactoryArg='"$SOCKETDIR"'/.s.PGSQL.5432'
|
||||||
|
libeufin-nexus serve --port 8082 \
|
||||||
|
2> "${MY_TMP_DIR}/libeufin-nexus-stderr.log" \
|
||||||
|
> "${MY_TMP_DIR}/libeufin-nexus-stdout.log" &
|
||||||
|
echo $! > "${MY_TMP_DIR}/libeufin-nexus.pid"
|
||||||
|
}
|
||||||
|
|
||||||
|
# Downloads new transactions from the bank.
|
||||||
|
function nexus_fetch_transactions () {
|
||||||
|
export LIBEUFIN_NEXUS_USERNAME="exchange"
|
||||||
|
export LIBEUFIN_NEXUS_PASSWORD="x"
|
||||||
|
export LIBEUFIN_NEXUS_URL="http://localhost:8082/"
|
||||||
|
libeufin-cli accounts \
|
||||||
|
fetch-transactions \
|
||||||
|
--range-type since-last \
|
||||||
|
--level report \
|
||||||
|
exchange-nexus > /dev/null
|
||||||
|
unset LIBEUFIN_NEXUS_USERNAME
|
||||||
|
unset LIBEUFIN_NEXUS_PASSWORD
|
||||||
|
unset LIBEUFIN_NEXUS_URL
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
# Instruct Nexus to all the prepared payments (= those
|
||||||
|
# POSTed to /transfer by the exchange).
|
||||||
|
function nexus_submit_to_sandbox () {
|
||||||
|
export LIBEUFIN_NEXUS_USERNAME="exchange"
|
||||||
|
export LIBEUFIN_NEXUS_PASSWORD="x"
|
||||||
|
export LIBEUFIN_NEXUS_URL="http://localhost:8082/"
|
||||||
|
libeufin-cli accounts \
|
||||||
|
submit-payments\
|
||||||
|
exchange-nexus
|
||||||
|
unset LIBEUFIN_NEXUS_USERNAME
|
||||||
|
unset LIBEUFIN_NEXUS_PASSWORD
|
||||||
|
unset LIBEUFIN_NEXUS_URL
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
# Operations to run before the actual audit
|
# Operations to run before the actual audit
|
||||||
function pre_audit () {
|
function pre_audit () {
|
||||||
@ -2170,6 +2238,7 @@ function check_with_database()
|
|||||||
{
|
{
|
||||||
BASEDB="$1"
|
BASEDB="$1"
|
||||||
CONF="$1.conf"
|
CONF="$1.conf"
|
||||||
|
ORIGIN=$(pwd)
|
||||||
echo "Running test suite with database $BASEDB using configuration $CONF"
|
echo "Running test suite with database $BASEDB using configuration $CONF"
|
||||||
MASTER_PRIV_FILE="${BASEDB}.mpriv"
|
MASTER_PRIV_FILE="${BASEDB}.mpriv"
|
||||||
taler-config \
|
taler-config \
|
||||||
@ -2275,7 +2344,7 @@ export PGHOST
|
|||||||
MYDIR="${MY_TMP_DIR}/basedb"
|
MYDIR="${MY_TMP_DIR}/basedb"
|
||||||
mkdir -p "${MYDIR}"
|
mkdir -p "${MYDIR}"
|
||||||
echo "Generating fresh database at $MYDIR"
|
echo "Generating fresh database at $MYDIR"
|
||||||
if faketime -f '-1 d' ./generate-auditor-basedb.sh -d "$MYDIR/$DB"
|
if faketime -f '-1 d' ./generate-auditor-basedb.sh "$MYDIR/$DB"
|
||||||
then
|
then
|
||||||
echo -n "Reset 'auditor-basedb' database at $PGHOST ..."
|
echo -n "Reset 'auditor-basedb' database at $PGHOST ..."
|
||||||
dropdb "auditor-basedb" >/dev/null 2>/dev/null || true
|
dropdb "auditor-basedb" >/dev/null 2>/dev/null || true
|
||||||
|
@ -1,784 +0,0 @@
|
|||||||
#!/bin/bash
|
|
||||||
#
|
|
||||||
# This file is part of TALER
|
|
||||||
# Copyright (C) 2014-2023 Taler Systems SA
|
|
||||||
#
|
|
||||||
# TALER is free software; you can redistribute it and/or modify it under the
|
|
||||||
# terms of the GNU General Public License as published by the Free Software
|
|
||||||
# Foundation; either version 3, or (at your option) any later version.
|
|
||||||
#
|
|
||||||
# TALER is distributed in the hope that it will be useful, but WITHOUT ANY
|
|
||||||
# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR
|
|
||||||
# A PARTICULAR PURPOSE. See the GNU General Public License for more details.
|
|
||||||
#
|
|
||||||
# You should have received a copy of the GNU General Public License along with
|
|
||||||
# TALER; see the file COPYING. If not, If not, see <http://www.gnu.org/license>
|
|
||||||
#
|
|
||||||
#
|
|
||||||
# shellcheck disable=SC2317
|
|
||||||
# shellcheck disable=SC1091
|
|
||||||
#
|
|
||||||
#
|
|
||||||
# Setup database which was generated from a perfectly normal
|
|
||||||
# exchange-wallet interaction with KYC enabled and transactions
|
|
||||||
# blocked due to KYC and run the auditor against it.
|
|
||||||
#
|
|
||||||
# Check that the auditor report is as expected.
|
|
||||||
#
|
|
||||||
# Requires 'jq' tool and Postgres superuser rights!
|
|
||||||
#
|
|
||||||
set -eu
|
|
||||||
#set -x
|
|
||||||
|
|
||||||
# Set of numbers for all the testcases.
|
|
||||||
# When adding new tests, increase the last number:
|
|
||||||
ALL_TESTS=$(seq 0 1)
|
|
||||||
|
|
||||||
# $TESTS determines which tests we should run.
|
|
||||||
# This construction is used to make it easy to
|
|
||||||
# only run a subset of the tests. To only run a subset,
|
|
||||||
# pass the numbers of the tests to run as the FIRST
|
|
||||||
# argument to test-kyc.sh, i.e.:
|
|
||||||
#
|
|
||||||
# $ test-kyc.sh "1 3"
|
|
||||||
#
|
|
||||||
# to run tests 1 and 3 only. By default, all tests are run.
|
|
||||||
#
|
|
||||||
TESTS=${1:-$ALL_TESTS}
|
|
||||||
|
|
||||||
# Global variable to run the auditor processes under valgrind
|
|
||||||
# VALGRIND=valgrind
|
|
||||||
VALGRIND=""
|
|
||||||
|
|
||||||
# Number of seconds to let libeuifn background
|
|
||||||
# tasks apply a cycle of payment submission and
|
|
||||||
# history request.
|
|
||||||
LIBEUFIN_SETTLE_TIME=1
|
|
||||||
|
|
||||||
. setup.sh
|
|
||||||
|
|
||||||
|
|
||||||
# Cleanup exchange and libeufin between runs.
|
|
||||||
function cleanup()
|
|
||||||
{
|
|
||||||
if test ! -z "${EPID:-}"
|
|
||||||
then
|
|
||||||
echo -n "Stopping exchange $EPID..."
|
|
||||||
kill -TERM "$EPID"
|
|
||||||
wait "$EPID" || true
|
|
||||||
echo "DONE"
|
|
||||||
unset EPID
|
|
||||||
fi
|
|
||||||
stop_libeufin
|
|
||||||
}
|
|
||||||
|
|
||||||
# Cleanup to run whenever we exit
|
|
||||||
function exit_cleanup()
|
|
||||||
{
|
|
||||||
echo "Running exit-cleanup"
|
|
||||||
if test ! -z "${POSTGRES_PATH:-}"
|
|
||||||
then
|
|
||||||
echo "Stopping Postgres at ${POSTGRES_PATH}"
|
|
||||||
"${POSTGRES_PATH}/pg_ctl" \
|
|
||||||
-D "$TMPDIR" \
|
|
||||||
-l /dev/null \
|
|
||||||
stop \
|
|
||||||
&> /dev/null \
|
|
||||||
|| true
|
|
||||||
fi
|
|
||||||
cleanup
|
|
||||||
for n in $(jobs -p)
|
|
||||||
do
|
|
||||||
kill "$n" 2> /dev/null || true
|
|
||||||
done
|
|
||||||
wait || true
|
|
||||||
echo "DONE"
|
|
||||||
}
|
|
||||||
|
|
||||||
# Install cleanup handler (except for kill -9)
|
|
||||||
trap exit_cleanup EXIT
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
# Operations to run before the actual audit
|
|
||||||
function pre_audit () {
|
|
||||||
# Launch bank
|
|
||||||
echo -n "Launching bank"
|
|
||||||
launch_libeufin
|
|
||||||
for n in $(seq 1 80)
|
|
||||||
do
|
|
||||||
echo -n "."
|
|
||||||
sleep 0.1
|
|
||||||
OK=1
|
|
||||||
wget http://localhost:18082/ \
|
|
||||||
-o /dev/null \
|
|
||||||
-O /dev/null \
|
|
||||||
>/dev/null \
|
|
||||||
&& break
|
|
||||||
OK=0
|
|
||||||
done
|
|
||||||
if [ 1 != "$OK" ]
|
|
||||||
then
|
|
||||||
exit_skip "Failed to launch Sandbox"
|
|
||||||
fi
|
|
||||||
sleep "$LIBEUFIN_SETTLE_TIME"
|
|
||||||
for n in $(seq 1 80)
|
|
||||||
do
|
|
||||||
echo -n "."
|
|
||||||
sleep 0.1
|
|
||||||
OK=1
|
|
||||||
wget http://localhost:8082/ \
|
|
||||||
-o /dev/null \
|
|
||||||
-O /dev/null \
|
|
||||||
>/dev/null \
|
|
||||||
&& break
|
|
||||||
OK=0
|
|
||||||
done
|
|
||||||
if [ 1 != "$OK" ]
|
|
||||||
then
|
|
||||||
exit_skip "Failed to launch Nexus"
|
|
||||||
fi
|
|
||||||
echo " DONE"
|
|
||||||
if test "${1:-no}" = "aggregator"
|
|
||||||
then
|
|
||||||
echo -n "Running exchange aggregator ..."
|
|
||||||
taler-exchange-aggregator \
|
|
||||||
-y \
|
|
||||||
-L "INFO" \
|
|
||||||
-t \
|
|
||||||
-c "$CONF" \
|
|
||||||
2> "${MY_TMP_DIR}/aggregator.log" \
|
|
||||||
|| exit_fail "FAIL"
|
|
||||||
echo " DONE"
|
|
||||||
echo -n "Running exchange closer ..."
|
|
||||||
taler-exchange-closer \
|
|
||||||
-L "INFO" \
|
|
||||||
-t \
|
|
||||||
-c "$CONF" \
|
|
||||||
2> "${MY_TMP_DIR}/closer.log" \
|
|
||||||
|| exit_fail "FAIL"
|
|
||||||
echo " DONE"
|
|
||||||
echo -n "Running exchange transfer ..."
|
|
||||||
taler-exchange-transfer \
|
|
||||||
-L "INFO" \
|
|
||||||
-t \
|
|
||||||
-c "$CONF" \
|
|
||||||
2> "${MY_TMP_DIR}/transfer.log" \
|
|
||||||
|| exit_fail "FAIL"
|
|
||||||
echo " DONE"
|
|
||||||
echo -n "Running Nexus payment submitter ..."
|
|
||||||
nexus_submit_to_sandbox
|
|
||||||
echo " DONE"
|
|
||||||
# Make outgoing transactions appear in the TWG:
|
|
||||||
echo -n "Download bank transactions ..."
|
|
||||||
nexus_fetch_transactions
|
|
||||||
echo " DONE"
|
|
||||||
fi
|
|
||||||
}
|
|
||||||
|
|
||||||
# actual audit run
|
|
||||||
function audit_only () {
|
|
||||||
# Run the auditor!
|
|
||||||
echo -n "Running audit(s) ..."
|
|
||||||
|
|
||||||
# Restart so that first run is always fresh, and second one is incremental
|
|
||||||
taler-auditor-dbinit \
|
|
||||||
-r \
|
|
||||||
-c "$CONF"
|
|
||||||
$VALGRIND taler-helper-auditor-aggregation \
|
|
||||||
-L DEBUG \
|
|
||||||
-c "$CONF" \
|
|
||||||
-m "$MASTER_PUB" \
|
|
||||||
> test-audit-aggregation.json \
|
|
||||||
2> "${MY_TMP_DIR}/test-audit-aggregation.log" \
|
|
||||||
|| exit_fail "aggregation audit failed"
|
|
||||||
echo -n "."
|
|
||||||
$VALGRIND taler-helper-auditor-aggregation \
|
|
||||||
-L DEBUG \
|
|
||||||
-c "$CONF" \
|
|
||||||
-m "$MASTER_PUB" \
|
|
||||||
> test-audit-aggregation-inc.json \
|
|
||||||
2> "${MY_TMP_DIR}/test-audit-aggregation-inc.log" \
|
|
||||||
|| exit_fail "incremental aggregation audit failed"
|
|
||||||
echo -n "."
|
|
||||||
$VALGRIND taler-helper-auditor-coins \
|
|
||||||
-L DEBUG \
|
|
||||||
-c "$CONF" \
|
|
||||||
-m "$MASTER_PUB" \
|
|
||||||
> test-audit-coins.json \
|
|
||||||
2> "${MY_TMP_DIR}/test-audit-coins.log" \
|
|
||||||
|| exit_fail "coin audit failed"
|
|
||||||
echo -n "."
|
|
||||||
$VALGRIND taler-helper-auditor-coins \
|
|
||||||
-L DEBUG \
|
|
||||||
-c "$CONF" \
|
|
||||||
-m "$MASTER_PUB" \
|
|
||||||
> test-audit-coins-inc.json \
|
|
||||||
2> "${MY_TMP_DIR}/test-audit-coins-inc.log" \
|
|
||||||
|| exit_fail "incremental coin audit failed"
|
|
||||||
echo -n "."
|
|
||||||
$VALGRIND taler-helper-auditor-deposits \
|
|
||||||
-L DEBUG \
|
|
||||||
-c "$CONF" \
|
|
||||||
-m "$MASTER_PUB" \
|
|
||||||
> test-audit-deposits.json \
|
|
||||||
2> "${MY_TMP_DIR}/test-audit-deposits.log" \
|
|
||||||
|| exit_fail "deposits audit failed"
|
|
||||||
echo -n "."
|
|
||||||
$VALGRIND taler-helper-auditor-deposits \
|
|
||||||
-L DEBUG \
|
|
||||||
-c "$CONF" \
|
|
||||||
-m "$MASTER_PUB" \
|
|
||||||
> test-audit-deposits-inc.json \
|
|
||||||
2> "${MY_TMP_DIR}/test-audit-deposits-inc.log" \
|
|
||||||
|| exit_fail "incremental deposits audit failed"
|
|
||||||
echo -n "."
|
|
||||||
$VALGRIND taler-helper-auditor-reserves \
|
|
||||||
-i \
|
|
||||||
-L DEBUG \
|
|
||||||
-c "$CONF" \
|
|
||||||
-m "$MASTER_PUB" \
|
|
||||||
> test-audit-reserves.json \
|
|
||||||
2> "${MY_TMP_DIR}/test-audit-reserves.log" \
|
|
||||||
|| exit_fail "reserves audit failed"
|
|
||||||
echo -n "."
|
|
||||||
$VALGRIND taler-helper-auditor-reserves \
|
|
||||||
-i \
|
|
||||||
-L DEBUG \
|
|
||||||
-c "$CONF" \
|
|
||||||
-m "$MASTER_PUB" \
|
|
||||||
> test-audit-reserves-inc.json \
|
|
||||||
2> "${MY_TMP_DIR}/test-audit-reserves-inc.log" \
|
|
||||||
|| exit_fail "incremental reserves audit failed"
|
|
||||||
echo -n "."
|
|
||||||
rm -f "${MY_TMP_DIR}/test-wire-audit.log"
|
|
||||||
thaw() {
|
|
||||||
$VALGRIND taler-helper-auditor-wire \
|
|
||||||
-i \
|
|
||||||
-L DEBUG \
|
|
||||||
-c "$CONF" \
|
|
||||||
-m "$MASTER_PUB" \
|
|
||||||
> test-audit-wire.json \
|
|
||||||
2>> "${MY_TMP_DIR}/test-wire-audit.log"
|
|
||||||
}
|
|
||||||
thaw || ( echo -e " FIRST CALL TO taler-helper-auditor-wire FAILED,\nRETRY AFTER TWO SECONDS..." | tee -a "${MY_TMP_DIR}/test-wire-audit.log"
|
|
||||||
sleep 2
|
|
||||||
thaw || exit_fail "wire audit failed" )
|
|
||||||
echo -n "."
|
|
||||||
$VALGRIND taler-helper-auditor-wire \
|
|
||||||
-i \
|
|
||||||
-L DEBUG \
|
|
||||||
-c "$CONF" \
|
|
||||||
-m "$MASTER_PUB" \
|
|
||||||
> test-audit-wire-inc.json \
|
|
||||||
2> "${MY_TMP_DIR}/test-wire-audit-inc.log" \
|
|
||||||
|| exit_fail "wire audit inc failed"
|
|
||||||
echo -n "."
|
|
||||||
|
|
||||||
echo " DONE"
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
# Cleanup to run after the auditor
|
|
||||||
function post_audit () {
|
|
||||||
taler-exchange-dbinit \
|
|
||||||
-c "$CONF" \
|
|
||||||
-g \
|
|
||||||
|| exit_fail "exchange DB GC failed"
|
|
||||||
|
|
||||||
cleanup
|
|
||||||
echo -n "TeXing ."
|
|
||||||
taler-helper-auditor-render.py \
|
|
||||||
test-audit-aggregation.json \
|
|
||||||
test-audit-coins.json \
|
|
||||||
test-audit-deposits.json \
|
|
||||||
test-audit-reserves.json \
|
|
||||||
test-audit-wire.json \
|
|
||||||
< ../../contrib/auditor-report.tex.j2 \
|
|
||||||
> test-report.tex \
|
|
||||||
|| exit_fail "Renderer failed"
|
|
||||||
|
|
||||||
echo -n "."
|
|
||||||
timeout 10 pdflatex test-report.tex \
|
|
||||||
>/dev/null \
|
|
||||||
|| exit_fail "pdflatex failed"
|
|
||||||
echo -n "."
|
|
||||||
timeout 10 pdflatex test-report.tex \
|
|
||||||
>/dev/null
|
|
||||||
echo " DONE"
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
# Run audit process on current database, including report
|
|
||||||
# generation. Pass "aggregator" as $1 to run
|
|
||||||
# $ taler-exchange-aggregator
|
|
||||||
# before auditor (to trigger pending wire transfers).
|
|
||||||
# Pass "drain" as $2 to run a drain operation as well.
|
|
||||||
function run_audit () {
|
|
||||||
pre_audit "${1:-no}"
|
|
||||||
if test "${2:-no}" = "drain"
|
|
||||||
then
|
|
||||||
echo -n "Starting exchange..."
|
|
||||||
taler-exchange-httpd \
|
|
||||||
-c "${CONF}" \
|
|
||||||
-L INFO \
|
|
||||||
2> "${MY_TMP_DIR}/exchange-httpd-drain.err" &
|
|
||||||
EPID=$!
|
|
||||||
|
|
||||||
# Wait for all services to be available
|
|
||||||
for n in $(seq 1 50)
|
|
||||||
do
|
|
||||||
echo -n "."
|
|
||||||
sleep 0.1
|
|
||||||
OK=0
|
|
||||||
# exchange
|
|
||||||
wget "http://localhost:8081/seed" \
|
|
||||||
-o /dev/null \
|
|
||||||
-O /dev/null \
|
|
||||||
>/dev/null \
|
|
||||||
|| continue
|
|
||||||
OK=1
|
|
||||||
break
|
|
||||||
done
|
|
||||||
echo "... DONE."
|
|
||||||
export CONF
|
|
||||||
|
|
||||||
echo -n "Running taler-exchange-offline drain "
|
|
||||||
|
|
||||||
taler-exchange-offline \
|
|
||||||
-L DEBUG \
|
|
||||||
-c "${CONF}" \
|
|
||||||
drain TESTKUDOS:0.1 \
|
|
||||||
exchange-account-1 payto://iban/SANDBOXX/DE360679?receiver-name=Exchange+Drain \
|
|
||||||
upload \
|
|
||||||
2> "${MY_TMP_DIR}/taler-exchange-offline-drain.log" \
|
|
||||||
|| exit_fail "offline draining failed"
|
|
||||||
kill -TERM "$EPID"
|
|
||||||
wait "$EPID" || true
|
|
||||||
unset EPID
|
|
||||||
echo -n "Running taler-exchange-drain ..."
|
|
||||||
printf "\n" | taler-exchange-drain \
|
|
||||||
-L DEBUG \
|
|
||||||
-c "$CONF" \
|
|
||||||
2> "${MY_TMP_DIR}/taler-exchange-drain.log" \
|
|
||||||
|| exit_fail "FAIL"
|
|
||||||
echo " DONE"
|
|
||||||
|
|
||||||
echo -n "Running taler-exchange-transfer ..."
|
|
||||||
taler-exchange-transfer \
|
|
||||||
-L INFO \
|
|
||||||
-t \
|
|
||||||
-c "$CONF" \
|
|
||||||
2> "${MY_TMP_DIR}/drain-transfer.log" \
|
|
||||||
|| exit_fail "FAIL"
|
|
||||||
echo " DONE"
|
|
||||||
|
|
||||||
export LIBEUFIN_NEXUS_USERNAME="exchange"
|
|
||||||
export LIBEUFIN_NEXUS_PASSWORD="x"
|
|
||||||
export LIBEUFIN_NEXUS_URL="http://localhost:8082/"
|
|
||||||
PAIN_UUID=$(libeufin-cli accounts list-payments exchange-nexus | jq .initiatedPayments[] | jq 'select(.submitted==false)' | jq -r .paymentInitiationId)
|
|
||||||
if test -z "${PAIN_UUID}"
|
|
||||||
then
|
|
||||||
echo -n "Payment likely already submitted, running submit-payments without UUID anyway ..."
|
|
||||||
libeufin-cli accounts \
|
|
||||||
submit-payments \
|
|
||||||
exchange-nexus
|
|
||||||
else
|
|
||||||
echo -n "Running payment submission for transaction ${PAIN_UUID} ..."
|
|
||||||
libeufin-cli accounts \
|
|
||||||
submit-payments \
|
|
||||||
--payment-uuid "${PAIN_UUID}" \
|
|
||||||
exchange-nexus
|
|
||||||
fi
|
|
||||||
echo " DONE"
|
|
||||||
echo -n "Import outgoing transactions..."
|
|
||||||
libeufin-cli accounts \
|
|
||||||
fetch-transactions \
|
|
||||||
--range-type since-last \
|
|
||||||
--level report \
|
|
||||||
exchange-nexus
|
|
||||||
echo " DONE"
|
|
||||||
fi
|
|
||||||
audit_only
|
|
||||||
post_audit
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
# Do a full reload of the (original) database
|
|
||||||
function full_reload()
|
|
||||||
{
|
|
||||||
echo -n "Doing full reload of the database (loading ${BASEDB}.sql into $DB at $PGHOST)... "
|
|
||||||
dropdb "$DB" 2> /dev/null || true
|
|
||||||
createdb -T template0 "$DB" \
|
|
||||||
|| exit_skip "could not create database $DB (at $PGHOST)"
|
|
||||||
# Import pre-generated database, -q(ietly) using single (-1) transaction
|
|
||||||
psql -Aqt "$DB" \
|
|
||||||
-q \
|
|
||||||
-1 \
|
|
||||||
-f "${BASEDB}.sql" \
|
|
||||||
> /dev/null \
|
|
||||||
|| exit_skip "Failed to load database $DB from ${BASEDB}.sql"
|
|
||||||
echo "DONE"
|
|
||||||
# Technically, this call shouldn't be needed as libeufin should already be stopped here...
|
|
||||||
stop_libeufin
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
function test_0() {
|
|
||||||
|
|
||||||
echo "===========0: normal run with aggregator==========="
|
|
||||||
run_audit aggregator
|
|
||||||
echo "Checking output"
|
|
||||||
# if an emergency was detected, that is a bug and we should fail
|
|
||||||
echo -n "Test for emergencies... "
|
|
||||||
jq -e .emergencies[0] < test-audit-coins.json > /dev/null && exit_fail "Unexpected emergency detected in ordinary run" || echo PASS
|
|
||||||
echo -n "Test for deposit confirmation emergencies... "
|
|
||||||
jq -e .deposit_confirmation_inconsistencies[0] < test-audit-deposits.json > /dev/null && exit_fail "Unexpected deposit confirmation inconsistency detected" || echo PASS
|
|
||||||
echo -n "Test for emergencies by count... "
|
|
||||||
jq -e .emergencies_by_count[0] < test-audit-coins.json > /dev/null && exit_fail "Unexpected emergency by count detected in ordinary run" || echo PASS
|
|
||||||
|
|
||||||
echo -n "Test for wire inconsistencies... "
|
|
||||||
jq -e .wire_out_amount_inconsistencies[0] < test-audit-wire.json > /dev/null && exit_fail "Unexpected wire out inconsistency detected in ordinary run"
|
|
||||||
jq -e .reserve_in_amount_inconsistencies[0] < test-audit-wire.json > /dev/null && exit_fail "Unexpected reserve in inconsistency detected in ordinary run"
|
|
||||||
jq -e .misattribution_inconsistencies[0] < test-audit-wire.json > /dev/null && exit_fail "Unexpected misattribution inconsistency detected in ordinary run"
|
|
||||||
jq -e .row_inconsistencies[0] < test-audit-wire.json > /dev/null && exit_fail "Unexpected row inconsistency detected in ordinary run"
|
|
||||||
jq -e .denomination_key_validity_withdraw_inconsistencies[0] < test-audit-reserves.json > /dev/null && exit_fail "Unexpected denomination key withdraw inconsistency detected in ordinary run"
|
|
||||||
jq -e .row_minor_inconsistencies[0] < test-audit-wire.json > /dev/null && exit_fail "Unexpected minor row inconsistency detected in ordinary run"
|
|
||||||
jq -e .lag_details[0] < test-audit-wire.json > /dev/null && exit_fail "Unexpected lag detected in ordinary run"
|
|
||||||
jq -e .wire_format_inconsistencies[0] < test-audit-wire.json > /dev/null && exit_fail "Unexpected wire format inconsistencies detected in ordinary run"
|
|
||||||
|
|
||||||
|
|
||||||
# TODO: check operation balances are correct (once we have all transaction types and wallet is deterministic)
|
|
||||||
# TODO: check revenue summaries are correct (once we have all transaction types and wallet is deterministic)
|
|
||||||
|
|
||||||
echo PASS
|
|
||||||
|
|
||||||
LOSS=$(jq -r .total_bad_sig_loss < test-audit-aggregation.json)
|
|
||||||
if [ "$LOSS" != "TESTKUDOS:0" ]
|
|
||||||
then
|
|
||||||
exit_fail "Wrong total bad sig loss from aggregation, got unexpected loss of $LOSS"
|
|
||||||
fi
|
|
||||||
LOSS=$(jq -r .irregular_loss < test-audit-coins.json)
|
|
||||||
if [ "$LOSS" != "TESTKUDOS:0" ]
|
|
||||||
then
|
|
||||||
exit_fail "Wrong total bad sig loss from coins, got unexpected loss of $LOSS"
|
|
||||||
fi
|
|
||||||
LOSS=$(jq -r .total_bad_sig_loss < test-audit-reserves.json)
|
|
||||||
if [ "$LOSS" != "TESTKUDOS:0" ]
|
|
||||||
then
|
|
||||||
exit_fail "Wrong total bad sig loss from reserves, got unexpected loss of $LOSS"
|
|
||||||
fi
|
|
||||||
|
|
||||||
echo -n "Test for wire amounts... "
|
|
||||||
WIRED=$(jq -r .total_wire_in_delta_plus < test-audit-wire.json)
|
|
||||||
if [ "$WIRED" != "TESTKUDOS:0" ]
|
|
||||||
then
|
|
||||||
exit_fail "Expected total wire delta plus wrong, got $WIRED"
|
|
||||||
fi
|
|
||||||
WIRED=$(jq -r .total_wire_in_delta_minus < test-audit-wire.json)
|
|
||||||
if [ "$WIRED" != "TESTKUDOS:0" ]
|
|
||||||
then
|
|
||||||
exit_fail "Expected total wire delta minus wrong, got $WIRED"
|
|
||||||
fi
|
|
||||||
WIRED=$(jq -r .total_wire_out_delta_plus < test-audit-wire.json)
|
|
||||||
if [ "$WIRED" != "TESTKUDOS:0" ]
|
|
||||||
then
|
|
||||||
exit_fail "Expected total wire delta plus wrong, got $WIRED"
|
|
||||||
fi
|
|
||||||
WIRED=$(jq -r .total_wire_out_delta_minus < test-audit-wire.json)
|
|
||||||
if [ "$WIRED" != "TESTKUDOS:0" ]
|
|
||||||
then
|
|
||||||
exit_fail "Expected total wire delta minus wrong, got $WIRED"
|
|
||||||
fi
|
|
||||||
WIRED=$(jq -r .total_misattribution_in < test-audit-wire.json)
|
|
||||||
if [ "$WIRED" != "TESTKUDOS:0" ]
|
|
||||||
then
|
|
||||||
exit_fail "Expected total misattribution in wrong, got $WIRED"
|
|
||||||
fi
|
|
||||||
echo "PASS"
|
|
||||||
|
|
||||||
echo -n "Checking for unexpected arithmetic differences "
|
|
||||||
LOSS=$(jq -r .total_arithmetic_delta_plus < test-audit-aggregation.json)
|
|
||||||
if [ "$LOSS" != "TESTKUDOS:0" ]
|
|
||||||
then
|
|
||||||
exit_fail "Wrong arithmetic delta from aggregations, got unexpected plus of $LOSS"
|
|
||||||
fi
|
|
||||||
LOSS=$(jq -r .total_arithmetic_delta_minus < test-audit-aggregation.json)
|
|
||||||
if [ "$LOSS" != "TESTKUDOS:0" ]
|
|
||||||
then
|
|
||||||
exit_fail "Wrong arithmetic delta from aggregation, got unexpected minus of $LOSS"
|
|
||||||
fi
|
|
||||||
LOSS=$(jq -r .total_arithmetic_delta_plus < test-audit-coins.json)
|
|
||||||
if [ "$LOSS" != "TESTKUDOS:0" ]
|
|
||||||
then
|
|
||||||
exit_fail "Wrong arithmetic delta from coins, got unexpected plus of $LOSS"
|
|
||||||
fi
|
|
||||||
LOSS=$(jq -r .total_arithmetic_delta_minus < test-audit-coins.json)
|
|
||||||
if [ "$LOSS" != "TESTKUDOS:0" ]
|
|
||||||
then
|
|
||||||
exit_fail "Wrong arithmetic delta from coins, got unexpected minus of $LOSS"
|
|
||||||
fi
|
|
||||||
LOSS=$(jq -r .total_arithmetic_delta_plus < test-audit-reserves.json)
|
|
||||||
if [ "$LOSS" != "TESTKUDOS:0" ]
|
|
||||||
then
|
|
||||||
exit_fail "Wrong arithmetic delta from reserves, got unexpected plus of $LOSS"
|
|
||||||
fi
|
|
||||||
LOSS=$(jq -r .total_arithmetic_delta_minus < test-audit-reserves.json)
|
|
||||||
if [ "$LOSS" != "TESTKUDOS:0" ]
|
|
||||||
then
|
|
||||||
exit_fail "Wrong arithmetic delta from reserves, got unexpected minus of $LOSS"
|
|
||||||
fi
|
|
||||||
|
|
||||||
jq -e .amount_arithmetic_inconsistencies[0] < test-audit-aggregation.json > /dev/null && exit_fail "Unexpected arithmetic inconsistencies from aggregations detected in ordinary run"
|
|
||||||
jq -e .amount_arithmetic_inconsistencies[0] < test-audit-coins.json > /dev/null && exit_fail "Unexpected arithmetic inconsistencies from coins detected in ordinary run"
|
|
||||||
jq -e .amount_arithmetic_inconsistencies[0] < test-audit-reserves.json > /dev/null && exit_fail "Unexpected arithmetic inconsistencies from reserves detected in ordinary run"
|
|
||||||
echo "PASS"
|
|
||||||
|
|
||||||
echo -n "Checking for unexpected wire out differences "
|
|
||||||
jq -e .wire_out_inconsistencies[0] < test-audit-aggregation.json > /dev/null && exit_fail "Unexpected wire out inconsistencies detected in ordinary run"
|
|
||||||
echo "PASS"
|
|
||||||
|
|
||||||
# cannot easily undo aggregator, hence full reload
|
|
||||||
full_reload
|
|
||||||
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
# Run without aggregator, hence auditor should detect wire
|
|
||||||
# transfer lag!
|
|
||||||
function test_1() {
|
|
||||||
|
|
||||||
echo "===========1: normal run==========="
|
|
||||||
run_audit
|
|
||||||
|
|
||||||
echo "Checking output"
|
|
||||||
# if an emergency was detected, that is a bug and we should fail
|
|
||||||
echo -n "Test for emergencies... "
|
|
||||||
jq -e .emergencies[0] \
|
|
||||||
< test-audit-coins.json \
|
|
||||||
> /dev/null \
|
|
||||||
&& exit_fail "Unexpected emergency detected in ordinary run";
|
|
||||||
echo "PASS"
|
|
||||||
echo -n "Test for emergencies by count... "
|
|
||||||
jq -e .emergencies_by_count[0] \
|
|
||||||
< test-audit-coins.json \
|
|
||||||
> /dev/null \
|
|
||||||
&& exit_fail "Unexpected emergency by count detected in ordinary run"
|
|
||||||
echo "PASS"
|
|
||||||
|
|
||||||
echo -n "Test for wire inconsistencies... "
|
|
||||||
jq -e .wire_out_amount_inconsistencies[0] \
|
|
||||||
< test-audit-wire.json \
|
|
||||||
> /dev/null \
|
|
||||||
&& exit_fail "Unexpected wire out inconsistency detected in ordinary run"
|
|
||||||
jq -e .reserve_in_amount_inconsistencies[0] \
|
|
||||||
< test-audit-wire.json \
|
|
||||||
> /dev/null \
|
|
||||||
&& exit_fail "Unexpected reserve in inconsistency detected in ordinary run"
|
|
||||||
jq -e .misattribution_inconsistencies[0] \
|
|
||||||
< test-audit-wire.json \
|
|
||||||
> /dev/null \
|
|
||||||
&& exit_fail "Unexpected misattribution inconsistency detected in ordinary run"
|
|
||||||
jq -e .row_inconsistencies[0] \
|
|
||||||
< test-audit-wire.json \
|
|
||||||
> /dev/null \
|
|
||||||
&& exit_fail "Unexpected row inconsistency detected in ordinary run"
|
|
||||||
jq -e .row_minor_inconsistencies[0] \
|
|
||||||
< test-audit-wire.json \
|
|
||||||
> /dev/null \
|
|
||||||
&& exit_fail "Unexpected minor row inconsistency detected in ordinary run"
|
|
||||||
jq -e .wire_format_inconsistencies[0] \
|
|
||||||
< test-audit-wire.json \
|
|
||||||
> /dev/null \
|
|
||||||
&& exit_fail "Unexpected wire format inconsistencies detected in ordinary run"
|
|
||||||
|
|
||||||
# TODO: check operation balances are correct (once we have all transaction types and wallet is deterministic)
|
|
||||||
# TODO: check revenue summaries are correct (once we have all transaction types and wallet is deterministic)
|
|
||||||
|
|
||||||
echo "PASS"
|
|
||||||
|
|
||||||
echo -n "Check for lag detection... "
|
|
||||||
|
|
||||||
# Check wire transfer lag reported (no aggregator!)
|
|
||||||
# NOTE: This test is EXPECTED to fail for ~1h after
|
|
||||||
# re-generating the test database as we do not
|
|
||||||
# report lag of less than 1h (see GRACE_PERIOD in
|
|
||||||
# taler-helper-auditor-wire.c)
|
|
||||||
jq -e .lag_details[0] \
|
|
||||||
< test-audit-wire.json \
|
|
||||||
> /dev/null \
|
|
||||||
|| exit_fail "Lag not detected in run without aggregator"
|
|
||||||
|
|
||||||
LAG=$(jq -r .total_amount_lag < test-audit-wire.json)
|
|
||||||
if [ "$LAG" = "TESTKUDOS:0" ]
|
|
||||||
then
|
|
||||||
exit_fail "Expected total lag to be non-zero"
|
|
||||||
fi
|
|
||||||
echo "PASS"
|
|
||||||
|
|
||||||
|
|
||||||
echo -n "Test for wire amounts... "
|
|
||||||
WIRED=$(jq -r .total_wire_in_delta_plus < test-audit-wire.json)
|
|
||||||
if [ "$WIRED" != "TESTKUDOS:0" ]
|
|
||||||
then
|
|
||||||
exit_fail "Expected total wire delta plus wrong, got $WIRED"
|
|
||||||
fi
|
|
||||||
WIRED=$(jq -r .total_wire_in_delta_minus < test-audit-wire.json)
|
|
||||||
if [ "$WIRED" != "TESTKUDOS:0" ]
|
|
||||||
then
|
|
||||||
exit_fail "Expected total wire delta minus wrong, got $WIRED"
|
|
||||||
fi
|
|
||||||
WIRED=$(jq -r .total_wire_out_delta_plus < test-audit-wire.json)
|
|
||||||
if [ "$WIRED" != "TESTKUDOS:0" ]
|
|
||||||
then
|
|
||||||
exit_fail "Expected total wire delta plus wrong, got $WIRED"
|
|
||||||
fi
|
|
||||||
WIRED=$(jq -r .total_wire_out_delta_minus < test-audit-wire.json)
|
|
||||||
if [ "$WIRED" != "TESTKUDOS:0" ]
|
|
||||||
then
|
|
||||||
exit_fail "Expected total wire delta minus wrong, got $WIRED"
|
|
||||||
fi
|
|
||||||
WIRED=$(jq -r .total_misattribution_in < test-audit-wire.json)
|
|
||||||
if [ "$WIRED" != "TESTKUDOS:0" ]
|
|
||||||
then
|
|
||||||
exit_fail "Expected total misattribution in wrong, got $WIRED"
|
|
||||||
fi
|
|
||||||
# Database was unmodified, no need to undo
|
|
||||||
echo "OK"
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
# *************** Main test loop starts here **************
|
|
||||||
|
|
||||||
|
|
||||||
# Run all the tests against the database given in $1.
|
|
||||||
# Sets $fail to 0 on success, non-zero on failure.
|
|
||||||
function check_with_database()
|
|
||||||
{
|
|
||||||
BASEDB="$1"
|
|
||||||
CONF="$1.conf"
|
|
||||||
echo "Running test suite with database $BASEDB using configuration $CONF"
|
|
||||||
MASTER_PRIV_FILE="${BASEDB}.mpriv"
|
|
||||||
taler-config \
|
|
||||||
-f \
|
|
||||||
-c "${CONF}" \
|
|
||||||
-s exchange-offline \
|
|
||||||
-o MASTER_PRIV_FILE \
|
|
||||||
-V "${MASTER_PRIV_FILE}"
|
|
||||||
MASTER_PUB=$(gnunet-ecc -p "$MASTER_PRIV_FILE")
|
|
||||||
|
|
||||||
echo "MASTER PUB is ${MASTER_PUB} using file ${MASTER_PRIV_FILE}"
|
|
||||||
|
|
||||||
# Load database
|
|
||||||
full_reload
|
|
||||||
|
|
||||||
# Run test suite
|
|
||||||
fail=0
|
|
||||||
for i in $TESTS
|
|
||||||
do
|
|
||||||
"test_$i"
|
|
||||||
if test 0 != $fail
|
|
||||||
then
|
|
||||||
break
|
|
||||||
fi
|
|
||||||
done
|
|
||||||
echo "Cleanup (disabled, leaving database $DB behind)"
|
|
||||||
# dropdb $DB
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
# *************** Main logic starts here **************
|
|
||||||
|
|
||||||
# ####### Setup globals ######
|
|
||||||
# Postgres database to use (must match configuration file)
|
|
||||||
export DB="auditor-basedb"
|
|
||||||
|
|
||||||
# test required commands exist
|
|
||||||
echo "Testing for jq"
|
|
||||||
jq -h > /dev/null || exit_skip "jq required"
|
|
||||||
echo "Testing for faketime"
|
|
||||||
faketime -h > /dev/null || exit_skip "faketime required"
|
|
||||||
# NOTE: really check for all three libeufin commands?
|
|
||||||
echo "Testing for libeufin"
|
|
||||||
libeufin-cli --help >/dev/null 2> /dev/null </dev/null || exit_skip "libeufin required"
|
|
||||||
echo "Testing for pdflatex"
|
|
||||||
which pdflatex > /dev/null </dev/null || exit_skip "pdflatex required"
|
|
||||||
echo "Testing for taler-wallet-cli"
|
|
||||||
taler-wallet-cli -h >/dev/null </dev/null 2>/dev/null || exit_skip "taler-wallet-cli required"
|
|
||||||
|
|
||||||
|
|
||||||
echo -n "Testing for Postgres"
|
|
||||||
# Available directly in path?
|
|
||||||
INITDB_BIN=$(command -v initdb) || true
|
|
||||||
if [[ -n "$INITDB_BIN" ]]; then
|
|
||||||
echo " FOUND (in path) at $INITDB_BIN"
|
|
||||||
else
|
|
||||||
HAVE_INITDB=$(find /usr -name "initdb" | head -1 2> /dev/null | grep postgres) \
|
|
||||||
|| exit_skip " MISSING"
|
|
||||||
echo " FOUND at $(dirname "$HAVE_INITDB")"
|
|
||||||
INITDB_BIN=$(echo "$HAVE_INITDB" | grep bin/initdb | grep postgres | sort -n | tail -n1)
|
|
||||||
fi
|
|
||||||
POSTGRES_PATH=$(dirname "$INITDB_BIN")
|
|
||||||
|
|
||||||
MY_TMP_DIR=$(mktemp -d /tmp/taler-auditor-basedbXXXXXX)
|
|
||||||
echo "Using $MY_TMP_DIR for logging and temporary data"
|
|
||||||
TMPDIR="$MY_TMP_DIR/postgres"
|
|
||||||
mkdir -p "$TMPDIR"
|
|
||||||
echo -n "Setting up Postgres DB at $TMPDIR ..."
|
|
||||||
$INITDB_BIN \
|
|
||||||
--no-sync \
|
|
||||||
--auth=trust \
|
|
||||||
-D "${TMPDIR}" \
|
|
||||||
> "${MY_TMP_DIR}/postgres-dbinit.log" \
|
|
||||||
2> "${MY_TMP_DIR}/postgres-dbinit.err"
|
|
||||||
echo "DONE"
|
|
||||||
SOCKETDIR="${TMPDIR}/sockets"
|
|
||||||
mkdir "${SOCKETDIR}"
|
|
||||||
echo -n "Launching Postgres service"
|
|
||||||
cat - >> "$TMPDIR/postgresql.conf" <<EOF
|
|
||||||
unix_socket_directories='${TMPDIR}/sockets'
|
|
||||||
fsync=off
|
|
||||||
max_wal_senders=0
|
|
||||||
synchronous_commit=off
|
|
||||||
wal_level=minimal
|
|
||||||
listen_addresses=''
|
|
||||||
EOF
|
|
||||||
grep -v host \
|
|
||||||
< "$TMPDIR/pg_hba.conf" \
|
|
||||||
> "$TMPDIR/pg_hba.conf.new"
|
|
||||||
mv "$TMPDIR/pg_hba.conf.new" "$TMPDIR/pg_hba.conf"
|
|
||||||
"${POSTGRES_PATH}/pg_ctl" \
|
|
||||||
-D "$TMPDIR" \
|
|
||||||
-l /dev/null \
|
|
||||||
start \
|
|
||||||
> "${MY_TMP_DIR}/postgres-start.log" \
|
|
||||||
2> "${MY_TMP_DIR}/postgres-start.err"
|
|
||||||
echo " DONE"
|
|
||||||
PGHOST="$TMPDIR/sockets"
|
|
||||||
export PGHOST
|
|
||||||
|
|
||||||
MYDIR="${MY_TMP_DIR}/basedb"
|
|
||||||
mkdir -p "${MYDIR}"
|
|
||||||
echo "Generating fresh database at $MYDIR"
|
|
||||||
if faketime -f '-1 d' ./generate-auditor-basedb.sh \
|
|
||||||
-c generate-kyc-basedb.conf \
|
|
||||||
-d "$MYDIR/$DB"
|
|
||||||
then
|
|
||||||
echo -n "Reset 'auditor-basedb' database at $PGHOST ..."
|
|
||||||
dropdb "auditor-basedb" >/dev/null 2>/dev/null || true
|
|
||||||
createdb "auditor-basedb" || exit_skip "Could not create database '$BASEDB' at $PGHOST"
|
|
||||||
echo " DONE"
|
|
||||||
check_with_database "$MYDIR/$DB"
|
|
||||||
if [ "$fail" != "0" ]
|
|
||||||
then
|
|
||||||
exit "$fail"
|
|
||||||
fi
|
|
||||||
else
|
|
||||||
echo "Generation failed"
|
|
||||||
exit 1
|
|
||||||
fi
|
|
||||||
|
|
||||||
exit 0
|
|
@ -19,15 +19,13 @@
|
|||||||
#
|
#
|
||||||
# Check that the auditor report is as expected.
|
# Check that the auditor report is as expected.
|
||||||
#
|
#
|
||||||
# shellcheck disable=SC2317
|
|
||||||
#
|
|
||||||
# Requires 'jq' tool and Postgres superuser rights!
|
# Requires 'jq' tool and Postgres superuser rights!
|
||||||
set -eu
|
set -eu
|
||||||
# set -x
|
# set -x
|
||||||
|
|
||||||
# Set of numbers for all the testcases.
|
# Set of numbers for all the testcases.
|
||||||
# When adding new tests, increase the last number:
|
# When adding new tests, increase the last number:
|
||||||
ALL_TESTS=$(seq 0 4)
|
ALL_TESTS=`seq 0 4`
|
||||||
|
|
||||||
# $TESTS determines which tests we should run.
|
# $TESTS determines which tests we should run.
|
||||||
# This construction is used to make it easy to
|
# This construction is used to make it easy to
|
||||||
@ -44,18 +42,50 @@ TESTS=${1:-$ALL_TESTS}
|
|||||||
# Global variable to run the auditor processes under valgrind
|
# Global variable to run the auditor processes under valgrind
|
||||||
# VALGRIND=valgrind
|
# VALGRIND=valgrind
|
||||||
VALGRIND=""
|
VALGRIND=""
|
||||||
LOGLEVEL="INFO"
|
|
||||||
|
|
||||||
. setup.sh
|
# Exit, with status code "skip" (no 'real' failure)
|
||||||
|
function exit_skip() {
|
||||||
|
echo "SKIPPING test: $1"
|
||||||
|
exit 77
|
||||||
|
}
|
||||||
|
|
||||||
|
# Exit, with error message (hard failure)
|
||||||
|
function exit_fail() {
|
||||||
|
echo "FAILING test: $1"
|
||||||
|
exit 1
|
||||||
|
}
|
||||||
|
|
||||||
|
function stop_libeufin()
|
||||||
|
{
|
||||||
|
echo "killing libeufin..."
|
||||||
|
if test -f ${MYDIR:-/}/libeufin-sandbox.pid
|
||||||
|
then
|
||||||
|
echo "Killing libeufin sandbox"
|
||||||
|
PID=`cat ${MYDIR}/libeufin-sandbox.pid 2> /dev/null`
|
||||||
|
rm ${MYDIR}/libeufin-sandbox.pid
|
||||||
|
kill $PID 2> /dev/null || true
|
||||||
|
wait $PID || true
|
||||||
|
fi
|
||||||
|
if test -f ${MYDIR:-/}/libeufin-nexus.pid
|
||||||
|
then
|
||||||
|
echo "Killing libeufin nexus"
|
||||||
|
PID=`cat ${MYDIR}/libeufin-nexus.pid 2> /dev/null`
|
||||||
|
rm ${MYDIR}/libeufin-nexus.pid
|
||||||
|
kill $PID 2> /dev/null || true
|
||||||
|
wait $PID || true
|
||||||
|
fi
|
||||||
|
echo "killing libeufin DONE"
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
# Cleanup to run whenever we exit
|
# Cleanup to run whenever we exit
|
||||||
function cleanup()
|
function cleanup()
|
||||||
{
|
{
|
||||||
if [ ! -z "${EPID:-}" ]
|
if test ! -z "${EPID:-}"
|
||||||
then
|
then
|
||||||
echo -n "Stopping exchange $EPID..."
|
echo -n "Stopping exchange $EPID..."
|
||||||
kill -TERM "$EPID"
|
kill -TERM $EPID
|
||||||
wait "$EPID"
|
wait $EPID
|
||||||
echo " DONE"
|
echo " DONE"
|
||||||
unset EPID
|
unset EPID
|
||||||
fi
|
fi
|
||||||
@ -66,20 +96,15 @@ function cleanup()
|
|||||||
function exit_cleanup()
|
function exit_cleanup()
|
||||||
{
|
{
|
||||||
echo "Running exit-cleanup"
|
echo "Running exit-cleanup"
|
||||||
if [ ! -z "${POSTGRES_PATH:-}" ]
|
if test ! -z "${POSTGRES_PATH:-}"
|
||||||
then
|
then
|
||||||
echo "Stopping Postgres at ${POSTGRES_PATH}"
|
echo "Stopping Postgres at ${POSTGRES_PATH}"
|
||||||
"${POSTGRES_PATH}/pg_ctl" \
|
${POSTGRES_PATH}/pg_ctl -D $TMPDIR -l /dev/null stop &> /dev/null || true
|
||||||
-D "$TMPDIR" \
|
|
||||||
-l /dev/null \
|
|
||||||
stop \
|
|
||||||
&> /dev/null \
|
|
||||||
|| true
|
|
||||||
fi
|
fi
|
||||||
cleanup
|
cleanup
|
||||||
for n in $(jobs -p)
|
for n in `jobs -p`
|
||||||
do
|
do
|
||||||
kill "$n" 2> /dev/null || true
|
kill $n 2> /dev/null || true
|
||||||
done
|
done
|
||||||
wait
|
wait
|
||||||
echo "DONE"
|
echo "DONE"
|
||||||
@ -88,80 +113,94 @@ function exit_cleanup()
|
|||||||
# Install cleanup handler (except for kill -9)
|
# Install cleanup handler (except for kill -9)
|
||||||
trap exit_cleanup EXIT
|
trap exit_cleanup EXIT
|
||||||
|
|
||||||
|
# Downloads new transactions from the bank.
|
||||||
|
function nexus_fetch_transactions () {
|
||||||
|
export LIBEUFIN_NEXUS_USERNAME=exchange
|
||||||
|
export LIBEUFIN_NEXUS_PASSWORD=x
|
||||||
|
export LIBEUFIN_NEXUS_URL=http://localhost:8082/
|
||||||
|
libeufin-cli accounts fetch-transactions \
|
||||||
|
--range-type since-last --level report exchange-nexus > /dev/null
|
||||||
|
unset LIBEUFIN_NEXUS_USERNAME
|
||||||
|
unset LIBEUFIN_NEXUS_PASSWORD
|
||||||
|
unset LIBEUFIN_NEXUS_URL
|
||||||
|
}
|
||||||
|
|
||||||
|
# Instruct Nexus to all the prepared payments (= those
|
||||||
|
# POSTed to /transfer by the exchange).
|
||||||
|
function nexus_submit_to_sandbox () {
|
||||||
|
export LIBEUFIN_NEXUS_USERNAME=exchange
|
||||||
|
export LIBEUFIN_NEXUS_PASSWORD=x
|
||||||
|
export LIBEUFIN_NEXUS_URL=http://localhost:8082/
|
||||||
|
libeufin-cli accounts submit-payments exchange-nexus
|
||||||
|
unset LIBEUFIN_NEXUS_USERNAME
|
||||||
|
unset LIBEUFIN_NEXUS_PASSWORD
|
||||||
|
unset LIBEUFIN_NEXUS_URL
|
||||||
|
}
|
||||||
|
|
||||||
function get_payto_uri() {
|
function get_payto_uri() {
|
||||||
export LIBEUFIN_SANDBOX_USERNAME=$1
|
export LIBEUFIN_SANDBOX_USERNAME=$1
|
||||||
export LIBEUFIN_SANDBOX_PASSWORD=$2
|
export LIBEUFIN_SANDBOX_PASSWORD=$2
|
||||||
export LIBEUFIN_SANDBOX_URL=http://localhost:18082
|
export LIBEUFIN_SANDBOX_URL=http://localhost:18082
|
||||||
libeufin-cli sandbox demobank info \
|
libeufin-cli sandbox demobank info --bank-account $1 | jq --raw-output '.paytoUri'
|
||||||
--bank-account "$1" \
|
|
||||||
| jq --raw-output '.paytoUri'
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
function launch_libeufin () {
|
||||||
|
export LIBEUFIN_NEXUS_DB_CONNECTION="jdbc:sqlite:${DB}-nexus.sqlite3"
|
||||||
|
cd $MYDIR
|
||||||
|
libeufin-nexus serve --port 8082 \
|
||||||
|
2> ${MYDIR}/libeufin-nexus-stderr.log \
|
||||||
|
> ${MYDIR}/libeufin-nexus-stdout.log &
|
||||||
|
echo $! > ${MYDIR}/libeufin-nexus.pid
|
||||||
|
export LIBEUFIN_SANDBOX_DB_CONNECTION="jdbc:sqlite:${DB}-sandbox.sqlite3"
|
||||||
|
libeufin-sandbox serve --no-auth --port 18082 \
|
||||||
|
> ${MYDIR}/libeufin-sandbox-stdout.log \
|
||||||
|
2> ${MYDIR}/libeufin-sandbox-stderr.log &
|
||||||
|
echo $! > ${MYDIR}/libeufin-sandbox.pid
|
||||||
|
cd $ORIGIN
|
||||||
|
}
|
||||||
|
|
||||||
# Operations to run before the actual audit
|
# Operations to run before the actual audit
|
||||||
function pre_audit () {
|
function pre_audit () {
|
||||||
# Launch bank
|
# Launch bank
|
||||||
echo -n "Launching bank "
|
echo -n "Launching bank "
|
||||||
|
EXCHANGE_URL=`taler-config -c $CONF -s EXCHANGE -o BASE_URL`
|
||||||
launch_libeufin
|
launch_libeufin
|
||||||
for n in $(seq 1 80)
|
for n in `seq 1 80`
|
||||||
do
|
do
|
||||||
echo -n "."
|
echo -n "."
|
||||||
sleep 0.1
|
sleep 0.1
|
||||||
OK=1
|
OK=1
|
||||||
wget http://localhost:18082/ \
|
wget http://localhost:18082/ -o /dev/null -O /dev/null >/dev/null && break
|
||||||
-o /dev/null \
|
|
||||||
-O /dev/null \
|
|
||||||
>/dev/null && break
|
|
||||||
OK=0
|
OK=0
|
||||||
done
|
done
|
||||||
if [ 1 != "$OK" ]
|
if [ 1 != $OK ]
|
||||||
then
|
then
|
||||||
exit_skip "Failed to launch Sandbox"
|
exit_skip "Failed to launch Sandbox"
|
||||||
fi
|
fi
|
||||||
for n in $(seq 1 80)
|
for n in `seq 1 80`
|
||||||
do
|
do
|
||||||
echo -n "."
|
echo -n "."
|
||||||
sleep 0.1
|
sleep 0.1
|
||||||
OK=1
|
OK=1
|
||||||
wget http://localhost:8082/ \
|
wget http://localhost:8082/ -o /dev/null -O /dev/null >/dev/null && break
|
||||||
-o /dev/null \
|
|
||||||
-O /dev/null \
|
|
||||||
>/dev/null && break
|
|
||||||
OK=0
|
OK=0
|
||||||
done
|
done
|
||||||
if [ 1 != "$OK" ]
|
if [ 1 != $OK ]
|
||||||
then
|
then
|
||||||
exit_skip "Failed to launch Nexus"
|
exit_skip "Failed to launch Nexus"
|
||||||
fi
|
fi
|
||||||
echo " DONE"
|
echo " DONE"
|
||||||
if [ "${1:-no}" = "aggregator" ]
|
if test ${1:-no} = "aggregator"
|
||||||
then
|
then
|
||||||
export CONF
|
export CONF
|
||||||
echo -n "Running exchange aggregator ... (config: $CONF)"
|
echo -n "Running exchange aggregator ... (config: $CONF)"
|
||||||
taler-exchange-aggregator \
|
taler-exchange-aggregator -L INFO -t -c $CONF -y 2> ${MYDIR}/aggregator.log || exit_fail "FAIL"
|
||||||
-L "$LOGLEVEL" \
|
|
||||||
-t \
|
|
||||||
-c "$CONF" \
|
|
||||||
-y \
|
|
||||||
2> "${MY_TMP_DIR}/aggregator.log" \
|
|
||||||
|| exit_fail "FAIL"
|
|
||||||
echo " DONE"
|
echo " DONE"
|
||||||
echo -n "Running exchange closer ..."
|
echo -n "Running exchange closer ..."
|
||||||
taler-exchange-closer \
|
taler-exchange-closer -L INFO -t -c $CONF 2> ${MYDIR}/closer.log || exit_fail "FAIL"
|
||||||
-L "$LOGLEVEL" \
|
|
||||||
-t \
|
|
||||||
-c "$CONF" \
|
|
||||||
2> "${MY_TMP_DIR}/closer.log" \
|
|
||||||
|| exit_fail "FAIL"
|
|
||||||
echo " DONE"
|
echo " DONE"
|
||||||
echo -n "Running exchange transfer ..."
|
echo -n "Running exchange transfer ..."
|
||||||
taler-exchange-transfer \
|
taler-exchange-transfer -L INFO -t -c $CONF 2> ${MYDIR}/transfer.log || exit_fail "FAIL"
|
||||||
-L "$LOGLEVEL" \
|
|
||||||
-t \
|
|
||||||
-c "$CONF" \
|
|
||||||
2> "${MY_TMP_DIR}/transfer.log" \
|
|
||||||
|| exit_fail "FAIL"
|
|
||||||
echo " DONE"
|
echo " DONE"
|
||||||
echo -n "Running Nexus payment submitter ..."
|
echo -n "Running Nexus payment submitter ..."
|
||||||
nexus_submit_to_sandbox
|
nexus_submit_to_sandbox
|
||||||
@ -179,93 +218,28 @@ function audit_only () {
|
|||||||
echo -n "Running audit(s) ... (conf is $CONF)"
|
echo -n "Running audit(s) ... (conf is $CONF)"
|
||||||
|
|
||||||
# Restart so that first run is always fresh, and second one is incremental
|
# Restart so that first run is always fresh, and second one is incremental
|
||||||
taler-auditor-dbinit \
|
taler-auditor-dbinit -r -c $CONF
|
||||||
-r \
|
$VALGRIND taler-helper-auditor-aggregation -L DEBUG -c $CONF -m $MASTER_PUB > test-audit-aggregation.json 2> test-audit-aggregation.log || exit_fail "aggregation audit failed"
|
||||||
-c "$CONF"
|
|
||||||
$VALGRIND taler-helper-auditor-aggregation \
|
|
||||||
-L "$LOGLEVEL" \
|
|
||||||
-c "$CONF" \
|
|
||||||
-m "$MASTER_PUB" \
|
|
||||||
> test-audit-aggregation.json \
|
|
||||||
2> test-audit-aggregation.log \
|
|
||||||
|| exit_fail "aggregation audit failed"
|
|
||||||
echo -n "."
|
echo -n "."
|
||||||
$VALGRIND taler-helper-auditor-aggregation \
|
$VALGRIND taler-helper-auditor-aggregation -L DEBUG -c $CONF -m $MASTER_PUB > test-audit-aggregation-inc.json 2> test-audit-aggregation-inc.log || exit_fail "incremental aggregation audit failed"
|
||||||
-L "$LOGLEVEL" \
|
|
||||||
-c "$CONF" \
|
|
||||||
-m "$MASTER_PUB" \
|
|
||||||
> test-audit-aggregation-inc.json \
|
|
||||||
2> test-audit-aggregation-inc.log \
|
|
||||||
|| exit_fail "incremental aggregation audit failed"
|
|
||||||
echo -n "."
|
echo -n "."
|
||||||
$VALGRIND taler-helper-auditor-coins \
|
$VALGRIND taler-helper-auditor-coins -L DEBUG -c $CONF -m $MASTER_PUB > test-audit-coins.json 2> test-audit-coins.log || exit_fail "coin audit failed"
|
||||||
-L "$LOGLEVEL" \
|
|
||||||
-c "$CONF" \
|
|
||||||
-m "$MASTER_PUB" \
|
|
||||||
> test-audit-coins.json \
|
|
||||||
2> test-audit-coins.log \
|
|
||||||
|| exit_fail "coin audit failed"
|
|
||||||
echo -n "."
|
echo -n "."
|
||||||
$VALGRIND taler-helper-auditor-coins \
|
$VALGRIND taler-helper-auditor-coins -L DEBUG -c $CONF -m $MASTER_PUB > test-audit-coins-inc.json 2> test-audit-coins-inc.log || exit_fail "incremental coin audit failed"
|
||||||
-L "$LOGLEVEL" \
|
|
||||||
-c "$CONF" \
|
|
||||||
-m "$MASTER_PUB" \
|
|
||||||
> test-audit-coins-inc.json \
|
|
||||||
2> test-audit-coins-inc.log \
|
|
||||||
|| exit_fail "incremental coin audit failed"
|
|
||||||
echo -n "."
|
echo -n "."
|
||||||
$VALGRIND taler-helper-auditor-deposits \
|
$VALGRIND taler-helper-auditor-deposits -L DEBUG -c $CONF -m $MASTER_PUB > test-audit-deposits.json 2> test-audit-deposits.log || exit_fail "deposits audit failed"
|
||||||
-L "$LOGLEVEL" \
|
|
||||||
-c "$CONF" \
|
|
||||||
-m "$MASTER_PUB" \
|
|
||||||
> test-audit-deposits.json \
|
|
||||||
2> test-audit-deposits.log \
|
|
||||||
|| exit_fail "deposits audit failed"
|
|
||||||
echo -n "."
|
echo -n "."
|
||||||
$VALGRIND taler-helper-auditor-deposits \
|
$VALGRIND taler-helper-auditor-deposits -L DEBUG -c $CONF -m $MASTER_PUB > test-audit-deposits-inc.json 2> test-audit-deposits-inc.log || exit_fail "incremental deposits audit failed"
|
||||||
-L "$LOGLEVEL" \
|
|
||||||
-c "$CONF" \
|
|
||||||
-m "$MASTER_PUB" \
|
|
||||||
> test-audit-deposits-inc.json \
|
|
||||||
2> test-audit-deposits-inc.log \
|
|
||||||
|| exit_fail "incremental deposits audit failed"
|
|
||||||
echo -n "."
|
echo -n "."
|
||||||
$VALGRIND taler-helper-auditor-reserves \
|
$VALGRIND taler-helper-auditor-reserves -i -L DEBUG -c $CONF -m $MASTER_PUB > test-audit-reserves.json 2> test-audit-reserves.log || exit_fail "reserves audit failed"
|
||||||
-i \
|
|
||||||
-L "$LOGLEVEL" \
|
|
||||||
-c "$CONF" \
|
|
||||||
-m "$MASTER_PUB" \
|
|
||||||
> test-audit-reserves.json \
|
|
||||||
2> test-audit-reserves.log \
|
|
||||||
|| exit_fail "reserves audit failed"
|
|
||||||
echo -n "."
|
echo -n "."
|
||||||
$VALGRIND taler-helper-auditor-reserves \
|
$VALGRIND taler-helper-auditor-reserves -i -L DEBUG -c $CONF -m $MASTER_PUB > test-audit-reserves-inc.json 2> test-audit-reserves-inc.log || exit_fail "incremental reserves audit failed"
|
||||||
-i \
|
|
||||||
-L "$LOGLEVEL" \
|
|
||||||
-c "$CONF" \
|
|
||||||
-m "$MASTER_PUB" \
|
|
||||||
> test-audit-reserves-inc.json \
|
|
||||||
2> test-audit-reserves-inc.log \
|
|
||||||
|| exit_fail "incremental reserves audit failed"
|
|
||||||
echo -n "."
|
echo -n "."
|
||||||
$VALGRIND taler-helper-auditor-wire \
|
$VALGRIND taler-helper-auditor-wire -i -L DEBUG -c $CONF -m $MASTER_PUB > test-audit-wire.json 2> test-wire-audit.log || exit_fail "wire audit failed"
|
||||||
-i \
|
|
||||||
-L "$LOGLEVEL" \
|
|
||||||
-c "$CONF" \
|
|
||||||
-m "$MASTER_PUB" \
|
|
||||||
> test-audit-wire.json \
|
|
||||||
2> test-wire-audit.log \
|
|
||||||
|| exit_fail "wire audit failed"
|
|
||||||
echo -n "."
|
echo -n "."
|
||||||
$VALGRIND taler-helper-auditor-wire \
|
$VALGRIND taler-helper-auditor-wire -i -L DEBUG -c $CONF -m $MASTER_PUB > test-audit-wire-inc.json 2> test-wire-audit-inc.log || exit_fail "wire audit failed"
|
||||||
-i \
|
|
||||||
-L "$LOGLEVEL" \
|
|
||||||
-c "$CONF" \
|
|
||||||
-m "$MASTER_PUB" \
|
|
||||||
> test-audit-wire-inc.json \
|
|
||||||
2> test-wire-audit-inc.log \
|
|
||||||
|| exit_fail "wire audit failed"
|
|
||||||
echo -n "."
|
echo -n "."
|
||||||
|
|
||||||
echo " DONE"
|
echo " DONE"
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -274,22 +248,12 @@ function audit_only () {
|
|||||||
function post_audit () {
|
function post_audit () {
|
||||||
cleanup
|
cleanup
|
||||||
echo -n "TeXing ."
|
echo -n "TeXing ."
|
||||||
taler-helper-auditor-render.py \
|
taler-helper-auditor-render.py test-audit-aggregation.json test-audit-coins.json test-audit-deposits.json test-audit-reserves.json test-audit-wire.json < ../../contrib/auditor-report.tex.j2 > test-report.tex || exit_fail "Renderer failed"
|
||||||
test-audit-aggregation.json \
|
|
||||||
test-audit-coins.json \
|
|
||||||
test-audit-deposits.json \
|
|
||||||
test-audit-reserves.json \
|
|
||||||
test-audit-wire.json \
|
|
||||||
< ../../contrib/auditor-report.tex.j2 \
|
|
||||||
> test-report.tex \
|
|
||||||
|| exit_fail "Renderer failed"
|
|
||||||
echo -n "."
|
echo -n "."
|
||||||
timeout 10 pdflatex test-report.tex \
|
timeout 10 pdflatex test-report.tex >/dev/null || exit_fail "pdflatex failed"
|
||||||
>/dev/null \
|
|
||||||
|| exit_fail "pdflatex failed"
|
|
||||||
echo -n "."
|
echo -n "."
|
||||||
timeout 10 pdflatex test-report.tex \
|
timeout 10 pdflatex test-report.tex >/dev/null
|
||||||
>/dev/null
|
|
||||||
echo " DONE"
|
echo " DONE"
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -299,9 +263,10 @@ function post_audit () {
|
|||||||
# $ taler-exchange-aggregator
|
# $ taler-exchange-aggregator
|
||||||
# before auditor (to trigger pending wire transfers).
|
# before auditor (to trigger pending wire transfers).
|
||||||
function run_audit () {
|
function run_audit () {
|
||||||
pre_audit "${1:-no}"
|
pre_audit ${1:-no}
|
||||||
audit_only
|
audit_only
|
||||||
post_audit
|
post_audit
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
@ -309,21 +274,35 @@ function run_audit () {
|
|||||||
function full_reload()
|
function full_reload()
|
||||||
{
|
{
|
||||||
echo -n "Doing full reload of the database... "
|
echo -n "Doing full reload of the database... "
|
||||||
dropdb "$DB" 2> /dev/null || true
|
dropdb $DB 2> /dev/null || true
|
||||||
createdb -T template0 "$DB" \
|
createdb -T template0 $DB || exit_skip "could not create database $DB (at $PGHOST)"
|
||||||
|| exit_skip "could not create database $DB (at $PGHOST)"
|
|
||||||
# Import pre-generated database, -q(ietly) using single (-1) transaction
|
# Import pre-generated database, -q(ietly) using single (-1) transaction
|
||||||
psql -Aqt "$DB" \
|
psql -Aqt $DB -q -1 -f ${BASEDB}.sql > /dev/null || exit_skip "Failed to load database $DB from ${BASEDB}.sql"
|
||||||
-q \
|
|
||||||
-1 \
|
|
||||||
-f "${BASEDB}.sql" \
|
|
||||||
> /dev/null \
|
|
||||||
|| exit_skip "Failed to load database $DB from ${BASEDB}.sql"
|
|
||||||
echo "DONE"
|
echo "DONE"
|
||||||
|
cd $MYDIR
|
||||||
|
rm -f ${DB}-nexus.sqlite3 ${DB}-sandbox.sqlite3 || true # libeufin
|
||||||
|
echo "Loading libeufin Nexus basedb: ${BASEDB}-libeufin-nexus.sql"
|
||||||
|
sqlite3 ${DB}-nexus.sqlite3 < ${BASEDB}-libeufin-nexus.sql || exit_skip "Failed to load Nexus database"
|
||||||
|
echo "DONE"
|
||||||
|
echo "Loading libeufin Sandbox basedb: ${BASEDB}-libeufin-nexus.sql"
|
||||||
|
sqlite3 ${DB}-sandbox.sqlite3 < ${BASEDB}-libeufin-sandbox.sql || exit_skip "Failed to load Sandbox database"
|
||||||
|
echo "DONE"
|
||||||
|
# Exchange payto URI contains the (dynamically generated)
|
||||||
|
# IBAN, that can only be written in CONF after libeufin is
|
||||||
|
# setup.
|
||||||
|
taler-config -c $CONF -s exchange-account-1 -o PAYTO_URI &> /dev/null || (
|
||||||
|
echo -n "Specifying exchange payto URI in the configuration ($CONF) (grab IBAN from ${DB}-sandbox.sqlite3)...";
|
||||||
|
EXCHANGE_IBAN=`echo "SELECT iban FROM BankAccounts WHERE label='exchange'" | sqlite3 ${DB}-sandbox.sqlite3`;
|
||||||
|
taler-config -c $CONF -s exchange-account-1 -o PAYTO_URI \
|
||||||
|
-V "payto://iban/SANDBOXX/$EXCHANGE_IBAN?receiver-name=Exchange+Company"
|
||||||
|
echo " DONE"
|
||||||
|
)
|
||||||
|
cd $ORIGIN
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
function test_0() {
|
function test_0() {
|
||||||
|
|
||||||
echo "===========0: normal run with aggregator==========="
|
echo "===========0: normal run with aggregator==========="
|
||||||
run_audit aggregator
|
run_audit aggregator
|
||||||
|
|
||||||
@ -352,105 +331,94 @@ function test_0() {
|
|||||||
|
|
||||||
echo PASS
|
echo PASS
|
||||||
|
|
||||||
LOSS=$(jq -r .total_bad_sig_loss < test-audit-aggregation.json)
|
LOSS=`jq -r .total_bad_sig_loss < test-audit-aggregation.json`
|
||||||
if [ "$LOSS" != "TESTKUDOS:0" ]
|
if test $LOSS != "TESTKUDOS:0"
|
||||||
then
|
then
|
||||||
exit_fail "Wrong total bad sig loss from aggregation, got unexpected loss of $LOSS"
|
exit_fail "Wrong total bad sig loss from aggregation, got unexpected loss of $LOSS"
|
||||||
fi
|
fi
|
||||||
LOSS=$(jq -r .irregular_loss < test-audit-coins.json)
|
LOSS=`jq -r .irregular_loss < test-audit-coins.json`
|
||||||
if [ "$LOSS" != "TESTKUDOS:0" ]
|
if test $LOSS != "TESTKUDOS:0"
|
||||||
then
|
then
|
||||||
exit_fail "Wrong total bad sig loss from coins, got unexpected loss of $LOSS"
|
exit_fail "Wrong total bad sig loss from coins, got unexpected loss of $LOSS"
|
||||||
fi
|
fi
|
||||||
LOSS=$(jq -r .total_bad_sig_loss < test-audit-reserves.json)
|
LOSS=`jq -r .total_bad_sig_loss < test-audit-reserves.json`
|
||||||
if [ "$LOSS" != "TESTKUDOS:0" ]
|
if test $LOSS != "TESTKUDOS:0"
|
||||||
then
|
then
|
||||||
exit_fail "Wrong total bad sig loss from reserves, got unexpected loss of $LOSS"
|
exit_fail "Wrong total bad sig loss from reserves, got unexpected loss of $LOSS"
|
||||||
fi
|
fi
|
||||||
|
|
||||||
echo -n "Test for wire amounts... "
|
echo -n "Test for wire amounts... "
|
||||||
WIRED=$(jq -r .total_wire_in_delta_plus < test-audit-wire.json)
|
WIRED=`jq -r .total_wire_in_delta_plus < test-audit-wire.json`
|
||||||
if [ "$WIRED" != "TESTKUDOS:0" ]
|
if test $WIRED != "TESTKUDOS:0"
|
||||||
then
|
then
|
||||||
exit_fail "Expected total wire delta plus wrong, got $WIRED"
|
exit_fail "Expected total wire delta plus wrong, got $WIRED"
|
||||||
fi
|
fi
|
||||||
WIRED=$(jq -r .total_wire_in_delta_minus < test-audit-wire.json)
|
WIRED=`jq -r .total_wire_in_delta_minus < test-audit-wire.json`
|
||||||
if [ "$WIRED" != "TESTKUDOS:0" ]
|
if test $WIRED != "TESTKUDOS:0"
|
||||||
then
|
then
|
||||||
exit_fail "Expected total wire delta minus wrong, got $WIRED"
|
exit_fail "Expected total wire delta minus wrong, got $WIRED"
|
||||||
fi
|
fi
|
||||||
WIRED=$(jq -r .total_wire_out_delta_plus < test-audit-wire.json)
|
WIRED=`jq -r .total_wire_out_delta_plus < test-audit-wire.json`
|
||||||
if [ "$WIRED" != "TESTKUDOS:0" ]
|
if test $WIRED != "TESTKUDOS:0"
|
||||||
then
|
then
|
||||||
exit_fail "Expected total wire delta plus wrong, got $WIRED"
|
exit_fail "Expected total wire delta plus wrong, got $WIRED"
|
||||||
fi
|
fi
|
||||||
WIRED=$(jq -r .total_wire_out_delta_minus < test-audit-wire.json)
|
WIRED=`jq -r .total_wire_out_delta_minus < test-audit-wire.json`
|
||||||
if [ "$WIRED" != "TESTKUDOS:0" ]
|
if test $WIRED != "TESTKUDOS:0"
|
||||||
then
|
then
|
||||||
exit_fail "Expected total wire delta minus wrong, got $WIRED"
|
exit_fail "Expected total wire delta minus wrong, got $WIRED"
|
||||||
fi
|
fi
|
||||||
WIRED=$(jq -r .total_misattribution_in < test-audit-wire.json)
|
WIRED=`jq -r .total_misattribution_in < test-audit-wire.json`
|
||||||
if [ "$WIRED" != "TESTKUDOS:0" ]
|
if test $WIRED != "TESTKUDOS:0"
|
||||||
then
|
then
|
||||||
exit_fail "Expected total misattribution in wrong, got $WIRED"
|
exit_fail "Expected total misattribution in wrong, got $WIRED"
|
||||||
fi
|
fi
|
||||||
echo "PASS"
|
echo PASS
|
||||||
|
|
||||||
echo -n "Checking for unexpected arithmetic differences "
|
echo -n "Checking for unexpected arithmetic differences "
|
||||||
LOSS=$(jq -r .total_arithmetic_delta_plus < test-audit-aggregation.json)
|
LOSS=`jq -r .total_arithmetic_delta_plus < test-audit-aggregation.json`
|
||||||
if [ "$LOSS" != "TESTKUDOS:0" ]
|
if test $LOSS != "TESTKUDOS:0"
|
||||||
then
|
then
|
||||||
exit_fail "Wrong arithmetic delta from aggregations, got unexpected plus of $LOSS"
|
exit_fail "Wrong arithmetic delta from aggregations, got unexpected plus of $LOSS"
|
||||||
fi
|
fi
|
||||||
LOSS=$(jq -r .total_arithmetic_delta_minus < test-audit-aggregation.json)
|
LOSS=`jq -r .total_arithmetic_delta_minus < test-audit-aggregation.json`
|
||||||
if [ "$LOSS" != "TESTKUDOS:0" ]
|
if test $LOSS != "TESTKUDOS:0"
|
||||||
then
|
then
|
||||||
exit_fail "Wrong arithmetic delta from aggregation, got unexpected minus of $LOSS"
|
exit_fail "Wrong arithmetic delta from aggregation, got unexpected minus of $LOSS"
|
||||||
fi
|
fi
|
||||||
LOSS=$(jq -r .total_arithmetic_delta_plus < test-audit-coins.json)
|
LOSS=`jq -r .total_arithmetic_delta_plus < test-audit-coins.json`
|
||||||
if [ "$LOSS" != "TESTKUDOS:0" ]
|
if test $LOSS != "TESTKUDOS:0"
|
||||||
then
|
then
|
||||||
exit_fail "Wrong arithmetic delta from coins, got unexpected plus of $LOSS"
|
exit_fail "Wrong arithmetic delta from coins, got unexpected plus of $LOSS"
|
||||||
fi
|
fi
|
||||||
LOSS=$(jq -r .total_arithmetic_delta_minus < test-audit-coins.json)
|
LOSS=`jq -r .total_arithmetic_delta_minus < test-audit-coins.json`
|
||||||
if [ "$LOSS" != "TESTKUDOS:0" ]
|
if test $LOSS != "TESTKUDOS:0"
|
||||||
then
|
then
|
||||||
exit_fail "Wrong arithmetic delta from coins, got unexpected minus of $LOSS"
|
exit_fail "Wrong arithmetic delta from coins, got unexpected minus of $LOSS"
|
||||||
fi
|
fi
|
||||||
LOSS=$(jq -r .total_arithmetic_delta_plus < test-audit-reserves.json)
|
LOSS=`jq -r .total_arithmetic_delta_plus < test-audit-reserves.json`
|
||||||
if [ "$LOSS" != "TESTKUDOS:0" ]
|
if test $LOSS != "TESTKUDOS:0"
|
||||||
then
|
then
|
||||||
exit_fail "Wrong arithmetic delta from reserves, got unexpected plus of $LOSS"
|
exit_fail "Wrong arithmetic delta from reserves, got unexpected plus of $LOSS"
|
||||||
fi
|
fi
|
||||||
LOSS=$(jq -r .total_arithmetic_delta_minus < test-audit-reserves.json)
|
LOSS=`jq -r .total_arithmetic_delta_minus < test-audit-reserves.json`
|
||||||
if [ "$LOSS" != "TESTKUDOS:0" ]
|
if test $LOSS != "TESTKUDOS:0"
|
||||||
then
|
then
|
||||||
exit_fail "Wrong arithmetic delta from reserves, got unexpected minus of $LOSS"
|
exit_fail "Wrong arithmetic delta from reserves, got unexpected minus of $LOSS"
|
||||||
fi
|
fi
|
||||||
|
|
||||||
jq -e .amount_arithmetic_inconsistencies[0] \
|
jq -e .amount_arithmetic_inconsistencies[0] < test-audit-aggregation.json > /dev/null && exit_fail "Unexpected arithmetic inconsistencies from aggregations detected in ordinary run"
|
||||||
< test-audit-aggregation.json \
|
jq -e .amount_arithmetic_inconsistencies[0] < test-audit-coins.json > /dev/null && exit_fail "Unexpected arithmetic inconsistencies from coins detected in ordinary run"
|
||||||
> /dev/null \
|
jq -e .amount_arithmetic_inconsistencies[0] < test-audit-reserves.json > /dev/null && exit_fail "Unexpected arithmetic inconsistencies from reserves detected in ordinary run"
|
||||||
&& exit_fail "Unexpected arithmetic inconsistencies from aggregations detected in ordinary run"
|
echo PASS
|
||||||
jq -e .amount_arithmetic_inconsistencies[0] \
|
|
||||||
< test-audit-coins.json \
|
|
||||||
> /dev/null \
|
|
||||||
&& exit_fail "Unexpected arithmetic inconsistencies from coins detected in ordinary run"
|
|
||||||
jq -e .amount_arithmetic_inconsistencies[0] \
|
|
||||||
< test-audit-reserves.json \
|
|
||||||
> /dev/null \
|
|
||||||
&& exit_fail "Unexpected arithmetic inconsistencies from reserves detected in ordinary run"
|
|
||||||
echo "PASS"
|
|
||||||
|
|
||||||
echo -n "Checking for unexpected wire out differences "
|
echo -n "Checking for unexpected wire out differences "
|
||||||
jq -e .wire_out_inconsistencies[0] \
|
jq -e .wire_out_inconsistencies[0] < test-audit-aggregation.json > /dev/null && exit_fail "Unexpected wire out inconsistencies detected in ordinary run"
|
||||||
< test-audit-aggregation.json \
|
echo PASS
|
||||||
> /dev/null \
|
|
||||||
&& exit_fail "Unexpected wire out inconsistencies detected in ordinary run"
|
|
||||||
echo "PASS"
|
|
||||||
|
|
||||||
# cannot easily undo aggregator, hence full reload
|
# cannot easily undo aggregator, hence full reload
|
||||||
full_reload
|
full_reload
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
@ -464,72 +432,46 @@ function test_1() {
|
|||||||
echo "Checking output"
|
echo "Checking output"
|
||||||
# if an emergency was detected, that is a bug and we should fail
|
# if an emergency was detected, that is a bug and we should fail
|
||||||
echo -n "Test for emergencies... "
|
echo -n "Test for emergencies... "
|
||||||
jq -e .emergencies[0] \
|
jq -e .emergencies[0] < test-audit-coins.json > /dev/null && exit_fail "Unexpected emergency detected in ordinary run" || echo PASS
|
||||||
< test-audit-coins.json \
|
|
||||||
> /dev/null \
|
|
||||||
&& exit_fail "Unexpected emergency detected in ordinary run" \
|
|
||||||
|| echo "PASS"
|
|
||||||
echo -n "Test for emergencies by count... "
|
echo -n "Test for emergencies by count... "
|
||||||
jq -e .emergencies_by_count[0] \
|
jq -e .emergencies_by_count[0] < test-audit-coins.json > /dev/null && exit_fail "Unexpected emergency by count detected in ordinary run" || echo PASS
|
||||||
< test-audit-coins.json \
|
|
||||||
> /dev/null \
|
|
||||||
&& exit_fail "Unexpected emergency by count detected in ordinary run" \
|
|
||||||
|| echo "PASS"
|
|
||||||
|
|
||||||
echo -n "Test for wire inconsistencies... "
|
echo -n "Test for wire inconsistencies... "
|
||||||
jq -e .wire_out_amount_inconsistencies[0] \
|
jq -e .wire_out_amount_inconsistencies[0] < test-audit-wire.json > /dev/null && exit_fail "Unexpected wire out inconsistency detected in ordinary run"
|
||||||
< test-audit-wire.json \
|
jq -e .reserve_in_amount_inconsistencies[0] < test-audit-wire.json > /dev/null && exit_fail "Unexpected reserve in inconsistency detected in ordinary run"
|
||||||
> /dev/null \
|
jq -e .misattribution_inconsistencies[0] < test-audit-wire.json > /dev/null && exit_fail "Unexpected misattribution inconsistency detected in ordinary run"
|
||||||
&& exit_fail "Unexpected wire out inconsistency detected in ordinary run"
|
jq -e .row_inconsistencies[0] < test-audit-wire.json > /dev/null && exit_fail "Unexpected row inconsistency detected in ordinary run"
|
||||||
jq -e .reserve_in_amount_inconsistencies[0] \
|
jq -e .row_minor_inconsistencies[0] < test-audit-wire.json > /dev/null && exit_fail "Unexpected minor row inconsistency detected in ordinary run"
|
||||||
< test-audit-wire.json \
|
jq -e .wire_format_inconsistencies[0] < test-audit-wire.json > /dev/null && exit_fail "Unexpected wire format inconsistencies detected in ordinary run"
|
||||||
> /dev/null \
|
|
||||||
&& exit_fail "Unexpected reserve in inconsistency detected in ordinary run"
|
|
||||||
jq -e .misattribution_inconsistencies[0] \
|
|
||||||
< test-audit-wire.json \
|
|
||||||
> /dev/null \
|
|
||||||
&& exit_fail "Unexpected misattribution inconsistency detected in ordinary run"
|
|
||||||
jq -e .row_inconsistencies[0] \
|
|
||||||
< test-audit-wire.json \
|
|
||||||
> /dev/null \
|
|
||||||
&& exit_fail "Unexpected row inconsistency detected in ordinary run"
|
|
||||||
jq -e .row_minor_inconsistencies[0] \
|
|
||||||
< test-audit-wire.json \
|
|
||||||
> /dev/null \
|
|
||||||
&& exit_fail "Unexpected minor row inconsistency detected in ordinary run"
|
|
||||||
jq -e .wire_format_inconsistencies[0] \
|
|
||||||
< test-audit-wire.json \
|
|
||||||
> /dev/null \
|
|
||||||
&& exit_fail "Unexpected wire format inconsistencies detected in ordinary run"
|
|
||||||
|
|
||||||
# TODO: check operation balances are correct (once we have all transaction types and wallet is deterministic)
|
# TODO: check operation balances are correct (once we have all transaction types and wallet is deterministic)
|
||||||
# TODO: check revenue summaries are correct (once we have all transaction types and wallet is deterministic)
|
# TODO: check revenue summaries are correct (once we have all transaction types and wallet is deterministic)
|
||||||
|
|
||||||
echo "PASS"
|
echo PASS
|
||||||
|
|
||||||
echo -n "Test for wire amounts... "
|
echo -n "Test for wire amounts... "
|
||||||
WIRED=$(jq -r .total_wire_in_delta_plus < test-audit-wire.json)
|
WIRED=`jq -r .total_wire_in_delta_plus < test-audit-wire.json`
|
||||||
if [ "$WIRED" != "TESTKUDOS:0" ]
|
if test $WIRED != "TESTKUDOS:0"
|
||||||
then
|
then
|
||||||
exit_fail "Expected total wire delta plus wrong, got $WIRED"
|
exit_fail "Expected total wire delta plus wrong, got $WIRED"
|
||||||
fi
|
fi
|
||||||
WIRED=$(jq -r .total_wire_in_delta_minus < test-audit-wire.json)
|
WIRED=`jq -r .total_wire_in_delta_minus < test-audit-wire.json`
|
||||||
if [ "$WIRED" != "TESTKUDOS:0" ]
|
if test $WIRED != "TESTKUDOS:0"
|
||||||
then
|
then
|
||||||
exit_fail "Expected total wire delta minus wrong, got $WIRED"
|
exit_fail "Expected total wire delta minus wrong, got $WIRED"
|
||||||
fi
|
fi
|
||||||
WIRED=$(jq -r .total_wire_out_delta_plus < test-audit-wire.json)
|
WIRED=`jq -r .total_wire_out_delta_plus < test-audit-wire.json`
|
||||||
if [ "$WIRED" != "TESTKUDOS:0" ]
|
if test $WIRED != "TESTKUDOS:0"
|
||||||
then
|
then
|
||||||
exit_fail "Expected total wire delta plus wrong, got $WIRED"
|
exit_fail "Expected total wire delta plus wrong, got $WIRED"
|
||||||
fi
|
fi
|
||||||
WIRED=$(jq -r .total_wire_out_delta_minus < test-audit-wire.json)
|
WIRED=`jq -r .total_wire_out_delta_minus < test-audit-wire.json`
|
||||||
if [ "$WIRED" != "TESTKUDOS:0" ]
|
if test $WIRED != "TESTKUDOS:0"
|
||||||
then
|
then
|
||||||
exit_fail "Expected total wire delta minus wrong, got $WIRED"
|
exit_fail "Expected total wire delta minus wrong, got $WIRED"
|
||||||
fi
|
fi
|
||||||
WIRED=$(jq -r .total_misattribution_in < test-audit-wire.json)
|
WIRED=`jq -r .total_misattribution_in < test-audit-wire.json`
|
||||||
if [ "$WIRED" != "TESTKUDOS:0" ]
|
if test $WIRED != "TESTKUDOS:0"
|
||||||
then
|
then
|
||||||
exit_fail "Expected total misattribution in wrong, got $WIRED"
|
exit_fail "Expected total misattribution in wrong, got $WIRED"
|
||||||
fi
|
fi
|
||||||
@ -544,37 +486,37 @@ function test_1() {
|
|||||||
function test_2() {
|
function test_2() {
|
||||||
|
|
||||||
echo "===========2: recoup amount inconsistency==========="
|
echo "===========2: recoup amount inconsistency==========="
|
||||||
echo "UPDATE exchange.recoup SET amount_val=5 WHERE recoup_uuid=1" | psql -Aqt "$DB"
|
echo "UPDATE exchange.recoup SET amount_val=5 WHERE recoup_uuid=1" | psql -Aqt $DB
|
||||||
|
|
||||||
run_audit
|
run_audit
|
||||||
|
|
||||||
# Reserve balance is now wrong
|
# Reserve balance is now wrong
|
||||||
echo -n "Testing inconsistency detection... "
|
echo -n "Testing inconsistency detection... "
|
||||||
AMOUNT=$(jq -r .reserve_balance_summary_wrong_inconsistencies[0].auditor < test-audit-reserves.json)
|
AMOUNT=`jq -r .reserve_balance_summary_wrong_inconsistencies[0].auditor < test-audit-reserves.json`
|
||||||
if [ "$AMOUNT" != "TESTKUDOS:3" ]
|
if test $AMOUNT != "TESTKUDOS:3"
|
||||||
then
|
then
|
||||||
exit_fail "Reserve auditor amount $AMOUNT is wrong"
|
exit_fail "Reserve auditor amount $AMOUNT is wrong"
|
||||||
fi
|
fi
|
||||||
AMOUNT=$(jq -r .reserve_balance_summary_wrong_inconsistencies[0].exchange < test-audit-reserves.json)
|
AMOUNT=`jq -r .reserve_balance_summary_wrong_inconsistencies[0].exchange < test-audit-reserves.json`
|
||||||
if [ "$AMOUNT" != "TESTKUDOS:0" ]
|
if test $AMOUNT != "TESTKUDOS:0"
|
||||||
then
|
then
|
||||||
exit_fail "Reserve exchange amount $AMOUNT is wrong"
|
exit_fail "Reserve exchange amount $AMOUNT is wrong"
|
||||||
fi
|
fi
|
||||||
# Coin spent exceeded coin's value
|
# Coin spent exceeded coin's value
|
||||||
AMOUNT=$(jq -r .amount_arithmetic_inconsistencies[0].auditor < test-audit-coins.json)
|
AMOUNT=`jq -r .amount_arithmetic_inconsistencies[0].auditor < test-audit-coins.json`
|
||||||
if [ "$AMOUNT" != "TESTKUDOS:2" ]
|
if test $AMOUNT != "TESTKUDOS:2"
|
||||||
then
|
then
|
||||||
exit_fail "Coin auditor amount $AMOUNT is wrong"
|
exit_fail "Coin auditor amount $AMOUNT is wrong"
|
||||||
fi
|
fi
|
||||||
AMOUNT=$(jq -r .amount_arithmetic_inconsistencies[0].exchange < test-audit-coins.json)
|
AMOUNT=`jq -r .amount_arithmetic_inconsistencies[0].exchange < test-audit-coins.json`
|
||||||
if [ "$AMOUNT" != "TESTKUDOS:5" ]
|
if test $AMOUNT != "TESTKUDOS:5"
|
||||||
then
|
then
|
||||||
exit_fail "Coin exchange amount $AMOUNT is wrong"
|
exit_fail "Coin exchange amount $AMOUNT is wrong"
|
||||||
fi
|
fi
|
||||||
echo "OK"
|
echo OK
|
||||||
|
|
||||||
# Undo database modification
|
# Undo database modification
|
||||||
echo "UPDATE exchange.recoup SET amount_val=2 WHERE recoup_uuid=1" | psql -Aqt "$DB"
|
echo "UPDATE exchange.recoup SET amount_val=2 WHERE recoup_uuid=1" | psql -Aqt $DB
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -583,26 +525,26 @@ function test_2() {
|
|||||||
function test_3() {
|
function test_3() {
|
||||||
|
|
||||||
echo "===========3: recoup-refresh amount inconsistency==========="
|
echo "===========3: recoup-refresh amount inconsistency==========="
|
||||||
echo "UPDATE exchange.recoup_refresh SET amount_val=5 WHERE recoup_refresh_uuid=1" | psql -Aqt "$DB"
|
echo "UPDATE exchange.recoup_refresh SET amount_val=5 WHERE recoup_refresh_uuid=1" | psql -Aqt $DB
|
||||||
|
|
||||||
run_audit
|
run_audit
|
||||||
|
|
||||||
echo -n "Testing inconsistency detection... "
|
echo -n "Testing inconsistency detection... "
|
||||||
# Coin spent exceeded coin's value
|
# Coin spent exceeded coin's value
|
||||||
AMOUNT=$(jq -r .total_arithmetic_delta_minus < test-audit-coins.json)
|
AMOUNT=`jq -r .total_arithmetic_delta_minus < test-audit-coins.json`
|
||||||
if [ "$AMOUNT" != "TESTKUDOS:5" ]
|
if test $AMOUNT != "TESTKUDOS:5"
|
||||||
then
|
then
|
||||||
exit_fail "Arithmetic delta minus amount $AMOUNT is wrong"
|
exit_fail "Arithmetic delta minus amount $AMOUNT is wrong"
|
||||||
fi
|
fi
|
||||||
AMOUNT=$(jq -r .total_arithmetic_delta_plus < test-audit-coins.json)
|
AMOUNT=`jq -r .total_arithmetic_delta_plus < test-audit-coins.json`
|
||||||
if [ "$AMOUNT" != "TESTKUDOS:0" ]
|
if test $AMOUNT != "TESTKUDOS:0"
|
||||||
then
|
then
|
||||||
exit_fail "Arithmetic delta plus amount $AMOUNT is wrong"
|
exit_fail "Arithmetic delta plus amount $AMOUNT is wrong"
|
||||||
fi
|
fi
|
||||||
echo "OK"
|
echo OK
|
||||||
|
|
||||||
# Undo database modification
|
# Undo database modification
|
||||||
echo "UPDATE exchange.recoup_refresh SET amount_val=0 WHERE recoup_refresh_uuid=1" | psql -Aqt "$DB"
|
echo "UPDATE exchange.recoup_refresh SET amount_val=0 WHERE recoup_refresh_uuid=1" | psql -Aqt $DB
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -611,35 +553,34 @@ function test_3() {
|
|||||||
function test_4() {
|
function test_4() {
|
||||||
|
|
||||||
echo "===========4: invalid recoup==========="
|
echo "===========4: invalid recoup==========="
|
||||||
echo "DELETE FROM exchange.denomination_revocations;" | psql -Aqt "$DB"
|
echo "DELETE FROM exchange.denomination_revocations;" | psql -Aqt $DB
|
||||||
|
|
||||||
run_audit
|
run_audit
|
||||||
|
|
||||||
echo -n "Testing inconsistency detection... "
|
echo -n "Testing inconsistency detection... "
|
||||||
# Coin spent exceeded coin's value
|
# Coin spent exceeded coin's value
|
||||||
jq -e .bad_sig_losses[0] \
|
jq -e .bad_sig_losses[0] < test-audit-coins.json > /dev/null || exit_fail "Bad recoup not detected"
|
||||||
< test-audit-coins.json \
|
AMOUNT=`jq -r .irregular_loss < test-audit-coins.json`
|
||||||
> /dev/null \
|
if test $AMOUNT == "TESTKUDOS:0"
|
||||||
|| exit_fail "Bad recoup not detected"
|
|
||||||
AMOUNT=$(jq -r .irregular_loss < test-audit-coins.json)
|
|
||||||
if [ "$AMOUNT" == "TESTKUDOS:0" ]
|
|
||||||
then
|
then
|
||||||
exit_fail "Total bad sig losses are wrong"
|
exit_fail "Total bad sig losses are wrong"
|
||||||
fi
|
fi
|
||||||
TAB=$(jq -r .row_inconsistencies[0].table < test-audit-reserves.json)
|
TAB=`jq -r .row_inconsistencies[0].table < test-audit-reserves.json`
|
||||||
if [ "$TAB" != "recoup" ]
|
if test $TAB != "recoup"
|
||||||
then
|
then
|
||||||
exit_fail "Wrong table for row inconsistency, got $TAB"
|
exit_fail "Wrong table for row inconsistency, got $TAB"
|
||||||
fi
|
fi
|
||||||
echo "OK"
|
echo OK
|
||||||
|
|
||||||
# Undo database modification (can't easily undo DELETE, so full reload)
|
# Undo database modification (can't easily undo DELETE, so full reload)
|
||||||
full_reload
|
full_reload
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
# *************** Main test loop starts here **************
|
# *************** Main test loop starts here **************
|
||||||
|
|
||||||
|
|
||||||
@ -647,14 +588,14 @@ function test_4() {
|
|||||||
# Sets $fail to 0 on success, non-zero on failure.
|
# Sets $fail to 0 on success, non-zero on failure.
|
||||||
function check_with_database()
|
function check_with_database()
|
||||||
{
|
{
|
||||||
BASEDB="$1"
|
BASEDB=$1
|
||||||
# Configuration file to use
|
# Configuration file to use
|
||||||
CONF="$1.conf"
|
CONF=$1.conf
|
||||||
echo "Running test suite with database $BASEDB using configuration $CONF"
|
echo "Running test suite with database $BASEDB using configuration $CONF"
|
||||||
|
|
||||||
MASTER_PRIV_FILE="${BASEDB}.mpriv"
|
MASTER_PRIV_FILE=${BASEDB}.mpriv
|
||||||
taler-config -f -c "${CONF}" -s exchange-offline -o MASTER_PRIV_FILE -V "${MASTER_PRIV_FILE}"
|
taler-config -f -c ${CONF} -s exchange-offline -o MASTER_PRIV_FILE -V ${MASTER_PRIV_FILE}
|
||||||
MASTER_PUB=$(gnunet-ecc -p "$MASTER_PRIV_FILE")
|
MASTER_PUB=`gnunet-ecc -p $MASTER_PRIV_FILE`
|
||||||
|
|
||||||
echo "MASTER PUB is ${MASTER_PUB} using file ${MASTER_PRIV_FILE}"
|
echo "MASTER PUB is ${MASTER_PUB} using file ${MASTER_PRIV_FILE}"
|
||||||
|
|
||||||
@ -664,14 +605,14 @@ function check_with_database()
|
|||||||
fail=0
|
fail=0
|
||||||
for i in $TESTS
|
for i in $TESTS
|
||||||
do
|
do
|
||||||
"test_$i"
|
test_$i
|
||||||
if [ 0 != "$fail" ]
|
if test 0 != $fail
|
||||||
then
|
then
|
||||||
break
|
break
|
||||||
fi
|
fi
|
||||||
done
|
done
|
||||||
# echo "Cleanup (disabled, leaving database $DB behind)"
|
# echo "Cleanup (disabled, leaving database $DB behind)"
|
||||||
dropdb "$DB"
|
dropdb $DB
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
@ -687,49 +628,36 @@ DB=revoke-basedb
|
|||||||
echo "Testing for jq"
|
echo "Testing for jq"
|
||||||
jq -h > /dev/null || exit_skip "jq required"
|
jq -h > /dev/null || exit_skip "jq required"
|
||||||
echo "Testing for faketime"
|
echo "Testing for faketime"
|
||||||
faketime -h > /dev/null \
|
faketime -h > /dev/null || exit_skip "faketime required"
|
||||||
|| exit_skip "faketime required"
|
|
||||||
echo "Testing for libeufin(-cli)"
|
echo "Testing for libeufin(-cli)"
|
||||||
libeufin-cli --help \
|
libeufin-cli --help >/dev/null 2> /dev/null </dev/null || exit_skip "libeufin required"
|
||||||
>/dev/null \
|
|
||||||
2> /dev/null \
|
|
||||||
</dev/null \
|
|
||||||
|| exit_skip "libeufin required"
|
|
||||||
echo "Testing for pdflatex"
|
echo "Testing for pdflatex"
|
||||||
which pdflatex > /dev/null </dev/null || exit_skip "pdflatex required"
|
which pdflatex > /dev/null </dev/null || exit_skip "pdflatex required"
|
||||||
echo "Testing for taler-wallet-cli"
|
echo "Testing for taler-wallet-cli"
|
||||||
taler-wallet-cli -h \
|
taler-wallet-cli -h >/dev/null </dev/null 2>/dev/null || exit_skip "taler-wallet-cli required"
|
||||||
>/dev/null \
|
|
||||||
</dev/null \
|
|
||||||
2>/dev/null \
|
|
||||||
|| exit_skip "taler-wallet-cli required"
|
|
||||||
|
|
||||||
echo -n "Testing for Postgres "
|
echo -n "Testing for Postgres"
|
||||||
# Available directly in path?
|
# Available directly in path?
|
||||||
INITDB_BIN=$(command -v initdb) || true
|
INITDB_BIN=$(command -v initdb) || true
|
||||||
if [[ -n "$INITDB_BIN" ]]; then
|
if [[ ! -z "$INITDB_BIN" ]]; then
|
||||||
echo "FOUND (in path) at $INITDB_BIN"
|
echo " FOUND (in path) at" $INITDB_BIN
|
||||||
else
|
else
|
||||||
HAVE_INITDB=$(find /usr -name "initdb" | head -1 2> /dev/null | grep postgres) || exit_skip " MISSING"
|
HAVE_INITDB=`find /usr -name "initdb" | head -1 2> /dev/null | grep postgres` || exit_skip " MISSING"
|
||||||
echo "FOUND at " "$(dirname "$HAVE_INITDB")"
|
echo " FOUND at" `dirname $HAVE_INITDB`
|
||||||
INITDB_BIN=$(echo "$HAVE_INITDB" | grep bin/initdb | grep postgres | sort -n | tail -n1)
|
INITDB_BIN=`echo $HAVE_INITDB | grep bin/initdb | grep postgres | sort -n | tail -n1`
|
||||||
fi
|
fi
|
||||||
echo -n "Setting up Postgres DB"
|
echo -n "Setting up Postgres DB"
|
||||||
POSTGRES_PATH=$(dirname "$INITDB_BIN")
|
POSTGRES_PATH=`dirname $INITDB_BIN`
|
||||||
MY_TMP_DIR=$(mktemp -d /tmp/taler-auditor-basedbXXXXXX)
|
ORIGIN=`pwd`
|
||||||
TMPDIR="${MY_TMP_DIR}/postgres/"
|
MYDIR=`mktemp -d /tmp/taler-auditor-basedbXXXXXX`
|
||||||
mkdir -p "$TMPDIR"
|
TMPDIR="${MYDIR}/postgres/"
|
||||||
|
mkdir -p $TMPDIR
|
||||||
echo -n "Setting up Postgres DB at $TMPDIR ..."
|
echo -n "Setting up Postgres DB at $TMPDIR ..."
|
||||||
"$INITDB_BIN" \
|
$INITDB_BIN --no-sync --auth=trust -D ${TMPDIR} > ${MYDIR}/postgres-dbinit.log 2> ${MYDIR}/postgres-dbinit.err
|
||||||
--no-sync \
|
|
||||||
--auth=trust \
|
|
||||||
-D "${TMPDIR}" \
|
|
||||||
> "${MY_TMP_DIR}/postgres-dbinit.log" \
|
|
||||||
2> "${MY_TMP_DIR}/postgres-dbinit.err"
|
|
||||||
echo " DONE"
|
echo " DONE"
|
||||||
mkdir "${TMPDIR}/sockets"
|
mkdir ${TMPDIR}/sockets
|
||||||
echo -n "Launching Postgres service at $POSTGRES_PATH"
|
echo -n "Launching Postgres service at $POSTGRES_PATH"
|
||||||
cat - >> "$TMPDIR/postgresql.conf" <<EOF
|
cat - >> $TMPDIR/postgresql.conf <<EOF
|
||||||
unix_socket_directories='${TMPDIR}/sockets'
|
unix_socket_directories='${TMPDIR}/sockets'
|
||||||
fsync=off
|
fsync=off
|
||||||
max_wal_senders=0
|
max_wal_senders=0
|
||||||
@ -737,30 +665,23 @@ synchronous_commit=off
|
|||||||
wal_level=minimal
|
wal_level=minimal
|
||||||
listen_addresses=''
|
listen_addresses=''
|
||||||
EOF
|
EOF
|
||||||
grep -v host \
|
cat $TMPDIR/pg_hba.conf | grep -v host > $TMPDIR/pg_hba.conf.new
|
||||||
< "$TMPDIR/pg_hba.conf" \
|
mv $TMPDIR/pg_hba.conf.new $TMPDIR/pg_hba.conf
|
||||||
> "$TMPDIR/pg_hba.conf.new"
|
${POSTGRES_PATH}/pg_ctl -D $TMPDIR -l /dev/null start > ${MYDIR}/postgres-start.log 2> ${MYDIR}/postgres-start.err
|
||||||
mv "$TMPDIR/pg_hba.conf.new" "$TMPDIR/pg_hba.conf"
|
|
||||||
"${POSTGRES_PATH}/pg_ctl" \
|
|
||||||
-D "$TMPDIR" \
|
|
||||||
-l /dev/null \
|
|
||||||
start \
|
|
||||||
> "${MY_TMP_DIR}/postgres-start.log" \
|
|
||||||
2> "${MY_TMP_DIR}/postgres-start.err"
|
|
||||||
echo " DONE"
|
echo " DONE"
|
||||||
PGHOST="$TMPDIR/sockets"
|
PGHOST="$TMPDIR/sockets"
|
||||||
export PGHOST
|
export PGHOST
|
||||||
|
|
||||||
echo "Generating fresh database at $MY_TMP_DIR"
|
echo "Generating fresh database at $MYDIR"
|
||||||
if faketime -f '-1 d' ./generate-revoke-basedb.sh "$MY_TMP_DIR/$DB"
|
if faketime -f '-1 d' ./generate-revoke-basedb.sh $MYDIR/$DB
|
||||||
then
|
then
|
||||||
check_with_database "$MY_TMP_DIR/$DB"
|
check_with_database $MYDIR/$DB
|
||||||
if [ "x$fail" != "x0" ]
|
if test x$fail != x0
|
||||||
then
|
then
|
||||||
exit "$fail"
|
exit $fail
|
||||||
else
|
else
|
||||||
echo "Cleaning up $MY_TMP_DIR..."
|
echo "Cleaning up $MYDIR..."
|
||||||
rm -rf "$MY_TMP_DIR" || echo "Removing $MY_TMP_DIR failed"
|
rm -rf $MYDIR || echo "Removing $MYDIR failed"
|
||||||
fi
|
fi
|
||||||
else
|
else
|
||||||
echo "Generation failed"
|
echo "Generation failed"
|
||||||
|
@ -1,7 +1,8 @@
|
|||||||
#!/bin/bash
|
#!/bin/bash
|
||||||
|
|
||||||
#
|
#
|
||||||
# This file is part of TALER
|
# This file is part of TALER
|
||||||
# Copyright (C) 2014-2023 Taler Systems SA
|
# Copyright (C) 2014-2021 Taler Systems SA
|
||||||
#
|
#
|
||||||
# TALER is free software; you can redistribute it and/or modify it under the
|
# TALER is free software; you can redistribute it and/or modify it under the
|
||||||
# terms of the GNU General Public License as published by the Free Software
|
# terms of the GNU General Public License as published by the Free Software
|
||||||
@ -14,7 +15,6 @@
|
|||||||
# You should have received a copy of the GNU General Public License along with
|
# You should have received a copy of the GNU General Public License along with
|
||||||
# TALER; see the file COPYING. If not, If not, see <http://www.gnu.org/license>
|
# TALER; see the file COPYING. If not, If not, see <http://www.gnu.org/license>
|
||||||
#
|
#
|
||||||
# shellcheck disable=SC2317
|
|
||||||
|
|
||||||
set -eu
|
set -eu
|
||||||
|
|
||||||
@ -32,13 +32,13 @@ function exit_fail() {
|
|||||||
|
|
||||||
# Cleanup to run whenever we exit
|
# Cleanup to run whenever we exit
|
||||||
function cleanup() {
|
function cleanup() {
|
||||||
if [ -n "${POSTGRES_PATH:-}" ]
|
if test ! -z "${POSTGRES_PATH:-}"
|
||||||
then
|
then
|
||||||
"${POSTGRES_PATH}/pg_ctl" -D "$TMPDIR" stop &> /dev/null || true
|
${POSTGRES_PATH}/pg_ctl -D $TMPDIR stop &> /dev/null || true
|
||||||
fi
|
fi
|
||||||
for n in $(jobs -p)
|
for n in `jobs -p`
|
||||||
do
|
do
|
||||||
kill "$n" 2> /dev/null || true
|
kill $n 2> /dev/null || true
|
||||||
done
|
done
|
||||||
wait
|
wait
|
||||||
}
|
}
|
||||||
@ -59,25 +59,19 @@ function check_with_database()
|
|||||||
|
|
||||||
taler-exchange-dbinit -c test-sync-out.conf
|
taler-exchange-dbinit -c test-sync-out.conf
|
||||||
echo -n "."
|
echo -n "."
|
||||||
psql -Aqt talercheck-in \
|
psql -Aqt talercheck-in -q -1 -f $1.sql >/dev/null || exit_skip "Failed to load database"
|
||||||
-q -1 \
|
|
||||||
-f "$1.sql" \
|
|
||||||
>/dev/null \
|
|
||||||
|| exit_skip "Failed to load database"
|
|
||||||
|
|
||||||
echo -n "."
|
echo -n "."
|
||||||
taler-auditor-sync \
|
taler-auditor-sync -s test-sync-in.conf -d test-sync-out.conf -t
|
||||||
-s test-sync-in.conf \
|
|
||||||
-d test-sync-out.conf -t
|
|
||||||
|
|
||||||
# cs_nonce_locks excluded: no point
|
# cs_nonce_locks excluded: no point
|
||||||
for table in denominations denomination_revocations wire_targets reserves reserves_in reserves_close reserves_out auditors auditor_denom_sigs exchange_sign_keys signkey_revocations extensions policy_details policy_fulfillments known_coins refresh_commitments refresh_revealed_coins refresh_transfer_keys deposits refunds wire_out aggregation_tracking wire_fee recoup recoup_refresh
|
for table in denominations denomination_revocations wire_targets reserves reserves_in reserves_close reserves_out auditors auditor_denom_sigs exchange_sign_keys signkey_revocations extensions policy_details policy_fulfillments known_coins refresh_commitments refresh_revealed_coins refresh_transfer_keys deposits refunds wire_out aggregation_tracking wire_fee recoup recoup_refresh
|
||||||
do
|
do
|
||||||
echo -n "."
|
echo -n "."
|
||||||
CIN=$(echo "SELECT COUNT(*) FROM exchange.$table" | psql talercheck-in -Aqt)
|
CIN=`echo "SELECT COUNT(*) FROM exchange.$table" | psql talercheck-in -Aqt`
|
||||||
COUT=$(echo "SELECT COUNT(*) FROM exchange.$table" | psql talercheck-out -Aqt)
|
COUT=`echo "SELECT COUNT(*) FROM exchange.$table" | psql talercheck-out -Aqt`
|
||||||
|
|
||||||
if [ "${CIN}" != "${COUT}" ]
|
if test ${CIN} != ${COUT}
|
||||||
then
|
then
|
||||||
dropdb talercheck-in
|
dropdb talercheck-in
|
||||||
dropdb talercheck-out
|
dropdb talercheck-out
|
||||||
@ -94,6 +88,14 @@ function check_with_database()
|
|||||||
fail=0
|
fail=0
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
# Postgres database to use
|
||||||
|
DB=auditor-basedb
|
||||||
|
|
||||||
|
# Configuration file to use
|
||||||
|
CONF=${DB}.conf
|
||||||
|
|
||||||
# test required commands exist
|
# test required commands exist
|
||||||
echo "Testing for jq"
|
echo "Testing for jq"
|
||||||
jq -h > /dev/null || exit_skip "jq required"
|
jq -h > /dev/null || exit_skip "jq required"
|
||||||
@ -109,25 +111,23 @@ taler-wallet-cli -h >/dev/null </dev/null 2>/dev/null || exit_skip "taler-wallet
|
|||||||
echo -n "Testing for Postgres"
|
echo -n "Testing for Postgres"
|
||||||
# Available directly in path?
|
# Available directly in path?
|
||||||
INITDB_BIN=$(command -v initdb) || true
|
INITDB_BIN=$(command -v initdb) || true
|
||||||
if [[ -n "$INITDB_BIN" ]]; then
|
if [[ ! -z "$INITDB_BIN" ]]; then
|
||||||
echo " FOUND (in path) at $INITDB_BIN"
|
echo " FOUND (in path) at" $INITDB_BIN
|
||||||
else
|
else
|
||||||
HAVE_INITDB=$(find /usr -name "initdb" | head -1 2> /dev/null | grep postgres) || exit_skip " MISSING"
|
HAVE_INITDB=`find /usr -name "initdb" | head -1 2> /dev/null | grep postgres` || exit_skip " MISSING"
|
||||||
echo " FOUND at " "$(dirname "$HAVE_INITDB")"
|
echo " FOUND at" `dirname $HAVE_INITDB`
|
||||||
INITDB_BIN=$(echo "$HAVE_INITDB" | grep bin/initdb | grep postgres | sort -n | tail -n1)
|
INITDB_BIN=`echo $HAVE_INITDB | grep bin/initdb | grep postgres | sort -n | tail -n1`
|
||||||
fi
|
fi
|
||||||
echo -n "Setting up Postgres DB"
|
echo -n "Setting up Postgres DB"
|
||||||
POSTGRES_PATH=$(dirname "$INITDB_BIN")
|
POSTGRES_PATH=`dirname $INITDB_BIN`
|
||||||
MYDIR=$(mktemp -d /tmp/taler-auditor-basedbXXXXXX)
|
MYDIR=`mktemp -d /tmp/taler-auditor-basedbXXXXXX`
|
||||||
TMPDIR="$MYDIR/postgres/"
|
TMPDIR="$MYDIR/postgres/"
|
||||||
mkdir -p "$TMPDIR"
|
mkdir -p $TMPDIR
|
||||||
"$INITDB_BIN" --no-sync --auth=trust -D "${TMPDIR}" \
|
$INITDB_BIN --no-sync --auth=trust -D ${TMPDIR} > ${MYDIR}/postgres-dbinit.log 2> ${MYDIR}/postgres-dbinit.err
|
||||||
> "${MYDIR}/postgres-dbinit.log" \
|
|
||||||
2> "${MYDIR}/postgres-dbinit.err"
|
|
||||||
echo " DONE"
|
echo " DONE"
|
||||||
mkdir "${TMPDIR}/sockets"
|
mkdir ${TMPDIR}/sockets
|
||||||
echo -n "Launching Postgres service"
|
echo -n "Launching Postgres service"
|
||||||
cat - >> "$TMPDIR/postgresql.conf" <<EOF
|
cat - >> $TMPDIR/postgresql.conf <<EOF
|
||||||
unix_socket_directories='${TMPDIR}/sockets'
|
unix_socket_directories='${TMPDIR}/sockets'
|
||||||
fsync=off
|
fsync=off
|
||||||
max_wal_senders=0
|
max_wal_senders=0
|
||||||
@ -135,30 +135,23 @@ synchronous_commit=off
|
|||||||
wal_level=minimal
|
wal_level=minimal
|
||||||
listen_addresses=''
|
listen_addresses=''
|
||||||
EOF
|
EOF
|
||||||
grep -v host \
|
cat $TMPDIR/pg_hba.conf | grep -v host > $TMPDIR/pg_hba.conf.new
|
||||||
< "$TMPDIR/pg_hba.conf" \
|
mv $TMPDIR/pg_hba.conf.new $TMPDIR/pg_hba.conf
|
||||||
> "$TMPDIR/pg_hba.conf.new"
|
${POSTGRES_PATH}/pg_ctl -D $TMPDIR -l /dev/null start > ${MYDIR}/postgres-start.log 2> ${MYDIR}/postgres-start.err
|
||||||
mv "$TMPDIR/pg_hba.conf.new" "$TMPDIR/pg_hba.conf"
|
|
||||||
"${POSTGRES_PATH}/pg_ctl" \
|
|
||||||
-D "$TMPDIR" \
|
|
||||||
-l /dev/null \
|
|
||||||
start \
|
|
||||||
> "${MYDIR}/postgres-start.log" \
|
|
||||||
2> "${MYDIR}/postgres-start.err"
|
|
||||||
echo " DONE"
|
echo " DONE"
|
||||||
PGHOST="$TMPDIR/sockets"
|
PGHOST="$TMPDIR/sockets"
|
||||||
export PGHOST
|
export PGHOST
|
||||||
|
|
||||||
echo "Generating fresh database at $MYDIR"
|
echo "Generating fresh database at $MYDIR"
|
||||||
if faketime -f '-1 d' ./generate-auditor-basedb.sh -d "$MYDIR/auditor-basedb"
|
if faketime -f '-1 d' ./generate-auditor-basedb.sh $MYDIR/auditor-basedb
|
||||||
then
|
then
|
||||||
check_with_database "$MYDIR/auditor-basedb"
|
check_with_database $MYDIR/auditor-basedb
|
||||||
if [ x$fail != x0 ]
|
if test x$fail != x0
|
||||||
then
|
then
|
||||||
exit "$fail"
|
exit $fail
|
||||||
else
|
else
|
||||||
echo "Cleaning up $MYDIR..."
|
echo "Cleaning up $MYDIR..."
|
||||||
rm -rf "$MYDIR" || echo "Removing $MYDIR failed"
|
rm -rf $MYDIR || echo "Removing $MYDIR failed"
|
||||||
fi
|
fi
|
||||||
else
|
else
|
||||||
echo "Generation failed"
|
echo "Generation failed"
|
||||||
|
@ -499,8 +499,6 @@ kyc_satisfied (struct AggregationUnit *au_active)
|
|||||||
char *requirement;
|
char *requirement;
|
||||||
enum GNUNET_DB_QueryStatus qs;
|
enum GNUNET_DB_QueryStatus qs;
|
||||||
|
|
||||||
if (kyc_off)
|
|
||||||
return true;
|
|
||||||
qs = TALER_KYCLOGIC_kyc_test_required (
|
qs = TALER_KYCLOGIC_kyc_test_required (
|
||||||
TALER_KYCLOGIC_KYC_TRIGGER_DEPOSIT,
|
TALER_KYCLOGIC_KYC_TRIGGER_DEPOSIT,
|
||||||
&au_active->h_payto,
|
&au_active->h_payto,
|
||||||
|
@ -876,7 +876,7 @@ sign_and_do_age_withdraw (
|
|||||||
/* Prepare the hashes of the coins for insertion */
|
/* Prepare the hashes of the coins for insertion */
|
||||||
for (uint32_t i = 0; i<awc->num_coins; i++)
|
for (uint32_t i = 0; i<awc->num_coins; i++)
|
||||||
{
|
{
|
||||||
TALER_coin_ev_hash (&awc->coin_evs[TALER_CNC_KAPPA * i + noreveal_index],
|
TALER_coin_ev_hash (&awc->coin_evs[i],
|
||||||
&awc->denom_hs[i],
|
&awc->denom_hs[i],
|
||||||
&h_coin_evs[i]);
|
&h_coin_evs[i]);
|
||||||
}
|
}
|
||||||
|
@ -71,7 +71,7 @@ struct AgeRevealContext
|
|||||||
* The data from the original age-withdraw. Will be retrieved from
|
* The data from the original age-withdraw. Will be retrieved from
|
||||||
* the DB via @a ach and @a reserve_pub.
|
* the DB via @a ach and @a reserve_pub.
|
||||||
*/
|
*/
|
||||||
struct TALER_EXCHANGEDB_AgeWithdraw commitment;
|
struct TALER_EXCHANGEDB_AgeWithdraw *commitment;
|
||||||
};
|
};
|
||||||
|
|
||||||
|
|
||||||
@ -106,8 +106,11 @@ parse_age_withdraw_reveal_json (
|
|||||||
error = "disclosed_coin_secrets must be an array";
|
error = "disclosed_coin_secrets must be an array";
|
||||||
else if (num_entries == 0)
|
else if (num_entries == 0)
|
||||||
error = "disclosed_coin_secrets must not be empty";
|
error = "disclosed_coin_secrets must not be empty";
|
||||||
else if (num_entries > TALER_MAX_FRESH_COINS)
|
else if (num_entries > TALER_MAX_FRESH_COINS * (TALER_CNC_KAPPA - 1))
|
||||||
error = "maximum number of coins that can be withdrawn has been exceeded";
|
error = "maximum number of coins that can be withdrawn has been exceeded";
|
||||||
|
else if (0 != num_entries % (TALER_CNC_KAPPA - 1))
|
||||||
|
error = "the size of disclosed_coin_secrets must be a multiple of "
|
||||||
|
TALER_CNC_KAPPA_MINUS_ONE_STR;
|
||||||
|
|
||||||
if (NULL != error)
|
if (NULL != error)
|
||||||
{
|
{
|
||||||
@ -117,26 +120,29 @@ parse_age_withdraw_reveal_json (
|
|||||||
return GNUNET_SYSERR;
|
return GNUNET_SYSERR;
|
||||||
}
|
}
|
||||||
|
|
||||||
actx->num_secrets = num_entries * (TALER_CNC_KAPPA - 1);
|
actx->num_secrets = num_entries;
|
||||||
actx->num_coins = num_entries;
|
actx->num_coins = num_entries / (TALER_CNC_KAPPA - 1);
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/* Continue parsing the parts */
|
/* Continue parsing the parts */
|
||||||
{
|
{
|
||||||
unsigned int idx = 0;
|
unsigned int idx = 0;
|
||||||
unsigned int k = 0;
|
|
||||||
json_t *array = NULL;
|
|
||||||
json_t *value = NULL;
|
json_t *value = NULL;
|
||||||
|
|
||||||
/* Parse diclosed keys */
|
/* Parse diclosed keys */
|
||||||
actx->disclosed_coin_secrets =
|
actx->disclosed_coin_secrets =
|
||||||
GNUNET_new_array (actx->num_secrets,
|
GNUNET_new_array (num_entries,
|
||||||
struct TALER_PlanchetMasterSecretP);
|
struct TALER_PlanchetMasterSecretP);
|
||||||
|
|
||||||
json_array_foreach (j_disclosed_coin_secrets, idx, array) {
|
json_array_foreach (j_disclosed_coin_secrets, idx, value) {
|
||||||
if (! json_is_array (array) ||
|
struct GNUNET_JSON_Specification spec[] = {
|
||||||
(TALER_CNC_KAPPA - 1 != json_array_size (array)))
|
GNUNET_JSON_spec_fixed_auto (NULL, &actx->disclosed_coin_secrets[idx]),
|
||||||
|
GNUNET_JSON_spec_end ()
|
||||||
|
};
|
||||||
|
|
||||||
|
if (GNUNET_OK !=
|
||||||
|
GNUNET_JSON_parse (value, spec, NULL, NULL))
|
||||||
{
|
{
|
||||||
char msg[256] = {0};
|
char msg[256] = {0};
|
||||||
GNUNET_snprintf (msg,
|
GNUNET_snprintf (msg,
|
||||||
@ -147,32 +153,6 @@ parse_age_withdraw_reveal_json (
|
|||||||
TALER_EC_GENERIC_PARAMETER_MALFORMED,
|
TALER_EC_GENERIC_PARAMETER_MALFORMED,
|
||||||
msg);
|
msg);
|
||||||
goto EXIT;
|
goto EXIT;
|
||||||
|
|
||||||
}
|
|
||||||
|
|
||||||
json_array_foreach (array, k, value)
|
|
||||||
{
|
|
||||||
struct TALER_PlanchetMasterSecretP *secret =
|
|
||||||
&actx->disclosed_coin_secrets[2 * idx + k];
|
|
||||||
struct GNUNET_JSON_Specification spec[] = {
|
|
||||||
GNUNET_JSON_spec_fixed_auto (NULL, secret),
|
|
||||||
GNUNET_JSON_spec_end ()
|
|
||||||
};
|
|
||||||
|
|
||||||
if (GNUNET_OK !=
|
|
||||||
GNUNET_JSON_parse (value, spec, NULL, NULL))
|
|
||||||
{
|
|
||||||
char msg[256] = {0};
|
|
||||||
GNUNET_snprintf (msg,
|
|
||||||
sizeof(msg),
|
|
||||||
"couldn't parse entry no. %d in array disclosed_coin_secrets[%d]",
|
|
||||||
k + 1,
|
|
||||||
idx + 1);
|
|
||||||
*mhd_ret = TALER_MHD_reply_with_ec (connection,
|
|
||||||
TALER_EC_GENERIC_PARAMETER_MALFORMED,
|
|
||||||
msg);
|
|
||||||
goto EXIT;
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
@ -202,7 +182,7 @@ find_original_commitment (
|
|||||||
struct MHD_Connection *connection,
|
struct MHD_Connection *connection,
|
||||||
const struct TALER_AgeWithdrawCommitmentHashP *h_commitment,
|
const struct TALER_AgeWithdrawCommitmentHashP *h_commitment,
|
||||||
const struct TALER_ReservePublicKeyP *reserve_pub,
|
const struct TALER_ReservePublicKeyP *reserve_pub,
|
||||||
struct TALER_EXCHANGEDB_AgeWithdraw *commitment,
|
struct TALER_EXCHANGEDB_AgeWithdraw **commitment,
|
||||||
MHD_RESULT *result)
|
MHD_RESULT *result)
|
||||||
{
|
{
|
||||||
enum GNUNET_DB_QueryStatus qs;
|
enum GNUNET_DB_QueryStatus qs;
|
||||||
@ -212,7 +192,7 @@ find_original_commitment (
|
|||||||
qs = TEH_plugin->get_age_withdraw (TEH_plugin->cls,
|
qs = TEH_plugin->get_age_withdraw (TEH_plugin->cls,
|
||||||
reserve_pub,
|
reserve_pub,
|
||||||
h_commitment,
|
h_commitment,
|
||||||
commitment);
|
*commitment);
|
||||||
switch (qs)
|
switch (qs)
|
||||||
{
|
{
|
||||||
case GNUNET_DB_STATUS_SUCCESS_ONE_RESULT:
|
case GNUNET_DB_STATUS_SUCCESS_ONE_RESULT:
|
||||||
@ -282,13 +262,7 @@ calculate_blinded_hash (
|
|||||||
connection,
|
connection,
|
||||||
result);
|
result);
|
||||||
if (NULL == denom_key)
|
if (NULL == denom_key)
|
||||||
{
|
|
||||||
GNUNET_break_op (0);
|
|
||||||
*result = TALER_MHD_reply_with_ec (connection,
|
|
||||||
TALER_EC_EXCHANGE_GENERIC_KEYS_MISSING,
|
|
||||||
NULL);
|
|
||||||
return GNUNET_SYSERR;
|
return GNUNET_SYSERR;
|
||||||
}
|
|
||||||
|
|
||||||
/* calculate age commitment hash */
|
/* calculate age commitment hash */
|
||||||
{
|
{
|
||||||
@ -312,10 +286,10 @@ calculate_blinded_hash (
|
|||||||
|
|
||||||
/* Next: calculate planchet */
|
/* Next: calculate planchet */
|
||||||
{
|
{
|
||||||
struct TALER_CoinPubHashP c_hash = {0};
|
struct TALER_CoinPubHashP c_hash;
|
||||||
struct TALER_PlanchetDetail detail = {0};
|
struct TALER_PlanchetDetail detail;
|
||||||
struct TALER_CoinSpendPrivateKeyP coin_priv = {0};
|
struct TALER_CoinSpendPrivateKeyP coin_priv;
|
||||||
union TALER_DenominationBlindingKeyP bks = {0};
|
union TALER_DenominationBlindingKeyP bks;
|
||||||
struct TALER_ExchangeWithdrawValues alg_values = {
|
struct TALER_ExchangeWithdrawValues alg_values = {
|
||||||
.cipher = denom_key->denom_pub.cipher,
|
.cipher = denom_key->denom_pub.cipher,
|
||||||
};
|
};
|
||||||
@ -324,23 +298,24 @@ calculate_blinded_hash (
|
|||||||
{
|
{
|
||||||
struct TALER_CsNonce nonce;
|
struct TALER_CsNonce nonce;
|
||||||
|
|
||||||
TALER_cs_withdraw_nonce_derive (secret,
|
TALER_cs_withdraw_nonce_derive (
|
||||||
&nonce);
|
secret,
|
||||||
|
&nonce);
|
||||||
|
|
||||||
{
|
{
|
||||||
|
enum TALER_ErrorCode ec;
|
||||||
struct TEH_CsDeriveData cdd = {
|
struct TEH_CsDeriveData cdd = {
|
||||||
.h_denom_pub = &denom_key->h_denom_pub,
|
.h_denom_pub = &denom_key->h_denom_pub,
|
||||||
.nonce = &nonce,
|
.nonce = &nonce,
|
||||||
};
|
};
|
||||||
|
|
||||||
GNUNET_assert (TALER_EC_NONE ==
|
ec = TEH_keys_denomination_cs_r_pub (&cdd,
|
||||||
TEH_keys_denomination_cs_r_pub (
|
false,
|
||||||
&cdd,
|
&alg_values.details.
|
||||||
false,
|
cs_values);
|
||||||
&alg_values.details.cs_values));
|
/* FIXME Handle error? */
|
||||||
|
GNUNET_assert (TALER_EC_NONE == ec);
|
||||||
}
|
}
|
||||||
|
|
||||||
detail.blinded_planchet.details.cs_blinded_planchet.nonce = nonce;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
TALER_planchet_blinding_secret_create (secret,
|
TALER_planchet_blinding_secret_create (secret,
|
||||||
@ -373,11 +348,10 @@ calculate_blinded_hash (
|
|||||||
ret = TALER_coin_ev_hash (&detail.blinded_planchet,
|
ret = TALER_coin_ev_hash (&detail.blinded_planchet,
|
||||||
&denom_key->h_denom_pub,
|
&denom_key->h_denom_pub,
|
||||||
bch);
|
bch);
|
||||||
|
|
||||||
GNUNET_assert (GNUNET_OK == ret);
|
GNUNET_assert (GNUNET_OK == ret);
|
||||||
}
|
}
|
||||||
|
|
||||||
return ret;
|
return GNUNET_SYSERR;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
@ -443,9 +417,9 @@ verify_commitment_and_max_age (
|
|||||||
{
|
{
|
||||||
size_t i = 0; /* either 0 or 1, to index into coin_evs */
|
size_t i = 0; /* either 0 or 1, to index into coin_evs */
|
||||||
|
|
||||||
for (size_t k = 0; k<TALER_CNC_KAPPA; k++)
|
for (size_t gamma = 0; gamma<TALER_CNC_KAPPA; gamma++)
|
||||||
{
|
{
|
||||||
if (k == (size_t) commitment->noreveal_index)
|
if (gamma == (size_t) commitment->noreveal_index)
|
||||||
{
|
{
|
||||||
GNUNET_CRYPTO_hash_context_read (hash_context,
|
GNUNET_CRYPTO_hash_context_read (hash_context,
|
||||||
&commitment->h_coin_evs[coin_idx],
|
&commitment->h_coin_evs[coin_idx],
|
||||||
@ -458,7 +432,7 @@ verify_commitment_and_max_age (
|
|||||||
const struct TALER_PlanchetMasterSecretP *secret;
|
const struct TALER_PlanchetMasterSecretP *secret;
|
||||||
struct TALER_BlindedCoinHashP bch;
|
struct TALER_BlindedCoinHashP bch;
|
||||||
|
|
||||||
GNUNET_assert (2>i);
|
GNUNET_assert (i<2);
|
||||||
GNUNET_assert ((TALER_CNC_KAPPA - 1) * num_coins > j);
|
GNUNET_assert ((TALER_CNC_KAPPA - 1) * num_coins > j);
|
||||||
|
|
||||||
secret = &disclosed_coin_secrets[j];
|
secret = &disclosed_coin_secrets[j];
|
||||||
@ -504,7 +478,8 @@ verify_commitment_and_max_age (
|
|||||||
}
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
return GNUNET_OK;
|
|
||||||
|
return ret;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
@ -526,7 +501,7 @@ reply_age_withdraw_reveal_success (
|
|||||||
for (unsigned int i = 0; i < commitment->num_coins; i++)
|
for (unsigned int i = 0; i < commitment->num_coins; i++)
|
||||||
{
|
{
|
||||||
json_t *obj = GNUNET_JSON_PACK (
|
json_t *obj = GNUNET_JSON_PACK (
|
||||||
TALER_JSON_pack_blinded_denom_sig (NULL,
|
TALER_JSON_pack_blinded_denom_sig ("ev_sig",
|
||||||
&commitment->denom_sigs[i]));
|
&commitment->denom_sigs[i]));
|
||||||
GNUNET_assert (0 ==
|
GNUNET_assert (0 ==
|
||||||
json_array_append_new (list,
|
json_array_append_new (list,
|
||||||
@ -597,7 +572,7 @@ TEH_handler_age_withdraw_reveal (
|
|||||||
if (GNUNET_OK !=
|
if (GNUNET_OK !=
|
||||||
verify_commitment_and_max_age (
|
verify_commitment_and_max_age (
|
||||||
rc->connection,
|
rc->connection,
|
||||||
&actx.commitment,
|
actx.commitment,
|
||||||
actx.disclosed_coin_secrets,
|
actx.disclosed_coin_secrets,
|
||||||
actx.num_coins,
|
actx.num_coins,
|
||||||
&result))
|
&result))
|
||||||
@ -605,7 +580,7 @@ TEH_handler_age_withdraw_reveal (
|
|||||||
|
|
||||||
/* Finally, return the signatures */
|
/* Finally, return the signatures */
|
||||||
result = reply_age_withdraw_reveal_success (rc->connection,
|
result = reply_age_withdraw_reveal_success (rc->connection,
|
||||||
&actx.commitment);
|
actx.commitment);
|
||||||
|
|
||||||
} while(0);
|
} while(0);
|
||||||
|
|
||||||
|
@ -29,8 +29,6 @@ BEGIN
|
|||||||
'(age_withdraw_id BIGINT GENERATED BY DEFAULT AS IDENTITY'
|
'(age_withdraw_id BIGINT GENERATED BY DEFAULT AS IDENTITY'
|
||||||
',h_commitment BYTEA NOT NULL CONSTRAINT h_commitment_length CHECK(LENGTH(h_commitment)=64)'
|
',h_commitment BYTEA NOT NULL CONSTRAINT h_commitment_length CHECK(LENGTH(h_commitment)=64)'
|
||||||
',max_age SMALLINT NOT NULL CONSTRAINT max_age_positive CHECK(max_age>=0)'
|
',max_age SMALLINT NOT NULL CONSTRAINT max_age_positive CHECK(max_age>=0)'
|
||||||
',amount_with_fee_val INT8 NOT NULL'
|
|
||||||
',amount_with_fee_frac INT4 NOT NULL'
|
|
||||||
',reserve_pub BYTEA NOT NULL CONSTRAINT reserve_pub_length CHECK(LENGTH(reserve_pub)=32)'
|
',reserve_pub BYTEA NOT NULL CONSTRAINT reserve_pub_length CHECK(LENGTH(reserve_pub)=32)'
|
||||||
',reserve_sig BYTEA NOT NULL CONSTRAINT reserve_sig_length CHECK(LENGTH(reserve_sig)=64)'
|
',reserve_sig BYTEA NOT NULL CONSTRAINT reserve_sig_length CHECK(LENGTH(reserve_sig)=64)'
|
||||||
',noreveal_index SMALLINT NOT NULL CONSTRAINT noreveal_index_positive CHECK(noreveal_index>=0)'
|
',noreveal_index SMALLINT NOT NULL CONSTRAINT noreveal_index_positive CHECK(noreveal_index>=0)'
|
||||||
|
@ -143,8 +143,6 @@ WHERE
|
|||||||
INSERT INTO exchange.age_withdraw
|
INSERT INTO exchange.age_withdraw
|
||||||
(h_commitment
|
(h_commitment
|
||||||
,max_age
|
,max_age
|
||||||
,amount_with_fee_val
|
|
||||||
,amount_with_fee_frac
|
|
||||||
,reserve_pub
|
,reserve_pub
|
||||||
,reserve_sig
|
,reserve_sig
|
||||||
,noreveal_index
|
,noreveal_index
|
||||||
@ -154,8 +152,6 @@ INSERT INTO exchange.age_withdraw
|
|||||||
VALUES
|
VALUES
|
||||||
(h_commitment
|
(h_commitment
|
||||||
,maximum_age_committed
|
,maximum_age_committed
|
||||||
,amount_val
|
|
||||||
,amount_frac
|
|
||||||
,rpub
|
,rpub
|
||||||
,rsig
|
,rsig
|
||||||
,noreveal_index
|
,noreveal_index
|
||||||
|
@ -52,15 +52,15 @@ TEH_PG_do_age_withdraw (
|
|||||||
GNUNET_PQ_query_param_auto_from_type (&commitment->h_commitment),
|
GNUNET_PQ_query_param_auto_from_type (&commitment->h_commitment),
|
||||||
GNUNET_PQ_query_param_uint16 (&commitment->max_age),
|
GNUNET_PQ_query_param_uint16 (&commitment->max_age),
|
||||||
GNUNET_PQ_query_param_uint16 (&commitment->noreveal_index),
|
GNUNET_PQ_query_param_uint16 (&commitment->noreveal_index),
|
||||||
TALER_PQ_query_param_array_blinded_coin_hash (commitment->num_coins,
|
GNUNET_PQ_query_param_array_auto_from_type (commitment->num_coins,
|
||||||
commitment->h_coin_evs,
|
commitment->h_coin_evs,
|
||||||
pg->conn),
|
pg->conn),
|
||||||
GNUNET_PQ_query_param_array_uint64 (commitment->num_coins,
|
GNUNET_PQ_query_param_array_uint64 (commitment->num_coins,
|
||||||
commitment->denom_serials,
|
commitment->denom_serials,
|
||||||
pg->conn),
|
pg->conn),
|
||||||
TALER_PQ_query_param_array_blinded_denom_sig (commitment->num_coins,
|
GNUNET_PQ_query_param_array_auto_from_type (commitment->num_coins,
|
||||||
commitment->denom_sigs,
|
commitment->denom_sigs,
|
||||||
pg->conn),
|
pg->conn),
|
||||||
GNUNET_PQ_query_param_end
|
GNUNET_PQ_query_param_end
|
||||||
};
|
};
|
||||||
struct GNUNET_PQ_ResultSpec rs[] = {
|
struct GNUNET_PQ_ResultSpec rs[] = {
|
||||||
|
@ -52,26 +52,27 @@ TEH_PG_get_age_withdraw (
|
|||||||
&aw->amount_with_fee),
|
&aw->amount_with_fee),
|
||||||
GNUNET_PQ_result_spec_uint16 ("noreveal_index",
|
GNUNET_PQ_result_spec_uint16 ("noreveal_index",
|
||||||
&aw->noreveal_index),
|
&aw->noreveal_index),
|
||||||
TALER_PQ_result_spec_array_blinded_coin_hash (
|
GNUNET_PQ_result_spec_array_fixed_size (
|
||||||
pg->conn,
|
pg->conn,
|
||||||
"h_blind_evs",
|
"h_coin_evs",
|
||||||
|
sizeof(struct TALER_BlindedPlanchet),
|
||||||
&aw->num_coins,
|
&aw->num_coins,
|
||||||
&aw->h_coin_evs),
|
(void **) &aw->h_coin_evs),
|
||||||
TALER_PQ_result_spec_array_blinded_denom_sig (
|
GNUNET_PQ_result_spec_array_fixed_size (
|
||||||
pg->conn,
|
pg->conn,
|
||||||
"denom_sigs",
|
"denom_sigs",
|
||||||
NULL, /* we assume that this is the same size as h_coin_evs */
|
sizeof(struct TALER_DenominationSignature),
|
||||||
&aw->denom_sigs),
|
NULL,
|
||||||
TALER_PQ_result_spec_array_denom_hash (
|
(void **) &aw->denom_sigs),
|
||||||
|
GNUNET_PQ_result_spec_array_fixed_size (
|
||||||
pg->conn,
|
pg->conn,
|
||||||
"denom_pub_hashes",
|
"denom_pub_hashes",
|
||||||
NULL, /* we assume that this is the same size as h_coin_evs */
|
sizeof(struct TALER_DenominationHashP),
|
||||||
&aw->denom_pub_hashes),
|
NULL,
|
||||||
|
(void **) &aw->denom_pub_hashes),
|
||||||
GNUNET_PQ_result_spec_end
|
GNUNET_PQ_result_spec_end
|
||||||
};
|
};
|
||||||
|
|
||||||
GNUNET_assert (NULL != aw);
|
|
||||||
|
|
||||||
/* Used in #postgres_get_age_withdraw() to
|
/* Used in #postgres_get_age_withdraw() to
|
||||||
locate the response for a /reserve/$RESERVE_PUB/age-withdraw request
|
locate the response for a /reserve/$RESERVE_PUB/age-withdraw request
|
||||||
using the hash of the blinded message. Also needed to ensure
|
using the hash of the blinded message. Also needed to ensure
|
||||||
@ -86,12 +87,12 @@ TEH_PG_get_age_withdraw (
|
|||||||
",amount_with_fee_val"
|
",amount_with_fee_val"
|
||||||
",amount_with_fee_frac"
|
",amount_with_fee_frac"
|
||||||
",noreveal_index"
|
",noreveal_index"
|
||||||
",h_blind_evs"
|
",h_coin_evs"
|
||||||
",denom_sigs"
|
",denom_sigs"
|
||||||
",ARRAY("
|
",ARRAY("
|
||||||
" SELECT denominations.denom_pub_hash FROM ("
|
" SELECT denominations.denom_pub_hash FROM ("
|
||||||
" SELECT UNNEST(denom_serials) AS id,"
|
" SELECT UNNEST(denomination_serials) AS id,"
|
||||||
" generate_subscripts(denom_serials, 1) AS nr" /* for order */
|
" generate_subscripts(denominations_serials, 1) AS nr" /* for order */
|
||||||
" ) AS denoms"
|
" ) AS denoms"
|
||||||
" LEFT JOIN denominations ON denominations.denominations_serial=denoms.id"
|
" LEFT JOIN denominations ON denominations.denominations_serial=denoms.id"
|
||||||
") AS denom_pub_hashes"
|
") AS denom_pub_hashes"
|
||||||
|
@ -3066,7 +3066,6 @@ typedef void
|
|||||||
* @param coin_inputs The input for the coins to withdraw, same as in the previous call to /age-withdraw
|
* @param coin_inputs The input for the coins to withdraw, same as in the previous call to /age-withdraw
|
||||||
* @param noreveal_index The index into each of the kappa coin candidates, that should not be revealed to the exchange
|
* @param noreveal_index The index into each of the kappa coin candidates, that should not be revealed to the exchange
|
||||||
* @param h_commitment The commmitment from the previous call to /age-withdraw
|
* @param h_commitment The commmitment from the previous call to /age-withdraw
|
||||||
* @param reserve_pub The public key of the reserve the original call to /age-withdraw was made to
|
|
||||||
* @param res_cb A callback for the result, maybe NULL
|
* @param res_cb A callback for the result, maybe NULL
|
||||||
* @param res_cb_cls A closure for @e res_cb, maybe NULL
|
* @param res_cb_cls A closure for @e res_cb, maybe NULL
|
||||||
* @return a handle for this request; NULL if the argument was invalid.
|
* @return a handle for this request; NULL if the argument was invalid.
|
||||||
@ -3081,7 +3080,6 @@ TALER_EXCHANGE_age_withdraw_reveal (
|
|||||||
num_coins],
|
num_coins],
|
||||||
uint8_t noreveal_index,
|
uint8_t noreveal_index,
|
||||||
const struct TALER_AgeWithdrawCommitmentHashP *h_commitment,
|
const struct TALER_AgeWithdrawCommitmentHashP *h_commitment,
|
||||||
const struct TALER_ReservePublicKeyP *reserve_pub,
|
|
||||||
TALER_EXCHANGE_AgeWithdrawRevealCallback res_cb,
|
TALER_EXCHANGE_AgeWithdrawRevealCallback res_cb,
|
||||||
void *res_cb_cls);
|
void *res_cb_cls);
|
||||||
|
|
||||||
|
@ -126,33 +126,6 @@ struct GNUNET_PQ_QueryParam
|
|||||||
TALER_PQ_query_param_json (const json_t *x);
|
TALER_PQ_query_param_json (const json_t *x);
|
||||||
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Generate query parameter for an array of blinded denomination signatures
|
|
||||||
*
|
|
||||||
* @param num number of elements in @e denom_sigs
|
|
||||||
* @param denom_sigs array of blinded denomination signatures
|
|
||||||
* @param db context for the db-connection
|
|
||||||
*/
|
|
||||||
struct GNUNET_PQ_QueryParam
|
|
||||||
TALER_PQ_query_param_array_blinded_denom_sig (
|
|
||||||
size_t num,
|
|
||||||
const struct TALER_BlindedDenominationSignature *denom_sigs,
|
|
||||||
const struct GNUNET_PQ_Context *db
|
|
||||||
);
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Generate query parameter for an array of blinded hashes of coin envelopes
|
|
||||||
*
|
|
||||||
* @param num number of elements in @e denom_sigs
|
|
||||||
* @param coin_evs array of blinded hashes of coin envelopes
|
|
||||||
* @param db context for the db-connection
|
|
||||||
*/
|
|
||||||
struct GNUNET_PQ_QueryParam
|
|
||||||
TALER_PQ_query_param_array_blinded_coin_hash (
|
|
||||||
size_t num,
|
|
||||||
const struct TALER_BlindedCoinHashP *coin_evs,
|
|
||||||
const struct GNUNET_PQ_Context *db);
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Currency amount expected.
|
* Currency amount expected.
|
||||||
*
|
*
|
||||||
@ -256,54 +229,6 @@ TALER_PQ_result_spec_json (const char *name,
|
|||||||
json_t **jp);
|
json_t **jp);
|
||||||
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Array of blinded denomination signature expected
|
|
||||||
*
|
|
||||||
* @param db context of the database connection
|
|
||||||
* @param name name of the field in the table
|
|
||||||
* @param[out] num number of elements in @e denom_sigs
|
|
||||||
* @param[out] denom_sigs where to store the result
|
|
||||||
* @return array entry for the result specification to use
|
|
||||||
*/
|
|
||||||
struct GNUNET_PQ_ResultSpec
|
|
||||||
TALER_PQ_result_spec_array_blinded_denom_sig (
|
|
||||||
const struct GNUNET_PQ_Context *db,
|
|
||||||
const char *name,
|
|
||||||
size_t *num,
|
|
||||||
struct TALER_BlindedDenominationSignature **denom_sigs);
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Array of blinded hashes of coin envelopes
|
|
||||||
*
|
|
||||||
* @param db context of the database connection
|
|
||||||
* @param name name of the field in the table
|
|
||||||
* @param[out] num number of elements in @e denom_sigs
|
|
||||||
* @param[out] h_coin_evs where to store the result
|
|
||||||
* @return array entry for the result specification to use
|
|
||||||
*/
|
|
||||||
struct GNUNET_PQ_ResultSpec
|
|
||||||
TALER_PQ_result_spec_array_blinded_coin_hash (
|
|
||||||
const struct GNUNET_PQ_Context *db,
|
|
||||||
const char *name,
|
|
||||||
size_t *num,
|
|
||||||
struct TALER_BlindedCoinHashP **h_coin_evs);
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Array of hashes of denominations
|
|
||||||
*
|
|
||||||
* @param db context of the database connection
|
|
||||||
* @param name name of the field in the table
|
|
||||||
* @param[out] num number of elements in @e denom_sigs
|
|
||||||
* @param[out] denom_hs where to store the result
|
|
||||||
* @return array entry for the result specification to use
|
|
||||||
*/
|
|
||||||
struct GNUNET_PQ_ResultSpec
|
|
||||||
TALER_PQ_result_spec_array_denom_hash (
|
|
||||||
const struct GNUNET_PQ_Context *db,
|
|
||||||
const char *name,
|
|
||||||
size_t *num,
|
|
||||||
struct TALER_DenominationHashP **denom_hs);
|
|
||||||
|
|
||||||
#endif /* TALER_PQ_LIB_H_ */
|
#endif /* TALER_PQ_LIB_H_ */
|
||||||
|
|
||||||
/* end of include/taler_pq_lib.h */
|
/* end of include/taler_pq_lib.h */
|
||||||
|
@ -346,7 +346,6 @@ reserve_age_withdraw_ok (
|
|||||||
return GNUNET_SYSERR;
|
return GNUNET_SYSERR;
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
awbh->callback (awbh->callback_cls,
|
awbh->callback (awbh->callback_cls,
|
||||||
&response);
|
&response);
|
||||||
/* make sure the callback isn't called again */
|
/* make sure the callback isn't called again */
|
||||||
@ -781,7 +780,7 @@ copy_results (
|
|||||||
const struct TALER_EXCHANGE_AgeWithdrawBlindedResponse *awbr)
|
const struct TALER_EXCHANGE_AgeWithdrawBlindedResponse *awbr)
|
||||||
{
|
{
|
||||||
struct TALER_EXCHANGE_AgeWithdrawHandle *awh = cls;
|
struct TALER_EXCHANGE_AgeWithdrawHandle *awh = cls;
|
||||||
uint8_t k = awbr->details.ok.noreveal_index;
|
uint8_t idx = awbr->details.ok.noreveal_index;
|
||||||
struct TALER_EXCHANGE_AgeWithdrawCoinPrivateDetails details[awh->num_coins];
|
struct TALER_EXCHANGE_AgeWithdrawCoinPrivateDetails details[awh->num_coins];
|
||||||
struct TALER_BlindedCoinHashP blinded_coin_hs[awh->num_coins];
|
struct TALER_BlindedCoinHashP blinded_coin_hs[awh->num_coins];
|
||||||
struct TALER_EXCHANGE_AgeWithdrawResponse resp = {
|
struct TALER_EXCHANGE_AgeWithdrawResponse resp = {
|
||||||
@ -798,9 +797,9 @@ copy_results (
|
|||||||
|
|
||||||
for (size_t n = 0; n< awh->num_coins; n++)
|
for (size_t n = 0; n< awh->num_coins; n++)
|
||||||
{
|
{
|
||||||
details[n] = awh->coin_data[n].coin_candidates[k].details;
|
details[n] = awh->coin_data[n].coin_candidates[idx].details;
|
||||||
details[n].planchet = awh->coin_data[n].planchet_details[k];
|
details[n].planchet = awh->coin_data[n].planchet_details[idx];
|
||||||
blinded_coin_hs[n] = awh->coin_data[n].coin_candidates[k].blinded_coin_h;
|
blinded_coin_hs[n] = awh->coin_data[n].coin_candidates[idx].blinded_coin_h;
|
||||||
}
|
}
|
||||||
|
|
||||||
awh->callback (awh->callback_cls,
|
awh->callback (awh->callback_cls,
|
||||||
@ -825,9 +824,9 @@ call_age_withdraw_blinded (
|
|||||||
for (size_t n = 0; n < awh->num_coins; n++)
|
for (size_t n = 0; n < awh->num_coins; n++)
|
||||||
{
|
{
|
||||||
blinded_input[n].denom_pub = &awh->coin_data[n].denom_pub;
|
blinded_input[n].denom_pub = &awh->coin_data[n].denom_pub;
|
||||||
for (uint8_t k = 0; k < TALER_CNC_KAPPA; k++)
|
for (uint8_t i = 0; i < TALER_CNC_KAPPA; i++)
|
||||||
blinded_input[n].planchet_details[k] =
|
blinded_input[n].planchet_details[i] =
|
||||||
awh->coin_data[n].planchet_details[k];
|
awh->coin_data[n].planchet_details[i];
|
||||||
}
|
}
|
||||||
|
|
||||||
awh->procotol_handle =
|
awh->procotol_handle =
|
||||||
@ -919,8 +918,6 @@ csr_withdraw_done (
|
|||||||
bool success = false;
|
bool success = false;
|
||||||
/* Complete the initialization of the coin with CS denomination */
|
/* Complete the initialization of the coin with CS denomination */
|
||||||
can->details.alg_values = csrr->details.ok.alg_values;
|
can->details.alg_values = csrr->details.ok.alg_values;
|
||||||
GNUNET_assert (can->details.alg_values.cipher
|
|
||||||
== TALER_DENOMINATION_CS);
|
|
||||||
TALER_planchet_setup_coin_priv (&can->secret,
|
TALER_planchet_setup_coin_priv (&can->secret,
|
||||||
&can->details.alg_values,
|
&can->details.alg_values,
|
||||||
&can->details.coin_priv);
|
&can->details.coin_priv);
|
||||||
@ -953,6 +950,7 @@ csr_withdraw_done (
|
|||||||
TALER_EXCHANGE_age_withdraw_cancel (awh);
|
TALER_EXCHANGE_age_withdraw_cancel (awh);
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
|
|
||||||
success = true;
|
success = true;
|
||||||
} while(0);
|
} while(0);
|
||||||
|
|
||||||
@ -1025,6 +1023,7 @@ prepare_coins (
|
|||||||
struct TALER_PlanchetDetail *planchet = &cd->planchet_details[k];
|
struct TALER_PlanchetDetail *planchet = &cd->planchet_details[k];
|
||||||
|
|
||||||
can->secret = input->secrets[k];
|
can->secret = input->secrets[k];
|
||||||
|
|
||||||
/* Derive the age restriction from the given secret and
|
/* Derive the age restriction from the given secret and
|
||||||
* the maximum age */
|
* the maximum age */
|
||||||
FAIL_IF (GNUNET_OK !=
|
FAIL_IF (GNUNET_OK !=
|
||||||
@ -1064,8 +1063,6 @@ prepare_coins (
|
|||||||
}
|
}
|
||||||
case TALER_DENOMINATION_CS:
|
case TALER_DENOMINATION_CS:
|
||||||
{
|
{
|
||||||
can->details.alg_values.cipher = TALER_DENOMINATION_CS;
|
|
||||||
|
|
||||||
struct CSRClosure *cls = &cd->csr_cls[k];
|
struct CSRClosure *cls = &cd->csr_cls[k];
|
||||||
/**
|
/**
|
||||||
* Save the handler and the denomination for the callback
|
* Save the handler and the denomination for the callback
|
||||||
|
@ -47,9 +47,6 @@ struct TALER_EXCHANGE_AgeWithdrawRevealHandle
|
|||||||
/* The age-withdraw commitment */
|
/* The age-withdraw commitment */
|
||||||
struct TALER_AgeWithdrawCommitmentHashP h_commitment;
|
struct TALER_AgeWithdrawCommitmentHashP h_commitment;
|
||||||
|
|
||||||
/* The reserve's public key */
|
|
||||||
const struct TALER_ReservePublicKeyP *reserve_pub;
|
|
||||||
|
|
||||||
/* Number of coins */
|
/* Number of coins */
|
||||||
size_t num_coins;
|
size_t num_coins;
|
||||||
|
|
||||||
@ -118,18 +115,17 @@ age_withdraw_reveal_ok (
|
|||||||
|
|
||||||
{
|
{
|
||||||
struct TALER_BlindedDenominationSignature denom_sigs[awrh->num_coins];
|
struct TALER_BlindedDenominationSignature denom_sigs[awrh->num_coins];
|
||||||
json_t *j_sig;
|
|
||||||
size_t n;
|
|
||||||
|
|
||||||
/* Reconstruct the coins and unblind the signatures */
|
/* Reconstruct the coins and unblind the signatures */
|
||||||
json_array_foreach (j_sigs, n, j_sig)
|
for (size_t n = 0; n < awrh->num_coins; n++)
|
||||||
{
|
{
|
||||||
|
json_t *j_sig = json_array_get (j_sigs, n);
|
||||||
struct GNUNET_JSON_Specification spec[] = {
|
struct GNUNET_JSON_Specification spec[] = {
|
||||||
TALER_JSON_spec_blinded_denom_sig (NULL,
|
GNUNET_JSON_spec_fixed_auto ("", &denom_sigs[n]),
|
||||||
&denom_sigs[n]),
|
|
||||||
GNUNET_JSON_spec_end ()
|
GNUNET_JSON_spec_end ()
|
||||||
};
|
};
|
||||||
|
|
||||||
|
GNUNET_assert (NULL != j_sig);
|
||||||
if (GNUNET_OK != GNUNET_JSON_parse (j_sig,
|
if (GNUNET_OK != GNUNET_JSON_parse (j_sig,
|
||||||
spec,
|
spec,
|
||||||
NULL, NULL))
|
NULL, NULL))
|
||||||
@ -137,7 +133,6 @@ age_withdraw_reveal_ok (
|
|||||||
GNUNET_break_op (0);
|
GNUNET_break_op (0);
|
||||||
return GNUNET_SYSERR;
|
return GNUNET_SYSERR;
|
||||||
}
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
response.details.ok.num_sigs = awrh->num_coins;
|
response.details.ok.num_sigs = awrh->num_coins;
|
||||||
@ -236,7 +231,7 @@ handle_age_withdraw_reveal_finished (
|
|||||||
break;
|
break;
|
||||||
case MHD_HTTP_NOT_FOUND:
|
case MHD_HTTP_NOT_FOUND:
|
||||||
/* Nothing really to verify, the exchange basically just says
|
/* Nothing really to verify, the exchange basically just says
|
||||||
that it doesn't know this age-withdraw commitment. */
|
that it doesn't know this age-withraw commitment. */
|
||||||
awr.hr.ec = TALER_JSON_get_error_code (j_response);
|
awr.hr.ec = TALER_JSON_get_error_code (j_response);
|
||||||
awr.hr.hint = TALER_JSON_get_error_hint (j_response);
|
awr.hr.hint = TALER_JSON_get_error_hint (j_response);
|
||||||
break;
|
break;
|
||||||
@ -304,7 +299,7 @@ prepare_url (
|
|||||||
*end = '\0';
|
*end = '\0';
|
||||||
GNUNET_snprintf (arg_str,
|
GNUNET_snprintf (arg_str,
|
||||||
sizeof (arg_str),
|
sizeof (arg_str),
|
||||||
"age-withdraw/%s/reveal",
|
"age-withraw/%s/reveal",
|
||||||
pub_str);
|
pub_str);
|
||||||
|
|
||||||
awrh->request_url = TALER_url_join (exchange_url,
|
awrh->request_url = TALER_url_join (exchange_url,
|
||||||
@ -348,9 +343,6 @@ perform_protocol (
|
|||||||
} \
|
} \
|
||||||
} while(0)
|
} while(0)
|
||||||
|
|
||||||
j_array_of_secrets = json_array ();
|
|
||||||
FAIL_IF (NULL == j_array_of_secrets);
|
|
||||||
|
|
||||||
for (size_t n = 0; n < awrh->num_coins; n++)
|
for (size_t n = 0; n < awrh->num_coins; n++)
|
||||||
{
|
{
|
||||||
const struct TALER_PlanchetMasterSecretP *secrets =
|
const struct TALER_PlanchetMasterSecretP *secrets =
|
||||||
@ -377,8 +369,6 @@ perform_protocol (
|
|||||||
j_secrets));
|
j_secrets));
|
||||||
}
|
}
|
||||||
j_request_body = GNUNET_JSON_PACK (
|
j_request_body = GNUNET_JSON_PACK (
|
||||||
GNUNET_JSON_pack_data_auto ("reserve_pub",
|
|
||||||
awrh->reserve_pub),
|
|
||||||
GNUNET_JSON_pack_array_steal ("disclosed_coin_secrets",
|
GNUNET_JSON_pack_array_steal ("disclosed_coin_secrets",
|
||||||
j_array_of_secrets));
|
j_array_of_secrets));
|
||||||
FAIL_IF (NULL == j_request_body);
|
FAIL_IF (NULL == j_request_body);
|
||||||
@ -428,7 +418,6 @@ TALER_EXCHANGE_age_withdraw_reveal (
|
|||||||
num_coins],
|
num_coins],
|
||||||
uint8_t noreveal_index,
|
uint8_t noreveal_index,
|
||||||
const struct TALER_AgeWithdrawCommitmentHashP *h_commitment,
|
const struct TALER_AgeWithdrawCommitmentHashP *h_commitment,
|
||||||
const struct TALER_ReservePublicKeyP *reserve_pub,
|
|
||||||
TALER_EXCHANGE_AgeWithdrawRevealCallback reveal_cb,
|
TALER_EXCHANGE_AgeWithdrawRevealCallback reveal_cb,
|
||||||
void *reveal_cb_cls)
|
void *reveal_cb_cls)
|
||||||
{
|
{
|
||||||
@ -440,7 +429,6 @@ TALER_EXCHANGE_age_withdraw_reveal (
|
|||||||
awrh->coins_input = coins_input;
|
awrh->coins_input = coins_input;
|
||||||
awrh->callback = reveal_cb;
|
awrh->callback = reveal_cb;
|
||||||
awrh->callback_cls = reveal_cb_cls;
|
awrh->callback_cls = reveal_cb_cls;
|
||||||
awrh->reserve_pub = reserve_pub;
|
|
||||||
|
|
||||||
if (GNUNET_OK !=
|
if (GNUNET_OK !=
|
||||||
prepare_url (exchange_url,
|
prepare_url (exchange_url,
|
||||||
|
@ -24,7 +24,6 @@
|
|||||||
#include <gnunet/gnunet_util_lib.h>
|
#include <gnunet/gnunet_util_lib.h>
|
||||||
#include <gnunet/gnunet_pq_lib.h>
|
#include <gnunet/gnunet_pq_lib.h>
|
||||||
#include "taler_pq_lib.h"
|
#include "taler_pq_lib.h"
|
||||||
#include "pq_common.h"
|
|
||||||
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@ -672,388 +671,4 @@ TALER_PQ_query_param_json (const json_t *x)
|
|||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
/** ------------------- Array support -----------------------------------**/
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Closure for the array type handlers.
|
|
||||||
*
|
|
||||||
* May contain sizes information for the data, given (and handled) by the
|
|
||||||
* caller.
|
|
||||||
*/
|
|
||||||
struct qconv_array_cls
|
|
||||||
{
|
|
||||||
/**
|
|
||||||
* If not null, contains the array of sizes (the size of the array is the
|
|
||||||
* .size field in the ambient GNUNET_PQ_QueryParam struct). We do not free
|
|
||||||
* this memory.
|
|
||||||
*
|
|
||||||
* If not null, this value has precedence over @a sizes, which MUST be NULL */
|
|
||||||
const size_t *sizes;
|
|
||||||
|
|
||||||
/**
|
|
||||||
* If @a size and @a c_sizes are NULL, this field defines the same size
|
|
||||||
* for each element in the array.
|
|
||||||
*/
|
|
||||||
size_t same_size;
|
|
||||||
|
|
||||||
/**
|
|
||||||
* If true, the array parameter to the data pointer to the qconv_array is a
|
|
||||||
* continuous byte array of data, either with @a same_size each or sizes
|
|
||||||
* provided bytes by @a sizes;
|
|
||||||
*/
|
|
||||||
bool continuous;
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Type of the array elements
|
|
||||||
*/
|
|
||||||
enum TALER_PQ_ArrayType typ;
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Oid of the array elements
|
|
||||||
*/
|
|
||||||
Oid oid;
|
|
||||||
};
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Callback to cleanup a qconv_array_cls to be used during
|
|
||||||
* GNUNET_PQ_cleanup_query_params_closures
|
|
||||||
*/
|
|
||||||
static void
|
|
||||||
qconv_array_cls_cleanup (void *cls)
|
|
||||||
{
|
|
||||||
GNUNET_free (cls);
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Function called to convert input argument into SQL parameters for arrays
|
|
||||||
*
|
|
||||||
* Note: the format for the encoding of arrays for libpq is not very well
|
|
||||||
* documented. We peeked into various sources (postgresql and libpqtypes) for
|
|
||||||
* guidance.
|
|
||||||
*
|
|
||||||
* @param cls Closure of type struct qconv_array_cls*
|
|
||||||
* @param data Pointer to first element in the array
|
|
||||||
* @param data_len Number of _elements_ in array @a data (if applicable)
|
|
||||||
* @param[out] param_values SQL data to set
|
|
||||||
* @param[out] param_lengths SQL length data to set
|
|
||||||
* @param[out] param_formats SQL format data to set
|
|
||||||
* @param param_length number of entries available in the @a param_values, @a param_lengths and @a param_formats arrays
|
|
||||||
* @param[out] scratch buffer for dynamic allocations (to be done via #GNUNET_malloc()
|
|
||||||
* @param scratch_length number of entries left in @a scratch
|
|
||||||
* @return -1 on error, number of offsets used in @a scratch otherwise
|
|
||||||
*/
|
|
||||||
static int
|
|
||||||
qconv_array (
|
|
||||||
void *cls,
|
|
||||||
const void *data,
|
|
||||||
size_t data_len,
|
|
||||||
void *param_values[],
|
|
||||||
int param_lengths[],
|
|
||||||
int param_formats[],
|
|
||||||
unsigned int param_length,
|
|
||||||
void *scratch[],
|
|
||||||
unsigned int scratch_length)
|
|
||||||
{
|
|
||||||
struct qconv_array_cls *meta = cls;
|
|
||||||
size_t num = data_len;
|
|
||||||
size_t total_size;
|
|
||||||
const size_t *sizes;
|
|
||||||
bool same_sized;
|
|
||||||
void *elements = NULL;
|
|
||||||
bool noerror = true;
|
|
||||||
/* needed to capture the encoded rsa signatures */
|
|
||||||
void **buffers = NULL;
|
|
||||||
size_t *buffer_lengths = NULL;
|
|
||||||
|
|
||||||
(void) (param_length);
|
|
||||||
(void) (scratch_length);
|
|
||||||
|
|
||||||
GNUNET_assert (NULL != meta);
|
|
||||||
GNUNET_assert (num < INT_MAX);
|
|
||||||
|
|
||||||
sizes = meta->sizes;
|
|
||||||
same_sized = (0 != meta->same_size);
|
|
||||||
|
|
||||||
#define RETURN_UNLESS(cond) \
|
|
||||||
do { \
|
|
||||||
if (! (cond)) \
|
|
||||||
{ \
|
|
||||||
GNUNET_break ((cond)); \
|
|
||||||
noerror = false; \
|
|
||||||
goto DONE; \
|
|
||||||
} \
|
|
||||||
} while(0)
|
|
||||||
|
|
||||||
/* Calculate sizes and check bounds */
|
|
||||||
{
|
|
||||||
/* num * length-field */
|
|
||||||
size_t x = sizeof(uint32_t);
|
|
||||||
size_t y = x * num;
|
|
||||||
RETURN_UNLESS ((0 == num) || (y / num == x));
|
|
||||||
|
|
||||||
/* size of header */
|
|
||||||
total_size = x = sizeof(struct TALER_PQ_ArrayHeader);
|
|
||||||
total_size += y;
|
|
||||||
RETURN_UNLESS (total_size >= x);
|
|
||||||
|
|
||||||
/* sizes of elements */
|
|
||||||
if (same_sized)
|
|
||||||
{
|
|
||||||
x = num * meta->same_size;
|
|
||||||
RETURN_UNLESS ((0 == num) || (x / num == meta->same_size));
|
|
||||||
|
|
||||||
y = total_size;
|
|
||||||
total_size += x;
|
|
||||||
RETURN_UNLESS (total_size >= y);
|
|
||||||
}
|
|
||||||
else /* sizes are different per element */
|
|
||||||
{
|
|
||||||
|
|
||||||
switch (meta->typ)
|
|
||||||
{
|
|
||||||
case TALER_PQ_array_of_blinded_denom_sig:
|
|
||||||
{
|
|
||||||
const struct TALER_BlindedDenominationSignature *denom_sigs = data;
|
|
||||||
size_t len;
|
|
||||||
|
|
||||||
buffers = GNUNET_new_array (num, void *);
|
|
||||||
buffer_lengths = GNUNET_new_array (num, size_t);
|
|
||||||
|
|
||||||
for (size_t i = 0; i<num; i++)
|
|
||||||
{
|
|
||||||
switch (denom_sigs[i].cipher)
|
|
||||||
{
|
|
||||||
case TALER_DENOMINATION_RSA:
|
|
||||||
len = GNUNET_CRYPTO_rsa_signature_encode (
|
|
||||||
denom_sigs[i].details.blinded_rsa_signature,
|
|
||||||
&buffers[i]);
|
|
||||||
RETURN_UNLESS (len != 0);
|
|
||||||
break;
|
|
||||||
case TALER_DENOMINATION_CS:
|
|
||||||
len = sizeof (denom_sigs[i].details.blinded_cs_answer);
|
|
||||||
break;
|
|
||||||
default:
|
|
||||||
GNUNET_assert (0);
|
|
||||||
}
|
|
||||||
|
|
||||||
/* for the cipher and marker */
|
|
||||||
len += 2 * sizeof(uint32_t);
|
|
||||||
buffer_lengths[i] = len;
|
|
||||||
|
|
||||||
y = total_size;
|
|
||||||
total_size += len;
|
|
||||||
RETURN_UNLESS (total_size >= y);
|
|
||||||
}
|
|
||||||
sizes = buffer_lengths;
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
default:
|
|
||||||
GNUNET_assert (0);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
RETURN_UNLESS (INT_MAX > total_size);
|
|
||||||
RETURN_UNLESS (0 != total_size);
|
|
||||||
|
|
||||||
elements = GNUNET_malloc (total_size);
|
|
||||||
}
|
|
||||||
|
|
||||||
/* Write data */
|
|
||||||
{
|
|
||||||
char *out = elements;
|
|
||||||
struct TALER_PQ_ArrayHeader h = {
|
|
||||||
.ndim = htonl (1), /* We only support one-dimensional arrays */
|
|
||||||
.has_null = htonl (0), /* We do not support NULL entries in arrays */
|
|
||||||
.lbound = htonl (1), /* Default start index value */
|
|
||||||
.dim = htonl (num),
|
|
||||||
.oid = htonl (meta->oid),
|
|
||||||
};
|
|
||||||
|
|
||||||
/* Write header */
|
|
||||||
GNUNET_memcpy (out, &h, sizeof(h));
|
|
||||||
out += sizeof(h);
|
|
||||||
|
|
||||||
|
|
||||||
/* Write elements */
|
|
||||||
for (size_t i = 0; i < num; i++)
|
|
||||||
{
|
|
||||||
size_t sz = same_sized ? meta->same_size : sizes[i];
|
|
||||||
|
|
||||||
*(uint32_t *) out = htonl (sz);
|
|
||||||
out += sizeof(uint32_t);
|
|
||||||
|
|
||||||
switch (meta->typ)
|
|
||||||
{
|
|
||||||
case TALER_PQ_array_of_blinded_denom_sig:
|
|
||||||
{
|
|
||||||
const struct TALER_BlindedDenominationSignature *denom_sigs = data;
|
|
||||||
|
|
||||||
uint32_t be[2];
|
|
||||||
be[0] = htonl ((uint32_t) denom_sigs[i].cipher);
|
|
||||||
be[1] = htonl (0x01); /* magic margker: blinded */
|
|
||||||
GNUNET_memcpy (out,
|
|
||||||
&be,
|
|
||||||
sizeof(be));
|
|
||||||
out += sizeof(be);
|
|
||||||
sz -= sizeof(be);
|
|
||||||
|
|
||||||
switch (denom_sigs[i].cipher)
|
|
||||||
{
|
|
||||||
case TALER_DENOMINATION_RSA:
|
|
||||||
{
|
|
||||||
void *buf = buffers[i];
|
|
||||||
|
|
||||||
GNUNET_memcpy (out,
|
|
||||||
buf,
|
|
||||||
sz);
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
case TALER_DENOMINATION_CS:
|
|
||||||
GNUNET_memcpy (out,
|
|
||||||
&denom_sigs[i].details.blinded_cs_answer,
|
|
||||||
sz);
|
|
||||||
break;
|
|
||||||
default:
|
|
||||||
GNUNET_assert (0);
|
|
||||||
}
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
case TALER_PQ_array_of_blinded_coin_hash:
|
|
||||||
{
|
|
||||||
const struct TALER_BlindedCoinHashP *coin_hs = data;
|
|
||||||
GNUNET_memcpy (out,
|
|
||||||
&coin_hs[i],
|
|
||||||
sizeof(struct TALER_BlindedCoinHashP));
|
|
||||||
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
case TALER_PQ_array_of_denom_hash:
|
|
||||||
{
|
|
||||||
const struct TALER_DenominationHashP *denom_hs = data;
|
|
||||||
GNUNET_memcpy (out,
|
|
||||||
&denom_hs[i],
|
|
||||||
sizeof(struct TALER_DenominationHashP));
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
default:
|
|
||||||
{
|
|
||||||
GNUNET_assert (0);
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
out += sz;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
param_values[0] = elements;
|
|
||||||
param_lengths[0] = total_size;
|
|
||||||
param_formats[0] = 1;
|
|
||||||
scratch[0] = elements;
|
|
||||||
|
|
||||||
DONE:
|
|
||||||
if (NULL != buffers)
|
|
||||||
{
|
|
||||||
for (size_t i = 0; i<num; i++)
|
|
||||||
GNUNET_free (buffers[i]);
|
|
||||||
GNUNET_free (buffers);
|
|
||||||
}
|
|
||||||
GNUNET_free (buffer_lengths);
|
|
||||||
|
|
||||||
if (noerror)
|
|
||||||
return 1;
|
|
||||||
|
|
||||||
return -1;
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Function to genreate a typ specific query parameter and corresponding closure
|
|
||||||
*
|
|
||||||
* @param num Number of elements in @a elements
|
|
||||||
* @param continuous If true, @a elements is an continuous array of data
|
|
||||||
* @param elements Array of @a num elements, either continuous or pointers
|
|
||||||
* @param sizes Array of @a num sizes, one per element, may be NULL
|
|
||||||
* @param same_size If not 0, all elements in @a elements have this size
|
|
||||||
* @param typ Supported internal type of each element in @a elements
|
|
||||||
* @param oid Oid of the type to be used in Postgres
|
|
||||||
* @return Query parameter
|
|
||||||
*/
|
|
||||||
static struct GNUNET_PQ_QueryParam
|
|
||||||
query_param_array_generic (
|
|
||||||
unsigned int num,
|
|
||||||
bool continuous,
|
|
||||||
const void *elements,
|
|
||||||
const size_t *sizes,
|
|
||||||
size_t same_size,
|
|
||||||
enum TALER_PQ_ArrayType typ,
|
|
||||||
Oid oid)
|
|
||||||
{
|
|
||||||
struct qconv_array_cls *meta = GNUNET_new (struct qconv_array_cls);
|
|
||||||
meta->typ = typ;
|
|
||||||
meta->oid = oid;
|
|
||||||
meta->sizes = sizes;
|
|
||||||
meta->same_size = same_size;
|
|
||||||
meta->continuous = continuous;
|
|
||||||
|
|
||||||
struct GNUNET_PQ_QueryParam res = {
|
|
||||||
.conv = qconv_array,
|
|
||||||
.conv_cls = meta,
|
|
||||||
.conv_cls_cleanup = qconv_array_cls_cleanup,
|
|
||||||
.data = elements,
|
|
||||||
.size = num,
|
|
||||||
.num_params = 1,
|
|
||||||
};
|
|
||||||
|
|
||||||
return res;
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
struct GNUNET_PQ_QueryParam
|
|
||||||
TALER_PQ_query_param_array_blinded_denom_sig (
|
|
||||||
size_t num,
|
|
||||||
const struct TALER_BlindedDenominationSignature *denom_sigs,
|
|
||||||
const struct GNUNET_PQ_Context *db)
|
|
||||||
{
|
|
||||||
return query_param_array_generic (num,
|
|
||||||
true,
|
|
||||||
denom_sigs,
|
|
||||||
NULL,
|
|
||||||
0,
|
|
||||||
TALER_PQ_array_of_blinded_denom_sig,
|
|
||||||
GNUNET_PQ_get_oid (db,
|
|
||||||
GNUNET_PQ_DATATYPE_BYTEA));
|
|
||||||
};
|
|
||||||
|
|
||||||
struct GNUNET_PQ_QueryParam
|
|
||||||
TALER_PQ_query_param_array_blinded_coin_hash (
|
|
||||||
size_t num,
|
|
||||||
const struct TALER_BlindedCoinHashP *coin_hs,
|
|
||||||
const struct GNUNET_PQ_Context *db)
|
|
||||||
{
|
|
||||||
return query_param_array_generic (num,
|
|
||||||
true,
|
|
||||||
coin_hs,
|
|
||||||
NULL,
|
|
||||||
sizeof(struct TALER_BlindedCoinHashP),
|
|
||||||
TALER_PQ_array_of_blinded_coin_hash,
|
|
||||||
GNUNET_PQ_get_oid (db,
|
|
||||||
GNUNET_PQ_DATATYPE_BYTEA));
|
|
||||||
};
|
|
||||||
|
|
||||||
struct GNUNET_PQ_QueryParam
|
|
||||||
TALER_PQ_query_param_array_denom_hash (
|
|
||||||
size_t num,
|
|
||||||
const struct TALER_DenominationHashP *denom_hs,
|
|
||||||
const struct GNUNET_PQ_Context *db)
|
|
||||||
{
|
|
||||||
return query_param_array_generic (num,
|
|
||||||
true,
|
|
||||||
denom_hs,
|
|
||||||
NULL,
|
|
||||||
sizeof(struct TALER_DenominationHashP),
|
|
||||||
TALER_PQ_array_of_denom_hash,
|
|
||||||
GNUNET_PQ_get_oid (db,
|
|
||||||
GNUNET_PQ_DATATYPE_BYTEA));
|
|
||||||
};
|
|
||||||
/* end of pq/pq_query_helper.c */
|
/* end of pq/pq_query_helper.c */
|
||||||
|
@ -20,7 +20,6 @@
|
|||||||
*/
|
*/
|
||||||
#include "platform.h"
|
#include "platform.h"
|
||||||
#include <gnunet/gnunet_util_lib.h>
|
#include <gnunet/gnunet_util_lib.h>
|
||||||
#include "pq_common.h"
|
|
||||||
#include "taler_pq_lib.h"
|
#include "taler_pq_lib.h"
|
||||||
|
|
||||||
|
|
||||||
@ -976,305 +975,4 @@ TALER_PQ_result_spec_exchange_withdraw_values (
|
|||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Closure for the array result specifications. Contains type information
|
|
||||||
* for the generic parser extract_array_generic and out-pointers for the results.
|
|
||||||
*/
|
|
||||||
struct ArrayResultCls
|
|
||||||
{
|
|
||||||
/* Oid of the expected type, must match the oid in the header of the PQResult struct */
|
|
||||||
Oid oid;
|
|
||||||
|
|
||||||
/* Target type */
|
|
||||||
enum TALER_PQ_ArrayType typ;
|
|
||||||
|
|
||||||
/* If not 0, defines the expected size of each entry */
|
|
||||||
size_t same_size;
|
|
||||||
|
|
||||||
/* Out-pointer to write the number of elements in the array */
|
|
||||||
size_t *num;
|
|
||||||
|
|
||||||
/* Out-pointer. If @a typ is TALER_PQ_array_of_byte and @a same_size is 0,
|
|
||||||
* allocate and put the array of @a num sizes here. NULL otherwise */
|
|
||||||
size_t **sizes;
|
|
||||||
};
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Extract data from a Postgres database @a result as array of a specific type
|
|
||||||
* from row @a row. The type information and optionally additional
|
|
||||||
* out-parameters are given in @a cls which is of type array_result_cls.
|
|
||||||
*
|
|
||||||
* @param cls closure of type array_result_cls
|
|
||||||
* @param result where to extract data from
|
|
||||||
* @param row row to extract data from
|
|
||||||
* @param fname name (or prefix) of the fields to extract from
|
|
||||||
* @param[in,out] dst_size where to store size of result, may be NULL
|
|
||||||
* @param[out] dst where to store the result
|
|
||||||
* @return
|
|
||||||
* #GNUNET_YES if all results could be extracted
|
|
||||||
* #GNUNET_SYSERR if a result was invalid (non-existing field or NULL)
|
|
||||||
*/
|
|
||||||
static enum GNUNET_GenericReturnValue
|
|
||||||
extract_array_generic (
|
|
||||||
void *cls,
|
|
||||||
PGresult *result,
|
|
||||||
int row,
|
|
||||||
const char *fname,
|
|
||||||
size_t *dst_size,
|
|
||||||
void *dst)
|
|
||||||
{
|
|
||||||
const struct ArrayResultCls *info = cls;
|
|
||||||
int data_sz;
|
|
||||||
char *data;
|
|
||||||
void *out = NULL;
|
|
||||||
struct TALER_PQ_ArrayHeader header;
|
|
||||||
int col_num;
|
|
||||||
|
|
||||||
GNUNET_assert (NULL != dst);
|
|
||||||
*((void **) dst) = NULL;
|
|
||||||
|
|
||||||
#define FAIL_IF(cond) \
|
|
||||||
do { \
|
|
||||||
if ((cond)) \
|
|
||||||
{ \
|
|
||||||
GNUNET_break (! (cond)); \
|
|
||||||
goto FAIL; \
|
|
||||||
} \
|
|
||||||
} while(0)
|
|
||||||
|
|
||||||
col_num = PQfnumber (result, fname);
|
|
||||||
FAIL_IF (0 > col_num);
|
|
||||||
|
|
||||||
data_sz = PQgetlength (result, row, col_num);
|
|
||||||
FAIL_IF (0 > data_sz);
|
|
||||||
FAIL_IF (sizeof(header) > (size_t) data_sz);
|
|
||||||
|
|
||||||
data = PQgetvalue (result, row, col_num);
|
|
||||||
FAIL_IF (NULL == data);
|
|
||||||
|
|
||||||
{
|
|
||||||
struct TALER_PQ_ArrayHeader *h =
|
|
||||||
(struct TALER_PQ_ArrayHeader *) data;
|
|
||||||
|
|
||||||
header.ndim = ntohl (h->ndim);
|
|
||||||
header.has_null = ntohl (h->has_null);
|
|
||||||
header.oid = ntohl (h->oid);
|
|
||||||
header.dim = ntohl (h->dim);
|
|
||||||
header.lbound = ntohl (h->lbound);
|
|
||||||
|
|
||||||
FAIL_IF (1 != header.ndim);
|
|
||||||
FAIL_IF (INT_MAX <= header.dim);
|
|
||||||
FAIL_IF (0 != header.has_null);
|
|
||||||
FAIL_IF (1 != header.lbound);
|
|
||||||
FAIL_IF (info->oid != header.oid);
|
|
||||||
}
|
|
||||||
|
|
||||||
if (NULL != info->num)
|
|
||||||
*info->num = header.dim;
|
|
||||||
|
|
||||||
{
|
|
||||||
char *in = data + sizeof(header);
|
|
||||||
|
|
||||||
switch (info->typ)
|
|
||||||
{
|
|
||||||
case TALER_PQ_array_of_denom_hash:
|
|
||||||
if (NULL != dst_size)
|
|
||||||
*dst_size = sizeof(struct TALER_DenominationHashP) * (header.dim);
|
|
||||||
out = GNUNET_new_array (header.dim, struct TALER_DenominationHashP);
|
|
||||||
*((void **) dst) = out;
|
|
||||||
for (uint32_t i = 0; i < header.dim; i++)
|
|
||||||
{
|
|
||||||
size_t sz = ntohl (*(uint32_t *) in);
|
|
||||||
FAIL_IF (sz != sizeof(struct TALER_DenominationHashP));
|
|
||||||
in += sizeof(uint32_t);
|
|
||||||
*(struct TALER_DenominationHashP *) out =
|
|
||||||
*(struct TALER_DenominationHashP *) in;
|
|
||||||
in += sz;
|
|
||||||
out += sz;
|
|
||||||
}
|
|
||||||
return GNUNET_OK;
|
|
||||||
|
|
||||||
case TALER_PQ_array_of_blinded_coin_hash:
|
|
||||||
if (NULL != dst_size)
|
|
||||||
*dst_size = sizeof(struct TALER_BlindedCoinHashP) * (header.dim);
|
|
||||||
out = GNUNET_new_array (header.dim, struct TALER_BlindedCoinHashP);
|
|
||||||
*((void **) dst) = out;
|
|
||||||
for (uint32_t i = 0; i < header.dim; i++)
|
|
||||||
{
|
|
||||||
size_t sz = ntohl (*(uint32_t *) in);
|
|
||||||
FAIL_IF (sz != sizeof(struct TALER_BlindedCoinHashP));
|
|
||||||
in += sizeof(uint32_t);
|
|
||||||
*(struct TALER_BlindedCoinHashP *) out =
|
|
||||||
*(struct TALER_BlindedCoinHashP *) in;
|
|
||||||
in += sz;
|
|
||||||
out += sz;
|
|
||||||
}
|
|
||||||
return GNUNET_OK;
|
|
||||||
|
|
||||||
case TALER_PQ_array_of_blinded_denom_sig:
|
|
||||||
{
|
|
||||||
struct TALER_BlindedDenominationSignature *denom_sigs;
|
|
||||||
if (0 == header.dim)
|
|
||||||
{
|
|
||||||
if (NULL != dst_size)
|
|
||||||
*dst_size = 0;
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
|
|
||||||
denom_sigs = GNUNET_new_array (header.dim,
|
|
||||||
struct TALER_BlindedDenominationSignature);
|
|
||||||
*((void **) dst) = denom_sigs;
|
|
||||||
|
|
||||||
/* copy data */
|
|
||||||
for (uint32_t i = 0; i < header.dim; i++)
|
|
||||||
{
|
|
||||||
struct TALER_BlindedDenominationSignature *denom_sig = &denom_sigs[i];
|
|
||||||
uint32_t be[2];
|
|
||||||
size_t sz = ntohl (*(uint32_t *) in);
|
|
||||||
in += sizeof(uint32_t);
|
|
||||||
|
|
||||||
FAIL_IF (sizeof(be) > sz);
|
|
||||||
GNUNET_memcpy (&be,
|
|
||||||
in,
|
|
||||||
sizeof(be));
|
|
||||||
FAIL_IF (0x01 != ntohl (be[1])); /* magic marker: blinded */
|
|
||||||
|
|
||||||
in += sizeof(be);
|
|
||||||
sz -= sizeof(be);
|
|
||||||
|
|
||||||
denom_sig->cipher = ntohl (be[0]);
|
|
||||||
switch (denom_sig->cipher)
|
|
||||||
{
|
|
||||||
case TALER_DENOMINATION_RSA:
|
|
||||||
denom_sig->details.blinded_rsa_signature =
|
|
||||||
GNUNET_CRYPTO_rsa_signature_decode (in,
|
|
||||||
sz);
|
|
||||||
FAIL_IF (NULL == denom_sig->details.blinded_rsa_signature);
|
|
||||||
break;
|
|
||||||
|
|
||||||
case TALER_DENOMINATION_CS:
|
|
||||||
FAIL_IF (sizeof(denom_sig->details.blinded_cs_answer) != sz);
|
|
||||||
GNUNET_memcpy (&denom_sig->details.blinded_cs_answer,
|
|
||||||
in,
|
|
||||||
sz);
|
|
||||||
break;
|
|
||||||
|
|
||||||
default:
|
|
||||||
FAIL_IF (true);
|
|
||||||
}
|
|
||||||
|
|
||||||
in += sz;
|
|
||||||
}
|
|
||||||
return GNUNET_OK;
|
|
||||||
}
|
|
||||||
default:
|
|
||||||
FAIL_IF (true);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
FAIL:
|
|
||||||
GNUNET_free (*(void **) dst);
|
|
||||||
return GNUNET_SYSERR;
|
|
||||||
#undef FAIL_IF
|
|
||||||
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Cleanup of the data and closure of an array spec.
|
|
||||||
*/
|
|
||||||
static void
|
|
||||||
array_cleanup (void *cls,
|
|
||||||
void *rd)
|
|
||||||
{
|
|
||||||
|
|
||||||
struct ArrayResultCls *info = cls;
|
|
||||||
void **dst = rd;
|
|
||||||
|
|
||||||
if ((0 == info->same_size) &&
|
|
||||||
(NULL != info->sizes))
|
|
||||||
GNUNET_free (*(info->sizes));
|
|
||||||
|
|
||||||
GNUNET_free (cls);
|
|
||||||
GNUNET_free (*dst);
|
|
||||||
*dst = NULL;
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
struct GNUNET_PQ_ResultSpec
|
|
||||||
TALER_PQ_result_spec_array_blinded_denom_sig (
|
|
||||||
const struct GNUNET_PQ_Context *db,
|
|
||||||
const char *name,
|
|
||||||
size_t *num,
|
|
||||||
struct TALER_BlindedDenominationSignature **denom_sigs)
|
|
||||||
{
|
|
||||||
struct ArrayResultCls *info = GNUNET_new (struct ArrayResultCls);
|
|
||||||
|
|
||||||
info->num = num;
|
|
||||||
info->typ = TALER_PQ_array_of_blinded_denom_sig;
|
|
||||||
info->oid = GNUNET_PQ_get_oid (db,
|
|
||||||
GNUNET_PQ_DATATYPE_BYTEA);
|
|
||||||
|
|
||||||
struct GNUNET_PQ_ResultSpec res = {
|
|
||||||
.conv = extract_array_generic,
|
|
||||||
.cleaner = array_cleanup,
|
|
||||||
.dst = (void *) denom_sigs,
|
|
||||||
.fname = name,
|
|
||||||
.cls = info
|
|
||||||
};
|
|
||||||
return res;
|
|
||||||
|
|
||||||
};
|
|
||||||
|
|
||||||
struct GNUNET_PQ_ResultSpec
|
|
||||||
TALER_PQ_result_spec_array_blinded_coin_hash (
|
|
||||||
const struct GNUNET_PQ_Context *db,
|
|
||||||
const char *name,
|
|
||||||
size_t *num,
|
|
||||||
struct TALER_BlindedCoinHashP **h_coin_evs)
|
|
||||||
{
|
|
||||||
struct ArrayResultCls *info = GNUNET_new (struct ArrayResultCls);
|
|
||||||
|
|
||||||
info->num = num;
|
|
||||||
info->typ = TALER_PQ_array_of_blinded_coin_hash;
|
|
||||||
info->oid = GNUNET_PQ_get_oid (db,
|
|
||||||
GNUNET_PQ_DATATYPE_BYTEA);
|
|
||||||
|
|
||||||
struct GNUNET_PQ_ResultSpec res = {
|
|
||||||
.conv = extract_array_generic,
|
|
||||||
.cleaner = array_cleanup,
|
|
||||||
.dst = (void *) h_coin_evs,
|
|
||||||
.fname = name,
|
|
||||||
.cls = info
|
|
||||||
};
|
|
||||||
return res;
|
|
||||||
|
|
||||||
};
|
|
||||||
|
|
||||||
struct GNUNET_PQ_ResultSpec
|
|
||||||
TALER_PQ_result_spec_array_denom_hash (
|
|
||||||
const struct GNUNET_PQ_Context *db,
|
|
||||||
const char *name,
|
|
||||||
size_t *num,
|
|
||||||
struct TALER_DenominationHashP **denom_hs)
|
|
||||||
{
|
|
||||||
struct ArrayResultCls *info = GNUNET_new (struct ArrayResultCls);
|
|
||||||
|
|
||||||
info->num = num;
|
|
||||||
info->typ = TALER_PQ_array_of_denom_hash;
|
|
||||||
info->oid = GNUNET_PQ_get_oid (db,
|
|
||||||
GNUNET_PQ_DATATYPE_BYTEA);
|
|
||||||
|
|
||||||
struct GNUNET_PQ_ResultSpec res = {
|
|
||||||
.conv = extract_array_generic,
|
|
||||||
.cleaner = array_cleanup,
|
|
||||||
.dst = (void *) denom_hs,
|
|
||||||
.fname = name,
|
|
||||||
.cls = info
|
|
||||||
};
|
|
||||||
return res;
|
|
||||||
|
|
||||||
};
|
|
||||||
|
|
||||||
|
|
||||||
/* end of pq_result_helper.c */
|
/* end of pq_result_helper.c */
|
||||||
|
@ -260,7 +260,7 @@ run (void *cls,
|
|||||||
CMD_TRANSFER_TO_EXCHANGE ("create-reserve-kyc-1",
|
CMD_TRANSFER_TO_EXCHANGE ("create-reserve-kyc-1",
|
||||||
"EUR:30.02"),
|
"EUR:30.02"),
|
||||||
TALER_TESTING_cmd_check_bank_admin_transfer (
|
TALER_TESTING_cmd_check_bank_admin_transfer (
|
||||||
"check-create-reserve-kyc-1",
|
"check-create-reserve-1",
|
||||||
"EUR:30.02",
|
"EUR:30.02",
|
||||||
cred.user42_payto,
|
cred.user42_payto,
|
||||||
cred.exchange_payto,
|
cred.exchange_payto,
|
||||||
@ -290,17 +290,14 @@ run (void *cls,
|
|||||||
MHD_HTTP_CONFLICT,
|
MHD_HTTP_CONFLICT,
|
||||||
"EUR:10",
|
"EUR:10",
|
||||||
NULL),
|
NULL),
|
||||||
TALER_TESTING_cmd_age_withdraw ("age-withdraw-coins-1",
|
TALER_TESTING_cmd_age_withdraw ("age-withdraw-coin-1",
|
||||||
"create-reserve-kyc-1",
|
"create-reserve-kyc-1",
|
||||||
8,
|
8,
|
||||||
MHD_HTTP_OK,
|
MHD_HTTP_OK,
|
||||||
"EUR:10",
|
"EUR:10",
|
||||||
"EUR:10",
|
"EUR:5",
|
||||||
"EUR:5",
|
"EUR:5",
|
||||||
NULL),
|
NULL),
|
||||||
TALER_TESTING_cmd_age_withdraw_reveal ("age-withdraw-coins-reveal-1",
|
|
||||||
"age-withdraw-coins-1",
|
|
||||||
MHD_HTTP_OK),
|
|
||||||
TALER_TESTING_cmd_end (),
|
TALER_TESTING_cmd_end (),
|
||||||
};
|
};
|
||||||
|
|
||||||
|
@ -578,9 +578,9 @@ age_withdraw_reveal_cb (
|
|||||||
case MHD_HTTP_OK:
|
case MHD_HTTP_OK:
|
||||||
{
|
{
|
||||||
const struct AgeWithdrawState *aws = awrs->aws;
|
const struct AgeWithdrawState *aws = awrs->aws;
|
||||||
|
GNUNET_log (GNUNET_ERROR_TYPE_INFO,
|
||||||
|
"Got age-withdraw reveal success!\n");
|
||||||
GNUNET_assert (awrs->num_coins == response->details.ok.num_sigs);
|
GNUNET_assert (awrs->num_coins == response->details.ok.num_sigs);
|
||||||
awrs->denom_sigs = GNUNET_new_array (awrs->num_coins,
|
|
||||||
struct TALER_DenominationSignature);
|
|
||||||
for (size_t n = 0; n < awrs->num_coins; n++)
|
for (size_t n = 0; n < awrs->num_coins; n++)
|
||||||
TALER_denom_sig_unblind (&awrs->denom_sigs[n],
|
TALER_denom_sig_unblind (&awrs->denom_sigs[n],
|
||||||
&response->details.ok.blinded_denom_sigs[n],
|
&response->details.ok.blinded_denom_sigs[n],
|
||||||
@ -588,8 +588,6 @@ age_withdraw_reveal_cb (
|
|||||||
&aws->coin_outputs[n].details.h_coin_pub,
|
&aws->coin_outputs[n].details.h_coin_pub,
|
||||||
&aws->coin_outputs[n].details.alg_values,
|
&aws->coin_outputs[n].details.alg_values,
|
||||||
&aws->coin_inputs[n].denom_pub->key);
|
&aws->coin_inputs[n].denom_pub->key);
|
||||||
GNUNET_log (GNUNET_ERROR_TYPE_INFO,
|
|
||||||
"age-withdraw reveal success!\n");
|
|
||||||
}
|
}
|
||||||
break;
|
break;
|
||||||
case MHD_HTTP_NOT_FOUND:
|
case MHD_HTTP_NOT_FOUND:
|
||||||
@ -631,8 +629,8 @@ age_withdraw_reveal_run (
|
|||||||
* Get the command and state for the previous call to "age witdraw"
|
* Get the command and state for the previous call to "age witdraw"
|
||||||
*/
|
*/
|
||||||
age_withdraw_cmd =
|
age_withdraw_cmd =
|
||||||
TALER_TESTING_interpreter_lookup_command (is,
|
TALER_TESTING_interpreter_get_command (is,
|
||||||
awrs->age_withdraw_reference);
|
awrs->age_withdraw_reference);
|
||||||
if (NULL == age_withdraw_cmd)
|
if (NULL == age_withdraw_cmd)
|
||||||
{
|
{
|
||||||
GNUNET_break (0);
|
GNUNET_break (0);
|
||||||
@ -651,7 +649,6 @@ age_withdraw_reveal_run (
|
|||||||
aws->coin_inputs,
|
aws->coin_inputs,
|
||||||
aws->noreveal_index,
|
aws->noreveal_index,
|
||||||
&aws->h_commitment,
|
&aws->h_commitment,
|
||||||
&aws->reserve_pub,
|
|
||||||
age_withdraw_reveal_cb,
|
age_withdraw_reveal_cb,
|
||||||
awrs);
|
awrs);
|
||||||
}
|
}
|
||||||
|
Loading…
Reference in New Issue
Block a user