Merge branch 'beatrice' into 'master'
Merge beatrice to master 2022-06 See merge request PBSA/peerplays!131
This commit is contained in:
commit
611a63076b
28 changed files with 592 additions and 152 deletions
104
.gitlab-ci.yml
104
.gitlab-ci.yml
|
|
@ -8,8 +8,9 @@ include:
|
||||||
stages:
|
stages:
|
||||||
- build
|
- build
|
||||||
- test
|
- test
|
||||||
|
- dockerize
|
||||||
|
|
||||||
build:
|
build-mainnet:
|
||||||
stage: build
|
stage: build
|
||||||
script:
|
script:
|
||||||
- rm -rf .git/modules/docs .git/modules/libraries/fc ./docs ./libraries/fc
|
- rm -rf .git/modules/docs .git/modules/libraries/fc ./docs ./libraries/fc
|
||||||
|
|
@ -29,25 +30,10 @@ build:
|
||||||
tags:
|
tags:
|
||||||
- builder
|
- builder
|
||||||
|
|
||||||
dockerize:
|
test-mainnet:
|
||||||
stage: build
|
|
||||||
variables:
|
|
||||||
IMAGE: $CI_REGISTRY_IMAGE/$CI_COMMIT_REF_SLUG:$CI_COMMIT_SHA
|
|
||||||
before_script:
|
|
||||||
- docker info
|
|
||||||
- docker login -u "$CI_REGISTRY_USER" -p "$CI_REGISTRY_PASSWORD" $CI_REGISTRY
|
|
||||||
script:
|
|
||||||
- docker build -t $IMAGE .
|
|
||||||
- docker push $IMAGE
|
|
||||||
tags:
|
|
||||||
- builder
|
|
||||||
when: manual
|
|
||||||
timeout: 3h
|
|
||||||
|
|
||||||
test:
|
|
||||||
stage: test
|
stage: test
|
||||||
dependencies:
|
dependencies:
|
||||||
- build
|
- build-mainnet
|
||||||
script:
|
script:
|
||||||
- ./build/libraries/fc/tests/all_tests
|
- ./build/libraries/fc/tests/all_tests
|
||||||
- ./build/tests/betting_test --log_level=message
|
- ./build/tests/betting_test --log_level=message
|
||||||
|
|
@ -55,3 +41,85 @@ test:
|
||||||
- ./build/tests/cli_test --log_level=message
|
- ./build/tests/cli_test --log_level=message
|
||||||
tags:
|
tags:
|
||||||
- builder
|
- builder
|
||||||
|
|
||||||
|
dockerize-mainnet:
|
||||||
|
stage: dockerize
|
||||||
|
dependencies:
|
||||||
|
- test-mainnet
|
||||||
|
variables:
|
||||||
|
IMAGE: $CI_REGISTRY_IMAGE/mainnet/$CI_COMMIT_REF_SLUG:$CI_COMMIT_SHA
|
||||||
|
before_script:
|
||||||
|
- docker info
|
||||||
|
- docker login -u "$CI_REGISTRY_USER" -p "$CI_REGISTRY_PASSWORD" $CI_REGISTRY
|
||||||
|
script:
|
||||||
|
- docker build --no-cache -t $IMAGE .
|
||||||
|
- docker push $IMAGE
|
||||||
|
after_script:
|
||||||
|
- docker rmi $IMAGE
|
||||||
|
tags:
|
||||||
|
- builder
|
||||||
|
when:
|
||||||
|
manual
|
||||||
|
timeout:
|
||||||
|
3h
|
||||||
|
|
||||||
|
build-testnet:
|
||||||
|
stage: build
|
||||||
|
script:
|
||||||
|
- rm -rf .git/modules/docs .git/modules/libraries/fc ./docs ./libraries/fc
|
||||||
|
- git submodule sync
|
||||||
|
- git submodule update --init --recursive
|
||||||
|
- rm -rf build
|
||||||
|
- mkdir build
|
||||||
|
- cd build
|
||||||
|
- cmake -DCMAKE_BUILD_TYPE=Release -DBUILD_PEERPLAYS_TESTNET=1 ..
|
||||||
|
- make -j$(nproc)
|
||||||
|
artifacts:
|
||||||
|
untracked: true
|
||||||
|
paths:
|
||||||
|
- build/libraries/
|
||||||
|
- build/programs/
|
||||||
|
- build/tests/
|
||||||
|
tags:
|
||||||
|
- builder
|
||||||
|
when:
|
||||||
|
manual
|
||||||
|
timeout:
|
||||||
|
3h
|
||||||
|
|
||||||
|
test-testnet:
|
||||||
|
stage: test
|
||||||
|
dependencies:
|
||||||
|
- build-testnet
|
||||||
|
script:
|
||||||
|
- ./build/libraries/fc/tests/all_tests
|
||||||
|
- ./build/tests/betting_test --log_level=message
|
||||||
|
- ./build/tests/chain_test --log_level=message
|
||||||
|
- ./build/tests/cli_test --log_level=message
|
||||||
|
tags:
|
||||||
|
- builder
|
||||||
|
when:
|
||||||
|
manual
|
||||||
|
timeout:
|
||||||
|
1h
|
||||||
|
|
||||||
|
dockerize-testnet:
|
||||||
|
stage: dockerize
|
||||||
|
dependencies:
|
||||||
|
- test-testnet
|
||||||
|
variables:
|
||||||
|
IMAGE: $CI_REGISTRY_IMAGE/testnet/$CI_COMMIT_REF_SLUG:$CI_COMMIT_SHA
|
||||||
|
before_script:
|
||||||
|
- docker info
|
||||||
|
- docker login -u "$CI_REGISTRY_USER" -p "$CI_REGISTRY_PASSWORD" $CI_REGISTRY
|
||||||
|
script:
|
||||||
|
- docker build --no-cache -t $IMAGE .
|
||||||
|
- docker push $IMAGE
|
||||||
|
after_script:
|
||||||
|
- docker rmi $IMAGE
|
||||||
|
tags:
|
||||||
|
- builder
|
||||||
|
when:
|
||||||
|
manual
|
||||||
|
timeout:
|
||||||
|
3h
|
||||||
|
|
|
||||||
|
|
@ -22,6 +22,37 @@ endif()
|
||||||
|
|
||||||
list( APPEND CMAKE_MODULE_PATH "${CMAKE_CURRENT_SOURCE_DIR}/CMakeModules" )
|
list( APPEND CMAKE_MODULE_PATH "${CMAKE_CURRENT_SOURCE_DIR}/CMakeModules" )
|
||||||
|
|
||||||
|
function(get_linux_lsb_release_information)
|
||||||
|
find_program(LSB_RELEASE_EXEC lsb_release)
|
||||||
|
if(NOT LSB_RELEASE_EXEC)
|
||||||
|
message(FATAL_ERROR "Could not detect lsb_release executable, can not gather required information")
|
||||||
|
endif()
|
||||||
|
|
||||||
|
execute_process(COMMAND "${LSB_RELEASE_EXEC}" --short --id OUTPUT_VARIABLE LSB_RELEASE_ID_SHORT OUTPUT_STRIP_TRAILING_WHITESPACE)
|
||||||
|
execute_process(COMMAND "${LSB_RELEASE_EXEC}" --short --release OUTPUT_VARIABLE LSB_RELEASE_VERSION_SHORT OUTPUT_STRIP_TRAILING_WHITESPACE)
|
||||||
|
execute_process(COMMAND "${LSB_RELEASE_EXEC}" --short --codename OUTPUT_VARIABLE LSB_RELEASE_CODENAME_SHORT OUTPUT_STRIP_TRAILING_WHITESPACE)
|
||||||
|
|
||||||
|
set(LSB_RELEASE_ID_SHORT "${LSB_RELEASE_ID_SHORT}" PARENT_SCOPE)
|
||||||
|
set(LSB_RELEASE_VERSION_SHORT "${LSB_RELEASE_VERSION_SHORT}" PARENT_SCOPE)
|
||||||
|
set(LSB_RELEASE_CODENAME_SHORT "${LSB_RELEASE_CODENAME_SHORT}" PARENT_SCOPE)
|
||||||
|
endfunction()
|
||||||
|
|
||||||
|
if(CMAKE_SYSTEM_NAME MATCHES "Linux")
|
||||||
|
find_package(cppzmq)
|
||||||
|
target_link_libraries(cppzmq)
|
||||||
|
|
||||||
|
get_linux_lsb_release_information()
|
||||||
|
message(STATUS "Linux ${LSB_RELEASE_ID_SHORT} ${LSB_RELEASE_VERSION_SHORT} ${LSB_RELEASE_CODENAME_SHORT}")
|
||||||
|
string(REGEX MATCHALL "([0-9]+)" arg_list ${LSB_RELEASE_VERSION_SHORT})
|
||||||
|
list( LENGTH arg_list listlen )
|
||||||
|
if (NOT listlen)
|
||||||
|
message(FATAL_ERROR "Could not detect Ubuntu version")
|
||||||
|
endif()
|
||||||
|
list(GET arg_list 0 output)
|
||||||
|
message("Ubuntu version is: ${output}")
|
||||||
|
add_definitions(-DPEERPLAYS_UBUNTU_VERSION=${output})
|
||||||
|
endif()
|
||||||
|
|
||||||
# function to help with cUrl
|
# function to help with cUrl
|
||||||
macro(FIND_CURL)
|
macro(FIND_CURL)
|
||||||
if (NOT WIN32 AND NOT APPLE AND CURL_STATICLIB)
|
if (NOT WIN32 AND NOT APPLE AND CURL_STATICLIB)
|
||||||
|
|
|
||||||
38
Dockerfile
38
Dockerfile
|
|
@ -1,5 +1,5 @@
|
||||||
FROM ubuntu:20.04
|
FROM ubuntu:20.04
|
||||||
MAINTAINER PeerPlays Blockchain Standards Association
|
MAINTAINER Peerplays Blockchain Standards Association
|
||||||
|
|
||||||
#===============================================================================
|
#===============================================================================
|
||||||
# Ubuntu setup
|
# Ubuntu setup
|
||||||
|
|
@ -23,13 +23,12 @@ RUN \
|
||||||
libbz2-dev \
|
libbz2-dev \
|
||||||
libcurl4-openssl-dev \
|
libcurl4-openssl-dev \
|
||||||
libncurses-dev \
|
libncurses-dev \
|
||||||
libreadline-dev \
|
|
||||||
libsnappy-dev \
|
libsnappy-dev \
|
||||||
libssl-dev \
|
libssl-dev \
|
||||||
libtool \
|
libtool \
|
||||||
libzip-dev \
|
libzip-dev \
|
||||||
libzmq3-dev \
|
|
||||||
locales \
|
locales \
|
||||||
|
lsb-release \
|
||||||
mc \
|
mc \
|
||||||
nano \
|
nano \
|
||||||
net-tools \
|
net-tools \
|
||||||
|
|
@ -40,6 +39,7 @@ RUN \
|
||||||
python3 \
|
python3 \
|
||||||
python3-jinja2 \
|
python3-jinja2 \
|
||||||
sudo \
|
sudo \
|
||||||
|
systemd-coredump \
|
||||||
wget
|
wget
|
||||||
|
|
||||||
ENV HOME /home/peerplays
|
ENV HOME /home/peerplays
|
||||||
|
|
@ -53,6 +53,38 @@ RUN echo 'peerplays:peerplays' | chpasswd
|
||||||
# SSH
|
# SSH
|
||||||
EXPOSE 22
|
EXPOSE 22
|
||||||
|
|
||||||
|
#===============================================================================
|
||||||
|
# libzmq setup
|
||||||
|
#===============================================================================
|
||||||
|
|
||||||
|
WORKDIR /home/peerplays/
|
||||||
|
|
||||||
|
RUN \
|
||||||
|
wget https://github.com/zeromq/libzmq/archive/refs/tags/v4.3.4.zip && \
|
||||||
|
unzip v4.3.4.zip && \
|
||||||
|
cd libzmq-4.3.4 && \
|
||||||
|
mkdir build && \
|
||||||
|
cd build && \
|
||||||
|
cmake .. && \
|
||||||
|
make -j$(nproc) install && \
|
||||||
|
ldconfig
|
||||||
|
|
||||||
|
#===============================================================================
|
||||||
|
# cppzmq setup
|
||||||
|
#===============================================================================
|
||||||
|
|
||||||
|
WORKDIR /home/peerplays/
|
||||||
|
|
||||||
|
RUN \
|
||||||
|
wget https://github.com/zeromq/cppzmq/archive/refs/tags/v4.8.1.zip && \
|
||||||
|
unzip v4.8.1.zip && \
|
||||||
|
cd cppzmq-4.8.1 && \
|
||||||
|
mkdir build && \
|
||||||
|
cd build && \
|
||||||
|
cmake .. && \
|
||||||
|
make -j$(nproc) install && \
|
||||||
|
ldconfig
|
||||||
|
|
||||||
#===============================================================================
|
#===============================================================================
|
||||||
# Peerplays setup
|
# Peerplays setup
|
||||||
#===============================================================================
|
#===============================================================================
|
||||||
|
|
|
||||||
159
Dockerfile.18.04
Normal file
159
Dockerfile.18.04
Normal file
|
|
@ -0,0 +1,159 @@
|
||||||
|
FROM ubuntu:18.04
|
||||||
|
MAINTAINER Peerplays Blockchain Standards Association
|
||||||
|
|
||||||
|
#===============================================================================
|
||||||
|
# Ubuntu setup
|
||||||
|
#===============================================================================
|
||||||
|
|
||||||
|
RUN \
|
||||||
|
apt-get update -y && \
|
||||||
|
DEBIAN_FRONTEND=noninteractive apt-get install -y \
|
||||||
|
apt-utils \
|
||||||
|
autoconf \
|
||||||
|
bash \
|
||||||
|
build-essential \
|
||||||
|
ca-certificates \
|
||||||
|
dnsutils \
|
||||||
|
doxygen \
|
||||||
|
expect \
|
||||||
|
git \
|
||||||
|
graphviz \
|
||||||
|
libbz2-dev \
|
||||||
|
libcurl4-openssl-dev \
|
||||||
|
libncurses-dev \
|
||||||
|
libsnappy-dev \
|
||||||
|
libssl-dev \
|
||||||
|
libtool \
|
||||||
|
libzip-dev \
|
||||||
|
locales \
|
||||||
|
lsb-release \
|
||||||
|
mc \
|
||||||
|
nano \
|
||||||
|
net-tools \
|
||||||
|
ntp \
|
||||||
|
openssh-server \
|
||||||
|
pkg-config \
|
||||||
|
perl \
|
||||||
|
python3 \
|
||||||
|
python3-jinja2 \
|
||||||
|
sudo \
|
||||||
|
systemd-coredump \
|
||||||
|
wget
|
||||||
|
|
||||||
|
ENV HOME /home/peerplays
|
||||||
|
RUN useradd -rm -d /home/peerplays -s /bin/bash -g root -G sudo -u 1000 peerplays
|
||||||
|
RUN echo "peerplays ALL=(ALL) NOPASSWD:ALL" | sudo tee /etc/sudoers.d/peerplays
|
||||||
|
RUN chmod 440 /etc/sudoers.d/peerplays
|
||||||
|
|
||||||
|
RUN service ssh start
|
||||||
|
RUN echo 'peerplays:peerplays' | chpasswd
|
||||||
|
|
||||||
|
# SSH
|
||||||
|
EXPOSE 22
|
||||||
|
|
||||||
|
#===============================================================================
|
||||||
|
# Boost setup
|
||||||
|
#===============================================================================
|
||||||
|
|
||||||
|
WORKDIR /home/peerplays/
|
||||||
|
|
||||||
|
RUN \
|
||||||
|
wget -c 'http://sourceforge.net/projects/boost/files/boost/1.67.0/boost_1_67_0.tar.bz2/download' -O boost_1_67_0.tar.bz2 && \
|
||||||
|
tar xjf boost_1_67_0.tar.bz2 && \
|
||||||
|
cd boost_1_67_0/ && \
|
||||||
|
./bootstrap.sh && \
|
||||||
|
./b2 install
|
||||||
|
|
||||||
|
#===============================================================================
|
||||||
|
# cmake setup
|
||||||
|
#===============================================================================
|
||||||
|
|
||||||
|
WORKDIR /home/peerplays/
|
||||||
|
|
||||||
|
RUN \
|
||||||
|
wget -c 'https://cmake.org/files/v3.23/cmake-3.23.1-linux-x86_64.sh' -O cmake-3.23.1-linux-x86_64.sh && \
|
||||||
|
chmod 755 ./cmake-3.23.1-linux-x86_64.sh && \
|
||||||
|
./cmake-3.23.1-linux-x86_64.sh --prefix=/usr/ --skip-license && \
|
||||||
|
cmake --version
|
||||||
|
|
||||||
|
#===============================================================================
|
||||||
|
# libzmq setup
|
||||||
|
#===============================================================================
|
||||||
|
|
||||||
|
WORKDIR /home/peerplays/
|
||||||
|
|
||||||
|
RUN \
|
||||||
|
wget https://github.com/zeromq/libzmq/archive/refs/tags/v4.3.4.zip && \
|
||||||
|
unzip v4.3.4.zip && \
|
||||||
|
cd libzmq-4.3.4 && \
|
||||||
|
mkdir build && \
|
||||||
|
cd build && \
|
||||||
|
cmake .. && \
|
||||||
|
make -j$(nproc) install && \
|
||||||
|
ldconfig
|
||||||
|
|
||||||
|
#===============================================================================
|
||||||
|
# cppzmq setup
|
||||||
|
#===============================================================================
|
||||||
|
|
||||||
|
WORKDIR /home/peerplays/
|
||||||
|
|
||||||
|
RUN \
|
||||||
|
wget https://github.com/zeromq/cppzmq/archive/refs/tags/v4.8.1.zip && \
|
||||||
|
unzip v4.8.1.zip && \
|
||||||
|
cd cppzmq-4.8.1 && \
|
||||||
|
mkdir build && \
|
||||||
|
cd build && \
|
||||||
|
cmake .. && \
|
||||||
|
make -j$(nproc) install && \
|
||||||
|
ldconfig
|
||||||
|
|
||||||
|
#===============================================================================
|
||||||
|
# Peerplays setup
|
||||||
|
#===============================================================================
|
||||||
|
|
||||||
|
WORKDIR /home/peerplays/
|
||||||
|
|
||||||
|
## Clone Peerplays
|
||||||
|
#RUN \
|
||||||
|
# git clone https://gitlab.com/PBSA/peerplays.git && \
|
||||||
|
# cd peerplays && \
|
||||||
|
# git checkout develop && \
|
||||||
|
# git submodule update --init --recursive && \
|
||||||
|
# git branch --show-current && \
|
||||||
|
# git log --oneline -n 5
|
||||||
|
|
||||||
|
# Add local source
|
||||||
|
ADD . peerplays
|
||||||
|
|
||||||
|
# Configure Peerplays
|
||||||
|
RUN \
|
||||||
|
cd peerplays && \
|
||||||
|
mkdir build && \
|
||||||
|
cd build && \
|
||||||
|
cmake -DCMAKE_BUILD_TYPE=Release ..
|
||||||
|
|
||||||
|
# Build Peerplays
|
||||||
|
RUN \
|
||||||
|
cd peerplays/build && \
|
||||||
|
make -j$(nproc) cli_wallet witness_node
|
||||||
|
|
||||||
|
WORKDIR /home/peerplays/peerplays-network
|
||||||
|
|
||||||
|
# Setup Peerplays runimage
|
||||||
|
RUN \
|
||||||
|
ln -s /home/peerplays/peerplays/build/programs/cli_wallet/cli_wallet ./ && \
|
||||||
|
ln -s /home/peerplays/peerplays/build/programs/witness_node/witness_node ./
|
||||||
|
|
||||||
|
RUN ./witness_node --create-genesis-json genesis.json && \
|
||||||
|
rm genesis.json
|
||||||
|
|
||||||
|
RUN chown peerplays:root -R /home/peerplays/peerplays-network
|
||||||
|
|
||||||
|
# Peerplays RPC
|
||||||
|
EXPOSE 8090
|
||||||
|
# Peerplays P2P:
|
||||||
|
EXPOSE 9777
|
||||||
|
|
||||||
|
# Peerplays
|
||||||
|
CMD ["./witness_node", "-d", "./witness_node_data_dir"]
|
||||||
153
README.md
153
README.md
|
|
@ -6,24 +6,49 @@ This is a quick introduction to get new developers and witnesses up to speed on
|
||||||
|
|
||||||
# Building and Installation Instructions
|
# Building and Installation Instructions
|
||||||
|
|
||||||
Officially supported OS is Ubuntu 20.04.
|
Officially supported OS are Ubuntu 20.04 and Ubuntu 18.04.
|
||||||
|
|
||||||
|
## Ubuntu 20.04
|
||||||
|
|
||||||
Following dependencies are needed for a clean install of Ubuntu 20.04:
|
Following dependencies are needed for a clean install of Ubuntu 20.04:
|
||||||
```
|
```
|
||||||
sudo apt-get install \
|
sudo apt-get install \
|
||||||
apt-utils autoconf bash build-essential ca-certificates clang-format cmake
|
apt-utils autoconf bash build-essential ca-certificates clang-format cmake \
|
||||||
dnsutils doxygen expect git graphviz libboost-all-dev libbz2-dev \
|
dnsutils doxygen expect git graphviz libboost-all-dev libbz2-dev \
|
||||||
libcurl4-openssl-dev libncurses-dev libreadline-dev libsnappy-dev \
|
libcurl4-openssl-dev libncurses-dev libsnappy-dev \
|
||||||
libssl-dev libtool libzip-dev libzmq3-dev locales mc nano net-tools ntp \
|
libssl-dev libtool libzip-dev locales lsb-release mc nano net-tools ntp \
|
||||||
openssh-server pkg-config perl python3 python3-jinja2 sudo wget
|
openssh-server pkg-config perl python3 python3-jinja2 sudo \
|
||||||
|
systemd-coredump wget
|
||||||
```
|
```
|
||||||
|
|
||||||
|
Install libzmq from source:
|
||||||
## Building Peerplays
|
```
|
||||||
|
wget https://github.com/zeromq/libzmq/archive/refs/tags/v4.3.4.zip
|
||||||
|
unzip v4.3.4.zip
|
||||||
|
cd libzmq-4.3.4
|
||||||
|
mkdir build
|
||||||
|
cd build
|
||||||
|
cmake ..
|
||||||
|
make -j$(nproc)
|
||||||
|
sudo make install
|
||||||
|
sudo ldconfig
|
||||||
|
```
|
||||||
|
|
||||||
|
Install cppzmq from source:
|
||||||
|
```
|
||||||
|
wget https://github.com/zeromq/cppzmq/archive/refs/tags/v4.8.1.zip
|
||||||
|
unzip v4.8.1.zip
|
||||||
|
cd cppzmq-4.8.1
|
||||||
|
mkdir build
|
||||||
|
cd build
|
||||||
|
cmake ..
|
||||||
|
make -j$(nproc)
|
||||||
|
sudo make install
|
||||||
|
sudo ldconfig
|
||||||
|
```
|
||||||
|
|
||||||
|
Building Peerplays
|
||||||
```
|
```
|
||||||
mkdir $HOME/src
|
|
||||||
cd $HOME/src
|
|
||||||
git clone https://gitlab.com/PBSA/peerplays.git
|
git clone https://gitlab.com/PBSA/peerplays.git
|
||||||
cd peerplays
|
cd peerplays
|
||||||
git submodule update --init --recursive
|
git submodule update --init --recursive
|
||||||
|
|
@ -41,7 +66,84 @@ cmake -DCMAKE_BUILD_TYPE=Release -DBUILD_PEERPLAYS_TESTNET=1
|
||||||
# make -j8 for 32GB RAM
|
# make -j8 for 32GB RAM
|
||||||
make -j$(nproc)
|
make -j$(nproc)
|
||||||
|
|
||||||
make install # this can install the executable files under /usr/local
|
sudo make install # this can install the executable files under /usr/local
|
||||||
|
```
|
||||||
|
|
||||||
|
## Ubuntu 18.04
|
||||||
|
|
||||||
|
Following dependencies are needed for a clean install of Ubuntu 18.04:
|
||||||
|
```
|
||||||
|
sudo apt-get install \
|
||||||
|
apt-utils autoconf bash build-essential ca-certificates clang-format \
|
||||||
|
dnsutils doxygen expect git graphviz libbz2-dev \
|
||||||
|
libcurl4-openssl-dev libncurses-dev libsnappy-dev \
|
||||||
|
libssl-dev libtool libzip-dev locales lsb-release mc nano net-tools ntp \
|
||||||
|
openssh-server pkg-config perl python3 python3-jinja2 sudo \
|
||||||
|
systemd-coredump wget
|
||||||
|
```
|
||||||
|
|
||||||
|
Install Boost libraries from source
|
||||||
|
```
|
||||||
|
wget -c 'http://sourceforge.net/projects/boost/files/boost/1.67.0/boost_1_67_0.tar.bz2/download' -O boost_1_67_0.tar.bz2
|
||||||
|
tar xjf boost_1_67_0.tar.bz2
|
||||||
|
cd boost_1_67_0/
|
||||||
|
./bootstrap.sh
|
||||||
|
sudo ./b2 install
|
||||||
|
```
|
||||||
|
|
||||||
|
Install cmake
|
||||||
|
```
|
||||||
|
wget -c 'https://cmake.org/files/v3.23/cmake-3.23.1-linux-x86_64.sh' -O cmake-3.23.1-linux-x86_64.sh
|
||||||
|
chmod 755 ./cmake-3.23.1-linux-x86_64.sh
|
||||||
|
sudo ./cmake-3.23.1-linux-x86_64.sh --prefix=/usr/ --skip-license
|
||||||
|
```
|
||||||
|
|
||||||
|
Install libzmq from source:
|
||||||
|
```
|
||||||
|
wget https://github.com/zeromq/libzmq/archive/refs/tags/v4.3.4.zip
|
||||||
|
unzip v4.3.4.zip
|
||||||
|
cd libzmq-4.3.4
|
||||||
|
mkdir build
|
||||||
|
cd build
|
||||||
|
cmake ..
|
||||||
|
make -j$(nproc)
|
||||||
|
sudo make install
|
||||||
|
sudo ldconfig
|
||||||
|
```
|
||||||
|
|
||||||
|
Install cppzmq from source:
|
||||||
|
```
|
||||||
|
wget https://github.com/zeromq/cppzmq/archive/refs/tags/v4.8.1.zip
|
||||||
|
unzip v4.8.1.zip
|
||||||
|
cd cppzmq-4.8.1
|
||||||
|
mkdir build
|
||||||
|
cd build
|
||||||
|
cmake ..
|
||||||
|
make -j$(nproc)
|
||||||
|
sudo make install
|
||||||
|
sudo ldconfig
|
||||||
|
```
|
||||||
|
|
||||||
|
Building Peerplays
|
||||||
|
```
|
||||||
|
git clone https://gitlab.com/PBSA/peerplays.git
|
||||||
|
cd peerplays
|
||||||
|
git submodule update --init --recursive
|
||||||
|
|
||||||
|
# If you want to build Mainnet node
|
||||||
|
cmake -DCMAKE_BUILD_TYPE=Release
|
||||||
|
|
||||||
|
# If you want to build Testnet node
|
||||||
|
cmake -DCMAKE_BUILD_TYPE=Release -DBUILD_PEERPLAYS_TESTNET=1
|
||||||
|
|
||||||
|
# Update -j flag depending on your current system specs;
|
||||||
|
# Recommended 4GB of RAM per 1 CPU core
|
||||||
|
# make -j2 for 8GB RAM
|
||||||
|
# make -j4 for 16GB RAM
|
||||||
|
# make -j8 for 32GB RAM
|
||||||
|
make -j$(nproc)
|
||||||
|
|
||||||
|
sudo make install # this can install the executable files under /usr/local
|
||||||
```
|
```
|
||||||
|
|
||||||
|
|
||||||
|
|
@ -62,15 +164,36 @@ sudo usermod -a -G docker $USER
|
||||||
docker pull datasecuritynode/peerplays:latest
|
docker pull datasecuritynode/peerplays:latest
|
||||||
```
|
```
|
||||||
|
|
||||||
### Building docker image manually
|
### Building docker images manually
|
||||||
```
|
```
|
||||||
# Build docker image (from the project root, must be a docker group member)
|
# Checkout the code
|
||||||
docker build -t peerplays .
|
git clone https://gitlab.com/PBSA/peerplays.git
|
||||||
|
cd peerplays
|
||||||
|
|
||||||
|
# Checkout the branch you want
|
||||||
|
# E.g.
|
||||||
|
# git checkout beatrice
|
||||||
|
# git checkout develop
|
||||||
|
git checkout master
|
||||||
|
|
||||||
|
git submodule update --init --recursive
|
||||||
|
|
||||||
|
# Execute from the project root, must be a docker group member
|
||||||
|
|
||||||
|
# Build docker image, using Ubuntu 20.04 base
|
||||||
|
docker build --no-cache -f Dockerfile -t peerplays .
|
||||||
|
|
||||||
|
# Build docker image, using Ubuntu 18.04 base
|
||||||
|
docker build --no-cache -f Dockerfile.18.04 -t peerplays-18-04 .
|
||||||
```
|
```
|
||||||
|
|
||||||
### Start docker image
|
### Start docker image
|
||||||
```
|
```
|
||||||
docker start peerplays
|
# Start docker image, using Ubuntu 20.04 base
|
||||||
|
docker run peerplays:latest
|
||||||
|
|
||||||
|
# Start docker image, using Ubuntu 18.04 base
|
||||||
|
docker run peerplays-18-04:latest
|
||||||
```
|
```
|
||||||
|
|
||||||
Rest of the instructions on starting the chain remains same.
|
Rest of the instructions on starting the chain remains same.
|
||||||
|
|
|
||||||
|
|
@ -48,6 +48,7 @@
|
||||||
#include <boost/range/algorithm/reverse.hpp>
|
#include <boost/range/algorithm/reverse.hpp>
|
||||||
#include <boost/signals2.hpp>
|
#include <boost/signals2.hpp>
|
||||||
|
|
||||||
|
#include <atomic>
|
||||||
#include <iostream>
|
#include <iostream>
|
||||||
|
|
||||||
#include <fc/log/file_appender.hpp>
|
#include <fc/log/file_appender.hpp>
|
||||||
|
|
@ -107,6 +108,7 @@ public:
|
||||||
fc::optional<fc::temp_file> _lock_file;
|
fc::optional<fc::temp_file> _lock_file;
|
||||||
bool _is_block_producer = false;
|
bool _is_block_producer = false;
|
||||||
bool _force_validate = false;
|
bool _force_validate = false;
|
||||||
|
std::atomic_bool _running{true};
|
||||||
|
|
||||||
void reset_p2p_node(const fc::path &data_dir) {
|
void reset_p2p_node(const fc::path &data_dir) {
|
||||||
try {
|
try {
|
||||||
|
|
@ -115,67 +117,29 @@ public:
|
||||||
_p2p_network->load_configuration(data_dir / "p2p");
|
_p2p_network->load_configuration(data_dir / "p2p");
|
||||||
_p2p_network->set_node_delegate(this);
|
_p2p_network->set_node_delegate(this);
|
||||||
|
|
||||||
|
vector<string> all_seeds;
|
||||||
|
|
||||||
if (_options->count("seed-node")) {
|
if (_options->count("seed-node")) {
|
||||||
auto seeds = _options->at("seed-node").as<vector<string>>();
|
auto seeds = _options->at("seed-node").as<vector<string>>();
|
||||||
for (const string &endpoint_string : seeds) {
|
all_seeds.insert(all_seeds.end(), seeds.begin(), seeds.end());
|
||||||
try {
|
|
||||||
std::vector<fc::ip::endpoint> endpoints = resolve_string_to_ip_endpoints(endpoint_string);
|
|
||||||
for (const fc::ip::endpoint &endpoint : endpoints) {
|
|
||||||
ilog("Adding seed node ${endpoint}", ("endpoint", endpoint));
|
|
||||||
_p2p_network->add_node(endpoint);
|
|
||||||
_p2p_network->connect_to_endpoint(endpoint);
|
|
||||||
}
|
|
||||||
} catch (const fc::exception &e) {
|
|
||||||
wlog("caught exception ${e} while adding seed node ${endpoint}",
|
|
||||||
("e", e.to_detail_string())("endpoint", endpoint_string));
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
if (_options->count("seed-nodes")) {
|
if (_options->count("seed-nodes")) {
|
||||||
auto seeds_str = _options->at("seed-nodes").as<string>();
|
auto seeds_str = _options->at("seed-nodes").as<string>();
|
||||||
auto seeds = fc::json::from_string(seeds_str).as<vector<string>>(2);
|
auto seeds = fc::json::from_string(seeds_str).as<vector<string>>(2);
|
||||||
for (const string &endpoint_string : seeds) {
|
all_seeds.insert(all_seeds.end(), seeds.begin(), seeds.end());
|
||||||
try {
|
}
|
||||||
std::vector<fc::ip::endpoint> endpoints = resolve_string_to_ip_endpoints(endpoint_string);
|
|
||||||
for (const fc::ip::endpoint &endpoint : endpoints) {
|
|
||||||
ilog("Adding seed node ${endpoint}", ("endpoint", endpoint));
|
|
||||||
_p2p_network->add_node(endpoint);
|
|
||||||
}
|
|
||||||
} catch (const fc::exception &e) {
|
|
||||||
wlog("caught exception ${e} while adding seed node ${endpoint}",
|
|
||||||
("e", e.to_detail_string())("endpoint", endpoint_string));
|
|
||||||
}
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
// t.me/peerplays #seednodes
|
|
||||||
vector<string> seeds = {
|
|
||||||
#ifdef BUILD_PEERPLAYS_TESTNET
|
|
||||||
|
|
||||||
#else
|
for (const string &endpoint_string : all_seeds) {
|
||||||
"51.222.110.110:9777",
|
try {
|
||||||
"95.216.90.243:9777",
|
std::vector<fc::ip::endpoint> endpoints = resolve_string_to_ip_endpoints(endpoint_string);
|
||||||
"96.46.48.98:19777",
|
for (const fc::ip::endpoint &endpoint : endpoints) {
|
||||||
"96.46.48.98:29777",
|
ilog("Adding seed node ${endpoint}", ("endpoint", endpoint));
|
||||||
"96.46.48.98:39777",
|
_p2p_network->add_node(endpoint);
|
||||||
"96.46.48.98:49777",
|
|
||||||
"96.46.48.98:59777",
|
|
||||||
"seed.i9networks.net.br:9777",
|
|
||||||
"witness.serverpit.com:9777"
|
|
||||||
#endif
|
|
||||||
};
|
|
||||||
|
|
||||||
for (const string &endpoint_string : seeds) {
|
|
||||||
try {
|
|
||||||
std::vector<fc::ip::endpoint> endpoints = resolve_string_to_ip_endpoints(endpoint_string);
|
|
||||||
for (const fc::ip::endpoint &endpoint : endpoints) {
|
|
||||||
ilog("Adding seed node ${endpoint}", ("endpoint", endpoint));
|
|
||||||
_p2p_network->add_node(endpoint);
|
|
||||||
}
|
|
||||||
} catch (const fc::exception &e) {
|
|
||||||
wlog("caught exception ${e} while adding seed node ${endpoint}",
|
|
||||||
("e", e.to_detail_string())("endpoint", endpoint_string));
|
|
||||||
}
|
}
|
||||||
|
} catch (const fc::exception &e) {
|
||||||
|
wlog("caught exception ${e} while adding seed node ${endpoint}",
|
||||||
|
("e", e.to_detail_string())("endpoint", endpoint_string));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -450,6 +414,12 @@ public:
|
||||||
*/
|
*/
|
||||||
virtual bool handle_block(const graphene::net::block_message &blk_msg, bool sync_mode,
|
virtual bool handle_block(const graphene::net::block_message &blk_msg, bool sync_mode,
|
||||||
std::vector<fc::uint160_t> &contained_transaction_message_ids) override {
|
std::vector<fc::uint160_t> &contained_transaction_message_ids) override {
|
||||||
|
|
||||||
|
// check point for the threads which may be cancled on application shutdown
|
||||||
|
if (!_running.load()) {
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
|
||||||
try {
|
try {
|
||||||
auto latency = fc::time_point::now() - blk_msg.block.timestamp;
|
auto latency = fc::time_point::now() - blk_msg.block.timestamp;
|
||||||
FC_ASSERT((latency.count() / 1000) > -5000, "Rejecting block with timestamp in the future");
|
FC_ASSERT((latency.count() / 1000) > -5000, "Rejecting block with timestamp in the future");
|
||||||
|
|
@ -860,9 +830,24 @@ application::~application() {
|
||||||
|
|
||||||
void application::set_program_options(boost::program_options::options_description &cli,
|
void application::set_program_options(boost::program_options::options_description &cli,
|
||||||
boost::program_options::options_description &cfg) const {
|
boost::program_options::options_description &cfg) const {
|
||||||
|
|
||||||
|
std::vector<string> seed_nodes = {
|
||||||
|
#ifdef BUILD_PEERPLAYS_TESTNET
|
||||||
|
#else
|
||||||
|
"51.222.110.110:9777",
|
||||||
|
"95.216.90.243:9777",
|
||||||
|
"ca.peerplays.info:9777",
|
||||||
|
"de.peerplays.xyz:9777",
|
||||||
|
"pl.peerplays.org:9777",
|
||||||
|
"seed.i9networks.net.br:9777",
|
||||||
|
"witness.serverpit.com:9777"
|
||||||
|
#endif
|
||||||
|
};
|
||||||
|
std::string seed_nodes_str = fc::json::to_string(seed_nodes);
|
||||||
|
|
||||||
cfg.add_options()("p2p-endpoint", bpo::value<string>()->default_value("0.0.0.0:9777"), "Endpoint for P2P node to listen on");
|
cfg.add_options()("p2p-endpoint", bpo::value<string>()->default_value("0.0.0.0:9777"), "Endpoint for P2P node to listen on");
|
||||||
cfg.add_options()("seed-node,s", bpo::value<vector<string>>()->composing(), "P2P nodes to connect to on startup (may specify multiple times)");
|
cfg.add_options()("seed-node,s", bpo::value<vector<string>>()->composing(), "P2P nodes to connect to on startup (may specify multiple times)");
|
||||||
cfg.add_options()("seed-nodes", bpo::value<string>()->composing(), "JSON array of P2P nodes to connect to on startup");
|
cfg.add_options()("seed-nodes", bpo::value<string>()->composing()->default_value(seed_nodes_str), "JSON array of P2P nodes to connect to on startup");
|
||||||
cfg.add_options()("checkpoint,c", bpo::value<vector<string>>()->composing(), "Pairs of [BLOCK_NUM,BLOCK_ID] that should be enforced as checkpoints.");
|
cfg.add_options()("checkpoint,c", bpo::value<vector<string>>()->composing(), "Pairs of [BLOCK_NUM,BLOCK_ID] that should be enforced as checkpoints.");
|
||||||
cfg.add_options()("rpc-endpoint", bpo::value<string>()->default_value("127.0.0.1:8090"), "Endpoint for websocket RPC to listen on");
|
cfg.add_options()("rpc-endpoint", bpo::value<string>()->default_value("127.0.0.1:8090"), "Endpoint for websocket RPC to listen on");
|
||||||
cfg.add_options()("rpc-tls-endpoint", bpo::value<string>()->implicit_value("127.0.0.1:8089"), "Endpoint for TLS websocket RPC to listen on");
|
cfg.add_options()("rpc-tls-endpoint", bpo::value<string>()->implicit_value("127.0.0.1:8089"), "Endpoint for TLS websocket RPC to listen on");
|
||||||
|
|
@ -1008,6 +993,7 @@ void application::shutdown_plugins() {
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
void application::shutdown() {
|
void application::shutdown() {
|
||||||
|
my->_running.store(false);
|
||||||
if (my->_p2p_network)
|
if (my->_p2p_network)
|
||||||
my->_p2p_network->close();
|
my->_p2p_network->close();
|
||||||
if (my->_chain_db)
|
if (my->_chain_db)
|
||||||
|
|
|
||||||
|
|
@ -29,12 +29,15 @@
|
||||||
#include <graphene/chain/pts_address.hpp>
|
#include <graphene/chain/pts_address.hpp>
|
||||||
#include <graphene/chain/tournament_object.hpp>
|
#include <graphene/chain/tournament_object.hpp>
|
||||||
|
|
||||||
|
#include <graphene/utilities/git_revision.hpp>
|
||||||
|
|
||||||
#include <fc/bloom_filter.hpp>
|
#include <fc/bloom_filter.hpp>
|
||||||
|
|
||||||
#include <fc/crypto/hex.hpp>
|
#include <fc/crypto/hex.hpp>
|
||||||
#include <fc/rpc/api_connection.hpp>
|
#include <fc/rpc/api_connection.hpp>
|
||||||
#include <fc/uint128.hpp>
|
#include <fc/uint128.hpp>
|
||||||
|
|
||||||
|
#include <boost/algorithm/string.hpp>
|
||||||
#include <boost/multiprecision/cpp_int.hpp>
|
#include <boost/multiprecision/cpp_int.hpp>
|
||||||
#include <boost/range/iterator_range.hpp>
|
#include <boost/range/iterator_range.hpp>
|
||||||
#include <boost/rational.hpp>
|
#include <boost/rational.hpp>
|
||||||
|
|
@ -90,6 +93,7 @@ public:
|
||||||
processed_transaction get_transaction(uint32_t block_num, uint32_t trx_in_block) const;
|
processed_transaction get_transaction(uint32_t block_num, uint32_t trx_in_block) const;
|
||||||
|
|
||||||
// Globals
|
// Globals
|
||||||
|
version_info get_version_info() const;
|
||||||
chain_property_object get_chain_properties() const;
|
chain_property_object get_chain_properties() const;
|
||||||
global_property_object get_global_properties() const;
|
global_property_object get_global_properties() const;
|
||||||
fc::variant_object get_config() const;
|
fc::variant_object get_config() const;
|
||||||
|
|
@ -563,6 +567,27 @@ processed_transaction database_api_impl::get_transaction(uint32_t block_num, uin
|
||||||
// //
|
// //
|
||||||
//////////////////////////////////////////////////////////////////////
|
//////////////////////////////////////////////////////////////////////
|
||||||
|
|
||||||
|
version_info database_api::get_version_info() const {
|
||||||
|
return my->get_version_info();
|
||||||
|
}
|
||||||
|
|
||||||
|
version_info database_api_impl::get_version_info() const {
|
||||||
|
|
||||||
|
std::string witness_version(graphene::utilities::git_revision_description);
|
||||||
|
const size_t pos = witness_version.find('/');
|
||||||
|
if (pos != std::string::npos && witness_version.size() > pos)
|
||||||
|
witness_version = witness_version.substr(pos + 1);
|
||||||
|
|
||||||
|
version_info vi;
|
||||||
|
vi.version = witness_version;
|
||||||
|
vi.git_revision = graphene::utilities::git_revision_sha;
|
||||||
|
vi.built = std::string(__DATE__) + " at " + std::string(__TIME__);
|
||||||
|
vi.openssl = OPENSSL_VERSION_TEXT;
|
||||||
|
vi.boost = boost::replace_all_copy(std::string(BOOST_LIB_VERSION), "_", ".");
|
||||||
|
|
||||||
|
return vi;
|
||||||
|
}
|
||||||
|
|
||||||
chain_property_object database_api::get_chain_properties() const {
|
chain_property_object database_api::get_chain_properties() const {
|
||||||
return my->get_chain_properties();
|
return my->get_chain_properties();
|
||||||
}
|
}
|
||||||
|
|
|
||||||
|
|
@ -130,6 +130,14 @@ struct gpos_info {
|
||||||
share_type account_vested_balance;
|
share_type account_vested_balance;
|
||||||
};
|
};
|
||||||
|
|
||||||
|
struct version_info {
|
||||||
|
string version;
|
||||||
|
string git_revision;
|
||||||
|
string built;
|
||||||
|
string openssl;
|
||||||
|
string boost;
|
||||||
|
};
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* @brief The database_api class implements the RPC API for the chain database.
|
* @brief The database_api class implements the RPC API for the chain database.
|
||||||
*
|
*
|
||||||
|
|
@ -218,6 +226,11 @@ public:
|
||||||
// Globals //
|
// Globals //
|
||||||
/////////////
|
/////////////
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @brief Retrieve the @ref version_info associated with the witness node
|
||||||
|
*/
|
||||||
|
version_info get_version_info() const;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* @brief Retrieve the @ref chain_property_object associated with the chain
|
* @brief Retrieve the @ref chain_property_object associated with the chain
|
||||||
*/
|
*/
|
||||||
|
|
@ -1040,6 +1053,7 @@ FC_REFLECT(graphene::app::market_ticker, (base)(quote)(latest)(lowest_ask)(highe
|
||||||
FC_REFLECT(graphene::app::market_volume, (base)(quote)(base_volume)(quote_volume));
|
FC_REFLECT(graphene::app::market_volume, (base)(quote)(base_volume)(quote_volume));
|
||||||
FC_REFLECT(graphene::app::market_trade, (date)(price)(amount)(value));
|
FC_REFLECT(graphene::app::market_trade, (date)(price)(amount)(value));
|
||||||
FC_REFLECT(graphene::app::gpos_info, (vesting_factor)(award)(total_amount)(current_subperiod)(last_voted_time)(allowed_withdraw_amount)(account_vested_balance));
|
FC_REFLECT(graphene::app::gpos_info, (vesting_factor)(award)(total_amount)(current_subperiod)(last_voted_time)(allowed_withdraw_amount)(account_vested_balance));
|
||||||
|
FC_REFLECT(graphene::app::version_info, (version)(git_revision)(built)(openssl)(boost));
|
||||||
|
|
||||||
FC_API(graphene::app::database_api,
|
FC_API(graphene::app::database_api,
|
||||||
// Objects
|
// Objects
|
||||||
|
|
@ -1060,6 +1074,7 @@ FC_API(graphene::app::database_api,
|
||||||
(get_recent_transaction_by_id)
|
(get_recent_transaction_by_id)
|
||||||
|
|
||||||
// Globals
|
// Globals
|
||||||
|
(get_version_info)
|
||||||
(get_chain_properties)
|
(get_chain_properties)
|
||||||
(get_global_properties)
|
(get_global_properties)
|
||||||
(get_config)
|
(get_config)
|
||||||
|
|
|
||||||
|
|
@ -433,7 +433,12 @@ processed_transaction database::push_proposal(const proposal_object& proposal)
|
||||||
{
|
{
|
||||||
for( size_t i=old_applied_ops_size,n=_applied_ops.size(); i<n; i++ )
|
for( size_t i=old_applied_ops_size,n=_applied_ops.size(); i<n; i++ )
|
||||||
{
|
{
|
||||||
ilog( "removing failed operation from applied_ops: ${op}", ("op", *(_applied_ops[i])) );
|
if(_applied_ops[i].valid()) {
|
||||||
|
ilog("removing failed operation from applied_ops: ${op}", ("op", *(_applied_ops[i])));
|
||||||
|
}
|
||||||
|
else{
|
||||||
|
ilog("Can't remove failed operation from applied_ops (operation is not valid), op_id : ${op_id}", ("op_id", i));
|
||||||
|
}
|
||||||
_applied_ops[i].reset();
|
_applied_ops[i].reset();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
@ -619,7 +624,7 @@ uint32_t database::push_applied_operation( const operation& op )
|
||||||
void database::set_applied_operation_result( uint32_t op_id, const operation_result& result )
|
void database::set_applied_operation_result( uint32_t op_id, const operation_result& result )
|
||||||
{
|
{
|
||||||
assert( op_id < _applied_ops.size() );
|
assert( op_id < _applied_ops.size() );
|
||||||
if( _applied_ops[op_id] )
|
if( _applied_ops[op_id].valid() )
|
||||||
_applied_ops[op_id]->result = result;
|
_applied_ops[op_id]->result = result;
|
||||||
else
|
else
|
||||||
{
|
{
|
||||||
|
|
@ -806,7 +811,7 @@ processed_transaction database::_apply_transaction(const signed_transaction& trx
|
||||||
return get_account_custom_authorities(id, op);
|
return get_account_custom_authorities(id, op);
|
||||||
};
|
};
|
||||||
trx.verify_authority( chain_id, get_active, get_owner, get_custom,
|
trx.verify_authority( chain_id, get_active, get_owner, get_custom,
|
||||||
MUST_IGNORE_CUSTOM_OP_REQD_AUTHS(head_block_time()),
|
true,
|
||||||
get_global_properties().parameters.max_authority_depth );
|
get_global_properties().parameters.max_authority_depth );
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -660,6 +660,10 @@ void database::update_active_committee_members()
|
||||||
|
|
||||||
void database::update_active_sons()
|
void database::update_active_sons()
|
||||||
{ try {
|
{ try {
|
||||||
|
if (head_block_time() < HARDFORK_SON_TIME) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
assert( _son_count_histogram_buffer.size() > 0 );
|
assert( _son_count_histogram_buffer.size() > 0 );
|
||||||
share_type stake_target = (_total_voting_stake-_son_count_histogram_buffer[0]) / 2;
|
share_type stake_target = (_total_voting_stake-_son_count_histogram_buffer[0]) / 2;
|
||||||
|
|
||||||
|
|
@ -759,11 +763,7 @@ void database::update_active_sons()
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
if (son_sets_equal) {
|
if (!son_sets_equal) {
|
||||||
ilog( "Active SONs set NOT CHANGED" );
|
|
||||||
} else {
|
|
||||||
ilog( "Active SONs set CHANGED" );
|
|
||||||
|
|
||||||
update_son_wallet(new_active_sons);
|
update_son_wallet(new_active_sons);
|
||||||
update_son_statuses(cur_active_sons, new_active_sons);
|
update_son_statuses(cur_active_sons, new_active_sons);
|
||||||
}
|
}
|
||||||
|
|
|
||||||
|
|
@ -625,7 +625,6 @@ void database::notify_changed_objects()
|
||||||
if( _undo_db.enabled() )
|
if( _undo_db.enabled() )
|
||||||
{
|
{
|
||||||
const auto& head_undo = _undo_db.head();
|
const auto& head_undo = _undo_db.head();
|
||||||
auto chain_time = head_block_time();
|
|
||||||
|
|
||||||
// New
|
// New
|
||||||
if( !new_objects.empty() )
|
if( !new_objects.empty() )
|
||||||
|
|
@ -637,8 +636,7 @@ void database::notify_changed_objects()
|
||||||
new_ids.push_back(item);
|
new_ids.push_back(item);
|
||||||
auto obj = find_object(item);
|
auto obj = find_object(item);
|
||||||
if(obj != nullptr)
|
if(obj != nullptr)
|
||||||
get_relevant_accounts(obj, new_accounts_impacted,
|
get_relevant_accounts(obj, new_accounts_impacted, true);
|
||||||
MUST_IGNORE_CUSTOM_OP_REQD_AUTHS(chain_time));
|
|
||||||
}
|
}
|
||||||
|
|
||||||
GRAPHENE_TRY_NOTIFY( new_objects, new_ids, new_accounts_impacted)
|
GRAPHENE_TRY_NOTIFY( new_objects, new_ids, new_accounts_impacted)
|
||||||
|
|
@ -652,8 +650,7 @@ void database::notify_changed_objects()
|
||||||
for( const auto& item : head_undo.old_values )
|
for( const auto& item : head_undo.old_values )
|
||||||
{
|
{
|
||||||
changed_ids.push_back(item.first);
|
changed_ids.push_back(item.first);
|
||||||
get_relevant_accounts(item.second.get(), changed_accounts_impacted,
|
get_relevant_accounts(item.second.get(), changed_accounts_impacted, true);
|
||||||
MUST_IGNORE_CUSTOM_OP_REQD_AUTHS(chain_time));
|
|
||||||
}
|
}
|
||||||
|
|
||||||
GRAPHENE_TRY_NOTIFY( changed_objects, changed_ids, changed_accounts_impacted)
|
GRAPHENE_TRY_NOTIFY( changed_objects, changed_ids, changed_accounts_impacted)
|
||||||
|
|
@ -670,8 +667,7 @@ void database::notify_changed_objects()
|
||||||
removed_ids.emplace_back( item.first );
|
removed_ids.emplace_back( item.first );
|
||||||
auto obj = item.second.get();
|
auto obj = item.second.get();
|
||||||
removed.emplace_back( obj );
|
removed.emplace_back( obj );
|
||||||
get_relevant_accounts(obj, removed_accounts_impacted,
|
get_relevant_accounts(obj, removed_accounts_impacted, true);
|
||||||
MUST_IGNORE_CUSTOM_OP_REQD_AUTHS(chain_time));
|
|
||||||
}
|
}
|
||||||
|
|
||||||
GRAPHENE_TRY_NOTIFY( removed_objects, removed_ids, removed, removed_accounts_impacted)
|
GRAPHENE_TRY_NOTIFY( removed_objects, removed_ids, removed, removed_accounts_impacted)
|
||||||
|
|
|
||||||
|
|
@ -1,10 +0,0 @@
|
||||||
// #210 Check authorities on custom_operation
|
|
||||||
#ifndef HARDFORK_CORE_210_TIME
|
|
||||||
#ifdef BUILD_PEERPLAYS_TESTNET
|
|
||||||
#define HARDFORK_CORE_210_TIME (fc::time_point_sec::from_iso_string("2030-01-01T00:00:00")) // (Not yet scheduled)
|
|
||||||
#else
|
|
||||||
#define HARDFORK_CORE_210_TIME (fc::time_point_sec::from_iso_string("2030-01-01T00:00:00")) // (Not yet scheduled)
|
|
||||||
#endif
|
|
||||||
// Bugfix: pre-HF 210, custom_operation's required_auths field was ignored.
|
|
||||||
#define MUST_IGNORE_CUSTOM_OP_REQD_AUTHS(chain_time) (chain_time <= HARDFORK_CORE_210_TIME)
|
|
||||||
#endif
|
|
||||||
|
|
@ -1,7 +1,7 @@
|
||||||
#ifndef HARDFORK_SON3_TIME
|
#ifndef HARDFORK_SON3_TIME
|
||||||
#ifdef BUILD_PEERPLAYS_TESTNET
|
#ifdef BUILD_PEERPLAYS_TESTNET
|
||||||
#define HARDFORK_SON3_TIME (fc::time_point_sec::from_iso_string("2022-04-30T00:00:00"))
|
#define HARDFORK_SON3_TIME (fc::time_point_sec::from_iso_string("2022-07-16T00:00:00"))
|
||||||
#else
|
#else
|
||||||
#define HARDFORK_SON3_TIME (fc::time_point_sec::from_iso_string("2022-04-30T00:00:00"))
|
#define HARDFORK_SON3_TIME (fc::time_point_sec::from_iso_string("2022-07-16T00:00:00"))
|
||||||
#endif
|
#endif
|
||||||
#endif
|
#endif
|
||||||
|
|
|
||||||
|
|
@ -131,6 +131,7 @@ namespace graphene { namespace chain {
|
||||||
}}
|
}}
|
||||||
|
|
||||||
FC_REFLECT_DERIVED( graphene::chain::dynamic_global_property_object, (graphene::db::object),
|
FC_REFLECT_DERIVED( graphene::chain::dynamic_global_property_object, (graphene::db::object),
|
||||||
|
(random)
|
||||||
(head_block_number)
|
(head_block_number)
|
||||||
(head_block_id)
|
(head_block_id)
|
||||||
(time)
|
(time)
|
||||||
|
|
|
||||||
|
|
@ -18,7 +18,7 @@ namespace graphene { namespace chain {
|
||||||
static const uint8_t type_id = son_wallet_deposit_object_type;
|
static const uint8_t type_id = son_wallet_deposit_object_type;
|
||||||
|
|
||||||
time_point_sec timestamp;
|
time_point_sec timestamp;
|
||||||
uint32_t block_num;
|
uint32_t block_num = 0;
|
||||||
sidechain_type sidechain = sidechain_type::unknown;
|
sidechain_type sidechain = sidechain_type::unknown;
|
||||||
std::string sidechain_uid;
|
std::string sidechain_uid;
|
||||||
std::string sidechain_transaction_id;
|
std::string sidechain_transaction_id;
|
||||||
|
|
|
||||||
|
|
@ -18,7 +18,7 @@ namespace graphene { namespace chain {
|
||||||
static const uint8_t type_id = son_wallet_withdraw_object_type;
|
static const uint8_t type_id = son_wallet_withdraw_object_type;
|
||||||
|
|
||||||
time_point_sec timestamp;
|
time_point_sec timestamp;
|
||||||
uint32_t block_num;
|
uint32_t block_num = 0;
|
||||||
sidechain_type sidechain = sidechain_type::unknown;
|
sidechain_type sidechain = sidechain_type::unknown;
|
||||||
std::string peerplays_uid;
|
std::string peerplays_uid;
|
||||||
std::string peerplays_transaction_id;
|
std::string peerplays_transaction_id;
|
||||||
|
|
|
||||||
|
|
@ -302,8 +302,7 @@ void_result proposal_create_evaluator::do_evaluate( const proposal_create_operat
|
||||||
vector<authority> other;
|
vector<authority> other;
|
||||||
for( auto& op : o.proposed_ops )
|
for( auto& op : o.proposed_ops )
|
||||||
{
|
{
|
||||||
operation_get_required_authorities( op.op, auths, auths, other,
|
operation_get_required_authorities( op.op, auths, auths, other, true );
|
||||||
MUST_IGNORE_CUSTOM_OP_REQD_AUTHS(block_time) );
|
|
||||||
}
|
}
|
||||||
|
|
||||||
FC_ASSERT( other.size() == 0 ); // TODO: what about other???
|
FC_ASSERT( other.size() == 0 ); // TODO: what about other???
|
||||||
|
|
@ -352,8 +351,7 @@ object_id_type proposal_create_evaluator::do_apply( const proposal_create_operat
|
||||||
|
|
||||||
// TODO: consider caching values from evaluate?
|
// TODO: consider caching values from evaluate?
|
||||||
for( auto& op : _proposed_trx.operations )
|
for( auto& op : _proposed_trx.operations )
|
||||||
operation_get_required_authorities( op, required_active, proposal.required_owner_approvals, other,
|
operation_get_required_authorities( op, required_active, proposal.required_owner_approvals, other, true);
|
||||||
MUST_IGNORE_CUSTOM_OP_REQD_AUTHS(chain_time) );
|
|
||||||
|
|
||||||
//All accounts which must provide both owner and active authority should be omitted from the active authority set;
|
//All accounts which must provide both owner and active authority should be omitted from the active authority set;
|
||||||
//owner authority approval implies active authority approval.
|
//owner authority approval implies active authority approval.
|
||||||
|
|
|
||||||
|
|
@ -39,7 +39,7 @@ bool proposal_object::is_authorized_to_execute( database& db ) const
|
||||||
[&]( account_id_type id ){ return &id(db).owner; },
|
[&]( account_id_type id ){ return &id(db).owner; },
|
||||||
[&]( account_id_type id, const operation& op ){
|
[&]( account_id_type id, const operation& op ){
|
||||||
return db.get_account_custom_authorities(id, op); },
|
return db.get_account_custom_authorities(id, op); },
|
||||||
MUST_IGNORE_CUSTOM_OP_REQD_AUTHS( db.head_block_time() ),
|
true,
|
||||||
db.get_global_properties().parameters.max_authority_depth,
|
db.get_global_properties().parameters.max_authority_depth,
|
||||||
true, /* allow committee */
|
true, /* allow committee */
|
||||||
available_active_approvals,
|
available_active_approvals,
|
||||||
|
|
|
||||||
|
|
@ -1 +1 @@
|
||||||
Subproject commit 6171e973c7fcfc9e0a39eaee2f05da84416a90e6
|
Subproject commit e7369949bea26f3201d8442ba78286a88df74762
|
||||||
|
|
@ -30,6 +30,8 @@
|
||||||
#include <iostream>
|
#include <iostream>
|
||||||
#include <algorithm>
|
#include <algorithm>
|
||||||
#include <tuple>
|
#include <tuple>
|
||||||
|
#include <random>
|
||||||
|
|
||||||
#include <boost/tuple/tuple.hpp>
|
#include <boost/tuple/tuple.hpp>
|
||||||
#include <boost/circular_buffer.hpp>
|
#include <boost/circular_buffer.hpp>
|
||||||
|
|
||||||
|
|
@ -67,7 +69,6 @@
|
||||||
#include <fc/io/json.hpp>
|
#include <fc/io/json.hpp>
|
||||||
#include <fc/io/enum_type.hpp>
|
#include <fc/io/enum_type.hpp>
|
||||||
#include <fc/io/raw_fwd.hpp>
|
#include <fc/io/raw_fwd.hpp>
|
||||||
#include <fc/crypto/rand.hpp>
|
|
||||||
#include <fc/network/rate_limiting.hpp>
|
#include <fc/network/rate_limiting.hpp>
|
||||||
#include <fc/network/ip.hpp>
|
#include <fc/network/ip.hpp>
|
||||||
|
|
||||||
|
|
@ -91,6 +92,7 @@
|
||||||
#define DEFAULT_LOGGER "p2p"
|
#define DEFAULT_LOGGER "p2p"
|
||||||
|
|
||||||
#define P2P_IN_DEDICATED_THREAD 1
|
#define P2P_IN_DEDICATED_THREAD 1
|
||||||
|
#define DISABLE_WITNESS_HF_CHECK 1
|
||||||
|
|
||||||
#define INVOCATION_COUNTER(name) \
|
#define INVOCATION_COUNTER(name) \
|
||||||
static unsigned total_ ## name ## _counter = 0; \
|
static unsigned total_ ## name ## _counter = 0; \
|
||||||
|
|
@ -827,7 +829,11 @@ namespace graphene { namespace net { namespace detail {
|
||||||
_maximum_blocks_per_peer_during_syncing(GRAPHENE_NET_MAX_BLOCKS_PER_PEER_DURING_SYNCING)
|
_maximum_blocks_per_peer_during_syncing(GRAPHENE_NET_MAX_BLOCKS_PER_PEER_DURING_SYNCING)
|
||||||
{
|
{
|
||||||
_rate_limiter.set_actual_rate_time_constant(fc::seconds(2));
|
_rate_limiter.set_actual_rate_time_constant(fc::seconds(2));
|
||||||
fc::rand_bytes(&_node_id.data[0], (int)_node_id.size());
|
|
||||||
|
using bytes_randomizer = std::independent_bits_engine<std::default_random_engine, CHAR_BIT, unsigned long>;
|
||||||
|
std::random_device rd;
|
||||||
|
bytes_randomizer br(rd());
|
||||||
|
std::generate(std::begin(_node_id.data), std::end(_node_id.data), std::ref(br));
|
||||||
}
|
}
|
||||||
|
|
||||||
node_impl::~node_impl()
|
node_impl::~node_impl()
|
||||||
|
|
@ -887,7 +893,7 @@ namespace graphene { namespace net { namespace detail {
|
||||||
void node_impl::p2p_network_connect_loop()
|
void node_impl::p2p_network_connect_loop()
|
||||||
{
|
{
|
||||||
VERIFY_CORRECT_THREAD();
|
VERIFY_CORRECT_THREAD();
|
||||||
while (!_p2p_network_connect_loop_done.canceled())
|
while (!_p2p_network_connect_loop_done.canceled() && !_node_is_shutting_down)
|
||||||
{
|
{
|
||||||
try
|
try
|
||||||
{
|
{
|
||||||
|
|
@ -1331,7 +1337,7 @@ namespace graphene { namespace net { namespace detail {
|
||||||
// reconnect with the rest of the network, or it might just futher isolate us.
|
// reconnect with the rest of the network, or it might just futher isolate us.
|
||||||
{
|
{
|
||||||
// As usual, the first step is to walk through all our peers and figure out which
|
// As usual, the first step is to walk through all our peers and figure out which
|
||||||
// peers need action (disconneting, sending keepalives, etc), then we walk through
|
// peers need action (disconneting, sending keepalives, etc), then we walk through
|
||||||
// those lists yielding at our leisure later.
|
// those lists yielding at our leisure later.
|
||||||
ASSERT_TASK_NOT_PREEMPTED();
|
ASSERT_TASK_NOT_PREEMPTED();
|
||||||
|
|
||||||
|
|
@ -3961,6 +3967,8 @@ namespace graphene { namespace net { namespace detail {
|
||||||
{
|
{
|
||||||
VERIFY_CORRECT_THREAD();
|
VERIFY_CORRECT_THREAD();
|
||||||
|
|
||||||
|
_node_is_shutting_down = true;
|
||||||
|
|
||||||
try
|
try
|
||||||
{
|
{
|
||||||
_potential_peer_db.close();
|
_potential_peer_db.close();
|
||||||
|
|
@ -4573,7 +4581,7 @@ namespace graphene { namespace net { namespace detail {
|
||||||
error_message_stream << "Unable to listen for connections on port " << listen_endpoint.port()
|
error_message_stream << "Unable to listen for connections on port " << listen_endpoint.port()
|
||||||
<< ", retrying in a few seconds\n";
|
<< ", retrying in a few seconds\n";
|
||||||
error_message_stream << "You can wait for it to become available, or restart this program using\n";
|
error_message_stream << "You can wait for it to become available, or restart this program using\n";
|
||||||
error_message_stream << "the --p2p-port option to specify another port\n";
|
error_message_stream << "the --p2p-endpoint option to specify another port\n";
|
||||||
first = false;
|
first = false;
|
||||||
}
|
}
|
||||||
else
|
else
|
||||||
|
|
|
||||||
|
|
@ -126,14 +126,12 @@ void account_history_plugin_impl::update_account_histories( const signed_block&
|
||||||
flat_set<account_id_type> impacted;
|
flat_set<account_id_type> impacted;
|
||||||
vector<authority> other;
|
vector<authority> other;
|
||||||
// fee payer is added here
|
// fee payer is added here
|
||||||
operation_get_required_authorities( op.op, impacted, impacted, other,
|
operation_get_required_authorities( op.op, impacted, impacted, other, true );
|
||||||
MUST_IGNORE_CUSTOM_OP_REQD_AUTHS( db.head_block_time() ) );
|
|
||||||
|
|
||||||
if( op.op.which() == operation::tag< account_create_operation >::value )
|
if( op.op.which() == operation::tag< account_create_operation >::value )
|
||||||
impacted.insert( op.result.get<object_id_type>() );
|
impacted.insert( op.result.get<object_id_type>() );
|
||||||
else
|
else
|
||||||
graphene::chain::operation_get_impacted_accounts( op.op, impacted,
|
graphene::chain::operation_get_impacted_accounts( op.op, impacted, true );
|
||||||
MUST_IGNORE_CUSTOM_OP_REQD_AUTHS(db.head_block_time()) );
|
|
||||||
if( op.op.which() == operation::tag< lottery_end_operation >::value )
|
if( op.op.which() == operation::tag< lottery_end_operation >::value )
|
||||||
{
|
{
|
||||||
auto lop = op.op.get< lottery_end_operation >();
|
auto lop = op.op.get< lottery_end_operation >();
|
||||||
|
|
|
||||||
|
|
@ -173,14 +173,12 @@ bool elasticsearch_plugin_impl::update_account_histories( const signed_block& b
|
||||||
flat_set<account_id_type> impacted;
|
flat_set<account_id_type> impacted;
|
||||||
vector<authority> other;
|
vector<authority> other;
|
||||||
// fee_payer is added here
|
// fee_payer is added here
|
||||||
operation_get_required_authorities( op.op, impacted, impacted, other,
|
operation_get_required_authorities( op.op, impacted, impacted, other, true );
|
||||||
MUST_IGNORE_CUSTOM_OP_REQD_AUTHS( db.head_block_time() ) );
|
|
||||||
|
|
||||||
if( op.op.which() == operation::tag< account_create_operation >::value )
|
if( op.op.which() == operation::tag< account_create_operation >::value )
|
||||||
impacted.insert( op.result.get<object_id_type>() );
|
impacted.insert( op.result.get<object_id_type>() );
|
||||||
else
|
else
|
||||||
operation_get_impacted_accounts( op.op, impacted,
|
operation_get_impacted_accounts( op.op, impacted, true );
|
||||||
MUST_IGNORE_CUSTOM_OP_REQD_AUTHS( db.head_block_time() ) );
|
|
||||||
|
|
||||||
for( auto& a : other )
|
for( auto& a : other )
|
||||||
for( auto& item : a.account_auths )
|
for( auto& item : a.account_auths )
|
||||||
|
|
|
||||||
|
|
@ -1,5 +1,6 @@
|
||||||
#pragma once
|
#pragma once
|
||||||
|
|
||||||
|
#include <mutex>
|
||||||
#include <vector>
|
#include <vector>
|
||||||
|
|
||||||
#include <boost/program_options.hpp>
|
#include <boost/program_options.hpp>
|
||||||
|
|
@ -60,6 +61,7 @@ protected:
|
||||||
std::map<std::string, std::string> private_keys;
|
std::map<std::string, std::string> private_keys;
|
||||||
|
|
||||||
std::vector<std::string> son_listener_log;
|
std::vector<std::string> son_listener_log;
|
||||||
|
std::mutex son_listener_log_mutex;
|
||||||
|
|
||||||
void on_applied_block(const signed_block &b);
|
void on_applied_block(const signed_block &b);
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -619,13 +619,15 @@ void sidechain_net_handler::settle_sidechain_transactions() {
|
||||||
}
|
}
|
||||||
|
|
||||||
void sidechain_net_handler::add_to_son_listener_log(std::string trx_id) {
|
void sidechain_net_handler::add_to_son_listener_log(std::string trx_id) {
|
||||||
|
const std::lock_guard<std::mutex> lock(son_listener_log_mutex);
|
||||||
son_listener_log.insert(son_listener_log.begin(), trx_id);
|
son_listener_log.insert(son_listener_log.begin(), trx_id);
|
||||||
if (son_listener_log.size() > 33) {
|
if (son_listener_log.size() > 33) {
|
||||||
son_listener_log.erase(son_listener_log.end());
|
son_listener_log.pop_back();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
std::vector<std::string> sidechain_net_handler::get_son_listener_log() {
|
std::vector<std::string> sidechain_net_handler::get_son_listener_log() {
|
||||||
|
const std::lock_guard<std::mutex> lock(son_listener_log_mutex);
|
||||||
return son_listener_log;
|
return son_listener_log;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -1079,8 +1079,10 @@ std::vector<zmq::message_t> zmq_listener::receive_multipart() {
|
||||||
|
|
||||||
void zmq_listener::handle_zmq() {
|
void zmq_listener::handle_zmq() {
|
||||||
int linger = 0;
|
int linger = 0;
|
||||||
socket.setsockopt(ZMQ_SUBSCRIBE, "hashblock", 9);
|
auto rc = zmq_setsockopt(socket, ZMQ_SUBSCRIBE, "hashblock", 9);
|
||||||
socket.setsockopt(ZMQ_LINGER, &linger, sizeof(linger));
|
FC_ASSERT(0 == rc);
|
||||||
|
rc = zmq_setsockopt(socket, ZMQ_LINGER, &linger, sizeof(linger));
|
||||||
|
FC_ASSERT(0 == rc);
|
||||||
//socket.setsockopt( ZMQ_SUBSCRIBE, "hashtx", 6 );
|
//socket.setsockopt( ZMQ_SUBSCRIBE, "hashtx", 6 );
|
||||||
//socket.setsockopt( ZMQ_SUBSCRIBE, "rawblock", 8 );
|
//socket.setsockopt( ZMQ_SUBSCRIBE, "rawblock", 8 );
|
||||||
//socket.setsockopt( ZMQ_SUBSCRIBE, "rawtx", 5 );
|
//socket.setsockopt( ZMQ_SUBSCRIBE, "rawtx", 5 );
|
||||||
|
|
|
||||||
|
|
@ -29,6 +29,7 @@
|
||||||
#include <sstream>
|
#include <sstream>
|
||||||
#include <string>
|
#include <string>
|
||||||
#include <list>
|
#include <list>
|
||||||
|
#include <random>
|
||||||
|
|
||||||
#include <boost/version.hpp>
|
#include <boost/version.hpp>
|
||||||
#include <boost/lexical_cast.hpp>
|
#include <boost/lexical_cast.hpp>
|
||||||
|
|
@ -62,7 +63,6 @@
|
||||||
#include <fc/crypto/hex.hpp>
|
#include <fc/crypto/hex.hpp>
|
||||||
#include <fc/thread/mutex.hpp>
|
#include <fc/thread/mutex.hpp>
|
||||||
#include <fc/thread/scoped_lock.hpp>
|
#include <fc/thread/scoped_lock.hpp>
|
||||||
#include <fc/crypto/rand.hpp>
|
|
||||||
|
|
||||||
#include <graphene/app/api.hpp>
|
#include <graphene/app/api.hpp>
|
||||||
#include <graphene/chain/asset_object.hpp>
|
#include <graphene/chain/asset_object.hpp>
|
||||||
|
|
@ -7365,8 +7365,12 @@ signed_transaction wallet_api::rps_throw(game_id_type game_id,
|
||||||
|
|
||||||
// construct the complete throw, the commit, and reveal
|
// construct the complete throw, the commit, and reveal
|
||||||
rock_paper_scissors_throw full_throw;
|
rock_paper_scissors_throw full_throw;
|
||||||
fc::rand_bytes((char*)&full_throw.nonce1, sizeof(full_throw.nonce1));
|
std::random_device rd;
|
||||||
fc::rand_bytes((char*)&full_throw.nonce2, sizeof(full_throw.nonce2));
|
std::mt19937_64 gen(rd());
|
||||||
|
std::uniform_int_distribution<uint64_t> dis;
|
||||||
|
full_throw.nonce1 = dis(gen);
|
||||||
|
full_throw.nonce2 = dis(gen);
|
||||||
|
|
||||||
full_throw.gesture = gesture;
|
full_throw.gesture = gesture;
|
||||||
|
|
||||||
rock_paper_scissors_throw_commit commit_throw;
|
rock_paper_scissors_throw_commit commit_throw;
|
||||||
|
|
|
||||||
|
|
@ -79,18 +79,12 @@ int main(int argc, char** argv) {
|
||||||
node->set_program_options(cli, cfg);
|
node->set_program_options(cli, cfg);
|
||||||
cfg_options.add(cfg);
|
cfg_options.add(cfg);
|
||||||
|
|
||||||
cfg_options.add_options()
|
|
||||||
("plugins", bpo::value<std::string>()->default_value("witness account_history market_history accounts_list affiliate_stats bookie"),
|
|
||||||
"Space-separated list of plugins to activate");
|
|
||||||
|
|
||||||
auto witness_plug = node->register_plugin<witness_plugin::witness_plugin>();
|
auto witness_plug = node->register_plugin<witness_plugin::witness_plugin>();
|
||||||
auto debug_witness_plug = node->register_plugin<debug_witness_plugin::debug_witness_plugin>();
|
auto debug_witness_plug = node->register_plugin<debug_witness_plugin::debug_witness_plugin>();
|
||||||
auto history_plug = node->register_plugin<account_history::account_history_plugin>();
|
auto history_plug = node->register_plugin<account_history::account_history_plugin>();
|
||||||
auto elasticsearch_plug = node->register_plugin<elasticsearch::elasticsearch_plugin>();
|
auto elasticsearch_plug = node->register_plugin<elasticsearch::elasticsearch_plugin>();
|
||||||
auto es_objects_plug = node->register_plugin<es_objects::es_objects_plugin>();
|
auto es_objects_plug = node->register_plugin<es_objects::es_objects_plugin>();
|
||||||
auto market_history_plug = node->register_plugin<market_history::market_history_plugin>();
|
auto market_history_plug = node->register_plugin<market_history::market_history_plugin>();
|
||||||
//auto generate_genesis_plug = node->register_plugin<generate_genesis_plugin::generate_genesis_plugin>();
|
|
||||||
//auto generate_uia_sharedrop_genesis_plug = node->register_plugin<generate_uia_sharedrop_genesis::generate_uia_sharedrop_genesis_plugin>();
|
|
||||||
auto list_plug = node->register_plugin<accounts_list::accounts_list_plugin>();
|
auto list_plug = node->register_plugin<accounts_list::accounts_list_plugin>();
|
||||||
auto affiliate_stats_plug = node->register_plugin<affiliate_stats::affiliate_stats_plugin>();
|
auto affiliate_stats_plug = node->register_plugin<affiliate_stats::affiliate_stats_plugin>();
|
||||||
auto bookie_plug = node->register_plugin<bookie::bookie_plugin>();
|
auto bookie_plug = node->register_plugin<bookie::bookie_plugin>();
|
||||||
|
|
|
||||||
|
|
@ -27,7 +27,7 @@
|
||||||
#include <graphene/chain/match_object.hpp>
|
#include <graphene/chain/match_object.hpp>
|
||||||
#include <graphene/chain/tournament_object.hpp>
|
#include <graphene/chain/tournament_object.hpp>
|
||||||
|
|
||||||
#include <fc/crypto/rand.hpp>
|
#include <random>
|
||||||
|
|
||||||
using namespace graphene::chain;
|
using namespace graphene::chain;
|
||||||
|
|
||||||
|
|
@ -276,8 +276,11 @@ void tournaments_helper::rps_throw(const game_id_type& game_id,
|
||||||
|
|
||||||
// construct the complete throw, the commit, and reveal
|
// construct the complete throw, the commit, and reveal
|
||||||
rock_paper_scissors_throw full_throw;
|
rock_paper_scissors_throw full_throw;
|
||||||
fc::rand_bytes((char*)&full_throw.nonce1, sizeof(full_throw.nonce1));
|
std::random_device rd;
|
||||||
fc::rand_bytes((char*)&full_throw.nonce2, sizeof(full_throw.nonce2));
|
std::mt19937_64 gen(rd());
|
||||||
|
std::uniform_int_distribution<uint64_t> dis;
|
||||||
|
full_throw.nonce1 = dis(gen);
|
||||||
|
full_throw.nonce2 = dis(gen);
|
||||||
full_throw.gesture = gesture;
|
full_throw.gesture = gesture;
|
||||||
|
|
||||||
rock_paper_scissors_throw_commit commit_throw;
|
rock_paper_scissors_throw_commit commit_throw;
|
||||||
|
|
|
||||||
Loading…
Reference in a new issue