diff --git a/.jenkinsci/artifacts.groovy b/.jenkinsci/artifacts.groovy
new file mode 100644
index 0000000000..3a5aa0a68b
--- /dev/null
+++ b/.jenkinsci/artifacts.groovy
@@ -0,0 +1,44 @@
+#!/usr/bin/env groovy
+
+def uploadArtifacts(filePaths, uploadPath, artifactServers=['artifact.soramitsu.co.jp']) {
+ def baseUploadPath = 'files'
+ def filePathsConverted = []
+ agentType = sh(script: 'uname', returnStdout: true).trim()
+ uploadPath = baseUploadPath + uploadPath
+ filePaths.each {
+ fp = sh(script: "ls -d ${it} | tr '\n' ','", returnStdout: true).trim()
+ filePathsConverted.addAll(fp.split(','))
+ }
+ def shaSumBinary = 'sha256sum'
+ def md5SumBinary = 'md5sum'
+ if (agentType == 'Darwin') {
+ shaSumBinary = 'shasum -a 256'
+ md5SumBinary = 'md5 -r'
+ }
+ sh "> \$(pwd)/batch.txt"
+ filePathsConverted.each {
+ sh "echo put ${it} $uploadPath >> \$(pwd)/batch.txt;"
+ sh "$shaSumBinary ${it} | cut -d' ' -f1 > \$(pwd)/\$(basename ${it}).sha256"
+ sh "$md5SumBinary ${it} | cut -d' ' -f1 > \$(pwd)/\$(basename ${it}).md5"
+ sh "echo put \$(pwd)/\$(basename ${it}).sha256 $uploadPath >> \$(pwd)/batch.txt;"
+ sh "echo put \$(pwd)/\$(basename ${it}).md5 $uploadPath >> \$(pwd)/batch.txt;"
+ }
+ // mkdirs recursively
+ uploadPath = uploadPath.split('/')
+ def p = ''
+ sh "> \$(pwd)/mkdirs.txt"
+ uploadPath.each {
+ p += "/${it}"
+ sh("echo -mkdir $p >> \$(pwd)/mkdirs.txt")
+ }
+
+ sshagent(['jenkins-artifact']) {
+ sh "ssh-agent"
+ artifactServers.each {
+ sh "sftp -b \$(pwd)/mkdirs.txt jenkins@${it} || true"
+ sh "sftp -b \$(pwd)/batch.txt jenkins@${it}"
+ }
+ }
+}
+
+return this
diff --git a/.jenkinsci/bindings.groovy b/.jenkinsci/bindings.groovy
index 4f20c09bb0..afc89afc82 100644
--- a/.jenkinsci/bindings.groovy
+++ b/.jenkinsci/bindings.groovy
@@ -1,27 +1,74 @@
#!/usr/bin/env groovy
-def doBindings() {
- def cmake_options = ""
- if (params.JavaBindings) {
- cmake_options += " -DSWIG_JAVA=ON "
- }
- if (params.PythonBindings) {
- cmake_options += " -DSWIG_PYTHON=ON "
- }
- // In case language specific options were not set,
- // build for each language
- if (!params.JavaBindings && !params.PythonBindings) {
- cmake_options += " -DSWIG_JAVA=ON -DSWIG_PYTHON=ON "
- }
+def doJavaBindings(buildType=Release) {
+ def currentPath = sh(script: "pwd", returnStdout: true).trim()
+ def commit = env.GIT_COMMIT
+ def artifactsPath = sprintf('%1$s/java-bindings-%2$s-%3$s-%4$s.zip',
+ [currentPath, buildType, sh(script: 'date "+%Y%m%d"', returnStdout: true).trim(), commit.substring(0,6)])
sh """
cmake \
-H. \
-Bbuild \
- -DCMAKE_BUILD_TYPE=Release \
- ${cmake_options}
+ -DCMAKE_BUILD_TYPE=$buildType \
+ -DSWIG_JAVA=ON
+ """
+ sh "cd build; make -j${params.PARALLELISM} irohajava"
+ sh "zip -j $artifactsPath build/shared_model/bindings/*.java build/shared_model/bindings/libirohajava.so"
+ sh "cp $artifactsPath /tmp/bindings-artifact"
+ return artifactsPath
+}
+
+def doPythonBindings(buildType=Release) {
+ def currentPath = sh(script: "pwd", returnStdout: true).trim()
+ def commit = env.GIT_COMMIT
+ def supportPython2 = "OFF"
+ def artifactsPath = sprintf('%1$s/python-bindings-%2$s-%3$s-%4$s-%5$s.zip',
+ [currentPath, env.PBVersion, buildType, sh(script: 'date "+%Y%m%d"', returnStdout: true).trim(), commit.substring(0,6)])
+ // do not use preinstalled libed25519
+ sh "rm -rf /usr/local/include/ed25519*; unlink /usr/local/lib/libed25519.so; rm -f /usr/local/lib/libed25519.so.1.2.2"
+ if (env.PBVersion == "python2") { supportPython2 = "ON" }
+ sh """
+ cmake \
+ -H. \
+ -Bbuild \
+ -DCMAKE_BUILD_TYPE=$buildType \
+ -DSWIG_PYTHON=ON \
+ -DSUPPORT_PYTHON2=$supportPython2
"""
sh "cmake --build build --target python_tests"
- sh "cd build; make -j${params.PARALLELISM} irohajava irohapy"
+ sh "cd build; make -j${params.PARALLELISM} irohapy"
+ sh "protoc --proto_path=schema --python_out=build/shared_model/bindings block.proto primitive.proto commands.proto queries.proto responses.proto endpoint.proto"
+ sh "${env.PBVersion} -m grpc_tools.protoc --proto_path=schema --python_out=build/shared_model/bindings --grpc_python_out=build/shared_model/bindings endpoint.proto yac.proto ordering.proto loader.proto"
+ sh "zip -j $artifactsPath build/shared_model/bindings/*.py build/shared_model/bindings/*.so"
+ sh "cp $artifactsPath /tmp/bindings-artifact"
+ return artifactsPath
+}
+
+def doAndroidBindings(abiVersion) {
+ def currentPath = sh(script: "pwd", returnStdout: true).trim()
+ def commit = env.GIT_COMMIT
+ def artifactsPath = sprintf('%1$s/android-bindings-%2$s-%3$s-%4$s-%5$s-%6$s.zip',
+ [currentPath, "\$PLATFORM", abiVersion, "\$BUILD_TYPE_A", sh(script: 'date "+%Y%m%d"', returnStdout: true).trim(), commit.substring(0,6)])
+ sh """
+ (cd /iroha; git init; git remote add origin https://github.com/hyperledger/iroha.git; \
+ git fetch --depth 1 origin develop; git checkout -t origin/develop)
+ """
+ sh """
+ . /entrypoint.sh; \
+ sed -i.bak "s~find_package(JNI REQUIRED)~SET(CMAKE_SWIG_FLAGS \\\${CMAKE_SWIG_FLAGS} -package \${PACKAGE})~" /iroha/shared_model/bindings/CMakeLists.txt; \
+ # TODO: might not be needed in the future
+ sed -i.bak "/target_include_directories(\\\${SWIG_MODULE_irohajava_REAL_NAME} PUBLIC/,+3d" /iroha/shared_model/bindings/CMakeLists.txt; \
+ sed -i.bak "s~swig_link_libraries(irohajava~swig_link_libraries(irohajava \"/protobuf/.build/lib\${PROTOBUF_LIB_NAME}.a\" \"\${NDK_PATH}/platforms/android-$abiVersion/\${ARCH}/usr/\${LIBP}/liblog.so\"~" /iroha/shared_model/bindings/CMakeLists.txt; \
+ sed -i.bak "s~find_library(protobuf_LIBRARY protobuf)~find_library(protobuf_LIBRARY \${PROTOBUF_LIB_NAME})~" /iroha/cmake/Modules/Findprotobuf.cmake; \
+ sed -i.bak "s~find_program(protoc_EXECUTABLE protoc~set(protoc_EXECUTABLE \"/protobuf/host_build/protoc\"~" /iroha/cmake/Modules/Findprotobuf.cmake; \
+ cmake -H/iroha/shared_model -B/iroha/shared_model/build -DCMAKE_SYSTEM_NAME=Android -DCMAKE_SYSTEM_VERSION=$abiVersion -DCMAKE_ANDROID_ARCH_ABI=\$PLATFORM \
+ -DANDROID_NDK=\$NDK_PATH -DCMAKE_ANDROID_STL_TYPE=c++_static -DCMAKE_BUILD_TYPE=\$BUILD_TYPE_A -DTESTING=OFF \
+ -DSHARED_MODEL_DISABLE_COMPATIBILITY=ON -DSWIG_JAVA=ON -DCMAKE_PREFIX_PATH=\$DEPS_DIR
+ """
+ sh "cmake --build /iroha/shared_model/build --target irohajava -- -j${params.PARALLELISM}"
+ sh "zip -j $artifactsPath /iroha/shared_model/build/bindings/*.java /iroha/shared_model/build/bindings/libirohajava.so"
+ sh "cp $artifactsPath /tmp/bindings-artifact"
+ return artifactsPath
}
return this
diff --git a/.jenkinsci/debug-build.groovy b/.jenkinsci/debug-build.groovy
index dd430e67d6..8616097d6b 100644
--- a/.jenkinsci/debug-build.groovy
+++ b/.jenkinsci/debug-build.groovy
@@ -1,106 +1,115 @@
#!/usr/bin/env groovy
def doDebugBuild(coverageEnabled=false) {
+ def dPullOrBuild = load ".jenkinsci/docker-pull-or-build.groovy"
+ def manifest = load ".jenkinsci/docker-manifest.groovy"
+ def pCommit = load ".jenkinsci/previous-commit.groovy"
def parallelism = params.PARALLELISM
+ def platform = sh(script: 'uname -m', returnStdout: true).trim()
+ def previousCommit = pCommit.previousCommitOrCurrent()
// params are always null unless job is started
// this is the case for the FIRST build only.
// So just set this to same value as default.
// This is a known bug. See https://issues.jenkins-ci.org/browse/JENKINS-41929
- if (parallelism == null) {
+ if (!parallelism) {
parallelism = 4
}
- if ("arm7" in env.NODE_NAME) {
+ if (env.NODE_NAME.contains('arm7')) {
parallelism = 1
}
sh "docker network create ${env.IROHA_NETWORK}"
+ def iC = dPullOrBuild.dockerPullOrUpdate("${platform}-develop-build",
+ "${env.GIT_RAW_BASE_URL}/${env.GIT_COMMIT}/docker/develop/Dockerfile",
+ "${env.GIT_RAW_BASE_URL}/${previousCommit}/docker/develop/Dockerfile",
+ "${env.GIT_RAW_BASE_URL}/develop/docker/develop/Dockerfile",
+ ['PARALLELISM': parallelism])
- docker.image('postgres:9.5').run(""
+ if (GIT_LOCAL_BRANCH == 'develop' && manifest.manifestSupportEnabled()) {
+ manifest.manifestCreate("${DOCKER_REGISTRY_BASENAME}:develop-build",
+ ["${DOCKER_REGISTRY_BASENAME}:x86_64-develop-build",
+ "${DOCKER_REGISTRY_BASENAME}:armv7l-develop-build",
+ "${DOCKER_REGISTRY_BASENAME}:aarch64-develop-build"])
+ manifest.manifestAnnotate("${DOCKER_REGISTRY_BASENAME}:develop-build",
+ [
+ [manifest: "${DOCKER_REGISTRY_BASENAME}:x86_64-develop-build",
+ arch: 'amd64', os: 'linux', osfeatures: [], variant: ''],
+ [manifest: "${DOCKER_REGISTRY_BASENAME}:armv7l-develop-build",
+ arch: 'arm', os: 'linux', osfeatures: [], variant: 'v7'],
+ [manifest: "${DOCKER_REGISTRY_BASENAME}:aarch64-develop-build",
+ arch: 'arm64', os: 'linux', osfeatures: [], variant: '']
+ ])
+ withCredentials([usernamePassword(credentialsId: 'docker-hub-credentials', usernameVariable: 'login', passwordVariable: 'password')]) {
+ manifest.manifestPush("${DOCKER_REGISTRY_BASENAME}:develop-build", login, password)
+ }
+ }
+ docker.image('postgres:9.5').withRun(""
+ " -e POSTGRES_USER=${env.IROHA_POSTGRES_USER}"
+ " -e POSTGRES_PASSWORD=${env.IROHA_POSTGRES_PASSWORD}"
+ " --name ${env.IROHA_POSTGRES_HOST}"
- + " --network=${env.IROHA_NETWORK}")
-
- def platform = sh(script: 'uname -m', returnStdout: true).trim()
- sh "curl -L -o /tmp/${env.GIT_COMMIT}/Dockerfile --create-dirs https://raw.githubusercontent.com/hyperledger/iroha/${env.GIT_COMMIT}/docker/develop/${platform}/Dockerfile"
- // pull docker image in case we don't have one
- // speeds up consequent image builds as we simply tag them
- sh "docker pull ${DOCKER_BASE_IMAGE_DEVELOP}"
- if (env.BRANCH_NAME == 'develop') {
- iC = docker.build("hyperledger/iroha:${GIT_COMMIT}-${BUILD_NUMBER}", "--build-arg PARALLELISM=${parallelism} -f /tmp/${env.GIT_COMMIT}/Dockerfile /tmp/${env.GIT_COMMIT}")
- docker.withRegistry('https://registry.hub.docker.com', 'docker-hub-credentials') {
- iC.push("${platform}-develop")
- }
- }
- else {
- iC = docker.build("hyperledger/iroha-workflow:${GIT_COMMIT}-${BUILD_NUMBER}", "-f /tmp/${env.GIT_COMMIT}/Dockerfile /tmp/${env.GIT_COMMIT} --build-arg PARALLELISM=${parallelism}")
- }
- iC.inside(""
- + " -e IROHA_POSTGRES_HOST=${env.IROHA_POSTGRES_HOST}"
- + " -e IROHA_POSTGRES_PORT=${env.IROHA_POSTGRES_PORT}"
- + " -e IROHA_POSTGRES_USER=${env.IROHA_POSTGRES_USER}"
- + " -e IROHA_POSTGRES_PASSWORD=${env.IROHA_POSTGRES_PASSWORD}"
- + " --network=${env.IROHA_NETWORK}"
- + " -v /var/jenkins/ccache:${CCACHE_DIR}") {
-
- def scmVars = checkout scm
- def cmakeOptions = ""
- if ( coverageEnabled ) {
- cmakeOptions = " -DCOVERAGE=ON "
- }
- env.IROHA_VERSION = "0x${scmVars.GIT_COMMIT}"
- env.IROHA_HOME = "/opt/iroha"
- env.IROHA_BUILD = "${env.IROHA_HOME}/build"
+ + " --network=${env.IROHA_NETWORK}") {
+ iC.inside(""
+ + " -e IROHA_POSTGRES_HOST=${env.IROHA_POSTGRES_HOST}"
+ + " -e IROHA_POSTGRES_PORT=${env.IROHA_POSTGRES_PORT}"
+ + " -e IROHA_POSTGRES_USER=${env.IROHA_POSTGRES_USER}"
+ + " -e IROHA_POSTGRES_PASSWORD=${env.IROHA_POSTGRES_PASSWORD}"
+ + " --network=${env.IROHA_NETWORK}"
+ + " -v /var/jenkins/ccache:${CCACHE_DIR}"
+ + " -v /tmp/${GIT_COMMIT}-${BUILD_NUMBER}:/tmp/${GIT_COMMIT}") {
- sh """
- ccache --version
- ccache --show-stats
- ccache --zero-stats
- ccache --max-size=5G
- """
- sh """
- cmake \
- -DTESTING=ON \
- -H. \
- -Bbuild \
- -DCMAKE_BUILD_TYPE=Debug \
- -DIROHA_VERSION=${env.IROHA_VERSION} \
- ${cmakeOptions}
- """
- sh "cmake --build build -- -j${parallelism}"
- sh "ccache --show-stats"
- if ( coverageEnabled ) {
- sh "cmake --build build --target coverage.init.info"
- }
- def testExitCode = sh(script: 'cmake --build build --target test', returnStatus: true)
- if (testExitCode != 0) {
- currentBuild.result = "UNSTABLE"
- }
- if ( coverageEnabled ) {
- sh "cmake --build build --target cppcheck"
- // Sonar
- if (env.CHANGE_ID != null) {
- sh """
- sonar-scanner \
- -Dsonar.github.disableInlineComments \
- -Dsonar.github.repository='hyperledger/iroha' \
- -Dsonar.analysis.mode=preview \
- -Dsonar.login=${SONAR_TOKEN} \
- -Dsonar.projectVersion=${BUILD_TAG} \
- -Dsonar.github.oauth=${SORABOT_TOKEN} \
- -Dsonar.github.pullRequest=${CHANGE_ID}
- """
+ def scmVars = checkout scm
+ def cmakeOptions = ""
+ if ( coverageEnabled ) {
+ cmakeOptions = " -DCOVERAGE=ON "
}
+ env.IROHA_VERSION = "0x${scmVars.GIT_COMMIT}"
+ env.IROHA_HOME = "/opt/iroha"
+ env.IROHA_BUILD = "${env.IROHA_HOME}/build"
- sh "cmake --build build --target coverage.info"
- sh "python /tmp/lcov_cobertura.py build/reports/coverage.info -o build/reports/coverage.xml"
- cobertura autoUpdateHealth: false, autoUpdateStability: false, coberturaReportFile: '**/build/reports/coverage.xml', conditionalCoverageTargets: '75, 50, 0', failUnhealthy: false, failUnstable: false, lineCoverageTargets: '75, 50, 0', maxNumberOfBuilds: 50, methodCoverageTargets: '75, 50, 0', onlyStable: false, zoomCoverageChart: false
- }
-
- // TODO: replace with upload to artifactory server
- // develop branch only
- if ( env.BRANCH_NAME == "develop" ) {
- //archive(includes: 'build/bin/,compile_commands.json')
+ sh """
+ ccache --version
+ ccache --show-stats
+ ccache --zero-stats
+ ccache --max-size=5G
+ """
+ sh """
+ cmake \
+ -DTESTING=ON \
+ -H. \
+ -Bbuild \
+ -DCMAKE_BUILD_TYPE=Debug \
+ -DIROHA_VERSION=${env.IROHA_VERSION} \
+ ${cmakeOptions}
+ """
+ sh "cmake --build build -- -j${parallelism}"
+ sh "ccache --show-stats"
+ if ( coverageEnabled ) {
+ sh "cmake --build build --target coverage.init.info"
+ }
+ def testExitCode = sh(script: 'CTEST_OUTPUT_ON_FAILURE=1 cmake --build build --target test', returnStatus: true)
+ if (testExitCode != 0) {
+ currentBuild.result = "UNSTABLE"
+ }
+ if ( coverageEnabled ) {
+ sh "cmake --build build --target cppcheck"
+ // Sonar
+ if (env.CHANGE_ID != null) {
+ sh """
+ sonar-scanner \
+ -Dsonar.github.disableInlineComments \
+ -Dsonar.github.repository='${DOCKER_REGISTRY_BASENAME}' \
+ -Dsonar.analysis.mode=preview \
+ -Dsonar.login=${SONAR_TOKEN} \
+ -Dsonar.projectVersion=${BUILD_TAG} \
+ -Dsonar.github.oauth=${SORABOT_TOKEN} \
+ -Dsonar.github.pullRequest=${CHANGE_ID}
+ """
+ }
+ sh "cmake --build build --target coverage.info"
+ sh "python /tmp/lcov_cobertura.py build/reports/coverage.info -o build/reports/coverage.xml"
+ cobertura autoUpdateHealth: false, autoUpdateStability: false, coberturaReportFile: '**/build/reports/coverage.xml', conditionalCoverageTargets: '75, 50, 0', failUnhealthy: false, failUnstable: false, lineCoverageTargets: '75, 50, 0', maxNumberOfBuilds: 50, methodCoverageTargets: '75, 50, 0', onlyStable: false, zoomCoverageChart: false
+ }
}
}
}
+
return this
diff --git a/.jenkinsci/docker-manifest.groovy b/.jenkinsci/docker-manifest.groovy
new file mode 100644
index 0000000000..c7b7c7c73b
--- /dev/null
+++ b/.jenkinsci/docker-manifest.groovy
@@ -0,0 +1,28 @@
+#!/usr/bin/env groovy
+
+def manifestSupportEnabled() {
+ def dockerVersion = sh(script: "docker -v", returnStdout: true).trim()
+ def experimentalEnabled = sh(script: "grep -i experimental ~/.docker/config.json", returnStatus: true)
+ return experimentalEnabled == 0 && dockerVersion ==~ /^Docker version 18.*$/
+
+}
+
+def manifestCreate(manifestListName, manifests) {
+ sh "docker manifest create ${manifestListName} ${manifests.join(' ')}"
+}
+
+def manifestAnnotate(manifestListName, manifestsWithFeatures) {
+ manifestsWithFeatures.each {
+ sh """
+ docker manifest annotate ${manifestListName} ${it['manifest']} --arch "${it['arch']}" \
+ --os "${it['os']}" --os-features "${it['osfeatures'].join(',')}" --variant "${it['variant']}"
+ """
+ }
+}
+
+def manifestPush(manifestListName, dockerRegistryLogin, dockerRegistryPassword) {
+ sh "docker login -u '${dockerRegistryLogin}' -p '${dockerRegistryPassword}'"
+ sh "docker manifest push --purge ${manifestListName}"
+}
+
+return this
diff --git a/.jenkinsci/docker-pull-or-build.groovy b/.jenkinsci/docker-pull-or-build.groovy
new file mode 100644
index 0000000000..d699ac40bb
--- /dev/null
+++ b/.jenkinsci/docker-pull-or-build.groovy
@@ -0,0 +1,57 @@
+#!/usr/bin/env groovy
+
+def remoteFilesDiffer(f1, f2) {
+ sh "curl -L -o /tmp/${env.GIT_COMMIT}/f1 --create-dirs ${f1}"
+ sh "curl -L -o /tmp/${env.GIT_COMMIT}/f2 ${f2}"
+ diffExitCode = sh(script: "diff -q /tmp/${env.GIT_COMMIT}/f1 /tmp/${env.GIT_COMMIT}/f2", returnStatus: true)
+ return diffExitCode != 0
+}
+
+def buildOptionsString(options) {
+ def s = ''
+ if (options) {
+ options.each { k, v ->
+ s += "--build-arg ${k}=${v} "
+ }
+ }
+ return s
+}
+
+def dockerPullOrUpdate(imageName, currentDockerfileURL, previousDockerfileURL, referenceDockerfileURL, buildOptions=null) {
+ buildOptions = buildOptionsString(buildOptions)
+ def commit = sh(script: "echo ${GIT_LOCAL_BRANCH} | md5sum | cut -c 1-8", returnStdout: true).trim()
+ if (remoteFilesDiffer(currentDockerfileURL, previousDockerfileURL)) {
+ // Dockerfile has been changed compared to the previous commit
+ // Worst case scenario. We cannot count on the local cache
+ // because Dockerfile may contain apt-get entries that would try to update
+ // from invalid (stale) addresses
+ iC = docker.build("${DOCKER_REGISTRY_BASENAME}:${commit}-${BUILD_NUMBER}", "${buildOptions} --no-cache -f /tmp/${env.GIT_COMMIT}/f1 /tmp/${env.GIT_COMMIT}")
+ }
+ else {
+ // first commit in this branch or Dockerfile modified
+ if (remoteFilesDiffer(currentDockerfileURL, referenceDockerfileURL)) {
+ // if we're lucky to build on the same agent, image will be built using cache
+ iC = docker.build("${DOCKER_REGISTRY_BASENAME}:${commit}-${BUILD_NUMBER}", "$buildOptions -f /tmp/${env.GIT_COMMIT}/f1 /tmp/${env.GIT_COMMIT}")
+ }
+ else {
+ // try pulling image from Dockerhub, probably image is already there
+ def testExitCode = sh(script: "docker pull ${DOCKER_REGISTRY_BASENAME}:${imageName}", returnStatus: true)
+ if (testExitCode != 0) {
+ // image does not (yet) exist on Dockerhub. Build it
+ iC = docker.build("${DOCKER_REGISTRY_BASENAME}:${commit}-${BUILD_NUMBER}", "$buildOptions --no-cache -f /tmp/${env.GIT_COMMIT}/f1 /tmp/${env.GIT_COMMIT}")
+ }
+ else {
+ // no difference found compared to both previous and reference Dockerfile
+ iC = docker.image("${DOCKER_REGISTRY_BASENAME}:${imageName}")
+ }
+ }
+ }
+ if (GIT_LOCAL_BRANCH ==~ /develop|master/) {
+ docker.withRegistry('https://registry.hub.docker.com', 'docker-hub-credentials') {
+ iC.push(imageName)
+ }
+ }
+ return iC
+}
+
+return this
diff --git a/.jenkinsci/linux-post-step.groovy b/.jenkinsci/linux-post-step.groovy
new file mode 100644
index 0000000000..2cd92a675b
--- /dev/null
+++ b/.jenkinsci/linux-post-step.groovy
@@ -0,0 +1,22 @@
+def linuxPostStep() {
+ timeout(time: 600, unit: "SECONDS") {
+ try {
+ if (currentBuild.currentResult == "SUCCESS" && GIT_LOCAL_BRANCH ==~ /(master|develop)/) {
+ def artifacts = load ".jenkinsci/artifacts.groovy"
+ def commit = env.GIT_COMMIT
+ def platform = sh(script: 'uname -m', returnStdout: true).trim()
+ filePaths = [ '/tmp/${GIT_COMMIT}-${BUILD_NUMBER}/*' ]
+ artifacts.uploadArtifacts(filePaths, sprintf('/iroha/linux/%4$s/%1$s-%2$s-%3$s', [GIT_LOCAL_BRANCH, sh(script: 'date "+%Y%m%d"', returnStdout: true).trim(), commit.substring(0,6), platform]))
+ }
+ }
+ finally {
+ if (env.BUILD_TYPE == 'Debug') {
+ def cleanup = load ".jenkinsci/docker-cleanup.groovy"
+ cleanup.doDockerCleanup()
+ }
+ cleanWs()
+ }
+ }
+}
+
+return this
diff --git a/.jenkinsci/mac-release-build.groovy b/.jenkinsci/mac-release-build.groovy
new file mode 100644
index 0000000000..02125d2632
--- /dev/null
+++ b/.jenkinsci/mac-release-build.groovy
@@ -0,0 +1,29 @@
+#!/usr/bin/env groovy
+
+def doReleaseBuild(coverageEnabled=false) {
+ def scmVars = checkout scm
+ env.IROHA_VERSION = "0x${scmVars.GIT_COMMIT}"
+ env.IROHA_HOME = "/opt/iroha"
+ env.IROHA_BUILD = "${env.IROHA_HOME}/build"
+
+ sh """
+ export CCACHE_DIR=${CCACHE_RELEASE_DIR}
+ ccache --version
+ ccache --show-stats
+ ccache --zero-stats
+ ccache --max-size=1G
+
+ cmake -H. \
+ -Bbuild \
+ -DCOVERAGE=OFF \
+ -DPACKAGE_TGZ=ON \
+ -DCMAKE_BUILD_TYPE=Release \
+ -DIROHA_VERSION=${env.IROHA_VERSION}
+
+ cmake --build build --target package -- -j${params.PARALLELISM}
+ mv ./build/iroha-${env.IROHA_VERSION}-*.tar.gz ./build/iroha.tar.gz
+ ccache --show-stats
+ """
+}
+
+return this
diff --git a/.jenkinsci/previous-commit.groovy b/.jenkinsci/previous-commit.groovy
new file mode 100644
index 0000000000..5a3916005e
--- /dev/null
+++ b/.jenkinsci/previous-commit.groovy
@@ -0,0 +1,9 @@
+#!/usr/bin/env groovy
+
+def previousCommitOrCurrent() {
+ // GIT_PREVIOUS_COMMIT is null on first PR build
+ // regardless Jenkins docs saying it equals the current one on first build in branch
+ return !env.GIT_PREVIOUS_COMMIT ? env.GIT_COMMIT : env.GIT_PREVIOUS_COMMIT
+}
+
+return this
diff --git a/.jenkinsci/release-build.groovy b/.jenkinsci/release-build.groovy
index 05d6613917..5e92053772 100644
--- a/.jenkinsci/release-build.groovy
+++ b/.jenkinsci/release-build.groovy
@@ -2,24 +2,21 @@
def doReleaseBuild() {
def parallelism = params.PARALLELISM
+ def manifest = load ".jenkinsci/docker-manifest.groovy"
// params are always null unless job is started
// this is the case for the FIRST build only.
// So just set this to same value as default.
// This is a known bug. See https://issues.jenkins-ci.org/browse/JENKINS-41929
- if (parallelism == null) {
+ if (!parallelism) {
parallelism = 4
}
- if ("arm7" in env.NODE_NAME) {
+ if (env.NODE_NAME.contains('arm7')) {
parallelism = 1
}
def platform = sh(script: 'uname -m', returnStdout: true).trim()
- sh "curl -L -o /tmp/${env.GIT_COMMIT}/Dockerfile --create-dirs https://raw.githubusercontent.com/hyperledger/iroha/${env.GIT_COMMIT}/docker/develop/${platform}/Dockerfile"
- // pull docker image for building release package of Iroha
- // speeds up consequent image builds as we simply tag them
- sh "docker pull ${DOCKER_BASE_IMAGE_DEVELOP}"
- iC = docker.build("hyperledger/iroha:${GIT_COMMIT}-${BUILD_NUMBER}", "--build-arg PARALLELISM=${parallelism} -f /tmp/${env.GIT_COMMIT}/Dockerfile /tmp/${env.GIT_COMMIT}")
-
sh "mkdir /tmp/${env.GIT_COMMIT}-${BUILD_NUMBER} || true"
+ iC = docker.image("${DOCKER_REGISTRY_BASENAME}:${platform}-develop-build")
+ iC.pull()
iC.inside(""
+ " -v /tmp/${GIT_COMMIT}-${BUILD_NUMBER}:/tmp/${GIT_COMMIT}"
+ " -v /var/jenkins/ccache:${CCACHE_RELEASE_DIR}") {
@@ -49,21 +46,58 @@ def doReleaseBuild() {
sh "cmake --build build --target package -- -j${parallelism}"
sh "ccache --show-stats"
- // copy build package to the volume
- sh "cp ./build/iroha-*.deb /tmp/${GIT_COMMIT}/iroha.deb"
+ // move build package to the volume
+ sh "mv ./build/iroha-*.deb /tmp/${GIT_COMMIT}/iroha.deb"
+ sh "mv ./build/*.tar.gz /tmp/${GIT_COMMIT}/iroha.tar.gz"
}
- sh "curl -L -o /tmp/${env.GIT_COMMIT}/Dockerfile --create-dirs https://raw.githubusercontent.com/hyperledger/iroha/${env.GIT_COMMIT}/docker/release/${platform}/Dockerfile"
- sh "curl -L -o /tmp/${env.GIT_COMMIT}/entrypoint.sh https://raw.githubusercontent.com/hyperledger/iroha/${env.GIT_COMMIT}/docker/release/${platform}/entrypoint.sh"
- sh "cp /tmp/${GIT_COMMIT}-${BUILD_NUMBER}/iroha.deb /tmp/${env.GIT_COMMIT}"
+ sh "curl -L -o /tmp/${env.GIT_COMMIT}/Dockerfile --create-dirs ${env.GIT_RAW_BASE_URL}/${env.GIT_COMMIT}/docker/release/Dockerfile"
+ sh "curl -L -o /tmp/${env.GIT_COMMIT}/entrypoint.sh ${env.GIT_RAW_BASE_URL}/${env.GIT_COMMIT}/docker/release/entrypoint.sh"
+ sh "mv /tmp/${GIT_COMMIT}-${BUILD_NUMBER}/iroha.deb /tmp/${env.GIT_COMMIT}"
sh "chmod +x /tmp/${env.GIT_COMMIT}/entrypoint.sh"
- iCRelease = docker.build("hyperledger/iroha:${GIT_COMMIT}-${BUILD_NUMBER}-release", "-f /tmp/${env.GIT_COMMIT}/Dockerfile /tmp/${env.GIT_COMMIT}")
+ iCRelease = docker.build("${DOCKER_REGISTRY_BASENAME}:${GIT_COMMIT}-${BUILD_NUMBER}-release", "--no-cache -f /tmp/${env.GIT_COMMIT}/Dockerfile /tmp/${env.GIT_COMMIT}")
docker.withRegistry('https://registry.hub.docker.com', 'docker-hub-credentials') {
- if (env.BRANCH_NAME == 'develop') {
- iCRelease.push("${platform}-develop-latest")
+ if (env.GIT_LOCAL_BRANCH == 'develop') {
+ iCRelease.push("${platform}-develop")
+ if (manifest.manifestSupportEnabled()) {
+ manifest.manifestCreate("${DOCKER_REGISTRY_BASENAME}:develop",
+ ["${DOCKER_REGISTRY_BASENAME}:x86_64-develop",
+ "${DOCKER_REGISTRY_BASENAME}:armv7l-develop",
+ "${DOCKER_REGISTRY_BASENAME}:aarch64-develop"])
+ manifest.manifestAnnotate("${DOCKER_REGISTRY_BASENAME}:develop",
+ [
+ [manifest: "${DOCKER_REGISTRY_BASENAME}:x86_64-develop",
+ arch: 'amd64', os: 'linux', osfeatures: [], variant: ''],
+ [manifest: "${DOCKER_REGISTRY_BASENAME}:armv7l-develop",
+ arch: 'arm', os: 'linux', osfeatures: [], variant: 'v7'],
+ [manifest: "${DOCKER_REGISTRY_BASENAME}:aarch64-develop",
+ arch: 'arm64', os: 'linux', osfeatures: [], variant: '']
+ ])
+ withCredentials([usernamePassword(credentialsId: 'docker-hub-credentials', usernameVariable: 'login', passwordVariable: 'password')]) {
+ manifest.manifestPush("${DOCKER_REGISTRY_BASENAME}:develop", login, password)
+ }
+ }
}
- else if (env.BRANCH_NAME == 'master') {
+ else if (env.GIT_LOCAL_BRANCH == 'master') {
iCRelease.push("${platform}-latest")
+ if (manifest.manifestSupportEnabled()) {
+ manifest.manifestCreate("${DOCKER_REGISTRY_BASENAME}:latest",
+ ["${DOCKER_REGISTRY_BASENAME}:x86_64-latest",
+ "${DOCKER_REGISTRY_BASENAME}:armv7l-latest",
+ "${DOCKER_REGISTRY_BASENAME}:aarch64-latest"])
+ manifest.manifestAnnotate("${DOCKER_REGISTRY_BASENAME}:latest",
+ [
+ [manifest: "${DOCKER_REGISTRY_BASENAME}:x86_64-latest",
+ arch: 'amd64', os: 'linux', osfeatures: [], variant: ''],
+ [manifest: "${DOCKER_REGISTRY_BASENAME}:armv7l-latest",
+ arch: 'arm', os: 'linux', osfeatures: [], variant: 'v7'],
+ [manifest: "${DOCKER_REGISTRY_BASENAME}:aarch64-latest",
+ arch: 'arm64', os: 'linux', osfeatures: [], variant: '']
+ ])
+ withCredentials([usernamePassword(credentialsId: 'docker-hub-credentials', usernameVariable: 'login', passwordVariable: 'password')]) {
+ manifest.manifestPush("${DOCKER_REGISTRY_BASENAME}:latest", login, password)
+ }
+ }
}
}
sh "docker rmi ${iCRelease.id}"
diff --git a/.jenkinsci/remote-files-differ.groovy b/.jenkinsci/remote-files-differ.groovy
new file mode 100644
index 0000000000..6ea43fdf27
--- /dev/null
+++ b/.jenkinsci/remote-files-differ.groovy
@@ -0,0 +1,10 @@
+#!/usr/bin/env groovy
+
+def remoteFilesDiffer(f1, f2) {
+ sh "curl -sSL -o /tmp/${env.GIT_COMMIT}/f1 --create-dirs ${f1}"
+ sh "curl -sSL -o /tmp/${env.GIT_COMMIT}/f2 ${f2}"
+ diffExitCode = sh(script: "diff -q /tmp/${env.GIT_COMMIT}/f1 /tmp/${env.GIT_COMMIT}/f2", returnStatus: true)
+ return diffExitCode != 0
+}
+
+return this
diff --git a/.jenkinsci/selected-branches-coverage.groovy b/.jenkinsci/selected-branches-coverage.groovy
new file mode 100644
index 0000000000..1dd04792dd
--- /dev/null
+++ b/.jenkinsci/selected-branches-coverage.groovy
@@ -0,0 +1,13 @@
+#!/usr/bin/env groovy
+
+def selectedBranchesCoverage(branches, PRCoverage=true) {
+ // trigger coverage if branch is either develop or master, or it is a PR
+ if (PRCoverage) {
+ return env.GIT_LOCAL_BRANCH in branches || env.CHANGE_ID != null
+ }
+ else {
+ return env.GIT_LOCAL_BRANCH in branches
+ }
+}
+
+return this
\ No newline at end of file
diff --git a/Jenkinsfile b/Jenkinsfile
index 6a6d71bf16..c4362644e0 100644
--- a/Jenkinsfile
+++ b/Jenkinsfile
@@ -16,11 +16,16 @@ properties([parameters([
booleanParam(defaultValue: false, description: '', name: 'ARMv7'),
booleanParam(defaultValue: false, description: '', name: 'ARMv8'),
booleanParam(defaultValue: true, description: '', name: 'MacOS'),
- booleanParam(defaultValue: false, description: 'Whether it is a triggered build', name: 'Nightly'),
booleanParam(defaultValue: false, description: 'Whether build docs or not', name: 'Doxygen'),
booleanParam(defaultValue: false, description: 'Whether build Java bindings', name: 'JavaBindings'),
+ choice(choices: 'Release\nDebug', description: 'Java Bindings Build Type', name: 'JBBuildType'),
booleanParam(defaultValue: false, description: 'Whether build Python bindings', name: 'PythonBindings'),
- booleanParam(defaultValue: false, description: 'Whether build bindings only w/o Iroha itself', name: 'BindingsOnly'),
+ choice(choices: 'Release\nDebug', description: 'Python Bindings Build Type', name: 'PBBuildType'),
+ choice(choices: 'python3\npython2', description: 'Python Bindings Version', name: 'PBVersion'),
+ booleanParam(defaultValue: false, description: 'Whether build Android bindings', name: 'AndroidBindings'),
+ choice(choices: '26\n25\n24\n23\n22\n21\n20\n19\n18\n17\n16\n15\n14', description: 'Android Bindings ABI Version', name: 'ABABIVersion'),
+ choice(choices: 'Release\nDebug', description: 'Android Bindings Build Type', name: 'ABBuildType'),
+ choice(choices: 'arm64-v8a\narmeabi-v7a\narmeabi\nx86_64\nx86', description: 'Android Bindings Platform', name: 'ABPlatform'),
string(defaultValue: '4', description: 'How much parallelism should we exploit. "4" is optimal for machines with modest amount of memory and at least 4 cores', name: 'PARALLELISM')])])
@@ -30,10 +35,8 @@ pipeline {
CCACHE_RELEASE_DIR = '/opt/.ccache-release'
SORABOT_TOKEN = credentials('SORABOT_TOKEN')
SONAR_TOKEN = credentials('SONAR_TOKEN')
- CODECOV_TOKEN = credentials('CODECOV_TOKEN')
- DOCKERHUB = credentials('DOCKERHUB')
- DOCKER_BASE_IMAGE_DEVELOP = 'hyperledger/iroha:develop'
- DOCKER_BASE_IMAGE_RELEASE = 'hyperledger/iroha:latest'
+ GIT_RAW_BASE_URL = "https://raw.githubusercontent.com/hyperledger/iroha"
+ DOCKER_REGISTRY_BASENAME = "hyperledger/iroha"
IROHA_NETWORK = "iroha-0${CHANGE_ID}-${GIT_COMMIT}-${BUILD_NUMBER}"
IROHA_POSTGRES_HOST = "pg-0${CHANGE_ID}-${GIT_COMMIT}-${BUILD_NUMBER}"
@@ -42,59 +45,45 @@ pipeline {
IROHA_POSTGRES_PORT = 5432
}
- triggers {
- parameterizedCron('''
-0 23 * * * %BUILD_TYPE=Release; Linux=True; MacOS=True; ARMv7=False; ARMv8=True; Nightly=True; Doxygen=False; JavaBindings=False; PythonBindings=False; BindingsOnly=False; PARALLELISM=4
- ''')
- }
options {
buildDiscarder(logRotator(numToKeepStr: '20'))
}
agent any
stages {
- stage ('Stop bad job builds') {
+ stage ('Stop same job builds') {
agent { label 'master' }
steps {
script {
- if (BRANCH_NAME != "develop") {
- if (params.Nightly) {
- // Stop this job running if it is nightly but not the develop it should be
- def tmp = load ".jenkinsci/cancel-nightly-except-develop.groovy"
- tmp.cancelThisJob()
- }
- else {
- // Stop same job running builds if it is commit/PR build and not triggered as nightly
- def builds = load ".jenkinsci/cancel-builds-same-job.groovy"
- builds.cancelSameJobBuilds()
- }
- }
- else {
- if (!params.Nightly) {
- // Stop same job running builds if it is develop but it is not nightly
- def builds = load ".jenkinsci/cancel-builds-same-job.groovy"
- builds.cancelSameJobBuilds()
- }
+ if (GIT_LOCAL_BRANCH != "develop") {
+ def builds = load ".jenkinsci/cancel-builds-same-job.groovy"
+ builds.cancelSameJobBuilds()
}
}
}
}
stage('Build Debug') {
when {
- allOf {
- expression { params.BUILD_TYPE == 'Debug' }
- expression { return !params.BindingsOnly }
- }
+ expression { params.BUILD_TYPE == 'Debug' }
}
parallel {
stage ('Linux') {
- when { expression { return params.Linux } }
+ when {
+ beforeAgent true
+ expression { return params.Linux }
+ }
agent { label 'x86_64' }
steps {
script {
debugBuild = load ".jenkinsci/debug-build.groovy"
- debugBuild.doDebugBuild(true)
- if (BRANCH_NAME ==~ /(master|develop)/) {
+ coverage = load ".jenkinsci/selected-branches-coverage.groovy"
+ if (coverage.selectedBranchesCoverage(['develop', 'master'])) {
+ debugBuild.doDebugBuild(true)
+ }
+ else {
+ debugBuild.doDebugBuild()
+ }
+ if (GIT_LOCAL_BRANCH ==~ /(master|develop)/) {
releaseBuild = load ".jenkinsci/release-build.groovy"
releaseBuild.doReleaseBuild()
}
@@ -103,28 +92,29 @@ pipeline {
post {
always {
script {
- timeout(time: 60, unit: "SECONDS") {
- def cleanup = load ".jenkinsci/docker-cleanup.groovy"
- cleanup.doDockerCleanup()
- cleanWs()
- }
+ post = load ".jenkinsci/linux-post-step.groovy"
+ post.linuxPostStep()
}
}
}
}
stage('ARMv7') {
- when { expression { return params.ARMv7 } }
+ when {
+ beforeAgent true
+ expression { return params.ARMv7 }
+ }
agent { label 'armv7' }
steps {
script {
- def debugBuild = load ".jenkinsci/debug-build.groovy"
- if (!params.Linux && !params.ARMv8 && !params.MacOS) {
+ debugBuild = load ".jenkinsci/debug-build.groovy"
+ coverage = load ".jenkinsci/selected-branches-coverage.groovy"
+ if (!params.Linux && !params.ARMv8 && !params.MacOS && (coverage.selectedBranchesCoverage(['develop', 'master']))) {
debugBuild.doDebugBuild(true)
}
else {
debugBuild.doDebugBuild()
}
- if (BRANCH_NAME ==~ /(master|develop)/) {
+ if (GIT_LOCAL_BRANCH ==~ /(master|develop)/) {
releaseBuild = load ".jenkinsci/release-build.groovy"
releaseBuild.doReleaseBuild()
}
@@ -133,28 +123,29 @@ pipeline {
post {
always {
script {
- timeout(time: 60, unit: "SECONDS") {
- def cleanup = load ".jenkinsci/docker-cleanup.groovy"
- cleanup.doDockerCleanup()
- cleanWs()
- }
+ post = load ".jenkinsci/linux-post-step.groovy"
+ post.linuxPostStep()
}
}
}
}
stage('ARMv8') {
- when { expression { return params.ARMv8 } }
+ when {
+ beforeAgent true
+ expression { return params.ARMv8 }
+ }
agent { label 'armv8' }
steps {
script {
- def debugBuild = load ".jenkinsci/debug-build.groovy"
- if (!params.Linux && !params.MacOS) {
+ debugBuild = load ".jenkinsci/debug-build.groovy"
+ coverage = load ".jenkinsci/selected-branches-coverage.groovy"
+ if (!params.Linux && !params.MacOS && (coverage.selectedBranchesCoverage(['develop', 'master']))) {
debugBuild.doDebugBuild(true)
}
else {
debugBuild.doDebugBuild()
}
- if (BRANCH_NAME ==~ /(master|develop)/) {
+ if (GIT_LOCAL_BRANCH ==~ /(master|develop)/) {
releaseBuild = load ".jenkinsci/release-build.groovy"
releaseBuild.doReleaseBuild()
}
@@ -163,23 +154,24 @@ pipeline {
post {
always {
script {
- timeout(time: 60, unit: "SECONDS") {
- def cleanup = load ".jenkinsci/docker-cleanup.groovy"
- cleanup.doDockerCleanup()
- cleanWs()
- }
+ post = load ".jenkinsci/linux-post-step.groovy"
+ post.linuxPostStep()
}
}
}
}
stage('MacOS'){
- when { expression { return params.MacOS } }
+ when {
+ beforeAgent true
+ expression { return params.MacOS }
+ }
agent { label 'mac' }
steps {
script {
def coverageEnabled = false
def cmakeOptions = ""
- if (!params.Linux) {
+ coverage = load ".jenkinsci/selected-branches-coverage.groovy"
+ if (!params.Linux && (coverage.selectedBranchesCoverage(['develop', 'master']))) {
coverageEnabled = true
cmakeOptions = " -DCOVERAGE=ON "
}
@@ -218,7 +210,7 @@ pipeline {
"""
def testExitCode = sh(script: 'IROHA_POSTGRES_HOST=localhost IROHA_POSTGRES_PORT=5433 cmake --build build --target test', returnStatus: true)
if (testExitCode != 0) {
- currentBuild.result = "UNSTABLE"
+ currentBuild.currentResult = "UNSTABLE"
}
if ( coverageEnabled ) {
sh "cmake --build build --target cppcheck"
@@ -237,25 +229,32 @@ pipeline {
sh "cmake --build build --target coverage.info"
sh "python /usr/local/bin/lcov_cobertura.py build/reports/coverage.info -o build/reports/coverage.xml"
cobertura autoUpdateHealth: false, autoUpdateStability: false, coberturaReportFile: '**/build/reports/coverage.xml', conditionalCoverageTargets: '75, 50, 0', failUnhealthy: false, failUnstable: false, lineCoverageTargets: '75, 50, 0', maxNumberOfBuilds: 50, methodCoverageTargets: '75, 50, 0', onlyStable: false, zoomCoverageChart: false
-
}
-
- // TODO: replace with upload to artifactory server
- // only develop branch
- if ( env.BRANCH_NAME == "develop" ) {
- //archive(includes: 'build/bin/,compile_commands.json')
+ if (GIT_LOCAL_BRANCH ==~ /(master|develop)/) {
+ releaseBuild = load ".jenkinsci/mac-release-build.groovy"
+ releaseBuild.doReleaseBuild()
}
}
}
post {
always {
script {
- timeout(time: 60, unit: "SECONDS") {
- cleanWs()
- sh """
- pg_ctl -D /var/jenkins/${GIT_COMMIT}-${BUILD_NUMBER}/ stop && \
- rm -rf /var/jenkins/${GIT_COMMIT}-${BUILD_NUMBER}/
- """
+ timeout(time: 600, unit: "SECONDS") {
+ try {
+ if (currentBuild.currentResult == "SUCCESS" && GIT_LOCAL_BRANCH ==~ /(master|develop)/) {
+ def artifacts = load ".jenkinsci/artifacts.groovy"
+ def commit = env.GIT_COMMIT
+ filePaths = [ '\$(pwd)/build/*.tar.gz' ]
+ artifacts.uploadArtifacts(filePaths, sprintf('/iroha/macos/%1$s-%2$s-%3$s', [GIT_LOCAL_BRANCH, sh(script: 'date "+%Y%m%d"', returnStdout: true).trim(), commit.substring(0,6)]))
+ }
+ }
+ finally {
+ cleanWs()
+ sh """
+ pg_ctl -D /var/jenkins/${GIT_COMMIT}-${BUILD_NUMBER}/ stop && \
+ rm -rf /var/jenkins/${GIT_COMMIT}-${BUILD_NUMBER}/
+ """
+ }
}
}
}
@@ -265,15 +264,15 @@ pipeline {
}
stage('Build Release') {
when {
- allOf {
- expression { params.BUILD_TYPE == 'Release' }
- expression { return ! params.BindingsOnly }
- }
+ expression { params.BUILD_TYPE == 'Release' }
}
parallel {
stage('Linux') {
- when { expression { return params.Linux } }
- agent { label 'linux && x86_64' }
+ when {
+ beforeAgent true
+ expression { return params.Linux }
+ }
+ agent { label 'x86_64' }
steps {
script {
def releaseBuild = load ".jenkinsci/release-build.groovy"
@@ -283,17 +282,17 @@ pipeline {
post {
always {
script {
- timeout(time: 60, unit: "SECONDS") {
- def cleanup = load ".jenkinsci/docker-cleanup.groovy"
- cleanup.doDockerCleanup()
- cleanWs()
- }
+ post = load ".jenkinsci/linux-post-step.groovy"
+ post.linuxPostStep()
}
}
}
}
stage('ARMv7') {
- when { expression { return params.ARMv7 } }
+ when {
+ beforeAgent true
+ expression { return params.ARMv7 }
+ }
agent { label 'armv7' }
steps {
script {
@@ -304,17 +303,17 @@ pipeline {
post {
always {
script {
- timeout(time: 60, unit: "SECONDS") {
- def cleanup = load ".jenkinsci/docker-cleanup.groovy"
- cleanup.doDockerCleanup()
- cleanWs()
- }
+ post = load ".jenkinsci/linux-post-step.groovy"
+ post.linuxPostStep()
}
}
- }
+ }
}
stage('ARMv8') {
- when { expression { return params.ARMv8 } }
+ when {
+ beforeAgent true
+ expression { return params.ARMv8 }
+ }
agent { label 'armv8' }
steps {
script {
@@ -325,44 +324,40 @@ pipeline {
post {
always {
script {
- timeout(time: 60, unit: "SECONDS") {
- def cleanup = load ".jenkinsci/docker-cleanup.groovy"
- cleanup.doDockerCleanup()
- cleanWs()
- }
+ post = load ".jenkinsci/linux-post-step.groovy"
+ post.linuxPostStep()
}
}
- }
+ }
}
stage('MacOS') {
- when { expression { return params.MacOS } }
+ when {
+ beforeAgent true
+ expression { return params.MacOS }
+ }
+ agent { label 'mac' }
steps {
script {
- def scmVars = checkout scm
- env.IROHA_VERSION = "0x${scmVars.GIT_COMMIT}"
- env.IROHA_HOME = "/opt/iroha"
- env.IROHA_BUILD = "${env.IROHA_HOME}/build"
-
- sh """
- ccache --version
- ccache --show-stats
- ccache --zero-stats
- ccache --max-size=5G
- """
- sh """
- cmake \
- -H. \
- -Bbuild \
- -DCMAKE_BUILD_TYPE=${params.BUILD_TYPE} \
- -DIROHA_VERSION=${env.IROHA_VERSION}
- """
- sh "cmake --build build -- -j${params.PARALLELISM}"
- sh "ccache --show-stats"
-
- // TODO: replace with upload to artifactory server
- // only develop branch
- if ( env.BRANCH_NAME == "develop" ) {
- //archive(includes: 'build/bin/,compile_commands.json')
+ def releaseBuild = load ".jenkinsci/mac-release-build.groovy"
+ releaseBuild.doReleaseBuild()
+ }
+ }
+ post {
+ always {
+ script {
+ timeout(time: 600, unit: "SECONDS") {
+ try {
+ if (currentBuild.currentResult == "SUCCESS" && GIT_LOCAL_BRANCH ==~ /(master|develop)/) {
+ def artifacts = load ".jenkinsci/artifacts.groovy"
+ def commit = env.GIT_COMMIT
+ filePaths = [ '\$(pwd)/build/*.tar.gz' ]
+ artifacts.uploadArtifacts(filePaths, sprintf('/iroha/macos/%1$s-%2$s-%3$s', [GIT_LOCAL_BRANCH, sh(script: 'date "+%Y%m%d"', returnStdout: true).trim(), commit.substring(0,6)]))
+ }
+ }
+ finally {
+ cleanWs()
+ }
+ }
}
}
}
@@ -371,9 +366,10 @@ pipeline {
}
stage('Build docs') {
when {
+ beforeAgent true
allOf {
expression { return params.Doxygen }
- expression { BRANCH_NAME ==~ /(master|develop)/ }
+ expression { GIT_LOCAL_BRANCH ==~ /(master|develop)/ }
}
}
// build docs on any vacant node. Prefer `x86_64` over
@@ -391,23 +387,85 @@ pipeline {
}
stage('Build bindings') {
when {
+ beforeAgent true
anyOf {
- expression { return params.BindingsOnly }
expression { return params.PythonBindings }
expression { return params.JavaBindings }
+ expression { return params.AndroidBindings }
}
}
agent { label 'x86_64' }
+ environment {
+ JAVA_HOME = '/usr/lib/jvm/java-8-oracle'
+ }
steps {
script {
def bindings = load ".jenkinsci/bindings.groovy"
+ def dPullOrBuild = load ".jenkinsci/docker-pull-or-build.groovy"
+ def pCommit = load ".jenkinsci/previous-commit.groovy"
def platform = sh(script: 'uname -m', returnStdout: true).trim()
- sh "curl -L -o /tmp/${env.GIT_COMMIT}/Dockerfile --create-dirs https://raw.githubusercontent.com/hyperledger/iroha/${env.GIT_COMMIT}/docker/develop/${platform}/Dockerfile"
- iC = docker.build("hyperledger/iroha-develop:${GIT_COMMIT}-${BUILD_NUMBER}", "-f /tmp/${env.GIT_COMMIT}/Dockerfile /tmp/${env.GIT_COMMIT} --build-arg PARALLELISM=${PARALLELISM}")
- sh "rm -rf /tmp/${env.GIT_COMMIT}"
- iC.inside {
- def scmVars = checkout scm
- bindings.doBindings()
+ def previousCommit = pCommit.previousCommitOrCurrent()
+ if (params.JavaBindings) {
+ iC = dPullOrBuild.dockerPullOrUpdate("$platform-develop-build",
+ "${env.GIT_RAW_BASE_URL}/${env.GIT_COMMIT}/docker/develop/Dockerfile",
+ "${env.GIT_RAW_BASE_URL}/${previousCommit}/docker/develop/Dockerfile",
+ "${env.GIT_RAW_BASE_URL}/develop/docker/develop/Dockerfile",
+ ['PARALLELISM': params.PARALLELISM])
+ iC.inside("-v /tmp/${env.GIT_COMMIT}/bindings-artifact:/tmp/bindings-artifact") {
+ bindings.doJavaBindings(params.JBBuildType)
+ }
+ }
+ if (params.PythonBindings) {
+ iC = dPullOrBuild.dockerPullOrUpdate("$platform-develop-build",
+ "${env.GIT_RAW_BASE_URL}/${env.GIT_COMMIT}/docker/develop/Dockerfile",
+ "${env.GIT_RAW_BASE_URL}/${previousCommit}/docker/develop/Dockerfile",
+ "${env.GIT_RAW_BASE_URL}/develop/docker/develop/Dockerfile",
+ ['PARALLELISM': params.PARALLELISM])
+ iC.inside("-v /tmp/${env.GIT_COMMIT}/bindings-artifact:/tmp/bindings-artifact") {
+ bindings.doPythonBindings(params.PBBuildType)
+ }
+ }
+ if (params.AndroidBindings) {
+ iC = dPullOrBuild.dockerPullOrUpdate("android-${params.ABPlatform}-${params.ABBuildType}",
+ "${env.GIT_RAW_BASE_URL}/${env.GIT_COMMIT}/docker/android/Dockerfile",
+ "${env.GIT_RAW_BASE_URL}/${previousCommit}/docker/android/Dockerfile",
+ "${env.GIT_RAW_BASE_URL}/develop/docker/android/Dockerfile",
+ ['PARALLELISM': params.PARALLELISM, 'PLATFORM': params.ABPlatform, 'BUILD_TYPE': params.ABBuildType])
+ sh "curl -L -o /tmp/${env.GIT_COMMIT}/entrypoint.sh ${env.GIT_RAW_BASE_URL}/${env.GIT_COMMIT}/docker/android/entrypoint.sh"
+ sh "chmod +x /tmp/${env.GIT_COMMIT}/entrypoint.sh"
+ iC.inside("-v /tmp/${env.GIT_COMMIT}/entrypoint.sh:/entrypoint.sh:ro -v /tmp/${env.GIT_COMMIT}/bindings-artifact:/tmp/bindings-artifact") {
+ bindings.doAndroidBindings(params.ABABIVersion)
+ }
+ }
+ }
+ }
+ post {
+ always {
+ timeout(time: 600, unit: "SECONDS") {
+ script {
+ try {
+ if (currentBuild.currentResult == "SUCCESS") {
+ def artifacts = load ".jenkinsci/artifacts.groovy"
+ def commit = env.GIT_COMMIT
+ if (params.JavaBindings) {
+ javaBindingsFilePaths = [ '/tmp/${GIT_COMMIT}/bindings-artifact/java-bindings-*.zip' ]
+ artifacts.uploadArtifacts(javaBindingsFilePaths, '/iroha/bindings/java')
+ }
+ if (params.PythonBindings) {
+ pythonBindingsFilePaths = [ '/tmp/${GIT_COMMIT}/bindings-artifact/python-bindings-*.zip' ]
+ artifacts.uploadArtifacts(pythonBindingsFilePaths, '/iroha/bindings/python')
+ }
+ if (params.AndroidBindings) {
+ androidBindingsFilePaths = [ '/tmp/${GIT_COMMIT}/bindings-artifact/android-bindings-*.zip' ]
+ artifacts.uploadArtifacts(androidBindingsFilePaths, '/iroha/bindings/android')
+ }
+ }
+ }
+ finally {
+ sh "rm -rf /tmp/${env.GIT_COMMIT}"
+ cleanWs()
+ }
+ }
}
}
}
diff --git a/MAINTAINERS.md b/MAINTAINERS.md
index 0bee5ed289..1885c6f9c2 100644
--- a/MAINTAINERS.md
+++ b/MAINTAINERS.md
@@ -1,21 +1,30 @@
## Maintainers
-| Name | GitHub Id | email |
-|---|---|---|
-| Makoto Takemiya | takemiyamakoto | takemiya@soramitsu.co.jp |
-| Ryu Okada | ryuo88 | okada@soramitsu.co.jp |
-| Taisei Igarashi | MizukiSonoko | igarashi@soramitsu.co.jp |
-| Motohiko Abe | motxx | abe@soramitsu.co.jp |
-| Daisuke Shimada | cimadai | dice.k1984@gmail.com |
-| Sushant D. Mayekar | Sushantdm | Sushantdm@gmail.com |
-| Yanno Ban | yannoban | ban.yanno@nbc.org.kh
-| Hiroshi Sasagawa | SasagawaHiroshi | sasagawa_hiroshi@intec.co.jp |
-| Takumi Yamashita | satelliteyes | yamashita@soramitsu.co.jp |
-| Bogdan Vaneev | Warchant | bogdan@soramitsu.co.jp |
-| Fyodor Muratov | muratovv | fyodor@soramitsu.co.jp |
-| Andrei Lebedev | lebdron | andrei@soramitsu.co.jp |
-| Bulat Nasrulin | grimadas | bulat@soramitsu.co.jp |
-| Kamil Salakhiev | kamilsa | kamil@soramitsu.co.jp |
-| Konstantin Munichev | luckychess | konstantin@soramitsu.co.jp |
-| Evgenii Mininbaev | l4l | evgenii@soramitsu.co.jp |
-| Nikolay Yushkevich | neewy | nikolai@soramitsu.co.jp |
+Maintainers of Hyperledger Iroha project
+are supposed to help contributors by explain them project details,
+such as architecture, process, existing issues.
+
+This is the list of maintainers, including their email address for direct communications:
+
+| Name | GitHub Id | email | Area of expertise |
+|------------------------|--------------------------|--------------------------------|---------------------------------|
+| Makoto Takemiya | @takemiyamakoto | takemiya@soramitsu.co.jp | Product vision |
+| Ryu Okada | @ryuo88 | okada@soramitsu.co.jp | Product vision |
+| Nikolay Yushkevich | @neewy | nikolai@soramitsu.co.jp | Project state |
+| Fyodor Muratov | @muratovv | fyodor@soramitsu.co.jp | Architecture, Java library, QA |
+| Andrei Lebedev | @lebdron | andrei@soramitsu.co.jp | Research |
+| Sergei Solonets | @Solonets | ssolonets@gmail.com | Development |
+| Yanno Ban | @yannoban | ban.yanno@nbc.org.kh | Development |
+| Dumitru Savva | @x3medima17 | savva@soramitsu.co.jp | Development |
+| Nikita Alekseev | @nickaleks | alekseev@soramitsu.co.jp | Development |
+| Victor Drobny | @victordrobny | drobny@soramitsu.co.jp | Development |
+| Bulat Nasrulin | @grimadas | bulat@soramitsu.co.jp | Development |
+| Kamil Salakhiev | @kamilsa | kamil@soramitsu.co.jp | Development |
+| Igor Egorov | @igor-egorov | igor@soramitsu.co.jp | Development, Android library |
+| Konstantin Munichev | @luckychess | konstantin@soramitsu.co.jp | Security |
+| Evgenii Mininbaev | @l4l | evgenii@soramitsu.co.jp | Security, Python library |
+| Vyacheslav Bikbaev | @laSinteZ | viacheslav@soramitsu.co.jp | Documentation, NodeJS library |
+| Arseniy Fokin | @stinger112 | stinger112@gmail.com | NodeJS library |
+| Alexey Chernyshov | @Alexey-N-Chernyshov | chernyshov@soramitsu.co.jp | Development |
+| Artyom Bakhtin | @bakhtin | a@bakhtin.net | Ansible, Jenkins, artifacts |
+| Anatoly Tyukushin | @tyvision | tyukushin@soramitsu.co.jp | Ansible, Jenkins |
diff --git a/README.md b/README.md
index e9b321ed34..50ca4195ab 100644
--- a/README.md
+++ b/README.md
@@ -11,7 +11,7 @@
Blockchain platform Hyperledger Iroha is designed for simple creation and management of assets. This is a distributed ledger of transactions.
-Check [overview](http://iroha.readthedocs.io/en/latest/overview/) page of our documentation.
+Check [overview](http://iroha.readthedocs.io/en/latest/overview.html) page of our documentation.
@@ -33,24 +33,25 @@ For more information, such as how to use client libraries in your target program
## Need help?
-* Join [telegram chat](https://t.me/joinchat/AgzrTUCZ6edlj6V612n5JQ) where the maintainers team is able to help you
-* Communicate in Gitter chat with our development community [](https://gitter.im/hyperledger-iroha/Lobby?utm_source=badge&utm_medium=badge&utm_campaign=pr-badge&utm_content=badge)
+* Join [telegram chat](https://t.me/hyperledgeriroha) where the maintainers team is able to help you
+* Communicate in Gitter chat with our development community [](https://gitter.im/hyperledger-iroha/Lobby)
* Submit issues via GitHub Iroha repository
* Join [HyperLedger RocketChat](https://chat.hyperledger.org) #iroha channel to discuss your concerns and proposals
* Use mailing list to spread your word within Iroha development community [hyperledger-iroha@lists.hyperledger.org](mailto:hyperledger-iroha@lists.hyperledger.org)
## License
-Copyright 2016 – 2018 Soramitsu Co., Ltd.
-
-Licensed under the Apache License, Version 2.0 (the "License");
-you may not use this file except in compliance with the License.
-You may obtain a copy of the License at
-
- http://www.apache.org/licenses/LICENSE-2.0
+Iroha codebase is licensed under the Apache License,
+Version 2.0 (the "License"); you may not use this file except
+in compliance with the License. You may obtain a copy of the
+License at http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
+
+Iroha documentation files are made available under the Creative Commons
+Attribution 4.0 International License (CC-BY-4.0), available at
+http://creativecommons.org/licenses/by/4.0/
diff --git a/cmake/Modules/Finded25519.cmake b/cmake/Modules/Finded25519.cmake
index bb50631f50..23414de25f 100644
--- a/cmake/Modules/Finded25519.cmake
+++ b/cmake/Modules/Finded25519.cmake
@@ -12,15 +12,26 @@ find_package_handle_standard_args(ed25519 DEFAULT_MSG
)
set(URL https://github.com/hyperledger/iroha-ed25519)
-set(VERSION e7188b8393dbe5ac54378610d53630bd4a180038)
+if (MSVC)
+ # trunk/1.2 with windows-specific changes
+ set(VERSION 31bb9b50e01b21ea2c21d33929e20934be4665b4)
+else()
+ set(VERSION e7188b8393dbe5ac54378610d53630bd4a180038)
+endif()
set_target_description(ed25519 "Digital signature algorithm" ${URL} ${VERSION})
if (NOT ed25519_FOUND)
+ if (NOT WIN32)
+ find_package(Git REQUIRED)
+ set(PATCH_RANDOM ${GIT_EXECUTABLE} apply ${PROJECT_SOURCE_DIR}/patch/close.patch || true)
+ endif ()
+
externalproject_add(hyperledger_ed25519
GIT_REPOSITORY ${URL}
GIT_TAG ${VERSION}
CMAKE_ARGS -DTESTING=OFF -DBUILD=STATIC
- BUILD_BYPRODUCTS ${EP_PREFIX}/src/hyperledger_ed25519-build/libed25519.a
+ PATCH_COMMAND ${PATCH_RANDOM}
+ BUILD_BYPRODUCTS ${EP_PREFIX}/src/hyperledger_ed25519-build/${CMAKE_STATIC_LIBRARY_PREFIX}ed25519${CMAKE_STATIC_LIBRARY_SUFFIX}
INSTALL_COMMAND "" # remove install step
TEST_COMMAND "" # remove test step
UPDATE_COMMAND "" # remove update step
@@ -32,6 +43,13 @@ if (NOT ed25519_FOUND)
file(MAKE_DIRECTORY ${ed25519_INCLUDE_DIR})
link_directories(${binary_dir})
+ if(CMAKE_GENERATOR MATCHES "Visual Studio")
+ set_target_properties(ed25519 PROPERTIES
+ IMPORTED_LOCATION_DEBUG ${binary_dir}/Debug/${CMAKE_STATIC_LIBRARY_PREFIX}ed25519${CMAKE_STATIC_LIBRARY_SUFFIX}
+ IMPORTED_LOCATION_RELEASE ${binary_dir}/Release/${CMAKE_STATIC_LIBRARY_PREFIX}ed25519${CMAKE_STATIC_LIBRARY_SUFFIX}
+ )
+ endif()
+
add_dependencies(ed25519 hyperledger_ed25519)
endif ()
diff --git a/cmake/Modules/Findgrpc.cmake b/cmake/Modules/Findgrpc.cmake
index cc32a12ff0..2dde985c0f 100644
--- a/cmake/Modules/Findgrpc.cmake
+++ b/cmake/Modules/Findgrpc.cmake
@@ -15,6 +15,9 @@ mark_as_advanced(grpc_grpc++_LIBRARY)
find_library(gpr_LIBRARY gpr)
mark_as_advanced(gpr_LIBRARY)
+find_library(address_sorting_LIBRARY address_sorting)
+mark_as_advanced(address_sorting_LIBRARY)
+
find_program(grpc_CPP_PLUGIN grpc_cpp_plugin)
mark_as_advanced(grpc_CPP_PLUGIN)
@@ -26,7 +29,7 @@ find_package_handle_standard_args(grpc DEFAULT_MSG
)
set(URL https://github.com/grpc/grpc)
-set(VERSION bfcbad3b86c7912968dc8e64f2121c920dad4dfb)
+set(VERSION bd44e485f69d70ca4095cea92decd98de3892aa6) # Release 1.11.0
set_target_description(grpc "Remote Procedure Call library" ${URL} ${VERSION})
if (NOT grpc_FOUND)
@@ -40,8 +43,6 @@ if (NOT grpc_FOUND)
-DProtobuf_DIR=${EP_PREFIX}/src/google_protobuf-build/lib/cmake/protobuf
-DgRPC_ZLIB_PROVIDER=package
-DBUILD_SHARED_LIBS=ON
- PATCH_COMMAND
- ${GIT_EXECUTABLE} apply ${PROJECT_SOURCE_DIR}/patch/fix-protobuf-package-include.patch || true
BUILD_BYPRODUCTS
${EP_PREFIX}/src/grpc_grpc-build/grpc_cpp_plugin
${EP_PREFIX}/src/grpc_grpc-build/${CMAKE_SHARED_LIBRARY_PREFIX}gpr${CMAKE_SHARED_LIBRARY_SUFFIX}
@@ -56,6 +57,7 @@ if (NOT grpc_FOUND)
set(gpr_LIBRARY ${binary_dir}/${CMAKE_SHARED_LIBRARY_PREFIX}gpr${CMAKE_SHARED_LIBRARY_SUFFIX})
set(grpc_LIBRARY ${binary_dir}/${CMAKE_SHARED_LIBRARY_PREFIX}grpc${CMAKE_SHARED_LIBRARY_SUFFIX})
set(grpc_grpc++_LIBRARY ${binary_dir}/${CMAKE_SHARED_LIBRARY_PREFIX}grpc++${CMAKE_SHARED_LIBRARY_SUFFIX})
+ set(address_sorting_LIBRARY ${binary_dir}/${CMAKE_SHARED_LIBRARY_PREFIX}address_sorting${CMAKE_SHARED_LIBRARY_SUFFIX})
set(grpc_CPP_PLUGIN ${binary_dir}/grpc_cpp_plugin)
file(MAKE_DIRECTORY ${grpc_INCLUDE_DIR})
link_directories(${binary_dir})
@@ -90,4 +92,5 @@ if(ENABLE_LIBS_PACKAGING)
add_install_step_for_lib(${grpc_LIBRARY})
add_install_step_for_lib(${grpc_grpc++_LIBRARY})
add_install_step_for_lib(${gpr_LIBRARY})
+ add_install_step_for_lib(${address_sorting_LIBRARY})
endif()
diff --git a/cmake/Modules/Findprotobuf.cmake b/cmake/Modules/Findprotobuf.cmake
index ac033a5391..0b7d61e338 100644
--- a/cmake/Modules/Findprotobuf.cmake
+++ b/cmake/Modules/Findprotobuf.cmake
@@ -19,7 +19,7 @@ find_package_handle_standard_args(protobuf DEFAULT_MSG
)
set(URL https://github.com/google/protobuf.git)
-set(VERSION 80a37e0782d2d702d52234b62dd4b9ec74fd2c95)
+set(VERSION 106ffc04be1abf3ff3399f54ccf149815b287dd9) # Protocol Buffers v3.5.1
set_target_description(protobuf "Protocol buffers library" ${URL} ${VERSION})
if (NOT protobuf_FOUND)
diff --git a/cmake/Modules/Findswig.cmake b/cmake/Modules/Findswig.cmake
index 449a957902..e012686edf 100644
--- a/cmake/Modules/Findswig.cmake
+++ b/cmake/Modules/Findswig.cmake
@@ -8,8 +8,9 @@ find_package_handle_standard_args(SWIG DEFAULT_MSG
if(NOT SWIG_EXECUTABLE)
find_package(Git REQUIRED)
- set(URL ftp://www.mirrorservice.org/sites/ftp.sourceforge.net/pub/sourceforge/s/sw/swig/swig/swig-3.0.12/swig-3.0.12.tar.gz)
- set_target_description(swig "Simplified Wrapper and Interface Generator (SWIG)" ${URL} 3.0.12)
+ set(SWIG_VERSION 3.0.12)
+ set(URL ftp://www.mirrorservice.org/sites/ftp.sourceforge.net/pub/sourceforge/s/sw/swig/swig/swig-${SWIG_VERSION}/swig-${SWIG_VERSION}.tar.gz)
+ set_target_description(swig "Simplified Wrapper and Interface Generator (SWIG)" ${URL} ${SWIG_VERSION})
ExternalProject_Add(swig_swig
URL ${URL}
@@ -20,12 +21,13 @@ if(NOT SWIG_EXECUTABLE)
BUILD_IN_SOURCE ON
BUILD_COMMAND ${MAKE} swig
TEST_COMMAND "" # remove test step
+ UPDATE_COMMAND "" # remove update step
)
ExternalProject_Get_Property(swig_swig source_dir)
# Predefined vars for local installed SWIG
set(SWIG_EXECUTABLE ${source_dir}/swig)
- set(SWIG_DIR ${source_dir})
+ set(SWIG_DIR ${source_dir}/share/swig/${SWIG_VERSION})
add_dependencies(swig swig_swig)
diff --git a/cmake/functions.cmake b/cmake/functions.cmake
index 2b9368c6c8..00d73b9540 100644
--- a/cmake/functions.cmake
+++ b/cmake/functions.cmake
@@ -27,15 +27,27 @@ function(addtest test_name SOURCES)
add_executable(${test_name} ${SOURCES})
target_link_libraries(${test_name} gtest gmock)
target_include_directories(${test_name} PUBLIC ${PROJECT_SOURCE_DIR}/test)
+
+ # fetch directory after test in source dir call
+ # for example:
+ # "/Users/user/iroha/test/integration/acceptance"
+ # match to "integration"
+ string(REGEX REPLACE ".*test\\/([a-zA-Z]+).*" "\\1" output ${CMAKE_CURRENT_SOURCE_DIR})
+
add_test(
- NAME ${test_name}
+ NAME "${output}_${test_name}"
COMMAND $ ${test_xml_output}
)
- strictmode(${test_name})
+ if (NOT MSVC)
+ # protobuf generates warnings at the moment
+ strictmode(${test_name})
+ endif ()
if ((CMAKE_CXX_COMPILER_ID STREQUAL "GNU") OR
(CMAKE_CXX_COMPILER_ID STREQUAL "Clang") OR
(CMAKE_CXX_COMPILER_ID STREQUAL "AppleClang"))
target_compile_options(${test_name} PRIVATE -Wno-inconsistent-missing-override)
+ elseif(CMAKE_CXX_COMPILER_ID STREQUAL "MSVC")
+ # do nothing, but also don't spam warning on each test
else ()
message(AUTHOR_WARNING "Unknown compiler: building target ${target} with default options")
endif ()
@@ -51,10 +63,17 @@ endfunction()
function(compile_proto_to_cpp PROTO)
string(REGEX REPLACE "\\.proto$" ".pb.h" GEN_PB_HEADER ${PROTO})
string(REGEX REPLACE "\\.proto$" ".pb.cc" GEN_PB ${PROTO})
+ if (MSVC)
+ set(GEN_COMMAND "${Protobuf_PROTOC_EXECUTABLE}")
+ set(GEN_ARGS ${Protobuf_INCLUDE_DIR})
+ else()
+ set(GEN_COMMAND ${CMAKE_COMMAND} -E env LD_LIBRARY_PATH=${protobuf_LIBRARY_DIR}:$ENV{LD_LIBRARY_PATH} "${protoc_EXECUTABLE}")
+ set(GEN_ARGS ${protobuf_INCLUDE_DIR})
+ endif()
add_custom_command(
OUTPUT ${IROHA_SCHEMA_DIR}/${GEN_PB_HEADER} ${IROHA_SCHEMA_DIR}/${GEN_PB}
- COMMAND ${CMAKE_COMMAND} -E env LD_LIBRARY_PATH=${protobuf_LIBRARY_DIR}:$ENV{LD_LIBRARY_PATH} "${protoc_EXECUTABLE}"
- ARGS -I${protobuf_INCLUDE_DIR} -I. --cpp_out=${IROHA_SCHEMA_DIR} ${PROTO}
+ COMMAND ${GEN_COMMAND}
+ ARGS -I${GEN_ARGS} -I. --cpp_out=${IROHA_SCHEMA_DIR} ${PROTO}
DEPENDS protoc
WORKING_DIRECTORY ${IROHA_SCHEMA_DIR}
)
diff --git a/deploy/ansible/playbooks/iroha-standalone-nodes/iroha-deploy.yml b/deploy/ansible/playbooks/iroha-standalone-nodes/iroha-deploy.yml
index be332ff798..42a106b882 100644
--- a/deploy/ansible/playbooks/iroha-standalone-nodes/iroha-deploy.yml
+++ b/deploy/ansible/playbooks/iroha-standalone-nodes/iroha-deploy.yml
@@ -12,4 +12,4 @@
changed_when: False
roles:
- { role: docker, tags: docker }
- - { role: iroha-standalone-deploy-node }
+ - { role: iroha-standalone-deploy-node, tags: iroha-standalone-deploy-node }
diff --git a/deploy/ansible/roles/iroha-cluster-deploy-node/defaults/main.yml b/deploy/ansible/roles/iroha-cluster-deploy-node/defaults/main.yml
index cdcda7679c..1804f0a11b 100644
--- a/deploy/ansible/roles/iroha-cluster-deploy-node/defaults/main.yml
+++ b/deploy/ansible/roles/iroha-cluster-deploy-node/defaults/main.yml
@@ -7,7 +7,7 @@
iroha_net: iroha_network # name of docker network
containerConfPath: /opt/iroha_data # path to folder with config files inside docker container
- irohaDockerImage: hyperledger/iroha-docker # docker image name
- irohaDockerImageTag: develop_latest # docker image tag
+ irohaDockerImage: hyperledger/iroha # docker image name
+ irohaDockerImageTag: develop # docker image tag
dbDockerImage: postgres
dbDockerImageTag: 9.5
diff --git a/deploy/ansible/roles/iroha-standalone-deploy-node/defaults/main.yml b/deploy/ansible/roles/iroha-standalone-deploy-node/defaults/main.yml
index 15a0c7c26d..b719450d0e 100644
--- a/deploy/ansible/roles/iroha-standalone-deploy-node/defaults/main.yml
+++ b/deploy/ansible/roles/iroha-standalone-deploy-node/defaults/main.yml
@@ -7,7 +7,7 @@
iroha_net: iroha_network # name of docker network
containerConfPath: /opt/iroha_data
- irohaDockerImage: hyperledger/iroha-docker # docker image name
- irohaDockerImageTag: develop_latest # docker image tag
+ irohaDockerImage: hyperledger/iroha # docker image name
+ irohaDockerImageTag: develop # docker image tag
dbDockerImage: postgres
dbDockerImageTag: 9.5
diff --git a/docker/android/Dockerfile b/docker/android/Dockerfile
new file mode 100644
index 0000000000..eb70b69ddb
--- /dev/null
+++ b/docker/android/Dockerfile
@@ -0,0 +1,52 @@
+# using fresh 18.04 as it contains suitable `cmake` in repos
+FROM ubuntu:18.04
+
+# number of concurrent threads during build
+# usage: docker build --build-arg PARALLELISM=8 -t name/name .
+ARG PARALLELISM=1
+ARG BUILD_TYPE_A
+ENV BUILD_TYPE_A=${BUILD_TYPE_A:-Release}
+ARG VERSION
+ENV VERSION=${VERSION:-26}
+ARG PACKAGE
+ENV PACKAGE=${PACKAGE:-jp.co.soramitsu.iroha.android}
+# valid platforms: armeabi, armeabi-v7a, arm64-v8a, x86, x86_64
+ARG PLATFORM
+ENV PLATFORM=${PLATFORM:-x86_64}
+
+ENV NDK_PATH="/android-ndk/android-ndk-r16b"
+ENV DEPS_DIR="/iroha/dependencies"
+
+RUN apt-get update && \
+ apt-get -y install --no-install-recommends git curl apt-utils software-properties-common libpthread-stubs0-dev libpcre3-dev \
+ unzip zip build-essential automake libtool ca-certificates ccache zlib1g-dev libcurl4-openssl-dev libc6-dbg cmake; \
+ rm -rf /var/lib/apt/lists/*
+
+RUN set -e; mkdir -p $DEPS_DIR/include $DEPS_DIR/lib
+
+# boost 1.66
+RUN set -e; \
+ curl -L -o /tmp/boost_1_66_0.tar.gz https://dl.bintray.com/boostorg/release/1.66.0/source/boost_1_66_0.tar.gz; \
+ tar -zxf /tmp/boost_1_66_0.tar.gz -C /tmp; mv /tmp/boost_1_66_0/boost $DEPS_DIR/include; rm -f /tmp/boost_1_66_0.tar.gz
+
+# install android-ndk-r16b
+RUN set -e; \
+ curl -L -o /tmp/android-ndk.zip https://dl.google.com/android/repository/android-ndk-r16b-linux-x86_64.zip; unzip -q /tmp/android-ndk.zip -d /android-ndk; rm -f /tmp/android-ndk.zip
+
+# protobuf
+RUN set -ex; \
+ git clone https://github.com/google/protobuf; \
+ (cd ./protobuf ; git checkout b5fbb742af122b565925987e65c08957739976a7); \
+ cmake -Dprotobuf_BUILD_TESTS=OFF -DCMAKE_BUILD_TYPE=$BUILD_TYPE_A -H./protobuf/cmake -B./protobuf/host_build; \
+ VERBOSE=1 cmake --build ./protobuf/host_build -- -j$PARALLELISM; \
+ sed -i.bak "s~COMMAND js_embed~COMMAND \"$PWD/protobuf/host_build/js_embed\"~" ./protobuf/cmake/libprotoc.cmake; \
+ LDFLAGS="-llog -landroid" cmake -DCMAKE_SYSTEM_NAME=Android -DCMAKE_SYSTEM_VERSION=$VERSION -DCMAKE_ANDROID_ARCH_ABI=$PLATFORM -DANDROID_NDK=$NDK_PATH -DCMAKE_ANDROID_STL_TYPE=c++_static -DCMAKE_INSTALL_PREFIX=$DEPS_DIR -Dprotobuf_BUILD_TESTS=OFF -DCMAKE_BUILD_TYPE="$BUILD_TYPE_A" -H./protobuf/cmake -B./protobuf/.build; \
+ VERBOSE=1 cmake --build ./protobuf/.build --target install -- -j$PARALLELISM
+
+# ed25519
+RUN set -e; \
+ git clone git://github.com/hyperledger/iroha-ed25519; \
+ (cd ./iroha-ed25519 ; git checkout e7188b8393dbe5ac54378610d53630bd4a180038); \
+ cmake -DCMAKE_SYSTEM_NAME=Android -DCMAKE_SYSTEM_VERSION=$VERSION -DCMAKE_ANDROID_ARCH_ABI=$PLATFORM -DANDROID_NDK=$NDK_PATH -DCMAKE_ANDROID_STL_TYPE=c++_static -DCMAKE_INSTALL_PREFIX=$DEPS_DIR -DTESTING=OFF -DCMAKE_BUILD_TYPE=$BUILD_TYPE_A -DBUILD=STATIC -H./iroha-ed25519 -B./iroha-ed25519/build; \
+ VERBOSE=1 cmake --build ./iroha-ed25519/build --target install -- -j$PARALLELISM; \
+ mv "$DEPS_DIR"/lib/static/libed25519.a "$DEPS_DIR"/lib; rmdir "$DEPS_DIR"/lib/static/
diff --git a/docker/android/entrypoint.sh b/docker/android/entrypoint.sh
new file mode 100644
index 0000000000..10ff2ba2d4
--- /dev/null
+++ b/docker/android/entrypoint.sh
@@ -0,0 +1,33 @@
+#!/bin/bash
+export LIBP=lib
+case "$PLATFORM" in
+ armeabi)
+ export ARCH=arch-arm
+ ;;
+ armeabi-v7a)
+ export ARCH=arch-arm
+ ;;
+ arm64-v8a)
+ export ARCH=arch-arm64
+ ;;
+ x86)
+ export ARCH=arch-x86
+ ;;
+ x86_64)
+ export ARCH=arch-x86_64
+ export LIBP=lib64
+ ;;
+ *)
+ echo Wrong ABI name: "$PLATFORM"
+ exit 1
+ ;;
+esac
+
+if [ "$BUILD_TYPE_A" = "Release" ]; then
+ export PROTOBUF_LIB_NAME=protobuf
+elif [ "$BUILD_TYPE_A" = "Debug" ]; then
+ export PROTOBUF_LIB_NAME=protobufd
+else
+ echo "Unknown build type: $BUILD_TYPE_A"
+ exit 1
+fi
diff --git a/docker/develop/aarch64/Dockerfile b/docker/develop/Dockerfile
similarity index 82%
rename from docker/develop/aarch64/Dockerfile
rename to docker/develop/Dockerfile
index 15d79ff37c..5d480fc386 100644
--- a/docker/develop/aarch64/Dockerfile
+++ b/docker/develop/Dockerfile
@@ -8,14 +8,13 @@ ARG CMAKE_BUILD_TYPE=Release
ENV IROHA_HOME /opt/iroha
ENV IROHA_BUILD /opt/iroha/build
-RUN apt-get update; \
- apt-get -y upgrade; \
+RUN apt-get update && \
apt-get -y --no-install-recommends install apt-utils software-properties-common; \
apt-get -y clean
# add git repository
-RUN add-apt-repository -y ppa:git-core/ppa; \
+RUN add-apt-repository -y ppa:git-core/ppa && \
apt-get update
@@ -24,17 +23,21 @@ RUN set -e; \
automake libtool \
# dev dependencies
libssl-dev zlib1g-dev libcurl4-openssl-dev libc6-dbg golang \
- # CircleCI dependencies
- git ssh tar gzip ca-certificates python3 python3-pip python3-setuptools \
+ # CI dependencies
+ git ssh tar gzip ca-certificates \
+ # Pythons
+ python-pip python3-pip python3-setuptools python-dev \
+ # SWIG dependencies
+ libpcre3-dev autoconf bison \
# other
- wget curl file unzip gdb iputils-ping vim ccache \
- gcovr cppcheck doxygen graphviz graphviz-dev; \
+ wget curl file gdb ccache \
+ gcovr cppcheck doxygen graphviz graphviz-dev unzip zip; \
apt-get -y clean
-# install cmake 3.7.2
+# install cmake 3.10.2
RUN set -e; \
git clone https://gitlab.kitware.com/cmake/cmake.git /tmp/cmake; \
- (cd /tmp/cmake ; git checkout 35413bf2c1b33980afd418030af27f184872af6b); \
+ (cd /tmp/cmake ; git checkout c1e087a9d3af74299d7681c9f9de59e5977a1539); \
(cd /tmp/cmake ; /tmp/cmake/bootstrap --system-curl --parallel=${PARALLELISM} --enable-ccache); \
make -j${PARALLELISM} -C /tmp/cmake; \
make -C /tmp/cmake install; \
@@ -52,10 +55,10 @@ RUN set -e; \
ldconfig; \
rm -rf /tmp/boost
-# install protobuf
+# install protobuf v3.5.1
RUN set -e; \
git clone https://github.com/google/protobuf /tmp/protobuf; \
- (cd /tmp/protobuf ; git checkout 80a37e0782d2d702d52234b62dd4b9ec74fd2c95); \
+ (cd /tmp/protobuf ; git checkout 106ffc04be1abf3ff3399f54ccf149815b287dd9); \
cmake \
-DCMAKE_BUILD_TYPE=${CMAKE_BUILD_TYPE} \
-Dprotobuf_BUILD_TESTS=OFF \
@@ -90,11 +93,10 @@ RUN set -e; \
ldconfig; \
rm -rf /tmp/c-ares
-# install grpc
+# install grpc 1.11.0
RUN set -e; \
git clone https://github.com/grpc/grpc /tmp/grpc; \
- cd /tmp/grpc; \
- git checkout bfcbad3b86c7912968dc8e64f2121c920dad4dfb; \
+ (cd /tmp/grpc ; git checkout bd44e485f69d70ca4095cea92decd98de3892aa6); \
cmake \
-DCMAKE_BUILD_TYPE=${CMAKE_BUILD_TYPE} \
-DgRPC_BENCHMARK_PROVIDER="" \
@@ -106,12 +108,7 @@ RUN set -e; \
-DBUILD_SHARED_LIBS=ON \
-H/tmp/grpc \
-B/tmp/grpc/.build; \
- cd /tmp/grpc/.build; \
- make -j${PARALLELISM} gpr grpc grpc++ grpc_cpp_plugin; \
- # copy libs to /usr/local/lib
- cp libgpr.so libgrpc.so libgrpc++.so libgrpc_plugin_support.so /usr/local/lib; \
- cp grpc_cpp_plugin /usr/local/bin; \
- cp -R ../include /usr/local; \
+ cmake --build /tmp/grpc/.build --target install -- -j${PARALLELISM}; \
ldconfig; \
rm -rf /tmp/grpc
@@ -203,15 +200,6 @@ RUN set -e; \
ldconfig; \
rm -rf /tmp/tbb
-# install docker
-ENV DOCKER_VERSION=17.06.0-ce
-RUN set -e; \
- curl -L -o /tmp/docker-${DOCKER_VERSION}.tgz https://download.docker.com/linux/static/stable/x86_64/docker-${DOCKER_VERSION}.tgz; \
- tar -xz -C /tmp -f /tmp/docker-${DOCKER_VERSION}.tgz; \
- mv /tmp/docker/* /usr/bin; \
- rm /tmp/docker-${DOCKER_VERSION}.tgz; \
- rm -rf /tmp/docker
-
# install sonar cli
ENV SONAR_CLI_VERSION=3.0.3.778
RUN set -e; \
@@ -236,10 +224,38 @@ RUN set -e; \
rm -rf /tmp/ed25519
# fetch lcov reports converter
-RUN curl -L -o /tmp/lcov_cobertura.py https://raw.githubusercontent.com/eriwen/lcov-to-cobertura-xml/8c55cd11f80a21e7e46f20f8c81fcde0bf11f5e5/lcov_cobertura/lcov_cobertura.py
+RUN set -e; \
+ curl -L -o /tmp/lcov_cobertura.py https://raw.githubusercontent.com/eriwen/lcov-to-cobertura-xml/8c55cd11f80a21e7e46f20f8c81fcde0bf11f5e5/lcov_cobertura/lcov_cobertura.py
+
+RUN set -e; \
+ add-apt-repository -y ppa:webupd8team/java; \
+ apt-get update; \
+ echo oracle-java8-installer shared/accepted-oracle-license-v1-1 select true | /usr/bin/debconf-set-selections; \
+ apt-get -y install oracle-java8-installer; \
+ java -version
+
+# Build SWIG
+RUN set -e; \
+ curl -L -o /tmp/swig-3.0.12.tar.gz https://github.com/swig/swig/archive/rel-3.0.12.tar.gz; \
+ tar -C /tmp -zxf /tmp/swig-3.0.12.tar.gz; \
+ cd /tmp/swig-rel-3.0.12; \
+ ./autogen.sh && ./configure && make -j${PARALLELISM}; \
+ make install; \
+ rm -rf /tmp/swig-rel-3.0.12
+
+RUN set -e; \
+ add-apt-repository -y ppa:jonathonf/python-3.6; \
+ apt-get update; \
+ apt-get -y install python3.6-dev
+
+# python bindings dependencies
+RUN set -e; \
+ pip install grpcio_tools; \
+ pip3 install grpcio_tools
# install lcov
-RUN curl -L -o /tmp/lcov-1.13.tar.gz https://github.com/linux-test-project/lcov/releases/download/v1.13/lcov-1.13.tar.gz; cd /tmp; tar zxf lcov-1.13.tar.gz; cd lcov-1.13; make install
+RUN set -e; \
+ curl -L -o /tmp/lcov-1.13.tar.gz https://github.com/linux-test-project/lcov/releases/download/v1.13/lcov-1.13.tar.gz; cd /tmp; tar zxf lcov-1.13.tar.gz; cd lcov-1.13; make install
# non-interactive adduser
# -m = create home dir
diff --git a/docker/develop/armv7l/Dockerfile b/docker/develop/armv7l/Dockerfile
deleted file mode 100644
index 11e52a94c1..0000000000
--- a/docker/develop/armv7l/Dockerfile
+++ /dev/null
@@ -1,261 +0,0 @@
-FROM ubuntu:16.04
-
-# number of concurrent threads during build
-# usage: docker build --build-arg PARALLELISM=8 -t name/name .
-ARG PARALLELISM=1
-ARG CMAKE_BUILD_TYPE=Release
-
-ENV IROHA_HOME /opt/iroha
-ENV IROHA_BUILD /opt/iroha/build
-
-RUN apt-get update; \
- apt-get -y upgrade; \
- apt-get -y --no-install-recommends install apt-utils software-properties-common; \
- apt-get -y clean
-
-
-# add git repository
-RUN add-apt-repository -y ppa:git-core/ppa; \
- apt-get update
-
-
-RUN set -e; \
- apt-get -y --no-install-recommends install build-essential python-software-properties \
- automake libtool \
- # dev dependencies
- libssl-dev zlib1g-dev libcurl4-openssl-dev libc6-dbg golang \
- # CircleCI dependencies
- git ssh tar gzip ca-certificates python3 python3-pip python3-setuptools \
- # other
- wget curl file unzip gdb iputils-ping vim ccache \
- gcovr cppcheck doxygen graphviz graphviz-dev; \
- apt-get -y clean
-
-# install cmake 3.7.2
-RUN set -e; \
- git clone https://gitlab.kitware.com/cmake/cmake.git /tmp/cmake; \
- (cd /tmp/cmake ; git checkout 35413bf2c1b33980afd418030af27f184872af6b); \
- (cd /tmp/cmake ; /tmp/cmake/bootstrap --system-curl --parallel=${PARALLELISM} --enable-ccache); \
- make -j${PARALLELISM} -C /tmp/cmake; \
- make -C /tmp/cmake install; \
- ldconfig; \
- rm -rf /tmp/cmake
-
-# install boost 1.65.1
-RUN set -e; \
- git clone https://github.com/boostorg/boost /tmp/boost; \
- (cd /tmp/boost ; git checkout 436ad1dfcfc7e0246141beddd11c8a4e9c10b146); \
- (cd /tmp/boost ; git submodule update --init --recursive); \
- (cd /tmp/boost ; /tmp/boost/bootstrap.sh --with-libraries=system,filesystem); \
- (cd /tmp/boost ; /tmp/boost/b2 headers); \
- (cd /tmp/boost ; /tmp/boost/b2 cxxflags="-std=c++14" -j ${PARALLELISM} install); \
- ldconfig; \
- rm -rf /tmp/boost
-
-# install protobuf
-RUN set -e; \
- git clone https://github.com/google/protobuf /tmp/protobuf; \
- (cd /tmp/protobuf ; git checkout 80a37e0782d2d702d52234b62dd4b9ec74fd2c95); \
- cmake \
- -DCMAKE_BUILD_TYPE=${CMAKE_BUILD_TYPE} \
- -Dprotobuf_BUILD_TESTS=OFF \
- -Dprotobuf_BUILD_SHARED_LIBS=ON \
- -H/tmp/protobuf/cmake \
- -B/tmp/protobuf/.build; \
- cmake --build /tmp/protobuf/.build --target install -- -j${PARALLELISM}; \
- ldconfig; \
- rm -rf /tmp/protobuf
-
-# install gflags
-RUN set -e; \
- git clone https://github.com/gflags/gflags /tmp/gflags; \
- (cd /tmp/gflags ; git checkout f8a0efe03aa69b3336d8e228b37d4ccb17324b88); \
- cmake \
- -DCMAKE_BUILD_TYPE=${CMAKE_BUILD_TYPE} \
- -H/tmp/gflags \
- -B/tmp/gflags/build; \
- cmake --build /tmp/gflags/build --target install -- -j${PARALLELISM}; \
- ldconfig; \
- rm -rf /tmp/gflags
-
-# install c-ares
-RUN set -e; \
- git clone https://github.com/c-ares/c-ares /tmp/c-ares; \
- (cd /tmp/c-ares ; git checkout 3be1924221e1326df520f8498d704a5c4c8d0cce); \
- cmake \
- -DCMAKE_BUILD_TYPE=${CMAKE_BUILD_TYPE} \
- -H/tmp/c-ares \
- -B/tmp/c-ares/build; \
- cmake --build /tmp/c-ares/build --target install -- -j${PARALLELISM}; \
- ldconfig; \
- rm -rf /tmp/c-ares
-
-# install grpc
-RUN set -e; \
- git clone https://github.com/grpc/grpc /tmp/grpc; \
- cd /tmp/grpc; \
- git checkout bfcbad3b86c7912968dc8e64f2121c920dad4dfb; \
- cmake \
- -DCMAKE_BUILD_TYPE=${CMAKE_BUILD_TYPE} \
- -DgRPC_BENCHMARK_PROVIDER="" \
- -DgRPC_ZLIB_PROVIDER=package \
- -DgRPC_CARES_PROVIDER=package \
- -DgRPC_SSL_PROVIDER=package \
- -DgRPC_PROTOBUF_PROVIDER=package \
- -DgRPC_GFLAGS_PROVIDER=package \
- -DBUILD_SHARED_LIBS=ON \
- -H/tmp/grpc \
- -B/tmp/grpc/.build; \
- cd /tmp/grpc/.build; \
- make -j${PARALLELISM} gpr grpc grpc++ grpc_cpp_plugin; \
- # copy libs to /usr/local/lib
- cp libgpr.so libgrpc.so libgrpc++.so libgrpc_plugin_support.so /usr/local/lib; \
- cp grpc_cpp_plugin /usr/local/bin; \
- cp -R ../include /usr/local; \
- ldconfig; \
- rm -rf /tmp/grpc
-
-# install gtest
-RUN set -e; \
- git clone https://github.com/google/googletest /tmp/googletest; \
- (cd /tmp/googletest ; git checkout ec44c6c1675c25b9827aacd08c02433cccde7780); \
- cmake \
- -DCMAKE_BUILD_TYPE=${CMAKE_BUILD_TYPE} \
- -H/tmp/googletest \
- -B/tmp/googletest/build; \
- cmake --build /tmp/googletest/build --target install -- -j${PARALLELISM}; \
- ldconfig; \
- rm -rf /tmp/googletest
-
-# install spdlog v0.16.3
-RUN set -e; \
- git clone https://github.com/gabime/spdlog /tmp/spdlog; \
- (cd /tmp/spdlog ; git checkout ccd675a286f457068ee8c823f8207f13c2325b26); \
- cmake \
- -DCMAKE_BUILD_TYPE=${CMAKE_BUILD_TYPE} \
- -DSPDLOG_BUILD_TESTING=OFF -H/tmp/spdlog -B/tmp/spdlog/build; \
- cmake --build /tmp/spdlog/build --target install; \
- rm -rf /tmp/spdlog
-
-# install rxcpp
-RUN set -e; \
- git clone https://github.com/Reactive-Extensions/RxCpp /tmp/RxCpp; \
- (cd /tmp/RxCpp ; git checkout 1b2e0589f19cb34d8cd58803677701dcf2161876); \
- cmake \
- -DCMAKE_BUILD_TYPE=${CMAKE_BUILD_TYPE} \
- -H/tmp/RxCpp \
- -B/tmp/RxCpp/build; \
- cmake --build /tmp/RxCpp/build --target install; \
- rm -rf /tmp/RxCpp
-
-# install rapidjson
-RUN set -e; \
- git clone https://github.com/miloyip/rapidjson /tmp/rapidjson; \
- (cd /tmp/rapidjson ; git checkout f54b0e47a08782a6131cc3d60f94d038fa6e0a51); \
- cmake \
- -DCMAKE_BUILD_TYPE=${CMAKE_BUILD_TYPE} \
- -DRAPIDJSON_BUILD_EXAMPLES=OFF \
- -H/tmp/rapidjson \
- -B/tmp/rapidjson/build; \
- cmake --build /tmp/rapidjson/build --target install; \
- ldconfig; \
- rm -rf /tmp/rapidjson
-
-# install libpq
-RUN set -e; \
- git clone --progress https://git.postgresql.org/git/postgresql.git /tmp/postgresql; \
- cd /tmp/postgresql; \
- git checkout 029386ccbddd0a33d481b94e511f5219b03e6636; \
- ./configure --without-readline --prefix=/usr/local; \
- # build
- make -j${PARALLELISM} -C src/bin/pg_config; \
- make -j${PARALLELISM} -C src/interfaces/libpq; \
- make -j${PARALLELISM} -C src/backend/utils fmgroids.h; \
- cp src/backend/utils/fmgroids.h src/include/utils/fmgroids.h; \
- # install
- make -C src/bin/pg_config install; \
- make -C src/interfaces/libpq install; \
- make -C src/include install; \
- ldconfig; \
- # remove
- rm -rf /tmp/postgresql
-
-# install pqxx
-RUN set -e; \
- git clone https://github.com/jtv/libpqxx /tmp/libpqxx; \
- (cd /tmp/libpqxx ; git checkout 5b17abce5ac2b1a2f8278718405b7ade8bb30ae9); \
- curl -L -o /tmp/libpqxx/config/config.guess 'http://git.savannah.gnu.org/gitweb/?p=config.git;a=blob_plain;f=config.guess;hb=6b2374c79506ee82a8b440f6d1ca293e2e2e2463'; \
- curl -L -o /tmp/libpqxx/config/config.sub 'http://git.savannah.gnu.org/gitweb/?p=config.git;a=blob_plain;f=config.sub;hb=6b2374c79506ee82a8b440f6d1ca293e2e2e2463'; \
- (cd /tmp/libpqxx ; /tmp/libpqxx/configure --disable-documentation --with-pic); \
- make -j${PARALLELISM} -C /tmp/libpqxx; \
- make -C /tmp/libpqxx install; \
- ldconfig; \
- rm -rf /tmp/libpqxx
-
-# install tbb
-RUN set -e; \
- git clone https://github.com/01org/tbb /tmp/tbb; \
- (cd /tmp/tbb ; git checkout eb6336ad29450f2a64af5123ca1b9429ff6bc11d); \
- make -j${PARALLELISM} -C /tmp/tbb tbb_build_prefix=build; \
- cp /tmp/tbb/build/build_debug/*.so* /usr/local/lib; \
- cp /tmp/tbb/build/build_release/*.so* /usr/local/lib; \
- cp -r /tmp/tbb/include/* /usr/local/include; \
- ldconfig; \
- rm -rf /tmp/tbb
-
-# install docker
-ENV DOCKER_VERSION=17.06.0-ce
-RUN set -e; \
- curl -L -o /tmp/docker-${DOCKER_VERSION}.tgz https://download.docker.com/linux/static/stable/armhf/docker-${DOCKER_VERSION}.tgz; \
- tar -xz -C /tmp -f /tmp/docker-${DOCKER_VERSION}.tgz; \
- mv /tmp/docker/* /usr/bin; \
- rm /tmp/docker-${DOCKER_VERSION}.tgz; \
- rm -rf /tmp/docker
-
-# install sonar cli
-ENV SONAR_CLI_VERSION=3.0.3.778
-RUN set -e; \
- mkdir -p /opt/sonar; \
- curl -L -o /tmp/sonar.zip https://sonarsource.bintray.com/Distribution/sonar-scanner-cli/sonar-scanner-cli-${SONAR_CLI_VERSION}-linux.zip; \
- unzip -o -d /tmp/sonar-scanner /tmp/sonar.zip; \
- mv /tmp/sonar-scanner/sonar-scanner-${SONAR_CLI_VERSION}-linux /opt/sonar/scanner; \
- ln -s -f /opt/sonar/scanner/bin/sonar-scanner /usr/local/bin/sonar-scanner; \
- rm -rf /tmp/sonar*
-
-# install ed25519
-RUN set -e; \
- git clone git://github.com/hyperledger/iroha-ed25519.git /tmp/ed25519; \
- (cd /tmp/ed25519 ; git checkout e7188b8393dbe5ac54378610d53630bd4a180038); \
- cmake \
- -DCMAKE_BUILD_TYPE=${CMAKE_BUILD_TYPE} \
- -DTESTING=OFF \
- -H/tmp/ed25519 \
- -B/tmp/ed25519/build; \
- cmake --build /tmp/ed25519/build --target install -- -j${PARALLELISM}; \
- ldconfig; \
- rm -rf /tmp/ed25519
-
-# fetch lcov reports converter
-RUN curl -L -o /tmp/lcov_cobertura.py https://raw.githubusercontent.com/eriwen/lcov-to-cobertura-xml/8c55cd11f80a21e7e46f20f8c81fcde0bf11f5e5/lcov_cobertura/lcov_cobertura.py
-
-# install lcov
-RUN curl -L -o /tmp/lcov-1.13.tar.gz https://github.com/linux-test-project/lcov/releases/download/v1.13/lcov-1.13.tar.gz; cd /tmp; tar zxf lcov-1.13.tar.gz; cd lcov-1.13; make install
-
-# non-interactive adduser
-# -m = create home dir
-# -s = set default shell
-# iroha-ci = username
-# -u = userid, default for Ubuntu is 1000
-# -U = create a group same as username
-# no password
-RUN useradd -ms /bin/bash iroha-ci -u 1000 -U
-
-WORKDIR /opt/iroha
-RUN set -e; \
- chmod -R 777 /opt/iroha; \
- mkdir -p /tmp/ccache -m 777; \
- ccache --clear
-
-
-USER iroha-ci
-CMD ["/bin/bash"]
diff --git a/docker/develop/x86_64/Dockerfile b/docker/develop/x86_64/Dockerfile
deleted file mode 100644
index 15d79ff37c..0000000000
--- a/docker/develop/x86_64/Dockerfile
+++ /dev/null
@@ -1,261 +0,0 @@
-FROM ubuntu:16.04
-
-# number of concurrent threads during build
-# usage: docker build --build-arg PARALLELISM=8 -t name/name .
-ARG PARALLELISM=1
-ARG CMAKE_BUILD_TYPE=Release
-
-ENV IROHA_HOME /opt/iroha
-ENV IROHA_BUILD /opt/iroha/build
-
-RUN apt-get update; \
- apt-get -y upgrade; \
- apt-get -y --no-install-recommends install apt-utils software-properties-common; \
- apt-get -y clean
-
-
-# add git repository
-RUN add-apt-repository -y ppa:git-core/ppa; \
- apt-get update
-
-
-RUN set -e; \
- apt-get -y --no-install-recommends install build-essential python-software-properties \
- automake libtool \
- # dev dependencies
- libssl-dev zlib1g-dev libcurl4-openssl-dev libc6-dbg golang \
- # CircleCI dependencies
- git ssh tar gzip ca-certificates python3 python3-pip python3-setuptools \
- # other
- wget curl file unzip gdb iputils-ping vim ccache \
- gcovr cppcheck doxygen graphviz graphviz-dev; \
- apt-get -y clean
-
-# install cmake 3.7.2
-RUN set -e; \
- git clone https://gitlab.kitware.com/cmake/cmake.git /tmp/cmake; \
- (cd /tmp/cmake ; git checkout 35413bf2c1b33980afd418030af27f184872af6b); \
- (cd /tmp/cmake ; /tmp/cmake/bootstrap --system-curl --parallel=${PARALLELISM} --enable-ccache); \
- make -j${PARALLELISM} -C /tmp/cmake; \
- make -C /tmp/cmake install; \
- ldconfig; \
- rm -rf /tmp/cmake
-
-# install boost 1.65.1
-RUN set -e; \
- git clone https://github.com/boostorg/boost /tmp/boost; \
- (cd /tmp/boost ; git checkout 436ad1dfcfc7e0246141beddd11c8a4e9c10b146); \
- (cd /tmp/boost ; git submodule update --init --recursive); \
- (cd /tmp/boost ; /tmp/boost/bootstrap.sh --with-libraries=system,filesystem); \
- (cd /tmp/boost ; /tmp/boost/b2 headers); \
- (cd /tmp/boost ; /tmp/boost/b2 cxxflags="-std=c++14" -j ${PARALLELISM} install); \
- ldconfig; \
- rm -rf /tmp/boost
-
-# install protobuf
-RUN set -e; \
- git clone https://github.com/google/protobuf /tmp/protobuf; \
- (cd /tmp/protobuf ; git checkout 80a37e0782d2d702d52234b62dd4b9ec74fd2c95); \
- cmake \
- -DCMAKE_BUILD_TYPE=${CMAKE_BUILD_TYPE} \
- -Dprotobuf_BUILD_TESTS=OFF \
- -Dprotobuf_BUILD_SHARED_LIBS=ON \
- -H/tmp/protobuf/cmake \
- -B/tmp/protobuf/.build; \
- cmake --build /tmp/protobuf/.build --target install -- -j${PARALLELISM}; \
- ldconfig; \
- rm -rf /tmp/protobuf
-
-# install gflags
-RUN set -e; \
- git clone https://github.com/gflags/gflags /tmp/gflags; \
- (cd /tmp/gflags ; git checkout f8a0efe03aa69b3336d8e228b37d4ccb17324b88); \
- cmake \
- -DCMAKE_BUILD_TYPE=${CMAKE_BUILD_TYPE} \
- -H/tmp/gflags \
- -B/tmp/gflags/build; \
- cmake --build /tmp/gflags/build --target install -- -j${PARALLELISM}; \
- ldconfig; \
- rm -rf /tmp/gflags
-
-# install c-ares
-RUN set -e; \
- git clone https://github.com/c-ares/c-ares /tmp/c-ares; \
- (cd /tmp/c-ares ; git checkout 3be1924221e1326df520f8498d704a5c4c8d0cce); \
- cmake \
- -DCMAKE_BUILD_TYPE=${CMAKE_BUILD_TYPE} \
- -H/tmp/c-ares \
- -B/tmp/c-ares/build; \
- cmake --build /tmp/c-ares/build --target install -- -j${PARALLELISM}; \
- ldconfig; \
- rm -rf /tmp/c-ares
-
-# install grpc
-RUN set -e; \
- git clone https://github.com/grpc/grpc /tmp/grpc; \
- cd /tmp/grpc; \
- git checkout bfcbad3b86c7912968dc8e64f2121c920dad4dfb; \
- cmake \
- -DCMAKE_BUILD_TYPE=${CMAKE_BUILD_TYPE} \
- -DgRPC_BENCHMARK_PROVIDER="" \
- -DgRPC_ZLIB_PROVIDER=package \
- -DgRPC_CARES_PROVIDER=package \
- -DgRPC_SSL_PROVIDER=package \
- -DgRPC_PROTOBUF_PROVIDER=package \
- -DgRPC_GFLAGS_PROVIDER=package \
- -DBUILD_SHARED_LIBS=ON \
- -H/tmp/grpc \
- -B/tmp/grpc/.build; \
- cd /tmp/grpc/.build; \
- make -j${PARALLELISM} gpr grpc grpc++ grpc_cpp_plugin; \
- # copy libs to /usr/local/lib
- cp libgpr.so libgrpc.so libgrpc++.so libgrpc_plugin_support.so /usr/local/lib; \
- cp grpc_cpp_plugin /usr/local/bin; \
- cp -R ../include /usr/local; \
- ldconfig; \
- rm -rf /tmp/grpc
-
-# install gtest
-RUN set -e; \
- git clone https://github.com/google/googletest /tmp/googletest; \
- (cd /tmp/googletest ; git checkout ec44c6c1675c25b9827aacd08c02433cccde7780); \
- cmake \
- -DCMAKE_BUILD_TYPE=${CMAKE_BUILD_TYPE} \
- -H/tmp/googletest \
- -B/tmp/googletest/build; \
- cmake --build /tmp/googletest/build --target install -- -j${PARALLELISM}; \
- ldconfig; \
- rm -rf /tmp/googletest
-
-# install spdlog v0.16.3
-RUN set -e; \
- git clone https://github.com/gabime/spdlog /tmp/spdlog; \
- (cd /tmp/spdlog ; git checkout ccd675a286f457068ee8c823f8207f13c2325b26); \
- cmake \
- -DCMAKE_BUILD_TYPE=${CMAKE_BUILD_TYPE} \
- -DSPDLOG_BUILD_TESTING=OFF -H/tmp/spdlog -B/tmp/spdlog/build; \
- cmake --build /tmp/spdlog/build --target install; \
- rm -rf /tmp/spdlog
-
-# install rxcpp
-RUN set -e; \
- git clone https://github.com/Reactive-Extensions/RxCpp /tmp/RxCpp; \
- (cd /tmp/RxCpp ; git checkout 1b2e0589f19cb34d8cd58803677701dcf2161876); \
- cmake \
- -DCMAKE_BUILD_TYPE=${CMAKE_BUILD_TYPE} \
- -H/tmp/RxCpp \
- -B/tmp/RxCpp/build; \
- cmake --build /tmp/RxCpp/build --target install; \
- rm -rf /tmp/RxCpp
-
-# install rapidjson
-RUN set -e; \
- git clone https://github.com/miloyip/rapidjson /tmp/rapidjson; \
- (cd /tmp/rapidjson ; git checkout f54b0e47a08782a6131cc3d60f94d038fa6e0a51); \
- cmake \
- -DCMAKE_BUILD_TYPE=${CMAKE_BUILD_TYPE} \
- -DRAPIDJSON_BUILD_EXAMPLES=OFF \
- -H/tmp/rapidjson \
- -B/tmp/rapidjson/build; \
- cmake --build /tmp/rapidjson/build --target install; \
- ldconfig; \
- rm -rf /tmp/rapidjson
-
-# install libpq
-RUN set -e; \
- git clone --progress https://git.postgresql.org/git/postgresql.git /tmp/postgresql; \
- cd /tmp/postgresql; \
- git checkout 029386ccbddd0a33d481b94e511f5219b03e6636; \
- ./configure --without-readline --prefix=/usr/local; \
- # build
- make -j${PARALLELISM} -C src/bin/pg_config; \
- make -j${PARALLELISM} -C src/interfaces/libpq; \
- make -j${PARALLELISM} -C src/backend/utils fmgroids.h; \
- cp src/backend/utils/fmgroids.h src/include/utils/fmgroids.h; \
- # install
- make -C src/bin/pg_config install; \
- make -C src/interfaces/libpq install; \
- make -C src/include install; \
- ldconfig; \
- # remove
- rm -rf /tmp/postgresql
-
-# install pqxx
-RUN set -e; \
- git clone https://github.com/jtv/libpqxx /tmp/libpqxx; \
- (cd /tmp/libpqxx ; git checkout 5b17abce5ac2b1a2f8278718405b7ade8bb30ae9); \
- curl -L -o /tmp/libpqxx/config/config.guess 'http://git.savannah.gnu.org/gitweb/?p=config.git;a=blob_plain;f=config.guess;hb=6b2374c79506ee82a8b440f6d1ca293e2e2e2463'; \
- curl -L -o /tmp/libpqxx/config/config.sub 'http://git.savannah.gnu.org/gitweb/?p=config.git;a=blob_plain;f=config.sub;hb=6b2374c79506ee82a8b440f6d1ca293e2e2e2463'; \
- (cd /tmp/libpqxx ; /tmp/libpqxx/configure --disable-documentation --with-pic); \
- make -j${PARALLELISM} -C /tmp/libpqxx; \
- make -C /tmp/libpqxx install; \
- ldconfig; \
- rm -rf /tmp/libpqxx
-
-# install tbb
-RUN set -e; \
- git clone https://github.com/01org/tbb /tmp/tbb; \
- (cd /tmp/tbb ; git checkout eb6336ad29450f2a64af5123ca1b9429ff6bc11d); \
- make -j${PARALLELISM} -C /tmp/tbb tbb_build_prefix=build; \
- cp /tmp/tbb/build/build_debug/*.so* /usr/local/lib; \
- cp /tmp/tbb/build/build_release/*.so* /usr/local/lib; \
- cp -r /tmp/tbb/include/* /usr/local/include; \
- ldconfig; \
- rm -rf /tmp/tbb
-
-# install docker
-ENV DOCKER_VERSION=17.06.0-ce
-RUN set -e; \
- curl -L -o /tmp/docker-${DOCKER_VERSION}.tgz https://download.docker.com/linux/static/stable/x86_64/docker-${DOCKER_VERSION}.tgz; \
- tar -xz -C /tmp -f /tmp/docker-${DOCKER_VERSION}.tgz; \
- mv /tmp/docker/* /usr/bin; \
- rm /tmp/docker-${DOCKER_VERSION}.tgz; \
- rm -rf /tmp/docker
-
-# install sonar cli
-ENV SONAR_CLI_VERSION=3.0.3.778
-RUN set -e; \
- mkdir -p /opt/sonar; \
- curl -L -o /tmp/sonar.zip https://sonarsource.bintray.com/Distribution/sonar-scanner-cli/sonar-scanner-cli-${SONAR_CLI_VERSION}-linux.zip; \
- unzip -o -d /tmp/sonar-scanner /tmp/sonar.zip; \
- mv /tmp/sonar-scanner/sonar-scanner-${SONAR_CLI_VERSION}-linux /opt/sonar/scanner; \
- ln -s -f /opt/sonar/scanner/bin/sonar-scanner /usr/local/bin/sonar-scanner; \
- rm -rf /tmp/sonar*
-
-# install ed25519
-RUN set -e; \
- git clone git://github.com/hyperledger/iroha-ed25519.git /tmp/ed25519; \
- (cd /tmp/ed25519 ; git checkout e7188b8393dbe5ac54378610d53630bd4a180038); \
- cmake \
- -DCMAKE_BUILD_TYPE=${CMAKE_BUILD_TYPE} \
- -DTESTING=OFF \
- -H/tmp/ed25519 \
- -B/tmp/ed25519/build; \
- cmake --build /tmp/ed25519/build --target install -- -j${PARALLELISM}; \
- ldconfig; \
- rm -rf /tmp/ed25519
-
-# fetch lcov reports converter
-RUN curl -L -o /tmp/lcov_cobertura.py https://raw.githubusercontent.com/eriwen/lcov-to-cobertura-xml/8c55cd11f80a21e7e46f20f8c81fcde0bf11f5e5/lcov_cobertura/lcov_cobertura.py
-
-# install lcov
-RUN curl -L -o /tmp/lcov-1.13.tar.gz https://github.com/linux-test-project/lcov/releases/download/v1.13/lcov-1.13.tar.gz; cd /tmp; tar zxf lcov-1.13.tar.gz; cd lcov-1.13; make install
-
-# non-interactive adduser
-# -m = create home dir
-# -s = set default shell
-# iroha-ci = username
-# -u = userid, default for Ubuntu is 1000
-# -U = create a group same as username
-# no password
-RUN useradd -ms /bin/bash iroha-ci -u 1000 -U
-
-WORKDIR /opt/iroha
-RUN set -e; \
- chmod -R 777 /opt/iroha; \
- mkdir -p /tmp/ccache -m 777; \
- ccache --clear
-
-
-USER iroha-ci
-CMD ["/bin/bash"]
diff --git a/docker/docker-compose.yml b/docker/docker-compose.yml
index 8bc958647b..95803e4084 100644
--- a/docker/docker-compose.yml
+++ b/docker/docker-compose.yml
@@ -2,7 +2,7 @@ version: '3'
services:
node:
- image: hyperledger/iroha-docker-develop:v1
+ image: hyperledger/iroha:develop-build
ports:
- "${IROHA_PORT}:50051"
- "${DEBUGGER_PORT}:20000"
diff --git a/docker/manifest.yaml b/docker/manifest.yaml
index f7ece5d19f..f43a6e1eee 100644
--- a/docker/manifest.yaml
+++ b/docker/manifest.yaml
@@ -2,20 +2,20 @@
image: hyperledger/iroha:latest
manifests:
-
- image: hyperledger/iroha:x86_64
+ image: hyperledger/iroha:x86_64-latest
platform:
architecture: amd64
os: linux
features:
- sse
-
- image: hyperledger/iroha:armv7l
+ image: hyperledger/iroha:armv7l-latest
platform:
architecture: arm
os: linux
variant: v7
-
- image: hyperledger/iroha:aarch64
+ image: hyperledger/iroha:aarch64-latest
platform:
architecture: arm64
os: linux
@@ -41,3 +41,25 @@ manifests:
platform:
architecture: arm64
os: linux
+
+# develop branch, build env
+image: hyperledger/iroha:develop-build
+manifests:
+ -
+ image: hyperledger/iroha:x86_64-develop-build
+ platform:
+ architecture: amd64
+ os: linux
+ features:
+ - sse
+ -
+ image: hyperledger/iroha:armv7l-develop-build
+ platform:
+ architecture: arm
+ os: linux
+ variant: v7
+ -
+ image: hyperledger/iroha:aarch64-develop-build
+ platform:
+ architecture: arm64
+ os: linux
diff --git a/docker/release/x86_64/Dockerfile b/docker/release/Dockerfile
similarity index 51%
rename from docker/release/x86_64/Dockerfile
rename to docker/release/Dockerfile
index 53f05883fd..c07120cb32 100644
--- a/docker/release/x86_64/Dockerfile
+++ b/docker/release/Dockerfile
@@ -1,15 +1,12 @@
FROM ubuntu:16.04
-RUN apt-get update;\
- apt-get upgrade -y; \
- apt-get install -y \
- libc-ares-dev
+RUN apt-get update; \
+ apt-get install -y libc-ares-dev
#Install iroha
COPY iroha.deb /tmp/iroha.deb
-RUN dpkg -i /tmp/iroha.deb
-
-RUN apt-get -fy install;rm -f /tmp/iroha.deb
+RUN apt-get install -y /tmp/iroha.deb; \
+ rm -f /tmp/iroha.deb
WORKDIR /opt/iroha_data
diff --git a/docker/release/aarch64/Dockerfile b/docker/release/aarch64/Dockerfile
deleted file mode 120000
index d810e95633..0000000000
--- a/docker/release/aarch64/Dockerfile
+++ /dev/null
@@ -1 +0,0 @@
-../x86_64/Dockerfile
\ No newline at end of file
diff --git a/docker/release/aarch64/entrypoint.sh b/docker/release/aarch64/entrypoint.sh
deleted file mode 120000
index d87c399ff9..0000000000
--- a/docker/release/aarch64/entrypoint.sh
+++ /dev/null
@@ -1 +0,0 @@
-../x86_64/entrypoint.sh
\ No newline at end of file
diff --git a/docker/release/armv7l/Dockerfile b/docker/release/armv7l/Dockerfile
deleted file mode 120000
index d810e95633..0000000000
--- a/docker/release/armv7l/Dockerfile
+++ /dev/null
@@ -1 +0,0 @@
-../x86_64/Dockerfile
\ No newline at end of file
diff --git a/docker/release/armv7l/entrypoint.sh b/docker/release/armv7l/entrypoint.sh
deleted file mode 120000
index d87c399ff9..0000000000
--- a/docker/release/armv7l/entrypoint.sh
+++ /dev/null
@@ -1 +0,0 @@
-../x86_64/entrypoint.sh
\ No newline at end of file
diff --git a/docker/release/x86_64/entrypoint.sh b/docker/release/entrypoint.sh
similarity index 70%
rename from docker/release/x86_64/entrypoint.sh
rename to docker/release/entrypoint.sh
index 85595eb18a..709e3d9bbc 100755
--- a/docker/release/x86_64/entrypoint.sh
+++ b/docker/release/entrypoint.sh
@@ -1,5 +1,4 @@
#!/usr/bin/env bash
echo key=$KEY
echo $PWD
-iroha-cli --genesis_block --peers_address peers.list
irohad --genesis_block genesis.block --config config.sample --keypair_name $KEY
diff --git a/docs/README.md b/docs/README.md
index 7883d2e54a..a57bff90f7 100644
--- a/docs/README.md
+++ b/docs/README.md
@@ -1,6 +1,6 @@
# Iroha reStructuredTest documentation
-The purpose of this documentation is to convey design and architecture aspects of Iroha ledger in a structured approach, as well as operational side: how-tos, guides, and examples. Docs are accessible via ReadTheDocs website, and can be generated to a lot of formats, available in Sphinx. In order to contribute, one should to be familiar with [reStructuredTest](docutils.sourceforge.net/rst.html) syntax, and follow principles described in this file.
+The purpose of this documentation is to convey design and architecture aspects of Iroha ledger in a structured approach, as well as operational side: how-tos, guides, and examples. Docs are accessible via ReadTheDocs website, and can be generated to a lot of formats, available in Sphinx. In order to contribute, one should be familiar with [reStructuredTest](http://docutils.sourceforge.net/rst.html) syntax, and follow principles described in this file.
## Principles
diff --git a/docs/image_assets/iroha_swift_guide/iroha_swift_guide_001.png b/docs/image_assets/iroha_swift_guide/iroha_swift_guide_001.png
new file mode 100644
index 0000000000..54ba9316eb
Binary files /dev/null and b/docs/image_assets/iroha_swift_guide/iroha_swift_guide_001.png differ
diff --git a/docs/image_assets/iroha_swift_guide/iroha_swift_guide_002.png b/docs/image_assets/iroha_swift_guide/iroha_swift_guide_002.png
new file mode 100644
index 0000000000..0110bda65e
Binary files /dev/null and b/docs/image_assets/iroha_swift_guide/iroha_swift_guide_002.png differ
diff --git a/docs/image_assets/iroha_swift_guide/iroha_swift_guide_003.png b/docs/image_assets/iroha_swift_guide/iroha_swift_guide_003.png
new file mode 100644
index 0000000000..a1fe925e9b
Binary files /dev/null and b/docs/image_assets/iroha_swift_guide/iroha_swift_guide_003.png differ
diff --git a/docs/image_assets/iroha_swift_guide/iroha_swift_guide_004.png b/docs/image_assets/iroha_swift_guide/iroha_swift_guide_004.png
new file mode 100644
index 0000000000..1bc7aba4a4
Binary files /dev/null and b/docs/image_assets/iroha_swift_guide/iroha_swift_guide_004.png differ
diff --git a/docs/image_assets/iroha_swift_guide/iroha_swift_guide_005.png b/docs/image_assets/iroha_swift_guide/iroha_swift_guide_005.png
new file mode 100644
index 0000000000..fd8ea8bbd3
Binary files /dev/null and b/docs/image_assets/iroha_swift_guide/iroha_swift_guide_005.png differ
diff --git a/docs/image_assets/iroha_swift_guide/iroha_swift_guide_007.png b/docs/image_assets/iroha_swift_guide/iroha_swift_guide_007.png
new file mode 100644
index 0000000000..f0e7d53101
Binary files /dev/null and b/docs/image_assets/iroha_swift_guide/iroha_swift_guide_007.png differ
diff --git a/docs/image_assets/iroha_swift_guide/iroha_swift_guide_008.png b/docs/image_assets/iroha_swift_guide/iroha_swift_guide_008.png
new file mode 100644
index 0000000000..05dbf506f1
Binary files /dev/null and b/docs/image_assets/iroha_swift_guide/iroha_swift_guide_008.png differ
diff --git a/docs/source/api/commands.rst b/docs/source/api/commands.rst
index 7736cef7b4..094d73994e 100644
--- a/docs/source/api/commands.rst
+++ b/docs/source/api/commands.rst
@@ -462,6 +462,8 @@ Purpose
Purpose of set account detail command is to set key-value information for a given account
+.. warning:: If there was a value for a given key already in the storage then it will be replaced with the new value
+
Schema
^^^^^^
diff --git a/docs/source/api/queries.rst b/docs/source/api/queries.rst
index da3cb1e1c5..fd1a17f06b 100644
--- a/docs/source/api/queries.rst
+++ b/docs/source/api/queries.rst
@@ -300,7 +300,7 @@ Response Structure
"Balance", "balance of the asset", "Not less than 0", "200.20"
Get Asset Info
---------------
+^^^^^^^^^^^^^^
Purpose
-------
diff --git a/docs/source/conf.py b/docs/source/conf.py
index d2b982d7e6..741f702a2d 100644
--- a/docs/source/conf.py
+++ b/docs/source/conf.py
@@ -52,9 +52,9 @@
# General information about the project.
project = u'Iroha'
documentation = u'Iroha Documentation'
-description = u'Distributed ledger technology platworm, written in C++'
-copyright = u'Creative Commons Attribution-NonCommercial 3.0 Unported'
-author = u'Nikolay Yushkevich at Soramitsu Co Ltd'
+description = u'Distributed ledger technology platform, written in C++'
+copyright = u'2018 Soramitsu Co., Ltd.'
+author = u'Nikolay Yushkevich at Soramitsu Co., Ltd.'
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
diff --git a/docs/source/getting_started/index.rst b/docs/source/getting_started/index.rst
index 627eb9ef18..4d7b8cb580 100644
--- a/docs/source/getting_started/index.rst
+++ b/docs/source/getting_started/index.rst
@@ -15,7 +15,7 @@ things simple, we will use Docker.
Prerequisites
-------------
For this guide, you need a computer running Unix-like system with ``docker``
-installed. You can read how to install it on a
+installed. You can read how to install it on a
`Docker's website `_.
.. note:: Please note that you can use Iroha without ``docker`` as well. You
@@ -76,17 +76,17 @@ Configuring Iroha Network
this guide.
Now we need to configure our Iroha network. This includes creating a
-configuration file, generating keypairs for a users, writing a list of peers
+configuration file, generating keypairs for a users, writing a list of peers
and creating a genesis block. However, we have prepared an example
-configuration for this guide, so you can start playing with Iroha faster.
-In order to get those files, you need to clone the
+configuration for this guide, so you can start playing with Iroha faster.
+In order to get those files, you need to clone the
`Iroha repository `_ from Github.
.. code-block:: shell
git clone -b develop https://github.com/hyperledger/iroha --depth=1
-.. hint:: ``--depth-1`` option allows us to download only latest commit and
+.. hint:: ``--depth=1`` option allows us to download only latest commit and
save some time and bandwidth. If you want to get a full commit history, you
can omit this option.
@@ -103,7 +103,7 @@ command
-v blockstore:/tmp/block_store \
--network=iroha-network \
--entrypoint=/bin/bash \
- hyperledger/iroha-docker:develop
+ hyperledger/iroha:develop
Let's look in detail what this command does:
@@ -116,10 +116,10 @@ Let's look in detail what this command does:
the container
- ``--network=iroha-network \`` adds our container to previously created
``iroha-network``, so Iroha and Postgres could see each other.
-- ``--entrypoint=/bin/bash \`` Because ``hyperledger/iroha-docker`` has
+- ``--entrypoint=/bin/bash \`` Because ``hyperledger/iroha`` has
the custom script which runs after starting the container, we want to
override it so we can start Iroha Daemon manually.
-- ``hyperledger/iroha-docker:develop`` is the image which has the ``develop``
+- ``hyperledger/iroha:develop`` is the image which has the ``develop``
branch.
Launching Iroha Daemon
@@ -178,7 +178,7 @@ account to work with Iroha.
.. note:: Full account name has a ``@`` symbol between name and domain. Note
that the keypair has the same name.
-
+
Creating the First Transaction
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
@@ -206,7 +206,7 @@ Quantity (add_ast_qty)``, enter Account ID – ``admin@test``, asset ID –
.. note:: Full asset name has a ``#`` symbol between name and domain.
-Let's transfer 100.50 ``coolcoins`` from ``admin@test`` to ``test@test``
+Let's transfer 100.50 ``coolcoins`` from ``admin@test`` to ``test@test``
by adding one more command and choosing ``5. Transfer Assets (tran_ast)``.
Enter Source Account and Destination Account, in our case ``admin@test`` and
``test@test``, Asset ID (``coolcoin#test``), integer part and precision
@@ -225,7 +225,7 @@ Congratulations! You have submitted your first transaction to Iroha.
Creating the First Query
^^^^^^^^^^^^^^^^^^^^^^^^
-Now let's check if ``coolcoins`` were successfully transferred from
+Now let's check if ``coolcoins`` were successfully transferred from
``admin@test`` to ``test@test``. Choose ``2. New query
(qry)``. ``7. Get Account's Assets (get_acc_ast)`` can help you to check if
``test@test`` now has ``coolcoin``. Form a query in a similar way you did with
@@ -257,7 +257,7 @@ Account, in our case ``admin@test`` and ``test@test``, Asset ID
(``coolcoin#test``), integer part and precision (``10000000`` and ``2``
accordingly). Send a transaction to Iroha peer as you did before. Well, it says
-.. code::
+.. code::
[2018-03-21 12:58:40.791297963][th:520][info] TransactionResponseHandler Transaction successfully sent
Congratulation, your transaction was accepted for processing.
@@ -269,11 +269,11 @@ had successfully cheated Iroha? Let's try to see transaction's status. Choose
you can get in the console after the previous command. Let's send it to Iroha.
It replies with:
-.. code::
+.. code::
Transaction has not passed stateful validation.
Apparently no. Our transaction was not accepted because it did not pass
stateful validation and ``coolcoins`` were not transferred. You can check
-the status of ``admin@test`` and ``test@test`` with queries to be sure
+the status of ``admin@test`` and ``test@test`` with queries to be sure
(like we did earlier).
diff --git a/docs/source/guides/build.rst b/docs/source/guides/build.rst
index 4b3657f023..2a46387808 100644
--- a/docs/source/guides/build.rst
+++ b/docs/source/guides/build.rst
@@ -38,7 +38,7 @@ to the directory of your choice.
git clone -b develop https://github.com/hyperledger/iroha --depth=1
-.. hint:: ``--depth-1`` option allows us to download only latest commit and
+.. hint:: ``--depth=1`` option allows us to download only latest commit and
save some time and bandwidth. If you want to get a full commit history, you
can omit this option.
@@ -57,8 +57,8 @@ After you execute this script, following things happen:
1. The script checks if you don't have containers with Iroha already running.
Successful completion finishes with the new container shell.
-2. The script will download ``iroha-docker-develop`` and ``postgres`` images.
-``iroha-docker-develop`` image contains all development dependencies and is
+2. The script will download ``hyperledger/iroha:develop-build`` and ``postgres`` images.
+``hyperledger/iroha:develop-build`` image contains all development dependencies and is
based on top of ``ubuntu:16.04``. ``postgres`` image is required for starting
and running Iroha.
3. Two containers are created and launched.
diff --git a/docs/source/guides/dependencies.rst b/docs/source/guides/dependencies.rst
index 6fdc99c511..b959ec9c2f 100644
--- a/docs/source/guides/dependencies.rst
+++ b/docs/source/guides/dependencies.rst
@@ -226,7 +226,7 @@ Installation on macOS
CMAKE_BUILD_TYPE="Release"
git clone https://github.com/google/protobuf /tmp/protobuf;
- (cd /tmp/protobuf ; git checkout 80a37e0782d2d702d52234b62dd4b9ec74fd2c95);
+ (cd /tmp/protobuf ; git checkout 106ffc04be1abf3ff3399f54ccf149815b287dd9);
cmake \
-DCMAKE_BUILD_TYPE=${CMAKE_BUILD_TYPE} \
-Dprotobuf_BUILD_TESTS=OFF \
diff --git a/docs/source/guides/deployment.rst b/docs/source/guides/deployment.rst
index 04ab566ebe..40dd732a5a 100644
--- a/docs/source/guides/deployment.rst
+++ b/docs/source/guides/deployment.rst
@@ -70,7 +70,7 @@ In order to run Iroha peer as a single instance in Docker, you should pull the i
.. code-block:: shell
- docker pull hyperledger/iroha-docker:latest
+ docker pull hyperledger/iroha:latest
.. Hint:: Use *latest* tag for latest stable release, and *develop* for latest development version
@@ -138,7 +138,7 @@ If they are met, you can move forward with the following command:
-e KEY='node0' \
# Docker network name
--network=iroha-network \
- hyperledger/iroha-docker:latest
+ hyperledger/iroha:latest
Running multiple instances (peer network)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
diff --git a/docs/source/guides/libraries.rst b/docs/source/guides/libraries.rst
index 4a4b563923..65860a742b 100644
--- a/docs/source/guides/libraries.rst
+++ b/docs/source/guides/libraries.rst
@@ -9,4 +9,5 @@ Client Libraries
libraries/java.rst
libraries/nodejs.rst
libraries/python.rst
-
+ libraries/swift_ios.rst
+
diff --git a/docs/source/guides/libraries/nodejs.rst b/docs/source/guides/libraries/nodejs.rst
index 997135ea18..90451d83e7 100644
--- a/docs/source/guides/libraries/nodejs.rst
+++ b/docs/source/guides/libraries/nodejs.rst
@@ -67,7 +67,7 @@ Go to the NPM package directory and start the build process
.. code-block:: shell
cd iroha/shared_model/packages/javascript
- npm install
+ npm install --build-from-source=iroha-lib
That's all. You can use the library now.
diff --git a/docs/source/guides/libraries/python.rst b/docs/source/guides/libraries/python.rst
index db61ddc66c..15f89c2e35 100644
--- a/docs/source/guides/libraries/python.rst
+++ b/docs/source/guides/libraries/python.rst
@@ -230,7 +230,6 @@ Create domain and asset:
.. code:: python
tx = tx_builder.creatorAccountId(creator) \
- .txCounter(tx_counter) \
.createdTime(current_time) \
.createDomain("domain", "user") \
.createAsset("coin", "domain", 2).build()
@@ -243,7 +242,6 @@ Create asset quantity:
.. code:: python
tx = tx_builder.creatorAccountId(creator) \
- .txCounter(tx_counter) \
.createdTime(current_time) \
.addAssetQuantity("admin@test", "coin#domain", "1000.2").build()
@@ -257,7 +255,6 @@ Create account:
user1_kp = crypto.generateKeypair()
tx = tx_builder.creatorAccountId(creator) \
- .txCounter(tx_counter) \
.createdTime(current_time) \
.createAccount("userone", "domain", user1_kp.publicKey()).build()
@@ -269,7 +266,6 @@ Send asset:
.. code:: python
tx = tx_builder.creatorAccountId(creator) \
- .txCounter(tx_counter) \
.createdTime(current_time) \
.transferAsset("admin@test", "userone@domain", "coin#domain", "Some message", "2.0").build()
diff --git a/docs/source/guides/libraries/swift_ios.rst b/docs/source/guides/libraries/swift_ios.rst
new file mode 100644
index 0000000000..8443bbfb79
--- /dev/null
+++ b/docs/source/guides/libraries/swift_ios.rst
@@ -0,0 +1,230 @@
+iOS Swift Library
+-----------------
+
+Objectives
+^^^^^^^^^^
+
+In this guide you will learn:
+
+- How to build client library for iOS
+- How to configure test application
+- How to interact with Iroha blockchain from the mobile device
+
+Video Guide
+^^^^^^^^^^^
+
+For more details please visit the video below which fully describes all the steps.
+
+.. raw:: html
+
+
+
+Prerequisites
+^^^^^^^^^^^^^
+
+Before starting you need to install the following software on your mac:
+
+- XCode
+- Carthage
+- Git
+- Cmake
+- Postgresql
+
+This tutorial was tested with the following environment:
+
+- MacOS Sierra 10.12.6
+- Xcode 9.2
+- carthage 0.29.0
+- cmake 3.11.0
+- iPhone 7 iOS 11.2 Simulator
+
+Hyperledger Iroha iOS library
+^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+Iroha has the following features:
+
+1. Creation and management of custom complex assets, such as currencies or indivisible rights, serial numbers, patents, etc.
+2. Management of user accounts
+3. Taxonomy of accounts based on domains — or sub-ledgers in the system
+4. The system of rights and verification of user permissions for the execution of transactions and queries in the system
+5. Validation of business rules for transactions and queries in the system
+
+Among the non-functional requirements can be noted a high degree of network fault tolerance (Byzantine Fault Tolerant).
+Iroha iOS library gives the ability to provide key generation and signing logic for queries and transactions passed to Iroha blockchain.
+Let's start with the detailed instructions how to install Iroha on the local machine.
+
+
+
+Instruction
+^^^^^^^^^^^
+
+1. Open the terminal and go to the folder where you want to install all artifacts:
+
+ .. code-block:: bash
+
+ cd path/to/your/folder/for/example/iroha-ios/project/
+
+2. Clone the repository for the iOS client:
+
+ .. code-block:: bash
+
+ git clone https://github.com/hyperledger/iroha-ios.git
+
+3. Go to the Iroha-ios folder:
+
+ .. code-block:: bash
+
+ cd iroha-ios/
+
+4. Update dependencies:
+
+ .. code-block:: bash
+
+ carthage update --platform iOS
+
+5. Go to sample project directory:
+
+ .. code-block:: bash
+
+ cd SwiftyIrohaExample
+
+6. Update dependencies for the sample:
+
+ .. code-block:: bash
+
+ carthage update --platform iOS
+
+7. Go to GRPC library source's location:
+
+ .. code-block:: bash
+
+ cd grpc-swift/
+
+8. Remove old library sources:
+
+.. note:: Make sure you are located in ``grpc-swift/`` subfolder
+
+ .. code-block:: bash
+
+ # removes all files from the current directory
+ rm -rf ./*
+ # removes all hidden files too (so clean build can be done)
+ rm -rf ./.*
+
+9. Download release version of GRCP from git to the current directory:
+
+ .. code-block:: bash
+
+ git clone --branch 0.3.3 https://github.com/grpc/grpc-swift.git .
+
+10. Build library:
+
+ .. code-block:: bash
+
+ make
+
+11. Go to the root of your playground folder (from the first step - path/to/your/folder/for/example/iroha-ios/project/):
+
+ .. code-block:: bash
+
+ cd ../../..
+
+.. note:: Make sure now you are located in ``path/to/your/folder/for/example/iroha-ios/project/`` folder
+
+12. This step downloads script for client library which is needed to build client library. Clone it from the repository:
+
+ .. code-block:: bash
+
+ curl https://raw.githubusercontent.com/hyperledger/iroha/master/shared_model/packages/ios/ios-build.sh > ios-build.sh
+
+13. Optional step. If you have issues with cloning during ios-build.sh execution do the following command before the script invocation:
+
+ .. code-block:: bash
+
+ sed -i '' 's|git://github.com/hyperledger/iroha-ed25519|https://github.com/hyperledger/iroha-ed25519.git|g' ios-build.sh
+
+14. Make downloaded script executable:
+
+ .. code-block:: bash
+
+ chmod +x ios-build.sh
+
+15. Finally, build the client iOS library with proper options - platform: OS | SIMULATOR | SIMULATOR64; build: Debug | Release :
+
+ .. code-block:: bash
+
+ ./ios-build.sh SIMULATOR64 Debug
+
+16. The generated artifacts should be copied to the proper location (let's create it first):
+
+ .. code-block:: bash
+
+ # this command shows location for simulator artifacts
+ # use this command for device instead:
+ # mkdir -p iroha-ios/libs/iOS/
+ mkdir -p iroha-ios/libs/Simulator/
+
+17. Copy generated binaries:
+
+ .. code-block:: bash
+
+ # this command shows location for simulator artifacts
+ # use this command for device instead:
+ # cp lib/* iroha-ios/libs/iOS/
+ cp lib/* iroha-ios/libs/Simulator/
+
+18. Do not forget to copy generated headers:
+
+ .. code-block:: bash
+
+ cp -a include/. iroha-ios/headers/
+
+19. Now it's time to manually config Xcode project for the sample application. Open SwiftyIroha.xcodeproj:
+
+.. image:: https://github.com/hyperledger/iroha/raw/develop/docs/image_assets/iroha_swift_guide/iroha_swift_guide_001.png
+
+20. Select SwiftyIrohaExample.xcodeproj general tab and link SwiftProtobuf framework from iroha-ios/SwiftProtobuf.framework location
+
+.. image:: https://github.com/hyperledger/iroha/raw/develop/docs/image_assets/iroha_swift_guide/iroha_swift_guide_002.png
+
+21. Select SwiftGRPC.xcodeproj project and remove zlib-example target from it:
+
+.. image:: https://github.com/hyperledger/iroha/raw/develop/docs/image_assets/iroha_swift_guide/iroha_swift_guide_003.png
+
+22. Go to Proto group and remove it (In future this step will be removed, but for now it's needed for sample app to be built):
+
+.. image:: https://github.com/hyperledger/iroha/raw/develop/docs/image_assets/iroha_swift_guide/iroha_swift_guide_004.png
+
+23. Congratulations! We are done. Select SwiftyIrohaExample target, choose iPhone simulator device and build the application to make sure we have done everything correctly:
+
+.. image:: https://github.com/hyperledger/iroha/raw/develop/docs/image_assets/iroha_swift_guide/iroha_swift_guide_005.png
+
+Before we launch the application and test it we should deploy Iroha peer on our local machine and launch it.
+
+There is good news - steps 1-18 should not be done manually every time - here is the script which does it automatically.
+
+The script for iOS client installation and setup
+^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+All you need now is to download `build script `__
+``iroha-preparation.sh`` and launch it from ``path/to/your/folder/for/example/iroha-ios/project/``.
+
+Starting Iroha Node
+^^^^^^^^^^^^^^^^^^^
+
+To run this example, you need an Iroha node up and running. Please check out
+:ref:`getting-started` if you want to learn how to start it.
+
+Launching Iroha iOS sample
+^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+Now it's time to switch back to SwiftyIrohaSample application and launch it on the simulator. Open Xcode project, select proper sample target and run.
+The sample will send test transaction to our node and query the result from blockchain. Successful operations will look similar to this Xcode console output:
+
+.. image:: https://github.com/hyperledger/iroha/raw/develop/docs/image_assets/iroha_swift_guide/iroha_swift_guide_007.png
+
+The output from Iroha terminal window (where the node is running):
+
+.. image:: https://github.com/hyperledger/iroha/raw/develop/docs/image_assets/iroha_swift_guide/iroha_swift_guide_008.png
+
+Great! We have sent our transaction and verified its presence in blockchain.
diff --git a/docs/source/overview.rst b/docs/source/overview.rst
index 51783ee7cf..a2a8462a9c 100644
--- a/docs/source/overview.rst
+++ b/docs/source/overview.rst
@@ -49,11 +49,6 @@ Finally, Iroha is the only ledger that has a robust permission system, allowing
.. [#f1] Yet Another Consensus
-Is it fast?
------------
-
-As per the latest review date of these docs, according to `Huawei Caliper `_ testing tool, Iroha is capable of processing 45 transactions per second. Theoretically, this is not even close to the limit of the system, and we will continue constant optimizations in order to improve stability and performance.
-
How to create applications around Iroha?
----------------------------------------
diff --git a/example/admin@test.priv b/example/admin@test.priv
index ef24e1430d..87ff52af36 100644
--- a/example/admin@test.priv
+++ b/example/admin@test.priv
@@ -1 +1 @@
-1d7e0a32ee0affeb4d22acd73c2c6fb6bd58e266c8c2ce4fa0ffe3dd6a253ffb
\ No newline at end of file
+0f0ce16d2afbb8eca23c7d8c2724f0c257a800ee2bbd54688cec6b898e3f7e33
\ No newline at end of file
diff --git a/example/admin@test.pub b/example/admin@test.pub
index d80b9b299f..1dfda5428f 100644
--- a/example/admin@test.pub
+++ b/example/admin@test.pub
@@ -1 +1 @@
-407e57f50ca48969b08ba948171bb2435e035d82cec417e18e4a38f5fb113f83
\ No newline at end of file
+889f6b881e331be21487db77dcf32c5f8d3d5e8066e78d2feac4239fe91d416f
\ No newline at end of file
diff --git a/example/genesis.block b/example/genesis.block
index a8f665dd13..bb30df3c22 100644
--- a/example/genesis.block
+++ b/example/genesis.block
@@ -1,116 +1,120 @@
{
- "signatures": [],
- "created_ts": 0,
- "hash": "ef9781aadf6d4d5ac8d01e14f7bbc805278ed9bb7200dafe8335bfc3a254e82b",
- "prev_hash": "0000000000000000000000000000000000000000000000000000000000000000",
- "height": 1,
- "txs_number": 1,
- "transactions": [
- {
- "signatures": [],
- "created_ts": 0,
- "creator_account_id": "",
- "tx_counter": 0,
- "commands": [
- {
- "command_type": "AddPeer",
- "peer": {
- "address": "localhost:10001",
- "peer_key": "292a8714694095edce6be799398ed5d6244cd7be37eb813106b217d850d261f2"
- }
- },
- {
- "command_type": "CreateRole",
- "role_name": "admin",
- "permissions": [
- "can_add_peer",
- "can_add_signatory",
- "can_create_account",
- "can_create_domain",
- "can_get_my_acc_detail",
- "can_get_all_acc_detail",
- "can_get_all_acc_ast",
- "can_get_all_acc_ast_txs",
- "can_get_all_acc_txs",
- "can_get_all_accounts",
- "can_get_all_signatories",
- "can_get_all_txs",
- "can_get_roles",
- "can_read_assets",
- "can_remove_signatory",
- "can_set_detail",
- "can_set_quorum"
- ]
- },
- {
- "command_type": "CreateRole",
- "role_name": "user",
- "permissions": [
- "can_add_signatory",
- "can_get_my_account",
- "can_get_my_acc_detail",
- "can_get_domain_acc_detail",
- "can_get_my_acc_ast",
- "can_get_my_acc_ast_txs",
- "can_get_my_acc_txs",
- "can_get_my_signatories",
- "can_get_my_txs",
- "can_grant_can_add_signatory",
- "can_grant_can_remove_signatory",
- "can_grant_can_set_detail",
- "can_grant_can_set_quorum",
- "can_grant_can_transfer",
- "can_receive",
- "can_remove_signatory",
- "can_set_quorum",
- "can_set_detail",
- "can_transfer"
- ]
- },
- {
- "command_type": "CreateRole",
- "role_name": "money_creator",
- "permissions": [
- "can_add_asset_qty",
- "can_create_asset",
- "can_receive",
- "can_transfer"
- ]
- },
- {
- "command_type": "CreateDomain",
- "domain_id": "test",
- "user_default_role": "user"
- },
- {
- "command_type": "CreateAsset",
- "asset_name": "coin",
- "domain_id": "test",
- "precision": 2
- },
- {
- "command_type": "CreateAccount",
- "account_name": "admin",
- "domain_id": "test",
- "pubkey": "407e57f50ca48969b08ba948171bb2435e035d82cec417e18e4a38f5fb113f83"
- },
- {
- "command_type": "CreateAccount",
- "account_name": "test",
- "domain_id": "test",
- "pubkey": "359f925e4eeecfdd6aa1abc0b79a6a121a5dd63bb612b603247ea4f8ad160156"
- },
- {
- "command_type": "AppendRole",
- "account_id": "admin@test",
- "role_name": "admin"
- },
- {
- "command_type": "AppendRole",
- "account_id": "admin@test",
- "role_name": "money_creator"
- }
- ]
- }
- ]
+ "payload":{
+ "transactions":[
+ {
+ "payload":{
+ "commands":[
+ {
+ "addPeer":{
+ "peer":{
+ "address":"localhost:10001",
+ "peerKey":"0E2icbV/5jQmrh3Jf2lSEEA3QR/PTztzncIX9F5fyZs="
+ }
+ }
+ },
+ {
+ "createRole":{
+ "roleName":"admin",
+ "permissions":[
+ "can_add_peer",
+ "can_add_signatory",
+ "can_create_account",
+ "can_create_domain",
+ "can_get_all_acc_ast",
+ "can_get_all_acc_ast_txs",
+ "can_get_all_acc_detail",
+ "can_get_all_acc_txs",
+ "can_get_all_accounts",
+ "can_get_all_signatories",
+ "can_get_all_txs",
+ "can_get_roles",
+ "can_read_assets",
+ "can_remove_signatory",
+ "can_set_quorum",
+ "can_get_blocks"
+ ]
+ }
+ },
+ {
+ "createRole":{
+ "roleName":"user",
+ "permissions":[
+ "can_add_signatory",
+ "can_get_my_acc_ast",
+ "can_get_my_acc_ast_txs",
+ "can_get_my_acc_detail",
+ "can_get_my_acc_txs",
+ "can_get_my_account",
+ "can_get_my_signatories",
+ "can_get_my_txs",
+ "can_grant_can_add_my_signatory",
+ "can_grant_can_remove_my_signatory",
+ "can_grant_can_set_my_account_detail",
+ "can_grant_can_set_my_quorum",
+ "can_grant_can_transfer_my_assets",
+ "can_receive",
+ "can_remove_signatory",
+ "can_set_quorum",
+ "can_transfer"
+ ]
+ }
+ },
+ {
+ "createRole":{
+ "roleName":"money_creator",
+ "permissions":[
+ "can_add_asset_qty",
+ "can_create_asset",
+ "can_receive",
+ "can_transfer"
+ ]
+ }
+ },
+ {
+ "createDomain":{
+ "domainId":"test",
+ "defaultRole":"user"
+ }
+ },
+ {
+ "createAsset":{
+ "assetName":"coin",
+ "domainId":"test",
+ "precision":2
+ }
+ },
+ {
+ "createAccount":{
+ "accountName":"admin",
+ "domainId":"test",
+ "mainPubkey":"iJ9riB4zG+IUh9t33PMsX409XoBm540v6sQjn+kdQW8="
+ }
+ },
+ {
+ "createAccount":{
+ "accountName":"test",
+ "domainId":"test",
+ "mainPubkey":"3MfszkSPLhkKSBrsfKIe5S7aVG5mC0gg9JdtATIVcJc="
+ }
+ },
+ {
+ "appendRole":{
+ "accountId":"admin@test",
+ "roleName":"admin"
+ }
+ },
+ {
+ "appendRole":{
+ "accountId":"admin@test",
+ "roleName":"money_creator"
+ }
+ }
+ ]
+ }
+ }
+ ],
+ "txNumber":1,
+ "height":"1",
+ "prevBlockHash":"AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA="
+ }
}
diff --git a/example/java/TransactionExample.java b/example/java/TransactionExample.java
index 19481053d6..08ab4b30a0 100644
--- a/example/java/TransactionExample.java
+++ b/example/java/TransactionExample.java
@@ -69,12 +69,11 @@ public static void main(String[] args) {
long currentTime = System.currentTimeMillis();
String creator = "admin@test";
- long startTxCounter = 1, startQueryCounter = 1;
+ long startQueryCounter = 1;
// build transaction (still unsigned)
UnsignedTx utx = txBuilder.creatorAccountId(creator)
.createdTime(BigInteger.valueOf(currentTime))
- .txCounter(BigInteger.valueOf(startTxCounter))
.createDomain("ru", "user")
.createAsset("dollar", "ru", (short)2).build();
diff --git a/example/java/build_library.sh b/example/java/build_library.sh
old mode 100644
new mode 100755
index ed2f5aef9a..2c4b0a7d28
--- a/example/java/build_library.sh
+++ b/example/java/build_library.sh
@@ -6,8 +6,13 @@ mkdir dist
# build native library
./prepare.sh
-cp build/shared_model/bindings/libirohajava.jnilib dist/libirohajava.jnilib
+unamestr=`uname`
+if [[ "$unamestr" == 'Linux' ]]; then
+ cp build/shared_model/bindings/libirohajava.so dist/libirohajava.so
+elif [[ "$unamestr" == 'Darwin' ]]; then
+ cp build/shared_model/bindings/libirohajava.jnilib dist/libirohajava.jnilib
+fi
# build jar
gradle jar
diff --git a/example/node/index.js b/example/node/index.js
index 41d4ce3b86..833ecae984 100644
--- a/example/node/index.js
+++ b/example/node/index.js
@@ -28,14 +28,12 @@ var adminPub = fs.readFileSync('../admin@test.pub').toString()
var keys = crypto.convertFromExisting(adminPub, adminPriv)
var currentTime = Date.now()
-var startTxCounter = 1
var startQueryCounter = 1
var creator = 'admin@test'
// build transaction
var tx = txBuilder
.creatorAccountId(creator)
- .txCounter(startTxCounter)
.createdTime(currentTime)
.createDomain('ru', 'user')
.createAsset('dollar', 'ru', 2)
diff --git a/example/node0.priv b/example/node0.priv
index 4ee1612c19..1362af9c39 100644
--- a/example/node0.priv
+++ b/example/node0.priv
@@ -1 +1 @@
-8316fe25fda2bb3964ae756251b5f1fe010fafe56443978d524dc6485548be76
\ No newline at end of file
+41209bd907789fd5a796ac6bdff908bac2f7abcf7a1d0b99a18290f285f6e965
\ No newline at end of file
diff --git a/example/node0.pub b/example/node0.pub
index e3e0793df3..bb7c1c9dcd 100644
--- a/example/node0.pub
+++ b/example/node0.pub
@@ -1 +1 @@
-292a8714694095edce6be799398ed5d6244cd7be37eb813106b217d850d261f2
\ No newline at end of file
+d04da271b57fe63426ae1dc97f6952104037411fcf4f3b739dc217f45e5fc99b
\ No newline at end of file
diff --git a/example/python/tx-example.py b/example/python/tx-example.py
index e1acc0691f..10c310d03a 100644
--- a/example/python/tx-example.py
+++ b/example/python/tx-example.py
@@ -20,9 +20,13 @@
admin_pub = open("../admin@test.pub", "r").read()
key_pair = crypto.convertFromExisting(admin_pub, admin_priv)
+user1_kp = crypto.generateKeypair()
+
current_time = int(round(time.time() * 1000)) - 10**5
creator = "admin@test"
+query_counter = 1
+
def get_status(tx):
# Create status request
@@ -114,9 +118,11 @@ def send_query(query, key_pair):
return query_response
-def tx1():
+def create_asset_coin():
+ """
+ Create domain "domain" and asset "coin#domain" with precision 2
+ """
tx = tx_builder.creatorAccountId(creator) \
- .txCounter(1) \
.createdTime(current_time) \
.createDomain("domain", "user") \
.createAsset("coin", "domain", 2).build()
@@ -125,9 +131,11 @@ def tx1():
print_status_streaming(tx)
-def tx2():
+def add_coin_to_admin():
+ """
+ Add 1000.00 asset quantity of asset coin to admin
+ """
tx = tx_builder.creatorAccountId(creator) \
- .txCounter(2) \
.createdTime(current_time) \
.addAssetQuantity("admin@test", "coin#domain", "1000.00").build()
@@ -135,32 +143,61 @@ def tx2():
print_status_streaming(tx)
-def tx3():
- user1_kp = crypto.generateKeypair()
-
+def create_account_userone():
+ """
+ Create account "userone@domain"
+ """
tx = tx_builder.creatorAccountId(creator) \
- .txCounter(3) \
.createdTime(current_time) \
.createAccount("userone", "domain", user1_kp.publicKey()).build()
send_tx(tx, key_pair)
print_status_streaming(tx)
-
-def tx4():
+def transfer_coin_from_admin_to_userone():
+ """
+ Transfer 2.00 of coin from admin@test to userone@domain
+ """
tx = tx_builder.creatorAccountId(creator) \
- .txCounter(4) \
.createdTime(current_time) \
.transferAsset("admin@test", "userone@domain", "coin#domain", "Some message", "2.00").build()
send_tx(tx, key_pair)
print_status_streaming(tx)
+def grant_admin_to_add_detail_to_userone():
+ """
+ Grant admin@test to be able to set details information to userone@domain
+ """
+ tx = tx_builder.creatorAccountId("userone@domain") \
+ .createdTime(current_time) \
+ .grantPermission(creator, "can_set_my_account_detail") \
+ .build()
+
+ send_tx(tx, user1_kp)
+ print_status_streaming(tx)
+
+def set_age_to_userone_by_admin():
+ """
+ Set age to userone@domain by admin@test
+ """
+ tx = tx_builder.creatorAccountId(creator) \
+ .createdTime(current_time) \
+ .setAccountDetail("userone@domain", "age", "18") \
+ .build()
+
+ send_tx(tx, key_pair)
+ print_status_streaming(tx)
-def get_asset():
+def get_coin_info():
+ """
+ Get information about asset coin#domain
+ """
+ global query_counter
+ query_counter += 1
query = query_builder.creatorAccountId(creator) \
.createdTime(current_time) \
- .queryCounter(1) \
+ .queryCounter(query_counter) \
.getAssetInfo("coin#domain") \
.build()
@@ -178,9 +215,14 @@ def get_asset():
def get_account_asset():
+ """
+ Get list of transactions done by userone@domain with asset coin#domain
+ """
+ global query_counter
+ query_counter += 1
query = query_builder.creatorAccountId(creator) \
.createdTime(current_time) \
- .queryCounter(11) \
+ .queryCounter(query_counter) \
.getAccountAssets("userone@domain", "coin#domain") \
.build()
@@ -188,11 +230,30 @@ def get_account_asset():
print(query_response)
+def get_userone_info():
+ """
+ Get userone's key value information
+ """
+ global query_counter
+ query_counter += 1
+ query = query_builder.creatorAccountId(creator) \
+ .createdTime(current_time) \
+ .queryCounter(query_counter) \
+ .getAccountDetail("userone@domain") \
+ .build()
+
+ query_response = send_query(query, key_pair)
+ print(query_response.account_detail_response.detail)
+
+
-tx1()
-tx2()
-tx3()
-tx4()
-get_asset()
+create_asset_coin()
+add_coin_to_admin()
+create_account_userone()
+transfer_coin_from_admin_to_userone()
+grant_admin_to_add_detail_to_userone()
+set_age_to_userone_by_admin()
+get_coin_info()
get_account_asset()
+get_userone_info()
print("done!")
diff --git a/example/test@test.priv b/example/test@test.priv
index 7e6f039b91..f41898d68e 100644
--- a/example/test@test.priv
+++ b/example/test@test.priv
@@ -1 +1 @@
-4209ba343a92f4d086921a3f3c1eb26f50f2fece610ec3524058de79281564c2
\ No newline at end of file
+2e5b37fd881f260323dca4f0776e6e1e969bef7dab14673858f82663e7cd8556
\ No newline at end of file
diff --git a/example/test@test.pub b/example/test@test.pub
index 556317b45f..95419850c9 100644
--- a/example/test@test.pub
+++ b/example/test@test.pub
@@ -1 +1 @@
-359f925e4eeecfdd6aa1abc0b79a6a121a5dd63bb612b603247ea4f8ad160156
\ No newline at end of file
+dcc7ecce448f2e190a481aec7ca21ee52eda546e660b4820f4976d0132157097
\ No newline at end of file
diff --git a/iroha-cli/client.cpp b/iroha-cli/client.cpp
index 0e4a0f86af..a9cb123342 100644
--- a/iroha-cli/client.cpp
+++ b/iroha-cli/client.cpp
@@ -16,6 +16,9 @@
*/
#include "client.hpp"
+
+#include "backend/protobuf/queries/proto_query.hpp"
+#include "backend/protobuf/transaction.hpp"
#include "model/converters/json_query_factory.hpp"
#include "model/converters/json_transaction_factory.hpp"
#include "model/converters/pb_query_factory.hpp"
@@ -27,13 +30,12 @@ namespace iroha_cli {
: command_client_(target_ip, port), query_client_(target_ip, port) {}
CliClient::Response CliClient::sendTx(
- iroha::model::Transaction tx) {
+ const shared_model::interface::Transaction &tx) {
+ const auto proto_tx =
+ static_cast(tx);
CliClient::Response response;
- // Convert to protobuf
- iroha::model::converters::PbTransactionFactory factory;
- auto pb_tx = factory.serialize(tx);
// Send to iroha:
- response.status = command_client_.Torii(pb_tx);
+ response.status = command_client_.Torii(proto_tx.getTransport());
// TODO 12/10/2017 neewy implement return of real transaction status IR-494
response.answer = TxStatus::OK;
@@ -55,13 +57,14 @@ namespace iroha_cli {
}
CliClient::Response CliClient::sendQuery(
- std::shared_ptr query) {
+ const shared_model::interface::Query &query) {
CliClient::Response response;
// Convert to proto and send to Iroha
iroha::model::converters::PbQueryFactory pb_factory;
- auto pb_query = pb_factory.serialize(query);
+ auto proto_query = static_cast(query);
iroha::protocol::QueryResponse query_response;
- response.status = query_client_.Find(pb_query.value(), query_response);
+ response.status =
+ query_client_.Find(proto_query.getTransport(), query_response);
response.answer = query_response;
return response;
}
diff --git a/iroha-cli/client.hpp b/iroha-cli/client.hpp
index 1458b86923..16c41c4abd 100644
--- a/iroha-cli/client.hpp
+++ b/iroha-cli/client.hpp
@@ -23,12 +23,12 @@
#include "torii/command_client.hpp"
#include "torii/query_client.hpp"
-namespace iroha {
- namespace model {
- struct Query;
- struct Transaction;
- }
-}
+namespace shared_model {
+ namespace interface {
+ class Transaction;
+ class Query;
+ } // namespace interface
+} // namespace shared_model
namespace iroha_cli {
@@ -50,7 +50,7 @@ namespace iroha_cli {
* @return
*/
CliClient::Response sendTx(
- iroha::model::Transaction tx);
+ const shared_model::interface::Transaction &tx);
/**
* Send Query to Iroha Peer, i.e. target_ip:port
@@ -58,7 +58,7 @@ namespace iroha_cli {
* @return
*/
CliClient::Response sendQuery(
- std::shared_ptr query);
+ const shared_model::interface::Query &query);
CliClient::Response getTxStatus(
std::string tx_hash);
diff --git a/iroha-cli/interactive/impl/interactive_cli.cpp b/iroha-cli/interactive/impl/interactive_cli.cpp
index e5fb118eb3..222fa7b8e2 100644
--- a/iroha-cli/interactive/impl/interactive_cli.cpp
+++ b/iroha-cli/interactive/impl/interactive_cli.cpp
@@ -44,12 +44,11 @@ namespace iroha_cli {
const std::string &account_name,
const std::string &default_peer_ip,
int default_port,
- uint64_t tx_counter,
uint64_t qry_counter,
const std::shared_ptr &provider)
: creator_(account_name),
tx_cli_(
- creator_, default_peer_ip, default_port, tx_counter, provider),
+ creator_, default_peer_ip, default_port, provider),
query_cli_(
creator_, default_peer_ip, default_port, qry_counter, provider),
statusCli_(default_peer_ip, default_port) {
diff --git a/iroha-cli/interactive/impl/interactive_query_cli.cpp b/iroha-cli/interactive/impl/interactive_query_cli.cpp
index 3c1f6a362f..8a9b6047b7 100644
--- a/iroha-cli/interactive/impl/interactive_query_cli.cpp
+++ b/iroha-cli/interactive/impl/interactive_query_cli.cpp
@@ -18,6 +18,8 @@
#include
#include
+#include "backend/protobuf/queries/proto_query.hpp"
+
#include "client.hpp"
#include "common/byteutils.hpp"
#include "crypto/keys_manager_impl.hpp"
@@ -26,6 +28,7 @@
#include "grpc_response_handler.hpp"
#include "interactive/interactive_query_cli.hpp"
#include "model/converters/json_query_factory.hpp"
+#include "model/converters/pb_query_factory.hpp"
#include "model/model_crypto_provider.hpp" // for ModelCryptoProvider
#include "model/queries/get_asset_info.hpp"
#include "model/queries/get_roles.hpp"
@@ -194,9 +197,8 @@ namespace iroha_cli {
GetTransactions::TxHashCollectionType tx_hashes;
std::for_each(
params.begin(), params.end(), [&tx_hashes](auto const &hex_hash) {
- if (auto opt = iroha::
- hexstringToArray(
- hex_hash)) {
+ if (auto opt = iroha::hexstringToArray<
+ GetTransactions::TxHashType::size()>(hex_hash)) {
tx_hashes.push_back(*opt);
}
});
@@ -262,7 +264,9 @@ namespace iroha_cli {
provider_->sign(*query_);
CliClient client(address.value().first, address.value().second);
- GrpcResponseHandler{}.handle(client.sendQuery(query_));
+ auto query = shared_model::proto::Query(
+ *iroha::model::converters::PbQueryFactory().serialize(query_));
+ GrpcResponseHandler{}.handle(client.sendQuery(query));
printEnd();
// Stop parsing
return false;
diff --git a/iroha-cli/interactive/impl/interactive_transaction_cli.cpp b/iroha-cli/interactive/impl/interactive_transaction_cli.cpp
index 386629218c..02b095a6e1 100644
--- a/iroha-cli/interactive/impl/interactive_transaction_cli.cpp
+++ b/iroha-cli/interactive/impl/interactive_transaction_cli.cpp
@@ -19,6 +19,8 @@
#include
#include
+
+#include "backend/protobuf/transaction.hpp"
#include "client.hpp"
#include "grpc_response_handler.hpp"
#include "model/commands/append_role.hpp"
@@ -30,12 +32,14 @@
#include "model/converters/json_common.hpp"
#include "model/converters/json_transaction_factory.hpp"
#include "model/converters/pb_common.hpp"
+#include "model/converters/pb_transaction_factory.hpp"
#include "model/model_crypto_provider.hpp" // for ModelCryptoProvider
-#include "validators/permissions.hpp"
#include "model/sha3_hash.hpp"
#include "parser/parser.hpp" // for parser::ParseValue
+#include "validators/permissions.hpp"
using namespace iroha::model;
+using namespace shared_model::permissions;
namespace iroha_cli {
namespace interactive {
@@ -177,13 +181,11 @@ namespace iroha_cli {
const std::string &creator_account,
const std::string &default_peer_ip,
int default_port,
- uint64_t tx_counter,
const std::shared_ptr &provider)
: current_context_(MAIN),
creator_(creator_account),
default_peer_ip_(default_peer_ip),
default_port_(default_port),
- tx_counter_(tx_counter),
provider_(provider) {
log_ = logger::log("InteractiveTransactionCli");
createCommandMenu();
@@ -195,8 +197,7 @@ namespace iroha_cli {
current_context_ = MAIN;
printMenu("Forming a new transactions, choose command to add: ",
commands_menu_);
- // Creating a new transaction, increment local tx_counter
- ++tx_counter_;
+ // Creating a new transaction
while (is_parsing) {
auto line = promptString("> ");
if (not line) {
@@ -253,9 +254,7 @@ namespace iroha_cli {
auto create_account = parser::parseValue(params[8]);
if (not(read_self and edit_self and read_all and transfer_receive
- and asset_create
- and create_domain
- and roles
+ and asset_create and create_domain and roles
and create_account)) {
std::cout << "Wrong format for permission" << std::endl;
return nullptr;
@@ -473,17 +472,18 @@ namespace iroha_cli {
// Forming a transaction
- auto tx =
- tx_generator_.generateTransaction(creator_, tx_counter_, commands_);
+ auto tx = tx_generator_.generateTransaction(creator_, commands_);
// clear commands so that we can start creating new tx
commands_.clear();
provider_->sign(tx);
GrpcResponseHandler response_handler;
-
+ auto shared_tx = shared_model::proto::Transaction(
+ iroha::model::converters::PbTransactionFactory().serialize(tx));
response_handler.handle(
- CliClient(address.value().first, address.value().second).sendTx(tx));
+ CliClient(address.value().first, address.value().second)
+ .sendTx(shared_tx));
printTxHash(tx);
printEnd();
@@ -502,8 +502,7 @@ namespace iroha_cli {
}
// Forming a transaction
- auto tx =
- tx_generator_.generateTransaction(creator_, tx_counter_, commands_);
+ auto tx = tx_generator_.generateTransaction(creator_, commands_);
// clear commands so that we can start creating new tx
commands_.clear();
diff --git a/iroha-cli/interactive/interactive_cli.hpp b/iroha-cli/interactive/interactive_cli.hpp
index 8f0b00a915..0756746042 100644
--- a/iroha-cli/interactive/interactive_cli.hpp
+++ b/iroha-cli/interactive/interactive_cli.hpp
@@ -32,7 +32,6 @@ namespace iroha_cli {
* @param account_name registered in Iroha network
* @param default_peer_ip default peer ip to send transactions/query
* @param default_port default port of peer's Iroha Torii
- * @param tx_counter synchronized nonce for sending transaction
* @param qry_counter synchronized nonce for sending queries
* @param provider crypto provider to make signatures
*/
@@ -40,7 +39,6 @@ namespace iroha_cli {
const std::string &account_name,
const std::string &default_peer_ip,
int default_port,
- uint64_t tx_counter,
uint64_t qry_counter,
const std::shared_ptr &provider);
/**
diff --git a/iroha-cli/interactive/interactive_transaction_cli.hpp b/iroha-cli/interactive/interactive_transaction_cli.hpp
index a9a5abb572..5e8f2a9597 100644
--- a/iroha-cli/interactive/interactive_transaction_cli.hpp
+++ b/iroha-cli/interactive/interactive_transaction_cli.hpp
@@ -40,14 +40,12 @@ namespace iroha_cli {
* @param creator_account user Iroha account
* @param default_peer_ip of Iroha peer
* @param default_port of Iroha peer
- * @param tx_counter synchronized with Iroha network
* @param provider for signing transactions
*/
InteractiveTransactionCli(
const std::string &creator_account,
const std::string &default_peer_ip,
int default_port,
- uint64_t tx_counter,
const std::shared_ptr &provider);
/**
* Run interactive query command line
@@ -180,9 +178,6 @@ namespace iroha_cli {
std::string default_peer_ip_;
int default_port_;
- // Transaction counter specific for account creator
- uint64_t tx_counter_;
-
// Builder for new commands
iroha::model::generators::CommandGenerator generator_;
diff --git a/iroha-cli/main.cpp b/iroha-cli/main.cpp
index cc9d533804..364bbeca64 100644
--- a/iroha-cli/main.cpp
+++ b/iroha-cli/main.cpp
@@ -16,26 +16,29 @@
*/
#include
+#include
+#include
#include
-#include
#include
+#include "backend/protobuf/queries/proto_query.hpp"
#include "client.hpp"
-#include "common/assert_config.hpp"
+#include "converters/protobuf/json_proto_converter.hpp"
#include "crypto/keys_manager_impl.hpp"
#include "grpc_response_handler.hpp"
#include "interactive/interactive_cli.hpp"
#include "model/converters/json_block_factory.hpp"
#include "model/converters/json_query_factory.hpp"
+#include "model/converters/pb_block_factory.hpp"
+#include "model/converters/pb_query_factory.hpp"
+#include "model/converters/pb_transaction_factory.hpp"
#include "model/generators/block_generator.hpp"
#include "model/model_crypto_provider_impl.hpp"
-#include "validators.hpp"
// Account information
-DEFINE_bool(
- new_account,
- false,
- "Generate and save locally new public/private keys");
+DEFINE_bool(new_account,
+ false,
+ "Generate and save locally new public/private keys");
DEFINE_string(account_name,
"",
"Name of the account. Must be unique in iroha network");
@@ -55,6 +58,9 @@ DEFINE_string(json_query, "", "Query in json format");
DEFINE_bool(genesis_block,
false,
"Generate genesis block for new Iroha network");
+DEFINE_string(genesis_transaction,
+ "",
+ "File with transaction in json format for the genesis block");
DEFINE_string(peers_address,
"",
"File with peers address for new Iroha network");
@@ -62,13 +68,20 @@ DEFINE_string(peers_address,
// Run iroha-cli in interactive mode
DEFINE_bool(interactive, true, "Run iroha-cli in interactive mode");
-
using namespace iroha::protocol;
using namespace iroha::model::generators;
using namespace iroha::model::converters;
using namespace iroha_cli::interactive;
namespace fs = boost::filesystem;
+iroha::keypair_t *makeOldModel(const shared_model::crypto::Keypair &keypair) {
+ return new iroha::keypair_t{
+ shared_model::crypto::PublicKey::OldPublicKeyType::from_string(
+ toBinaryString(keypair.publicKey())),
+ shared_model::crypto::PrivateKey::OldPrivateKeyType::from_string(
+ toBinaryString(keypair.privateKey()))};
+}
+
int main(int argc, char *argv[]) {
gflags::ParseCommandLineFlags(&argc, &argv, true);
gflags::ShutDownCommandLineFlags();
@@ -76,25 +89,42 @@ int main(int argc, char *argv[]) {
// Generate new genesis block now Iroha network
if (FLAGS_genesis_block) {
BlockGenerator generator;
-
- if (FLAGS_peers_address.empty()) {
- logger->error("--peers_address is empty");
- return EXIT_FAILURE;
+ iroha::model::Transaction transaction;
+ if (FLAGS_genesis_transaction.empty()) {
+ if (FLAGS_peers_address.empty()) {
+ logger->error("--peers_address is empty");
+ return EXIT_FAILURE;
+ }
+ std::ifstream file(FLAGS_peers_address);
+ std::vector peers_address;
+ std::copy(std::istream_iterator(file),
+ std::istream_iterator(),
+ std::back_inserter(peers_address));
+ // Generate genesis block
+ transaction = TransactionGenerator().generateGenesisTransaction(
+ 0, std::move(peers_address));
+ } else {
+ rapidjson::Document doc;
+ std::ifstream file(FLAGS_genesis_transaction);
+ rapidjson::IStreamWrapper isw(file);
+ doc.ParseStream(isw);
+ auto some_tx = JsonTransactionFactory{}.deserialize(doc);
+ if (some_tx) {
+ transaction = *some_tx;
+ } else {
+ logger->error(
+ "Cannot deserialize genesis transaction (problem with file reading "
+ "or illformed json?)");
+ return EXIT_FAILURE;
+ }
}
- std::ifstream file(FLAGS_peers_address);
- std::vector peers_address;
- std::copy(std::istream_iterator(file),
- std::istream_iterator(),
- std::back_inserter(peers_address));
- // Generate genesis block
- auto transaction = TransactionGenerator().generateGenesisTransaction(
- 0, std::move(peers_address));
+
auto block = generator.generateGenesisBlock(0, {transaction});
// Convert to json
- JsonBlockFactory json_factory;
- auto doc = json_factory.serialize(block);
std::ofstream output_file("genesis.block");
- output_file << jsonToString(doc);
+ auto bl = shared_model::proto::Block(
+ iroha::model::converters::PbBlockFactory().serialize(block));
+ output_file << shared_model::converters::protobuf::modelToJson(bl);
logger->info("File saved to genesis.block");
}
// Create new pub/priv key, register in Iroha Network
@@ -129,7 +159,10 @@ int main(int argc, char *argv[]) {
if (not tx_opt) {
logger->error("Json transaction has wrong format.");
} else {
- response_handler.handle(client.sendTx(tx_opt.value()));
+ auto tx = shared_model::proto::Transaction(
+ iroha::model::converters::PbTransactionFactory().serialize(
+ *tx_opt));
+ response_handler.handle(client.sendTx(tx));
}
}
if (not FLAGS_json_query.empty()) {
@@ -142,7 +175,10 @@ int main(int argc, char *argv[]) {
if (not query_opt) {
logger->error("Json has wrong format.");
} else {
- response_handler.handle(client.sendQuery(query_opt.value()));
+ auto query = shared_model::proto::Query(
+ *iroha::model::converters::PbQueryFactory().serialize(*query_opt));
+ auto response = client.sendQuery(query);
+ response_handler.handle(response);
}
}
}
@@ -158,20 +194,17 @@ int main(int argc, char *argv[]) {
return EXIT_FAILURE;
}
iroha::KeysManagerImpl manager((path / FLAGS_account_name).string());
- boost::optional keypair;
- if (FLAGS_pass_phrase.size() != 0) {
- keypair = manager.loadKeys(FLAGS_pass_phrase);
- } else {
- keypair = manager.loadKeys();
- }
+ auto keypair = FLAGS_pass_phrase.size() != 0
+ ? manager.loadKeys(FLAGS_pass_phrase)
+ : manager.loadKeys();
if (not keypair) {
logger->error(
"Cannot load specified keypair, or keypair is invalid. Path: {}, "
- "keypair name: {}. Use --key_path to path to your keypair. \nMaybe wrong pass phrase (\"{}\")?",
+ "keypair name: {}. Use --key_path with path of your keypair. \n"
+ "Maybe wrong pass phrase (\"{}\")?",
path.string(),
FLAGS_account_name,
- FLAGS_pass_phrase
- );
+ FLAGS_pass_phrase);
return EXIT_FAILURE;
}
// TODO 13/09/17 grimadas: Init counters from Iroha, or read from disk?
@@ -181,9 +214,8 @@ int main(int argc, char *argv[]) {
FLAGS_peer_ip,
FLAGS_torii_port,
0,
- 0,
std::make_shared(
- *keypair));
+ *std::unique_ptr(makeOldModel(*keypair))));
interactiveCli.run();
} else {
logger->error("Invalid flags");
diff --git a/irohad/ametsuchi/CMakeLists.txt b/irohad/ametsuchi/CMakeLists.txt
index 8806c4ce01..4f37c62781 100644
--- a/irohad/ametsuchi/CMakeLists.txt
+++ b/irohad/ametsuchi/CMakeLists.txt
@@ -13,15 +13,13 @@ add_library(ametsuchi
)
target_link_libraries(ametsuchi
- json_model_converters
logger
rxcpp
pqxx
libs_common
command_execution
- boost
- model_interfaces
+ query_execution
+ shared_model_interfaces
shared_model_proto_backend
- shared_model_proto_builders
shared_model_stateless_validation
)
diff --git a/irohad/ametsuchi/block_query.hpp b/irohad/ametsuchi/block_query.hpp
index cac9647d5c..47780171fd 100644
--- a/irohad/ametsuchi/block_query.hpp
+++ b/irohad/ametsuchi/block_query.hpp
@@ -96,6 +96,14 @@ namespace iroha {
*/
virtual boost::optional getTxByHashSync(
const shared_model::crypto::Hash &hash) = 0;
+
+ /**
+ * Synchronously checks whether transaction
+ * with given hash is present in any block
+ * @param hash - transaction hash
+ * @return true if transaction exists, false otherwise
+ */
+ virtual bool hasTxWithHash(const shared_model::crypto::Hash &hash) = 0;
};
} // namespace ametsuchi
} // namespace iroha
diff --git a/irohad/ametsuchi/impl/mutable_storage_impl.cpp b/irohad/ametsuchi/impl/mutable_storage_impl.cpp
index 45a34b4c20..2baaf5e8e3 100644
--- a/irohad/ametsuchi/impl/mutable_storage_impl.cpp
+++ b/irohad/ametsuchi/impl/mutable_storage_impl.cpp
@@ -23,7 +23,6 @@
#include "ametsuchi/impl/postgres_wsv_command.hpp"
#include "ametsuchi/impl/postgres_wsv_query.hpp"
#include "ametsuchi/wsv_command.hpp"
-#include "backend/protobuf/from_old_model.hpp"
#include "model/sha3_hash.hpp"
namespace iroha {
@@ -76,9 +75,7 @@ namespace iroha {
execute_transaction);
if (result) {
- block_store_.insert(std::make_pair(
- block.height(),
- clone(block)));
+ block_store_.insert(std::make_pair(block.height(), clone(block)));
block_index_->index(block);
top_hash_ = block.hash();
diff --git a/irohad/ametsuchi/impl/postgres_block_query.cpp b/irohad/ametsuchi/impl/postgres_block_query.cpp
index d17aa25c41..ac2529d02c 100644
--- a/irohad/ametsuchi/impl/postgres_block_query.cpp
+++ b/irohad/ametsuchi/impl/postgres_block_query.cpp
@@ -16,9 +16,11 @@
*/
#include "ametsuchi/impl/postgres_block_query.hpp"
+
#include
#include
-#include "backend/protobuf/from_old_model.hpp"
+
+#include "converters/protobuf/json_proto_converter.hpp"
namespace iroha {
namespace ametsuchi {
@@ -38,26 +40,25 @@ namespace iroha {
if (height > to or count == 0) {
return rxcpp::observable<>::empty();
}
- return rxcpp::observable<>::range(height, to).flat_map([this](auto i) {
- // TODO IR-975 victordrobny 12.02.2018 convert directly to
- // shared_model::proto::Block after FlatFile will be reworked to new
- // model
- auto block = block_store_.get(i) | [](const auto &bytes) {
- return model::converters::stringToJson(bytesToString(bytes));
- } | [this](const auto &d) {
- return serializer_.deserialize(d);
- } | [](const auto &block_old) {
- return std::make_shared(
- shared_model::proto::from_old(block_old));
- };
- return rxcpp::observable<>::create(
- [block{std::move(block)}](auto s) {
- if (block) {
- s.on_next(block);
- }
- s.on_completed();
- });
- });
+ return rxcpp::observable<>::range(height, to)
+ .flat_map([this](const auto &i) {
+ auto block = block_store_.get(i) | [](const auto &bytes) {
+ return shared_model::converters::protobuf::jsonToModel<
+ shared_model::proto::Block>(bytesToString(bytes));
+ };
+ if (not block) {
+ log_->error("error while converting from JSON");
+ }
+
+ return rxcpp::observable<>::create(
+ [block{std::move(block)}](const auto &s) {
+ if (block) {
+ s.on_next(std::make_shared(
+ block.value()));
+ }
+ s.on_completed();
+ });
+ });
}
rxcpp::observable PostgresBlockQuery::getBlocksFrom(
@@ -100,6 +101,7 @@ namespace iroha {
-> boost::optional<
shared_model::interface::types::HeightType> {
if (result.size() == 0) {
+ log_->info("No block with transaction {}", hash.toString());
return boost::none;
}
return result[0]
@@ -111,19 +113,20 @@ namespace iroha {
std::function PostgresBlockQuery::callback(
const rxcpp::subscriber &subscriber, uint64_t block_id) {
return [this, &subscriber, block_id](pqxx::result &result) {
- auto block = block_store_.get(block_id) | [this](auto bytes) {
- // TODO IR-975 victordrobny 12.02.2018 convert directly to
- // shared_model::proto::Block after FlatFile will be reworked to new
- // model
- return boost::optional(
- shared_model::proto::from_old(*serializer_.deserialize(
- *model::converters::stringToJson(bytesToString(bytes)))));
+ auto block = block_store_.get(block_id) | [](const auto &bytes) {
+ return shared_model::converters::protobuf::jsonToModel<
+ shared_model::proto::Block>(bytesToString(bytes));
};
+ if (not block) {
+ log_->error("error while converting from JSON");
+ return;
+ }
+
boost::for_each(
result | boost::adaptors::transformed([](const auto &x) {
return x.at("index").template as();
}),
- [&](auto x) {
+ [&](const auto &x) {
subscriber.on_next(PostgresBlockQuery::wTransaction(
clone(*block->transactions().at(x))));
});
@@ -134,7 +137,7 @@ namespace iroha {
PostgresBlockQuery::getAccountTransactions(
const shared_model::interface::types::AccountIdType &account_id) {
return rxcpp::observable<>::create(
- [this, account_id](auto subscriber) {
+ [this, account_id](const auto &subscriber) {
auto block_ids = this->getBlockIds(account_id);
if (block_ids.empty()) {
subscriber.on_completed();
@@ -183,10 +186,10 @@ namespace iroha {
PostgresBlockQuery::getTransactions(
const std::vector &tx_hashes) {
return rxcpp::observable<>::create>(
- [this, tx_hashes](auto subscriber) {
+ [this, tx_hashes](const auto &subscriber) {
std::for_each(tx_hashes.begin(),
tx_hashes.end(),
- [that = this, &subscriber](auto tx_hash) {
+ [that = this, &subscriber](const auto &tx_hash) {
subscriber.on_next(that->getTxByHashSync(tx_hash));
});
subscriber.on_completed();
@@ -196,30 +199,32 @@ namespace iroha {
boost::optional
PostgresBlockQuery::getTxByHashSync(
const shared_model::crypto::Hash &hash) {
- return getBlockId(hash) |
- [this](auto blockId) { return block_store_.get(blockId); } |
- [](auto bytes) {
- // TODO IR-975 victordrobny 12.02.2018 convert directly to
- // shared_model::proto::Block after FlatFile will be reworked to new
- // model
- return model::converters::stringToJson(bytesToString(bytes));
- }
- | [&](const auto &json) { return serializer_.deserialize(json); } |
- [](const auto &block) {
- return boost::optional(
- shared_model::proto::from_old(block));
- }
- | [&](const auto &block) {
- boost::optional result;
- auto it =
- std::find_if(block.transactions().begin(),
- block.transactions().end(),
- [&hash](auto tx) { return tx->hash() == hash; });
- if (it != block.transactions().end()) {
- result = boost::optional(clone(**it));
- }
- return result;
- };
+ auto block = getBlockId(hash) | [this](const auto &block_id) {
+ return block_store_.get(block_id);
+ } | [](const auto &bytes) {
+ return shared_model::converters::protobuf::jsonToModel<
+ shared_model::proto::Block>(bytesToString(bytes));
+ };
+ if (not block) {
+ log_->error("error while converting from JSON");
+ return boost::none;
+ }
+
+ boost::optional result;
+ auto it =
+ std::find_if(block->transactions().begin(),
+ block->transactions().end(),
+ [&hash](const auto &tx) { return tx->hash() == hash; });
+ if (it != block->transactions().end()) {
+ result = boost::optional(
+ PostgresBlockQuery::wTransaction(clone(**it)));
+ }
+ return result;
+ }
+
+ bool PostgresBlockQuery::hasTxWithHash(
+ const shared_model::crypto::Hash &hash) {
+ return getBlockId(hash) != boost::none;
}
} // namespace ametsuchi
diff --git a/irohad/ametsuchi/impl/postgres_block_query.hpp b/irohad/ametsuchi/impl/postgres_block_query.hpp
index 7343331e4a..c2eab7f51e 100644
--- a/irohad/ametsuchi/impl/postgres_block_query.hpp
+++ b/irohad/ametsuchi/impl/postgres_block_query.hpp
@@ -24,7 +24,6 @@
#include "ametsuchi/block_query.hpp"
#include "ametsuchi/impl/flat_file/flat_file.hpp"
#include "logger/logger.hpp"
-#include "model/converters/json_block_factory.hpp"
#include "postgres_wsv_common.hpp"
namespace iroha {
@@ -63,6 +62,8 @@ namespace iroha {
rxcpp::observable getTopBlocks(uint32_t count) override;
+ bool hasTxWithHash(const shared_model::crypto::Hash &hash) override;
+
private:
/**
* Returns all blocks' ids containing given account id
@@ -95,7 +96,6 @@ namespace iroha {
logger::Logger log_;
using ExecuteType = decltype(makeExecuteOptional(transaction_, log_));
ExecuteType execute_;
- model::converters::JsonBlockFactory serializer_;
};
} // namespace ametsuchi
} // namespace iroha
diff --git a/irohad/ametsuchi/impl/postgres_wsv_command.cpp b/irohad/ametsuchi/impl/postgres_wsv_command.cpp
index d0b30339a5..32a53a54ee 100644
--- a/irohad/ametsuchi/impl/postgres_wsv_command.cpp
+++ b/irohad/ametsuchi/impl/postgres_wsv_command.cpp
@@ -162,13 +162,11 @@ namespace iroha {
const shared_model::interface::Account &account) {
auto result = execute_(
"INSERT INTO account(account_id, domain_id, quorum, "
- "transaction_count, data) VALUES ("
+ "data) VALUES ("
+ transaction_.quote(account.accountId()) + ", "
+ transaction_.quote(account.domainId()) + ", "
+ transaction_.quote(account.quorum())
+ ", "
- // Transaction counter
- + transaction_.quote(default_tx_counter) + ", "
+ transaction_.quote(account.jsonData()) + ");");
auto message_gen = [&] {
@@ -176,10 +174,9 @@ namespace iroha {
"account id: '%s', "
"domain id: '%s', "
"quorum: '%d', "
- "transaction counter: '%d', "
"json_data: %s")
% account.accountId() % account.domainId() % account.quorum()
- % default_tx_counter % account.jsonData())
+ % account.jsonData())
.str();
};
@@ -359,8 +356,6 @@ namespace iroha {
"UPDATE account\n"
" SET quorum=" +
transaction_.quote(account.quorum()) +
- ", transaction_count=" +
- /*account.transaction_count*/ transaction_.quote(default_tx_counter) +
"\n"
" WHERE account_id=" +
transaction_.quote(account.accountId()) + ";");
diff --git a/irohad/ametsuchi/impl/postgres_wsv_command.hpp b/irohad/ametsuchi/impl/postgres_wsv_command.hpp
index 4290db4365..2f251a3599 100644
--- a/irohad/ametsuchi/impl/postgres_wsv_command.hpp
+++ b/irohad/ametsuchi/impl/postgres_wsv_command.hpp
@@ -91,8 +91,6 @@ namespace iroha {
&permission_id) override;
private:
- const size_t default_tx_counter = 0;
-
pqxx::nontransaction &transaction_;
using ExecuteType = decltype(makeExecuteResult(transaction_));
diff --git a/irohad/ametsuchi/impl/storage_impl.cpp b/irohad/ametsuchi/impl/storage_impl.cpp
index d020533ab3..564187c53f 100644
--- a/irohad/ametsuchi/impl/storage_impl.cpp
+++ b/irohad/ametsuchi/impl/storage_impl.cpp
@@ -22,13 +22,9 @@
#include "ametsuchi/impl/postgres_block_query.hpp"
#include "ametsuchi/impl/postgres_wsv_query.hpp"
#include "ametsuchi/impl/temporary_wsv_impl.hpp"
-#include "model/converters/json_common.hpp"
+#include "converters/protobuf/json_proto_converter.hpp"
#include "postgres_ordering_service_persistent_state.hpp"
-// TODO: 14-02-2018 Alexey Chernyshov remove this after relocation to
-// shared_model https://soramitsu.atlassian.net/browse/IR-887
-#include "backend/protobuf/from_old_model.hpp"
-
namespace iroha {
namespace ametsuchi {
@@ -130,6 +126,8 @@ namespace iroha {
const auto &top_hash) { return true; });
log_->info("block inserted: {}", inserted);
commit(std::move(storage.value));
+ notifier_.get_subscriber().on_next(
+ std::shared_ptr(clone(block)));
},
[&](expected::Error &error) {
log_->error(error.error);
@@ -257,13 +255,11 @@ DROP TABLE IF EXISTS index_by_id_height_asset;
auto storage_ptr = std::move(mutableStorage); // get ownership of storage
auto storage = static_cast(storage_ptr.get());
for (const auto &block : storage->block_store_) {
- // TODO: rework to shared model converters once they are available
- // IR-1084 Nikita Alekseev
- auto old_block =
- *std::unique_ptr(block.second->makeOldModel());
- block_store_->add(block.first,
- stringToBytes(model::converters::jsonToString(
- serializer_.serialize(old_block))));
+ block_store_->add(
+ block.first,
+ stringToBytes(shared_model::converters::protobuf::modelToJson(
+ *std::static_pointer_cast(
+ block.second))));
}
storage->transaction_->exec("COMMIT;");
@@ -289,5 +285,9 @@ DROP TABLE IF EXISTS index_by_id_height_asset;
std::shared_ptr StorageImpl::getBlockQuery() const {
return blocks_;
}
+ rxcpp::observable>
+ StorageImpl::on_commit() {
+ return notifier_.get_observable();
+ }
} // namespace ametsuchi
} // namespace iroha
diff --git a/irohad/ametsuchi/impl/storage_impl.hpp b/irohad/ametsuchi/impl/storage_impl.hpp
index c9838da7fc..5ea53adb95 100644
--- a/irohad/ametsuchi/impl/storage_impl.hpp
+++ b/irohad/ametsuchi/impl/storage_impl.hpp
@@ -20,12 +20,11 @@
#include "ametsuchi/storage.hpp"
-#include
#include
+#include
#include
#include
#include "logger/logger.hpp"
-#include "model/converters/json_block_factory.hpp"
namespace iroha {
namespace ametsuchi {
@@ -62,7 +61,8 @@ namespace iroha {
* @param blocks - block for insertion
* @return true if all blocks are inserted
*/
- virtual bool insertBlock(const shared_model::interface::Block &block) override;
+ virtual bool insertBlock(
+ const shared_model::interface::Block &block) override;
/**
* Insert blocks without validation
@@ -70,7 +70,8 @@ namespace iroha {
* @return true if inserted
*/
virtual bool insertBlocks(
- const std::vector> &blocks) override;
+ const std::vector>
+ &blocks) override;
virtual void dropStorage() override;
@@ -80,6 +81,9 @@ namespace iroha {
std::shared_ptr getBlockQuery() const override;
+ rxcpp::observable>
+ on_commit() override;
+
~StorageImpl() override;
protected:
@@ -111,13 +115,14 @@ namespace iroha {
std::shared_ptr blocks_;
- model::converters::JsonBlockFactory serializer_;
-
// Allows multiple readers and a single writer
std::shared_timed_mutex rw_lock_;
logger::Logger log_;
+ rxcpp::subjects::subject>
+ notifier_;
+
protected:
const std::string init_ = R"(
CREATE TABLE IF NOT EXISTS role (
@@ -137,7 +142,6 @@ CREATE TABLE IF NOT EXISTS account (
account_id character varying(288),
domain_id character varying(255) NOT NULL REFERENCES domain,
quorum int NOT NULL,
- transaction_count int NOT NULL DEFAULT 0,
data JSONB,
PRIMARY KEY (account_id)
);
diff --git a/irohad/ametsuchi/storage.hpp b/irohad/ametsuchi/storage.hpp
index 0678591684..f9d84b2959 100644
--- a/irohad/ametsuchi/storage.hpp
+++ b/irohad/ametsuchi/storage.hpp
@@ -18,6 +18,7 @@
#ifndef IROHA_AMETSUCHI_H
#define IROHA_AMETSUCHI_H
+#include
#include
#include "ametsuchi/mutable_factory.hpp"
#include "ametsuchi/temporary_factory.hpp"
@@ -27,7 +28,7 @@ namespace shared_model {
namespace interface {
class Block;
}
-}
+} // namespace shared_model
namespace iroha {
@@ -58,7 +59,16 @@ namespace iroha {
* @param blocks - collection of blocks for insertion
* @return true if inserted
*/
- virtual bool insertBlocks(const std::vector> &blocks) = 0;
+ virtual bool insertBlocks(
+ const std::vector>
+ &blocks) = 0;
+
+ /**
+ * method called when block is written to the storage
+ * @return observable with the Block committed
+ */
+ virtual rxcpp::observable>
+ on_commit() = 0;
/**
* Remove all information from ledger
diff --git a/irohad/consensus/yac/CMakeLists.txt b/irohad/consensus/yac/CMakeLists.txt
index 4eac54002d..5bb0fa0b12 100644
--- a/irohad/consensus/yac/CMakeLists.txt
+++ b/irohad/consensus/yac/CMakeLists.txt
@@ -19,7 +19,7 @@ add_library(supermajority_check
impl/supermajority_checker_impl.cpp
)
target_link_libraries(supermajority_check
- model
+ shared_model_interfaces
)
add_library(yac
@@ -43,4 +43,5 @@ target_link_libraries(yac
yac_grpc
logger
hash
+ shared_model_proto_backend
)
diff --git a/irohad/consensus/yac/impl/supermajority_checker_impl.cpp b/irohad/consensus/yac/impl/supermajority_checker_impl.cpp
index 597be726fd..e40ba8658a 100644
--- a/irohad/consensus/yac/impl/supermajority_checker_impl.cpp
+++ b/irohad/consensus/yac/impl/supermajority_checker_impl.cpp
@@ -17,16 +17,17 @@
#include "consensus/yac/impl/supermajority_checker_impl.hpp"
#include "interfaces/common_objects/peer.hpp"
+#include "interfaces/common_objects/signature.hpp"
namespace iroha {
namespace consensus {
namespace yac {
bool SupermajorityCheckerImpl::hasSupermajority(
- const shared_model::interface::SignatureSetType &signatures,
+ const shared_model::interface::types::SignatureRangeType &signatures,
const std::vector>
&peers) const {
- return checkSize(signatures.size(), peers.size())
+ return checkSize(boost::size(signatures), peers.size())
and peersSubset(signatures, peers);
}
@@ -40,17 +41,19 @@ namespace iroha {
}
bool SupermajorityCheckerImpl::peersSubset(
- const shared_model::interface::SignatureSetType &signatures,
+ const shared_model::interface::types::SignatureRangeType &signatures,
const std::vector>
&peers) const {
return std::all_of(
- signatures.begin(), signatures.end(), [&peers](auto signature) {
+ signatures.begin(),
+ signatures.end(),
+ [&peers](const auto &signature) {
return std::find_if(
peers.begin(),
peers.end(),
[&signature](const std::shared_ptr<
shared_model::interface::Peer> &peer) {
- return signature->publicKey() == peer->pubkey();
+ return signature.publicKey() == peer->pubkey();
})
!= peers.end();
});
diff --git a/irohad/consensus/yac/impl/supermajority_checker_impl.hpp b/irohad/consensus/yac/impl/supermajority_checker_impl.hpp
index a1ab775c31..894df51ac8 100644
--- a/irohad/consensus/yac/impl/supermajority_checker_impl.hpp
+++ b/irohad/consensus/yac/impl/supermajority_checker_impl.hpp
@@ -35,14 +35,16 @@ namespace iroha {
* @return true on supermajority is achieved or false otherwise
*/
virtual bool hasSupermajority(
- const shared_model::interface::SignatureSetType &signatures,
- const std::vector> &peers)
- const override;
+ const shared_model::interface::types::SignatureRangeType
+ &signatures,
+ const std::vector>
+ &peers) const override;
virtual bool checkSize(uint64_t current, uint64_t all) const override;
virtual bool peersSubset(
- const shared_model::interface::SignatureSetType &signatures,
+ const shared_model::interface::types::SignatureRangeType
+ &signatures,
const std::vector>
&peers) const override;
diff --git a/irohad/consensus/yac/impl/timer_impl.cpp b/irohad/consensus/yac/impl/timer_impl.cpp
index 429866f25b..cc152018e8 100644
--- a/irohad/consensus/yac/impl/timer_impl.cpp
+++ b/irohad/consensus/yac/impl/timer_impl.cpp
@@ -21,17 +21,18 @@
namespace iroha {
namespace consensus {
namespace yac {
- void TimerImpl::invokeAfterDelay(uint64_t millis,
- std::function handler) {
+ TimerImpl::TimerImpl(
+ std::function()> invoke_delay)
+ : invoke_delay_(std::move(invoke_delay)) {}
+
+ void TimerImpl::invokeAfterDelay(std::function handler) {
deny();
- handler_ = std::move(handler);
- timer = rxcpp::observable<>::timer(std::chrono::milliseconds(millis));
- handle = timer.subscribe_on(rxcpp::observe_on_new_thread())
- .subscribe([this](auto) { handler_(); });
+ handle_ = invoke_delay_().subscribe(
+ [handler{std::move(handler)}](auto) { handler(); });
}
void TimerImpl::deny() {
- handle.unsubscribe();
+ handle_.unsubscribe();
}
TimerImpl::~TimerImpl() {
diff --git a/irohad/consensus/yac/impl/timer_impl.hpp b/irohad/consensus/yac/impl/timer_impl.hpp
index 9304d268c3..e38081a7bf 100644
--- a/irohad/consensus/yac/impl/timer_impl.hpp
+++ b/irohad/consensus/yac/impl/timer_impl.hpp
@@ -26,21 +26,26 @@ namespace iroha {
namespace yac {
class TimerImpl : public Timer {
public:
- TimerImpl() = default;
+ /// Delay observable type
+ using TimeoutType = long;
+
+ /**
+ * Constructor
+ * @param invoke_delay cold observable which specifies invoke strategy
+ */
+ explicit TimerImpl(
+ std::function()> invoke_delay);
TimerImpl(const TimerImpl &) = delete;
TimerImpl &operator=(const TimerImpl &) = delete;
- void invokeAfterDelay(uint64_t millis,
- std::function handler) override;
+ void invokeAfterDelay(std::function handler) override;
void deny() override;
~TimerImpl() override;
private:
- std::function handler_;
-
- rxcpp::observable timer;
- rxcpp::composite_subscription handle;
+ std::function()> invoke_delay_;
+ rxcpp::composite_subscription handle_;
};
} // namespace yac
} // namespace consensus
diff --git a/irohad/consensus/yac/impl/yac.cpp b/irohad/consensus/yac/impl/yac.cpp
index a663f4e13a..68d5539c15 100644
--- a/irohad/consensus/yac/impl/yac.cpp
+++ b/irohad/consensus/yac/impl/yac.cpp
@@ -1,5 +1,5 @@
/**
- * Copyright Soramitsu Co., Ltd. 2017 All Rights Reserved.
+ * Copyright Soramitsu Co., Ltd. 2018 All Rights Reserved.
* http://soramitsu.co.jp
*
* Licensed under the Apache License, Version 2.0 (the "License");
@@ -35,9 +35,9 @@ namespace iroha {
"Crypto verification failed for message.\n Votes: ";
result += logger::to_string(votes, [](const auto &vote) {
std::string result = "(Public key: ";
- result += vote.signature.pubkey.to_hexstring();
+ result += vote.signature->publicKey().hex();
result += ", Signature: ";
- result += vote.signature.signature.to_hexstring();
+ result += vote.signature->signedData().hex();
result += ")\n";
return result;
});
@@ -54,24 +54,21 @@ namespace iroha {
std::shared_ptr network,
std::shared_ptr crypto,
std::shared_ptr timer,
- ClusterOrdering order,
- uint64_t delay) {
+ ClusterOrdering order) {
return std::make_shared(
- vote_storage, network, crypto, timer, order, delay);
+ vote_storage, network, crypto, timer, order);
}
Yac::Yac(YacVoteStorage vote_storage,
std::shared_ptr network,
std::shared_ptr crypto,
std::shared_ptr timer,
- ClusterOrdering order,
- uint64_t delay)
+ ClusterOrdering order)
: vote_storage_(std::move(vote_storage)),
network_(std::move(network)),
crypto_(std::move(crypto)),
timer_(std::move(timer)),
- cluster_order_(order),
- delay_(delay) {
+ cluster_order_(order){
log_ = logger::log("YAC");
}
@@ -137,8 +134,7 @@ namespace iroha {
network_->send_vote(cluster_order_.currentLeader(), vote);
cluster_order_.switchToNext();
if (cluster_order_.hasNext()) {
- timer_->invokeAfterDelay(delay_,
- [this, vote] { this->votingStep(vote); });
+ timer_->invokeAfterDelay([this, vote] { this->votingStep(vote); });
}
}
@@ -151,11 +147,7 @@ namespace iroha {
auto peers = cluster_order_.getPeers();
auto it =
std::find_if(peers.begin(), peers.end(), [&](const auto &peer) {
- // TODO: 24/03/2018 x3medima17, remove makeOldModel after
- // migrating VoteMessage to the new model
- auto old_peer =
- *std::unique_ptr(peer->makeOldModel());
- return old_peer.pubkey == vote.signature.pubkey;
+ return peer->pubkey() == vote.signature->publicKey();
});
return it != peers.end() ? boost::make_optional(std::move(*it))
: boost::none;
@@ -210,8 +202,8 @@ namespace iroha {
// IR-497
},
[&](const CommitMessage &commit) {
- notifier_.get_subscriber().on_next(commit);
this->propagateCommit(commit);
+ notifier_.get_subscriber().on_next(commit);
});
}
this->closeRound();
@@ -228,7 +220,7 @@ namespace iroha {
} else {
log_->info("Apply vote: {} from unknown peer {}",
vote.hash.block_hash,
- vote.signature.pubkey.to_hexstring());
+ vote.signature->publicKey().hex());
}
auto answer =
@@ -246,8 +238,8 @@ namespace iroha {
// propagate for all
log_->info("Propagate commit {} to whole network",
vote.hash.block_hash);
- notifier_.get_subscriber().on_next(commit);
this->propagateCommit(commit);
+ notifier_.get_subscriber().on_next(commit);
},
[&](const RejectMessage &reject) {
// propagate reject for all
diff --git a/irohad/consensus/yac/impl/yac_crypto_provider_impl.cpp b/irohad/consensus/yac/impl/yac_crypto_provider_impl.cpp
index 77401f3399..a5850925e4 100644
--- a/irohad/consensus/yac/impl/yac_crypto_provider_impl.cpp
+++ b/irohad/consensus/yac/impl/yac_crypto_provider_impl.cpp
@@ -1,5 +1,5 @@
/**
- * Copyright Soramitsu Co., Ltd. 2017 All Rights Reserved.
+ * Copyright Soramitsu Co., Ltd. 2018 All Rights Reserved.
* http://soramitsu.co.jp
*
* Licensed under the Apache License, Version 2.0 (the "License");
@@ -17,13 +17,14 @@
#include "consensus/yac/impl/yac_crypto_provider_impl.hpp"
#include "consensus/yac/transport/yac_pb_converters.hpp"
-#include "cryptography/ed25519_sha3_impl/internal/ed25519_impl.hpp"
-#include "cryptography/ed25519_sha3_impl/internal/sha3_hash.hpp"
+#include "cryptography/crypto_provider/crypto_signer.hpp"
+#include "cryptography/crypto_provider/crypto_verifier.hpp"
namespace iroha {
namespace consensus {
namespace yac {
- CryptoProviderImpl::CryptoProviderImpl(const keypair_t &keypair)
+ CryptoProviderImpl::CryptoProviderImpl(
+ const shared_model::crypto::Keypair &keypair)
: keypair_(keypair) {}
bool CryptoProviderImpl::verify(CommitMessage msg) {
@@ -41,25 +42,38 @@ namespace iroha {
}
bool CryptoProviderImpl::verify(VoteMessage msg) {
- return iroha::verify(
- iroha::sha3_256(
- PbConverters::serializeVote(msg).hash().SerializeAsString())
- .to_string(),
- msg.signature.pubkey,
- msg.signature.signature);
+ auto serialized =
+ PbConverters::serializeVote(msg).hash().SerializeAsString();
+ auto blob = shared_model::crypto::Blob(serialized);
+
+ return shared_model::crypto::CryptoVerifier<>::verify(
+ msg.signature->signedData(), blob, msg.signature->publicKey());
}
VoteMessage CryptoProviderImpl::getVote(YacHash hash) {
VoteMessage vote;
vote.hash = hash;
- auto signature = iroha::sign(
- iroha::sha3_256(
- PbConverters::serializeVote(vote).hash().SerializeAsString())
- .to_string(),
- keypair_.pubkey,
- keypair_.privkey);
- vote.signature.signature = signature;
- vote.signature.pubkey = keypair_.pubkey;
+ auto serialized =
+ PbConverters::serializeVotePayload(vote).hash().SerializeAsString();
+ auto blob = shared_model::crypto::Blob(serialized);
+ const auto &pubkey = keypair_.publicKey();
+ const auto &privkey = keypair_.privateKey();
+ auto signature = shared_model::crypto::CryptoSigner<>::sign(
+ blob, shared_model::crypto::Keypair(pubkey, privkey));
+
+ shared_model::builder::DefaultSignatureBuilder()
+ .publicKey(pubkey)
+ .signedData(signature)
+ .build()
+ .match([&vote](iroha::expected::Value<
+ std::shared_ptr>
+ &sig) { vote.signature = sig.value; },
+ [](iroha::expected::Error>
+ &reason) {
+ logger::log("YacCryptoProvider::getVote")
+ ->error("Cannot build vote signature: {}",
+ *reason.error);
+ });
return vote;
}
diff --git a/irohad/consensus/yac/impl/yac_crypto_provider_impl.hpp b/irohad/consensus/yac/impl/yac_crypto_provider_impl.hpp
index d55288b087..9e04a46e0e 100644
--- a/irohad/consensus/yac/impl/yac_crypto_provider_impl.hpp
+++ b/irohad/consensus/yac/impl/yac_crypto_provider_impl.hpp
@@ -1,5 +1,5 @@
/**
- * Copyright Soramitsu Co., Ltd. 2017 All Rights Reserved.
+ * Copyright Soramitsu Co., Ltd. 2018 All Rights Reserved.
* http://soramitsu.co.jp
*
* Licensed under the Apache License, Version 2.0 (the "License");
@@ -19,13 +19,15 @@
#define IROHA_YAC_CRYPTO_PROVIDER_IMPL_HPP
#include "consensus/yac/yac_crypto_provider.hpp"
+#include "cryptography/keypair.hpp"
namespace iroha {
namespace consensus {
namespace yac {
class CryptoProviderImpl : public YacCryptoProvider {
public:
- explicit CryptoProviderImpl(const keypair_t &keypair);
+ explicit CryptoProviderImpl(
+ const shared_model::crypto::Keypair &keypair);
bool verify(CommitMessage msg) override;
@@ -36,7 +38,7 @@ namespace iroha {
VoteMessage getVote(YacHash hash) override;
private:
- keypair_t keypair_;
+ shared_model::crypto::Keypair keypair_;
};
} // namespace yac
} // namespace consensus
diff --git a/irohad/consensus/yac/impl/yac_gate_impl.cpp b/irohad/consensus/yac/impl/yac_gate_impl.cpp
index b472fea6b0..d22e561b9d 100644
--- a/irohad/consensus/yac/impl/yac_gate_impl.cpp
+++ b/irohad/consensus/yac/impl/yac_gate_impl.cpp
@@ -1,5 +1,5 @@
/**
- * Copyright Soramitsu Co., Ltd. 2017 All Rights Reserved.
+ * Copyright Soramitsu Co., Ltd. 2018 All Rights Reserved.
* http://soramitsu.co.jp
*
* Licensed under the Apache License, Version 2.0 (the "License");
@@ -103,9 +103,7 @@ namespace iroha {
std::shared_ptr>(
[this, model_hash, vote](auto subscriber) {
auto block = block_loader_->retrieveBlock(
- shared_model::crypto::PublicKey(
- {vote.signature.pubkey.begin(),
- vote.signature.pubkey.end()}),
+ vote.signature->publicKey(),
shared_model::crypto::Hash(model_hash));
// if load is successful
if (block) {
@@ -116,6 +114,7 @@ namespace iroha {
})
// need only the first
.first()
+ .retry()
.subscribe(
// if load is successful from at least one node
[subscriber](auto block) {
@@ -132,7 +131,6 @@ namespace iroha {
}
void YacGateImpl::copySignatures(const CommitMessage &commit) {
- current_block_.second->clearSignatures();
for (const auto &vote : commit.votes) {
auto sig = vote.hash.block_signature;
current_block_.second->addSignature(sig->signedData(),
diff --git a/irohad/consensus/yac/impl/yac_gate_impl.hpp b/irohad/consensus/yac/impl/yac_gate_impl.hpp
index 2070befbe0..3b70fd834d 100644
--- a/irohad/consensus/yac/impl/yac_gate_impl.hpp
+++ b/irohad/consensus/yac/impl/yac_gate_impl.hpp
@@ -49,6 +49,11 @@ namespace iroha {
std::shared_ptr block_loader,
uint64_t delay);
void vote(const shared_model::interface::Block &) override;
+ /**
+ * method called when commit recived
+ * assumes to retrieve a block eventually
+ * @return observable with the Block commited
+ */
rxcpp::observable>
on_commit() override;
diff --git a/irohad/consensus/yac/impl/yac_hash_provider_impl.cpp b/irohad/consensus/yac/impl/yac_hash_provider_impl.cpp
index 4198774c41..59e602929e 100644
--- a/irohad/consensus/yac/impl/yac_hash_provider_impl.cpp
+++ b/irohad/consensus/yac/impl/yac_hash_provider_impl.cpp
@@ -30,7 +30,7 @@ namespace iroha {
result.proposal_hash = hex_hash;
result.block_hash = hex_hash;
const auto &sig = *block.signatures().begin();
- result.block_signature = clone(*sig);
+ result.block_signature = clone(sig);
return result;
}
diff --git a/irohad/consensus/yac/messages.hpp b/irohad/consensus/yac/messages.hpp
index 53ec70bbef..6a9174a8c8 100644
--- a/irohad/consensus/yac/messages.hpp
+++ b/irohad/consensus/yac/messages.hpp
@@ -1,5 +1,5 @@
/**
- * Copyright Soramitsu Co., Ltd. 2017 All Rights Reserved.
+ * Copyright Soramitsu Co., Ltd. 2018 All Rights Reserved.
* http://soramitsu.co.jp
*
* Licensed under the Apache License, Version 2.0 (the "License");
@@ -21,7 +21,7 @@
#include
#include "consensus/yac/yac_hash_provider.hpp" // for YacHash
-#include "model/signature.hpp" // for model::Signature
+#include "interfaces/common_objects/signature.hpp"
namespace iroha {
namespace consensus {
@@ -32,10 +32,10 @@ namespace iroha {
*/
struct VoteMessage {
YacHash hash;
- model::Signature signature;
+ std::shared_ptr signature;
bool operator==(const VoteMessage &rhs) const {
- return hash == rhs.hash and signature == rhs.signature;
+ return hash == rhs.hash and *signature == *rhs.signature;
}
bool operator!=(const VoteMessage &rhs) const {
diff --git a/irohad/consensus/yac/supermajority_checker.hpp b/irohad/consensus/yac/supermajority_checker.hpp
index ad6d158547..5226a98605 100644
--- a/irohad/consensus/yac/supermajority_checker.hpp
+++ b/irohad/consensus/yac/supermajority_checker.hpp
@@ -19,7 +19,7 @@
#define IROHA_CONSENSUS_SUPERMAJORITY_CHECKER_HPP
#include
-#include "interfaces/common_objects/signable_hash.hpp"
+#include "interfaces/common_objects/types.hpp"
namespace shared_model {
namespace interface {
@@ -46,7 +46,8 @@ namespace iroha {
* @return true on supermajority is achieved or false otherwise
*/
virtual bool hasSupermajority(
- const shared_model::interface::SignatureSetType &signatures,
+ const shared_model::interface::types::SignatureRangeType
+ &signatures,
const std::vector>
&peers) const = 0;
@@ -65,7 +66,8 @@ namespace iroha {
* @return true if is subset or false otherwise
*/
virtual bool peersSubset(
- const shared_model::interface::SignatureSetType &signatures,
+ const shared_model::interface::types::SignatureRangeType
+ &signatures,
const std::vector>
&peers) const = 0;
diff --git a/irohad/consensus/yac/timer.hpp b/irohad/consensus/yac/timer.hpp
index a99cfc4c39..8ee95b9312 100644
--- a/irohad/consensus/yac/timer.hpp
+++ b/irohad/consensus/yac/timer.hpp
@@ -30,12 +30,10 @@ namespace iroha {
class Timer {
public:
/**
- * Invoke handler after delay
- * @param millis - number of milliseconds before invoking
+ * Invoke handler with class-specific strategy
* @param handler - function, that will be invoked
*/
- virtual void invokeAfterDelay(uint64_t millis,
- std::function handler) = 0;
+ virtual void invokeAfterDelay(std::function handler) = 0;
/**
* Stop timer
diff --git a/irohad/consensus/yac/transport/impl/network_impl.cpp b/irohad/consensus/yac/transport/impl/network_impl.cpp
index 13709f8385..fef94eb2b4 100644
--- a/irohad/consensus/yac/transport/impl/network_impl.cpp
+++ b/irohad/consensus/yac/transport/impl/network_impl.cpp
@@ -24,15 +24,16 @@
#include "consensus/yac/transport/yac_pb_converters.hpp"
#include "interfaces/common_objects/peer.hpp"
#include "logger/logger.hpp"
+#include "network/impl/grpc_channel_builder.hpp"
namespace iroha {
namespace consensus {
namespace yac {
// ----------| Public API |----------
- NetworkImpl::NetworkImpl() {
- log_ = logger::log("YacNetwork");
- }
+ NetworkImpl::NetworkImpl()
+ : network::AsyncGrpcClient(
+ logger::log("YacNetwork")) {}
void NetworkImpl::subscribe(
std::shared_ptr handler) {
@@ -154,8 +155,8 @@ namespace iroha {
void NetworkImpl::createPeerConnection(
const shared_model::interface::Peer &peer) {
if (peers_.count(peer.address()) == 0) {
- peers_[peer.address()] = proto::Yac::NewStub(grpc::CreateChannel(
- peer.address(), grpc::InsecureChannelCredentials()));
+ peers_[peer.address()] =
+ network::createClient(peer.address());
}
}
diff --git a/irohad/consensus/yac/transport/impl/network_impl.hpp b/irohad/consensus/yac/transport/impl/network_impl.hpp
index d75c833a60..b2d3ab46c5 100644
--- a/irohad/consensus/yac/transport/impl/network_impl.hpp
+++ b/irohad/consensus/yac/transport/impl/network_impl.hpp
@@ -101,11 +101,6 @@ namespace iroha {
* Subscriber of network messages
*/
std::weak_ptr handler_;
-
- /**
- * Internal logger
- */
- logger::Logger log_;
};
} // namespace yac
diff --git a/irohad/consensus/yac/transport/yac_pb_converters.hpp b/irohad/consensus/yac/transport/yac_pb_converters.hpp
index ac963f0b04..1ed047b955 100644
--- a/irohad/consensus/yac/transport/yac_pb_converters.hpp
+++ b/irohad/consensus/yac/transport/yac_pb_converters.hpp
@@ -1,5 +1,5 @@
/**
- * Copyright Soramitsu Co., Ltd. 2017 All Rights Reserved.
+ * Copyright Soramitsu Co., Ltd. 2018 All Rights Reserved.
* http://soramitsu.co.jp
*
* Licensed under the Apache License, Version 2.0 (the "License");
@@ -23,6 +23,7 @@
#include "consensus/yac/messages.hpp"
#include "cryptography/crypto_provider/crypto_defaults.hpp"
#include "interfaces/common_objects/signature.hpp"
+#include "logger/logger.hpp"
#include "yac.pb.h"
namespace iroha {
@@ -30,7 +31,7 @@ namespace iroha {
namespace yac {
class PbConverters {
public:
- static proto::Vote serializeVote(const VoteMessage &vote) {
+ static proto::Vote serializeVotePayload(const VoteMessage &vote) {
proto::Vote pb_vote;
auto hash = pb_vote.mutable_hash();
@@ -39,24 +40,23 @@ namespace iroha {
auto block_signature = hash->mutable_block_signature();
- // Will fix it in the next PR, very soon, don't worry
- if (vote.hash.block_signature == nullptr) {
- auto peer_key = shared_model::crypto::DefaultCryptoAlgorithmType::
- generateKeypair()
- .publicKey();
- shared_model::builder::DefaultSignatureBuilder()
- .publicKey(peer_key)
- .signedData(shared_model::crypto::Signed(""))
- .build()
- .match(
- [&](iroha::expected::Value> &sig) {
- const_cast(vote).hash.block_signature =
- sig.value;
- },
- [](iroha::expected::Error>) {
- });
- }
+ block_signature->set_signature(shared_model::crypto::toBinaryString(
+ vote.hash.block_signature->signedData()));
+
+ block_signature->set_pubkey(shared_model::crypto::toBinaryString(
+ vote.hash.block_signature->publicKey()));
+
+ return pb_vote;
+ }
+
+ static proto::Vote serializeVote(const VoteMessage &vote) {
+ proto::Vote pb_vote;
+
+ auto hash = pb_vote.mutable_hash();
+ hash->set_block(vote.hash.block_hash);
+ hash->set_proposal(vote.hash.proposal_hash);
+
+ auto block_signature = hash->mutable_block_signature();
block_signature->set_signature(shared_model::crypto::toBinaryString(
vote.hash.block_signature->signedData()));
@@ -65,8 +65,11 @@ namespace iroha {
vote.hash.block_signature->publicKey()));
auto signature = pb_vote.mutable_signature();
- signature->set_signature(vote.signature.signature.to_string());
- signature->set_pubkey(vote.signature.pubkey.to_string());
+ const auto &sig = *vote.signature;
+ signature->set_signature(
+ shared_model::crypto::toBinaryString(sig.signedData()));
+ signature->set_pubkey(
+ shared_model::crypto::toBinaryString(sig.publicKey()));
return pb_vote;
}
@@ -84,15 +87,35 @@ namespace iroha {
pb_vote.hash().block_signature().signature()))
.build()
.match(
- [&](iroha::expected::Value<
- std::shared_ptr>
- &sig) { vote.hash.block_signature = sig.value; },
- [](iroha::expected::Error>) {});
-
- vote.signature.signature = *stringToBlob(
- pb_vote.signature().signature());
- vote.signature.pubkey = *stringToBlob(
- pb_vote.signature().pubkey());
+ [&vote](iroha::expected::Value<
+ std::shared_ptr>
+ &sig) { vote.hash.block_signature = sig.value; },
+ [](iroha::expected::Error>
+ &reason) {
+ logger::log("YacPbConverter::deserializeVote")
+ ->error("Cannot build vote hash block signature: {}",
+ *reason.error);
+ });
+
+ const auto &pubkey =
+ shared_model::crypto::PublicKey(pb_vote.signature().pubkey());
+ const auto &signed_data =
+ shared_model::crypto::Signed(pb_vote.signature().signature());
+
+ shared_model::builder::DefaultSignatureBuilder()
+ .publicKey(pubkey)
+ .signedData(signed_data)
+ .build()
+ .match(
+ [&vote](iroha::expected::Value<
+ std::shared_ptr>
+ &sig) { vote.signature = sig.value; },
+ [](iroha::expected::Error>
+ &reason) {
+ logger::log("YacPbConverter::deserializeVote")
+ ->error("Cannot build vote signature: {}",
+ *reason.error);
+ });
return vote;
}
diff --git a/irohad/consensus/yac/yac.hpp b/irohad/consensus/yac/yac.hpp
index bdc499c06e..b3d3e92cb6 100644
--- a/irohad/consensus/yac/yac.hpp
+++ b/irohad/consensus/yac/yac.hpp
@@ -48,15 +48,13 @@ namespace iroha {
std::shared_ptr network,
std::shared_ptr crypto,
std::shared_ptr timer,
- ClusterOrdering order,
- uint64_t delay);
+ ClusterOrdering order);
Yac(YacVoteStorage vote_storage,
std::shared_ptr network,
std::shared_ptr crypto,
std::shared_ptr timer,
- ClusterOrdering order,
- uint64_t delay);
+ ClusterOrdering order);
// ------|Hash gate|------
@@ -133,9 +131,6 @@ namespace iroha {
// ------|One round|------
ClusterOrdering cluster_order_;
- // ------|Constants|------
- const uint64_t delay_;
-
// ------|Logger|------
logger::Logger log_;
};
diff --git a/irohad/execution/CMakeLists.txt b/irohad/execution/CMakeLists.txt
index 8ba23e30ec..c875c2b3d4 100644
--- a/irohad/execution/CMakeLists.txt
+++ b/irohad/execution/CMakeLists.txt
@@ -17,6 +17,7 @@ add_library(common_execution
)
target_link_libraries(common_execution
rxcpp
+ boost
)
add_library(command_execution
@@ -28,4 +29,15 @@ target_link_libraries(command_execution
common_execution
rxcpp
shared_model_default_builders
+ shared_model_amount_utils
+ )
+
+add_library(query_execution
+ impl/query_execution.cpp
+ )
+
+target_link_libraries(query_execution
+ rxcpp
+ shared_model_default_builders
+ common_execution
)
diff --git a/irohad/execution/command_executor.hpp b/irohad/execution/command_executor.hpp
index 7e7731967c..c8fbdb69be 100644
--- a/irohad/execution/command_executor.hpp
+++ b/irohad/execution/command_executor.hpp
@@ -141,7 +141,7 @@ namespace iroha {
std::shared_ptr commands;
shared_model::interface::types::AccountIdType creator_account_id;
- shared_model::builder::DefaultAmountBuilder amount_builder_;
+ shared_model::builder::AmountBuilderWithoutValidator amount_builder_;
shared_model::builder::DefaultAccountAssetBuilder account_asset_builder_;
shared_model::builder::DefaultAccountBuilder account_builder_;
shared_model::builder::DefaultAssetBuilder asset_builder_;
diff --git a/irohad/execution/impl/command_executor.cpp b/irohad/execution/impl/command_executor.cpp
index a6bd3eda6e..90475c8f04 100644
--- a/irohad/execution/impl/command_executor.cpp
+++ b/irohad/execution/impl/command_executor.cpp
@@ -21,8 +21,10 @@
#include "execution/common_executor.hpp"
#include "interfaces/commands/command.hpp"
-#include "validators/permissions.hpp"
#include "utils/amount_utils.hpp"
+#include "validators/permissions.hpp"
+
+using namespace shared_model::detail;
namespace iroha {
@@ -62,16 +64,18 @@ namespace iroha {
(boost::format("asset %s is absent") % command->assetId()).str(),
command_name);
}
- auto precision = asset.value()->precision();
- if (command->amount().precision() != precision) {
+ auto precision = asset.value()->precision();
+ if (command->amount().precision() > precision) {
return makeExecutionError(
- (boost::format("precision mismatch: expected %d, but got %d")
+ (boost::format("command precision is greater than asset precision: "
+ "expected %d, but got %d")
% precision % command->amount().precision())
.str(),
command_name);
}
-
+ auto command_amount =
+ makeAmountWithPrecision(command->amount(), asset.value()->precision());
if (not queries->getAccount(command->accountId())) {
return makeExecutionError(
(boost::format("account %s is absent") % command->accountId()).str(),
@@ -80,9 +84,11 @@ namespace iroha {
auto account_asset =
queries->getAccountAsset(command->accountId(), command->assetId());
- auto new_balance = amount_builder_.precision(command->amount().precision())
- .intValue(command->amount().intValue())
- .build();
+ auto new_balance = command_amount | [this](const auto &amount) {
+ return amount_builder_.precision(amount->precision())
+ .intValue(amount->intValue())
+ .build();
+ };
using AccountAssetResult =
expected::Result,
iroha::ExecutionError>;
@@ -354,30 +360,34 @@ namespace iroha {
command_name);
}
auto precision = asset.value()->precision();
-
- if (command->amount().precision() != precision) {
+ if (command->amount().precision() > precision) {
return makeExecutionError(
- (boost::format("precision mismatch: expected %d, but got %d")
+ (boost::format("command precision is greater than asset precision: "
+ "expected %d, but got %d")
% precision % command->amount().precision())
.str(),
command_name);
}
- auto account_asset = queries->getAccountAsset(
- command->accountId(), command->assetId());
+ auto command_amount =
+ makeAmountWithPrecision(command->amount(), asset.value()->precision());
+ auto account_asset =
+ queries->getAccountAsset(command->accountId(), command->assetId());
if (not account_asset) {
return makeExecutionError((boost::format("%s do not have %s")
% command->accountId() % command->assetId())
.str(),
command_name);
}
- auto account_asset_new =
- (account_asset.value()->balance() - command->amount()) |
- [this, &account_asset](const auto &new_balance) {
- return account_asset_builder_.balance(*new_balance)
- .accountId(account_asset.value()->accountId())
- .assetId(account_asset.value()->assetId())
- .build();
- };
+ auto account_asset_new = command_amount |
+ [&account_asset](const auto &amount) {
+ return account_asset.value()->balance() - *amount;
+ }
+ | [this, &account_asset](const auto &new_balance) {
+ return account_asset_builder_.balance(*new_balance)
+ .accountId(account_asset.value()->accountId())
+ .assetId(account_asset.value()->assetId())
+ .build();
+ };
return account_asset_new.match(
[&](const expected::Value<
@@ -417,23 +427,29 @@ namespace iroha {
.str(),
command_name);
}
- // Precision for both wallets
auto precision = asset.value()->precision();
- if (command->amount().precision() != precision) {
+ if (command->amount().precision() > precision) {
return makeExecutionError(
- (boost::format("precision %d is wrong") % precision).str(),
+ (boost::format("command precision is greater than asset precision: "
+ "expected %d, but got %d")
+ % precision % command->amount().precision())
+ .str(),
command_name);
}
+ auto command_amount =
+ makeAmountWithPrecision(command->amount(), asset.value()->precision());
// Set new balance for source account
- auto src_account_asset_new =
- (src_account_asset.value()->balance() - command->amount()) |
- [this, &src_account_asset](const auto &new_src_balance) {
- return account_asset_builder_
- .assetId(src_account_asset.value()->assetId())
- .accountId(src_account_asset.value()->accountId())
- .balance(*new_src_balance)
- .build();
- };
+ auto src_account_asset_new = command_amount |
+ [&src_account_asset](const auto &amount) {
+ return src_account_asset.value()->balance() - *amount;
+ }
+ | [this, &src_account_asset](const auto &new_src_balance) {
+ return account_asset_builder_
+ .assetId(src_account_asset.value()->assetId())
+ .accountId(src_account_asset.value()->accountId())
+ .balance(*new_src_balance)
+ .build();
+ };
return src_account_asset_new.match(
[&](const expected::Value<
std::shared_ptr>
@@ -506,7 +522,9 @@ namespace iroha {
// any asset
return creator_account_id == command.accountId()
and checkAccountRolePermission(
- creator_account_id, queries, model::can_add_asset_qty);
+ creator_account_id,
+ queries,
+ shared_model::permissions::can_add_asset_qty);
}
bool CommandValidator::hasPermissions(
@@ -514,7 +532,7 @@ namespace iroha {
ametsuchi::WsvQuery &queries,
const shared_model::interface::types::AccountIdType &creator_account_id) {
return checkAccountRolePermission(
- creator_account_id, queries, model::can_add_peer);
+ creator_account_id, queries, shared_model::permissions::can_add_peer);
}
bool CommandValidator::hasPermissions(
@@ -526,11 +544,15 @@ namespace iroha {
// account and he has permission CanAddSignatory
(command.accountId() == creator_account_id
and checkAccountRolePermission(
- creator_account_id, queries, model::can_add_signatory))
+ creator_account_id,
+ queries,
+ shared_model::permissions::can_add_signatory))
or
// Case 2. Creator has granted permission for it
(queries.hasAccountGrantablePermission(
- creator_account_id, command.accountId(), model::can_add_signatory));
+ creator_account_id,
+ command.accountId(),
+ shared_model::permissions::can_add_my_signatory));
}
bool CommandValidator::hasPermissions(
@@ -538,7 +560,9 @@ namespace iroha {
ametsuchi::WsvQuery &queries,
const shared_model::interface::types::AccountIdType &creator_account_id) {
return checkAccountRolePermission(
- creator_account_id, queries, model::can_append_role);
+ creator_account_id,
+ queries,
+ shared_model::permissions::can_append_role);
}
bool CommandValidator::hasPermissions(
@@ -546,7 +570,9 @@ namespace iroha {
ametsuchi::WsvQuery &queries,
const shared_model::interface::types::AccountIdType &creator_account_id) {
return checkAccountRolePermission(
- creator_account_id, queries, model::can_create_account);
+ creator_account_id,
+ queries,
+ shared_model::permissions::can_create_account);
}
bool CommandValidator::hasPermissions(
@@ -554,7 +580,9 @@ namespace iroha {
ametsuchi::WsvQuery &queries,
const shared_model::interface::types::AccountIdType &creator_account_id) {
return checkAccountRolePermission(
- creator_account_id, queries, model::can_create_asset);
+ creator_account_id,
+ queries,
+ shared_model::permissions::can_create_asset);
}
bool CommandValidator::hasPermissions(
@@ -562,7 +590,9 @@ namespace iroha {
ametsuchi::WsvQuery &queries,
const shared_model::interface::types::AccountIdType &creator_account_id) {
return checkAccountRolePermission(
- creator_account_id, queries, model::can_create_domain);
+ creator_account_id,
+ queries,
+ shared_model::permissions::can_create_domain);
}
bool CommandValidator::hasPermissions(
@@ -570,7 +600,9 @@ namespace iroha {
ametsuchi::WsvQuery &queries,
const shared_model::interface::types::AccountIdType &creator_account_id) {
return checkAccountRolePermission(
- creator_account_id, queries, model::can_create_role);
+ creator_account_id,
+ queries,
+ shared_model::permissions::can_create_role);
}
bool CommandValidator::hasPermissions(
@@ -578,7 +610,9 @@ namespace iroha {
ametsuchi::WsvQuery &queries,
const shared_model::interface::types::AccountIdType &creator_account_id) {
return checkAccountRolePermission(
- creator_account_id, queries, model::can_detach_role);
+ creator_account_id,
+ queries,
+ shared_model::permissions::can_detach_role);
}
bool CommandValidator::hasPermissions(
@@ -588,7 +622,7 @@ namespace iroha {
return checkAccountRolePermission(
creator_account_id,
queries,
- model::can_grant + command.permissionName());
+ shared_model::permissions::can_grant + command.permissionName());
}
bool CommandValidator::hasPermissions(
@@ -600,11 +634,14 @@ namespace iroha {
// permission on it
(creator_account_id == command.accountId()
and checkAccountRolePermission(
- creator_account_id, queries, model::can_remove_signatory))
+ creator_account_id,
+ queries,
+ shared_model::permissions::can_remove_signatory))
// 2. Creator has granted permission on removal
- or (queries.hasAccountGrantablePermission(creator_account_id,
- command.accountId(),
- model::can_remove_signatory));
+ or (queries.hasAccountGrantablePermission(
+ creator_account_id,
+ command.accountId(),
+ shared_model::permissions::can_remove_my_signatory));
}
bool CommandValidator::hasPermissions(
@@ -624,7 +661,9 @@ namespace iroha {
creator_account_id == command.accountId() or
// Case 2. Creator has grantable permission to set account key/value
queries.hasAccountGrantablePermission(
- creator_account_id, command.accountId(), model::can_set_detail);
+ creator_account_id,
+ command.accountId(),
+ shared_model::permissions::can_set_my_account_detail);
}
bool CommandValidator::hasPermissions(
@@ -635,10 +674,14 @@ namespace iroha {
// 1. Creator set quorum for his account -> must have permission
(creator_account_id == command.accountId()
and checkAccountRolePermission(
- creator_account_id, queries, model::can_set_quorum))
+ creator_account_id,
+ queries,
+ shared_model::permissions::can_set_quorum))
// 2. Creator has granted permission on it
or (queries.hasAccountGrantablePermission(
- creator_account_id, command.accountId(), model::can_set_quorum));
+ creator_account_id,
+ command.accountId(),
+ shared_model::permissions::can_set_my_quorum));
}
bool CommandValidator::hasPermissions(
@@ -647,7 +690,9 @@ namespace iroha {
const shared_model::interface::types::AccountIdType &creator_account_id) {
return creator_account_id == command.accountId()
and checkAccountRolePermission(
- creator_account_id, queries, model::can_subtract_asset_qty);
+ creator_account_id,
+ queries,
+ shared_model::permissions::can_subtract_asset_qty);
}
bool CommandValidator::hasPermissions(
@@ -660,15 +705,18 @@ namespace iroha {
and queries.hasAccountGrantablePermission(
creator_account_id,
command.srcAccountId(),
- model::can_transfer))
+ shared_model::permissions::can_transfer_my_assets))
or
// 2. Creator transfer from their account
(creator_account_id == command.srcAccountId()
and checkAccountRolePermission(
- creator_account_id, queries, model::can_transfer)))
+ creator_account_id,
+ queries,
+ shared_model::permissions::can_transfer)))
// For both cases, dest_account must have can_receive
- and checkAccountRolePermission(
- command.destAccountId(), queries, model::can_receive);
+ and checkAccountRolePermission(command.destAccountId(),
+ queries,
+ shared_model::permissions::can_receive);
}
bool CommandValidator::isValid(
@@ -745,12 +793,12 @@ namespace iroha {
const shared_model::interface::CreateRole &command,
ametsuchi::WsvQuery &queries,
const shared_model::interface::types::AccountIdType &creator_account_id) {
- return std::all_of(
- command.rolePermissions().begin(),
- command.rolePermissions().end(),
- [&queries, &creator_account_id](auto perm) {
- return checkAccountRolePermission(creator_account_id, queries, perm);
- });
+ return std::all_of(command.rolePermissions().begin(),
+ command.rolePermissions().end(),
+ [&queries, &creator_account_id](auto perm) {
+ return checkAccountRolePermission(
+ creator_account_id, queries, perm);
+ });
}
bool CommandValidator::isValid(
@@ -772,8 +820,7 @@ namespace iroha {
ametsuchi::WsvQuery &queries,
const shared_model::interface::types::AccountIdType &creator_account_id) {
auto account = queries.getAccount(command.accountId());
- auto signatories =
- queries.getSignatories(command.accountId());
+ auto signatories = queries.getSignatories(command.accountId());
if (not(account and signatories)) {
// No account or signatories found
@@ -803,8 +850,7 @@ namespace iroha {
const shared_model::interface::SetQuorum &command,
ametsuchi::WsvQuery &queries,
const shared_model::interface::types::AccountIdType &creator_account_id) {
- auto signatories =
- queries.getSignatories(command.accountId());
+ auto signatories = queries.getSignatories(command.accountId());
if (not(signatories)) {
// No signatories of an account found
@@ -831,7 +877,7 @@ namespace iroha {
return false;
}
// Amount is formed wrong
- if (command.amount().precision() != asset.value()->precision()) {
+ if (command.amount().precision() > asset.value()->precision()) {
return false;
}
auto account_asset =
diff --git a/irohad/model/impl/query_execution.cpp b/irohad/execution/impl/query_execution.cpp
similarity index 94%
rename from irohad/model/impl/query_execution.cpp
rename to irohad/execution/impl/query_execution.cpp
index 6feec625d0..40d5866f88 100644
--- a/irohad/model/impl/query_execution.cpp
+++ b/irohad/execution/impl/query_execution.cpp
@@ -15,14 +15,15 @@
* limitations under the License.
*/
-#include "model/query_execution.hpp"
+#include "execution/query_execution.hpp"
#include
#include "execution/common_executor.hpp"
#include "validators/permissions.hpp"
-using namespace iroha::model;
+using namespace shared_model::permissions;
+using namespace iroha;
using namespace iroha::ametsuchi;
// TODO: 28/03/2018 x3medima17 remove poly wrapper, IR-1011
@@ -270,7 +271,7 @@ QueryProcessingFactory::executeGetAccountAssets(
}
QueryProcessingFactory::QueryResponseBuilderDone
-iroha::model::QueryProcessingFactory::executeGetAccountDetail(
+QueryProcessingFactory::executeGetAccountDetail(
const shared_model::interface::GetAccountDetail &query) {
auto acct_detail = _wsvQuery->getAccountDetail(query.accountId());
if (not acct_detail) {
@@ -281,7 +282,7 @@ iroha::model::QueryProcessingFactory::executeGetAccountDetail(
}
QueryProcessingFactory::QueryResponseBuilderDone
-iroha::model::QueryProcessingFactory::executeGetAccountAssetTransactions(
+QueryProcessingFactory::executeGetAccountAssetTransactions(
const shared_model::interface::GetAccountAssetTransactions &query) {
auto acc_asset_tx = _blockQuery->getAccountAssetTransactions(
query.accountId(), query.assetId());
@@ -312,18 +313,22 @@ QueryProcessingFactory::executeGetAccountTransactions(
}
QueryProcessingFactory::QueryResponseBuilderDone
-iroha::model::QueryProcessingFactory::executeGetTransactions(
- const shared_model::interface::GetTransactions &query) {
- const std::vector &hashes =
- query.transactionHashes();
+QueryProcessingFactory::executeGetTransactions(
+ const shared_model::interface::GetTransactions &q,
+ const shared_model::interface::types::AccountIdType &accountId) {
+ const std::vector &hashes = q.transactionHashes();
auto transactions = _blockQuery->getTransactions(hashes);
std::vector txs;
+ bool can_get_all =
+ checkAccountRolePermission(accountId, *_wsvQuery, can_get_all_txs);
transactions.subscribe([&](const auto &tx) {
if (tx) {
- txs.push_back(
- *std::static_pointer_cast(*tx));
+ auto proto_tx =
+ *std::static_pointer_cast(*tx);
+ if (can_get_all or proto_tx.creatorAccountId() == accountId)
+ txs.push_back(proto_tx);
}
});
@@ -343,7 +348,8 @@ QueryProcessingFactory::executeGetSignatories(
}
std::shared_ptr
-QueryProcessingFactory::execute(const shared_model::interface::Query &query) {
+QueryProcessingFactory::validateAndExecute(
+ const shared_model::interface::Query &query) {
const auto &query_hash = query.hash();
QueryResponseBuilderDone builder;
// TODO: 29/04/2018 x3medima18, Add visitor class, IR-1185
@@ -377,7 +383,7 @@ QueryProcessingFactory::execute(const shared_model::interface::Query &query) {
if (not validate(query, *q)) {
builder = statefulFailed();
} else {
- builder = executeGetTransactions(*q);
+ builder = executeGetTransactions(*q, query.creatorAccountId());
}
return clone(builder.queryHash(query_hash).build());
},
diff --git a/irohad/execution/query_execution.hpp b/irohad/execution/query_execution.hpp
new file mode 100644
index 0000000000..e8bf548e23
--- /dev/null
+++ b/irohad/execution/query_execution.hpp
@@ -0,0 +1,135 @@
+/**
+ * Copyright Soramitsu Co., Ltd. 2018 All Rights Reserved.
+ * http://soramitsu.co.jp
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+#ifndef IROHA_QUERY_EXECUTION_HPP
+#define IROHA_QUERY_EXECUTION_HPP
+
+#include "ametsuchi/block_query.hpp"
+#include "ametsuchi/wsv_query.hpp"
+#include "builders/protobuf/builder_templates/query_response_template.hpp"
+
+namespace shared_model {
+ namespace interface {
+ class QueryResponse;
+ class Query;
+ } // namespace interface
+} // namespace shared_model
+
+namespace iroha {
+
+ /**
+ * Converting model objects to protobuf and vice versa
+ */
+ class QueryProcessingFactory {
+ using QueryResponseBuilder =
+ shared_model::proto::TemplateQueryResponseBuilder<0>;
+
+ using QueryResponseBuilderDone =
+ shared_model::proto::TemplateQueryResponseBuilder<1>;
+
+ public:
+ /**
+ * Execute and validate query.
+ *
+ * @param query
+ * @return shared pointer to query response
+ */
+ std::shared_ptr validateAndExecute(
+ const shared_model::interface::Query &query);
+ /**
+ *
+ * @param wsvQuery
+ * @param blockQuery
+ */
+ QueryProcessingFactory(std::shared_ptr wsvQuery,
+ std::shared_ptr blockQuery);
+
+ private:
+ bool validate(const shared_model::interface::Query &query,
+ const shared_model::interface::GetAssetInfo &get_asset_info);
+
+ bool validate(const shared_model::interface::Query &query,
+ const shared_model::interface::GetRoles &get_roles);
+
+ bool validate(const shared_model::interface::Query &query,
+ const shared_model::interface::GetRolePermissions
+ &get_role_permissions);
+
+ bool validate(
+ const shared_model::interface::Query &query,
+ const shared_model::interface::GetAccountAssets &get_account_assets);
+
+ bool validate(const shared_model::interface::Query &query,
+ const shared_model::interface::GetAccount &get_account);
+
+ bool validate(
+ const shared_model::interface::Query &query,
+ const shared_model::interface::GetSignatories &get_signatories);
+
+ bool validate(const shared_model::interface::Query &query,
+ const shared_model::interface::GetAccountTransactions
+ &get_account_transactions);
+
+ bool validate(const shared_model::interface::Query &query,
+ const shared_model::interface::GetAccountAssetTransactions
+ &get_account_asset_transactions);
+
+ bool validate(
+ const shared_model::interface::Query &query,
+ const shared_model::interface::GetAccountDetail &get_account_detail);
+
+ bool validate(
+ const shared_model::interface::Query &query,
+ const shared_model::interface::GetTransactions &get_transactions);
+
+ QueryResponseBuilderDone executeGetAssetInfo(
+ const shared_model::interface::GetAssetInfo &get_asset_info);
+
+ QueryResponseBuilderDone executeGetRoles(
+ const shared_model::interface::GetRoles &query);
+
+ QueryResponseBuilderDone executeGetRolePermissions(
+ const shared_model::interface::GetRolePermissions &query);
+
+ QueryResponseBuilderDone executeGetAccountAssets(
+ const shared_model::interface::GetAccountAssets &query);
+
+ QueryResponseBuilderDone executeGetAccountDetail(
+ const shared_model::interface::GetAccountDetail &query);
+
+ QueryResponseBuilderDone executeGetAccount(
+ const shared_model::interface::GetAccount &query);
+
+ QueryResponseBuilderDone executeGetSignatories(
+ const shared_model::interface::GetSignatories &query);
+
+ QueryResponseBuilderDone executeGetAccountAssetTransactions(
+ const shared_model::interface::GetAccountAssetTransactions &query);
+
+ QueryResponseBuilderDone executeGetAccountTransactions(
+ const shared_model::interface::GetAccountTransactions &query);
+
+ QueryResponseBuilderDone executeGetTransactions(
+ const shared_model::interface::GetTransactions &query,
+ const shared_model::interface::types::AccountIdType &accountId);
+
+ std::shared_ptr _wsvQuery;
+ std::shared_ptr _blockQuery;
+ };
+
+} // namespace iroha
+
+#endif // IROHA_QUERY_EXECUTION_HPP
diff --git a/irohad/main/CMakeLists.txt b/irohad/main/CMakeLists.txt
index 39d1427740..cd351ee33e 100644
--- a/irohad/main/CMakeLists.txt
+++ b/irohad/main/CMakeLists.txt
@@ -24,9 +24,9 @@ target_link_libraries(server_runner
boost # iroha::expected::Result
)
-add_library(raw_block_insertion impl/raw_block_loader.cpp)
-target_link_libraries(raw_block_insertion
- json_model_converters
+add_library(raw_block_loader impl/raw_block_loader.cpp)
+target_link_libraries(raw_block_loader
+ shared_model_interfaces
)
add_library(application
@@ -39,7 +39,6 @@ target_link_libraries(application
logger
yac
server_runner
- model
ametsuchi
networking
ordering_service
@@ -57,7 +56,7 @@ target_link_libraries(application
add_executable(irohad irohad.cpp)
target_link_libraries(irohad
application
- raw_block_insertion
+ raw_block_loader
gflags
rapidjson
keys_manager
diff --git a/irohad/main/application.cpp b/irohad/main/application.cpp
index 3af241645c..b383b48233 100644
--- a/irohad/main/application.cpp
+++ b/irohad/main/application.cpp
@@ -42,7 +42,7 @@ Irohad::Irohad(const std::string &block_store_dir,
std::chrono::milliseconds proposal_delay,
std::chrono::milliseconds vote_delay,
std::chrono::milliseconds load_delay,
- const keypair_t &keypair)
+ const shared_model::crypto::Keypair &keypair)
: block_store_dir_(block_store_dir),
pg_conn_(pg_conn),
torii_port_(torii_port),
@@ -126,21 +126,16 @@ bool Irohad::restoreWsv() {
/**
* Initializing peer query interface
*/
-void Irohad::initPeerQuery() {
- wsv = std::make_shared(storage->getWsvQuery());
-
- log_->info("[Init] => peer query");
+std::unique_ptr Irohad::initPeerQuery() {
+ return std::make_unique(storage->getWsvQuery());
}
/**
* Initializing crypto provider
*/
void Irohad::initCryptoProvider() {
- shared_model::crypto::Keypair keypair_(
- shared_model::crypto::PublicKey(keypair.pubkey.to_string()),
- shared_model::crypto::PrivateKey(keypair.privkey.to_string()));
crypto_signer_ =
- std::make_shared>(keypair_);
+ std::make_shared>(keypair);
log_->info("[Init] => crypto provider");
}
@@ -160,8 +155,11 @@ void Irohad::initValidators() {
* Initializing ordering gate
*/
void Irohad::initOrderingGate() {
- ordering_gate = ordering_init.initOrderingGate(
- wsv, max_proposal_size_, proposal_delay_, ordering_service_storage_);
+ ordering_gate = ordering_init.initOrderingGate(initPeerQuery(),
+ max_proposal_size_,
+ proposal_delay_,
+ ordering_service_storage_,
+ storage->getBlockQuery());
log_->info("[Init] => init ordering gate - [{}]",
logger::logBool(ordering_gate));
}
@@ -183,8 +181,8 @@ void Irohad::initSimulator() {
* Initializing block loader
*/
void Irohad::initBlockLoader() {
- block_loader = loader_init.initBlockLoader(
- wsv, storage->getBlockQuery());
+ block_loader =
+ loader_init.initBlockLoader(initPeerQuery(), storage->getBlockQuery());
log_->info("[Init] => block loader");
}
@@ -193,8 +191,12 @@ void Irohad::initBlockLoader() {
* Initializing consensus gate
*/
void Irohad::initConsensusGate() {
- consensus_gate = yac_init.initConsensusGate(
- wsv, simulator, block_loader, keypair, vote_delay_, load_delay_);
+ consensus_gate = yac_init.initConsensusGate(initPeerQuery(),
+ simulator,
+ block_loader,
+ keypair,
+ vote_delay_,
+ load_delay_);
log_->info("[Init] => consensus gate");
}
diff --git a/irohad/main/application.hpp b/irohad/main/application.hpp
index 0fa00f558f..77ca494c09 100644
--- a/irohad/main/application.hpp
+++ b/irohad/main/application.hpp
@@ -22,6 +22,7 @@
#include "ametsuchi/impl/storage_impl.hpp"
#include "ametsuchi/ordering_service_persistent_state.hpp"
#include "cryptography/crypto_provider/crypto_model_signer.hpp"
+#include "cryptography/keypair.hpp"
#include "logger/logger.hpp"
#include "main/impl/block_loader_init.hpp"
#include "main/impl/consensus_init.hpp"
@@ -76,7 +77,7 @@ class Irohad {
std::chrono::milliseconds proposal_delay,
std::chrono::milliseconds vote_delay,
std::chrono::milliseconds load_delay,
- const iroha::keypair_t &keypair);
+ const shared_model::crypto::Keypair &keypair);
/**
* Initialization of whole objects in system
@@ -111,7 +112,7 @@ class Irohad {
virtual void initStorage();
- virtual void initPeerQuery();
+ virtual std::unique_ptr initPeerQuery();
virtual void initCryptoProvider();
@@ -157,9 +158,6 @@ class Irohad {
std::shared_ptr stateful_validator;
std::shared_ptr chain_validator;
- // peer query
- std::shared_ptr wsv;
-
// WSV restorer
std::shared_ptr wsv_restorer_;
@@ -204,7 +202,7 @@ class Irohad {
public:
std::shared_ptr storage;
- iroha::keypair_t keypair;
+ shared_model::crypto::Keypair keypair;
grpc::ServerBuilder builder;
};
diff --git a/irohad/main/impl/consensus_init.cpp b/irohad/main/impl/consensus_init.cpp
index d219c66a09..40984c9f10 100644
--- a/irohad/main/impl/consensus_init.cpp
+++ b/irohad/main/impl/consensus_init.cpp
@@ -1,5 +1,5 @@
/**
- * Copyright Soramitsu Co., Ltd. 2017 All Rights Reserved.
+ * Copyright Soramitsu Co., Ltd. 2018 All Rights Reserved.
* http://soramitsu.co.jp
*
* Licensed under the Apache License, Version 2.0 (the "License");
@@ -38,14 +38,41 @@ namespace iroha {
return consensus_network;
}
- auto YacInit::createCryptoProvider(const keypair_t &keypair) {
+ auto YacInit::createCryptoProvider(
+ const shared_model::crypto::Keypair &keypair) {
auto crypto = std::make_shared(keypair);
return crypto;
}
- auto YacInit::createTimer() {
- return std::make_shared();
+ auto YacInit::createTimer(std::chrono::milliseconds delay_milliseconds) {
+ return std::make_shared([delay_milliseconds] {
+ // static factory with a single thread
+ //
+ // observe_on_new_thread -- coordination which creates new thread with
+ // observe_on strategy -- all subsequent operations will be performed
+ // on this thread.
+ //
+ // scheduler owns a timeline that is exposed by the now() method.
+ // scheduler is also a factory for workers in that timeline.
+ //
+ // coordination is a factory for coordinators and has a scheduler.
+ //
+ // coordinator has a worker, and is a factory for coordinated
+ // observables, subscribers and schedulable functions.
+ //
+ // A new thread scheduler is created
+ // by calling .create_coordinator().get_scheduler()
+ //
+ // static allows to reuse the same thread in subsequent calls to this
+ // lambda
+ static rxcpp::observe_on_one_worker coordination(
+ rxcpp::observe_on_new_thread()
+ .create_coordinator()
+ .get_scheduler());
+ return rxcpp::observable<>::timer(
+ std::chrono::milliseconds(delay_milliseconds), coordination);
+ });
}
auto YacInit::createHashProvider() {
@@ -54,21 +81,20 @@ namespace iroha {
std::shared_ptr YacInit::createYac(
ClusterOrdering initial_order,
- const keypair_t &keypair,
+ const shared_model::crypto::Keypair &keypair,
std::chrono::milliseconds delay_milliseconds) {
return Yac::create(YacVoteStorage(),
createNetwork(),
createCryptoProvider(keypair),
- createTimer(),
- initial_order,
- delay_milliseconds.count());
+ createTimer(delay_milliseconds),
+ initial_order);
}
std::shared_ptr YacInit::initConsensusGate(
std::shared_ptr wsv,
std::shared_ptr block_creator,
std::shared_ptr block_loader,
- const keypair_t &keypair,
+ const shared_model::crypto::Keypair &keypair,
std::chrono::milliseconds vote_delay_milliseconds,
std::chrono::milliseconds load_delay_milliseconds) {
auto peer_orderer = createPeerOrderer(wsv);
diff --git a/irohad/main/impl/consensus_init.hpp b/irohad/main/impl/consensus_init.hpp
index 97406e8009..dd2a82700e 100644
--- a/irohad/main/impl/consensus_init.hpp
+++ b/irohad/main/impl/consensus_init.hpp
@@ -1,5 +1,5 @@
/**
- * Copyright Soramitsu Co., Ltd. 2017 All Rights Reserved.
+ * Copyright Soramitsu Co., Ltd. 2018 All Rights Reserved.
* http://soramitsu.co.jp
*
* Licensed under the Apache License, Version 2.0 (the "License");
@@ -30,6 +30,7 @@
#include "consensus/yac/yac_gate.hpp"
#include "consensus/yac/yac_hash_provider.hpp"
#include "consensus/yac/yac_peer_orderer.hpp"
+#include "cryptography/keypair.hpp"
#include "network/block_loader.hpp"
#include "simulator/block_creator.hpp"
@@ -45,15 +46,15 @@ namespace iroha {
auto createNetwork();
- auto createCryptoProvider(const keypair_t &keypair);
+ auto createCryptoProvider(const shared_model::crypto::Keypair &keypair);
- auto createTimer();
+ auto createTimer(std::chrono::milliseconds delay_milliseconds);
auto createHashProvider();
std::shared_ptr createYac(
ClusterOrdering initial_order,
- const keypair_t &keypair,
+ const shared_model::crypto::Keypair &keypair,
std::chrono::milliseconds delay_milliseconds);
public:
@@ -61,7 +62,7 @@ namespace iroha {
std::shared_ptr wsv,
std::shared_ptr block_creator,
std::shared_ptr block_loader,
- const keypair_t &keypair,
+ const shared_model::crypto::Keypair &keypair,
std::chrono::milliseconds vote_delay_milliseconds,
std::chrono::milliseconds load_delay_milliseconds);
diff --git a/irohad/main/impl/ordering_init.cpp b/irohad/main/impl/ordering_init.cpp
index a6b8137ffc..8026947707 100644
--- a/irohad/main/impl/ordering_init.cpp
+++ b/irohad/main/impl/ordering_init.cpp
@@ -1,18 +1,6 @@
/**
- * Copyright Soramitsu Co., Ltd. 2017 All Rights Reserved.
- * http://soramitsu.co.jp
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
+ * Copyright Soramitsu Co., Ltd. All Rights Reserved.
+ * SPDX-License-Identifier: Apache-2.0
*/
#include "main/impl/ordering_init.hpp"
@@ -22,8 +10,12 @@
namespace iroha {
namespace network {
auto OrderingInit::createGate(
- std::shared_ptr transport) {
- auto gate = std::make_shared(transport);
+ std::shared_ptr transport,
+ std::shared_ptr block_query) {
+ auto height = block_query->getTopBlocks(1).as_blocking().last()->height();
+ auto gate =
+ std::make_shared(transport, height);
+ log_->info("Creating Ordering Gate with initial height {}", height);
transport->subscribe(gate);
return gate;
}
@@ -38,7 +30,8 @@ namespace iroha {
return std::make_shared(
wsv,
max_size,
- delay_milliseconds.count(),
+ rxcpp::observable<>::interval(delay_milliseconds,
+ rxcpp::observe_on_new_thread()),
transport,
persistent_state);
}
@@ -48,13 +41,15 @@ namespace iroha {
size_t max_size,
std::chrono::milliseconds delay_milliseconds,
std::shared_ptr
- persistent_state) {
+ persistent_state,
+ std::shared_ptr block_query) {
auto ledger_peers = wsv->getLedgerPeers();
if (not ledger_peers or ledger_peers.value().empty()) {
log_->error(
"Ledger don't have peers. Do you set correct genesis block?");
}
auto network_address = ledger_peers->front()->address();
+ log_->info("Ordering gate is at {}", network_address);
ordering_gate_transport =
std::make_shared(
network_address);
@@ -67,7 +62,7 @@ namespace iroha {
ordering_service_transport,
persistent_state);
ordering_service_transport->subscribe(ordering_service);
- ordering_gate = createGate(ordering_gate_transport);
+ ordering_gate = createGate(ordering_gate_transport, block_query);
return ordering_gate;
}
} // namespace network
diff --git a/irohad/main/impl/ordering_init.hpp b/irohad/main/impl/ordering_init.hpp
index bd8828be1d..8149866bbd 100644
--- a/irohad/main/impl/ordering_init.hpp
+++ b/irohad/main/impl/ordering_init.hpp
@@ -18,6 +18,7 @@
#ifndef IROHA_ORDERING_INIT_HPP
#define IROHA_ORDERING_INIT_HPP
+#include "ametsuchi/block_query.hpp"
#include "ametsuchi/peer_query.hpp"
#include "logger/logger.hpp"
#include "ordering/impl/ordering_gate_impl.hpp"
@@ -41,9 +42,12 @@ namespace iroha {
/**
* Init effective realisation of ordering gate (client of ordering
* service)
- * @param network_address - address of ordering service
+ * @param transport - object which will be notified
+ * about incoming proposals and send transactions
+ * @param block_query - block store to get last block height
*/
- auto createGate(std::shared_ptr);
+ auto createGate(std::shared_ptr transport,
+ std::shared_ptr block_query);
/**
* Init ordering service
@@ -67,14 +71,16 @@ namespace iroha {
* @param loop - handler of async events
* @param max_size - limitation of proposal size
* @param delay_milliseconds - delay before emitting proposal
- * @return effective realisation of OrderingGate
+ * @param block_query - block store to get last block height
+ * @return efficient implementation of OrderingGate
*/
std::shared_ptr initOrderingGate(
std::shared_ptr wsv,
size_t max_size,
std::chrono::milliseconds delay_milliseconds,
std::shared_ptr
- persistent_state);
+ persistent_state,
+ std::shared_ptr block_query);
std::shared_ptr ordering_service;
std::shared_ptr ordering_gate;
diff --git a/irohad/main/impl/raw_block_loader.cpp b/irohad/main/impl/raw_block_loader.cpp
index 38f1600e98..a4c601c243 100644
--- a/irohad/main/impl/raw_block_loader.cpp
+++ b/irohad/main/impl/raw_block_loader.cpp
@@ -16,26 +16,30 @@
*/
#include "main/raw_block_loader.hpp"
+
#include
-#include
-#include "common/types.hpp"
-#include "model/converters/json_common.hpp"
+
+#include "converters/protobuf/json_proto_converter.hpp"
+#include "backend/protobuf/block.hpp"
namespace iroha {
namespace main {
+ using shared_model::converters::protobuf::jsonToProto;
+ using shared_model::interface::Block;
+
BlockLoader::BlockLoader() : log_(logger::log("BlockLoader")) {}
- boost::optional BlockLoader::parseBlock(std::string data) {
- auto document = model::converters::stringToJson(data);
- if (not document) {
- log_->error("Blob parsing failed");
- return boost::none;
- }
- return block_factory_.deserialize(document.value());
+ boost::optional> BlockLoader::parseBlock(
+ const std::string &data) {
+ return jsonToProto(data) | [](auto &&block) {
+ return boost::optional>(
+ std::make_shared(std::move(block)));
+ };
}
- boost::optional BlockLoader::loadFile(std::string path) {
+ boost::optional BlockLoader::loadFile(
+ const std::string &path) {
std::ifstream file(path);
if (not file) {
log_->error("Cannot read '" + path + "'");
diff --git a/irohad/main/iroha_conf_loader.hpp b/irohad/main/iroha_conf_loader.hpp
index 63982aa871..099137df3f 100644
--- a/irohad/main/iroha_conf_loader.hpp
+++ b/irohad/main/iroha_conf_loader.hpp
@@ -18,6 +18,7 @@
#ifndef IROHA_CONF_LOADER_HPP
#define IROHA_CONF_LOADER_HPP
+#include
#include
#include
#include
diff --git a/irohad/main/irohad.cpp b/irohad/main/irohad.cpp
index 06f6936630..71fd35cb1f 100644
--- a/irohad/main/irohad.cpp
+++ b/irohad/main/irohad.cpp
@@ -20,7 +20,6 @@
#include
#include
#include
-#include "backend/protobuf/from_old_model.hpp"
#include "common/result.hpp"
#include "crypto/keys_manager_impl.hpp"
#include "main/application.hpp"
@@ -100,11 +99,9 @@ int main(int argc, char *argv[]) {
// Reading public and private key files
iroha::KeysManagerImpl keysManager(FLAGS_keypair_name);
- iroha::keypair_t keypair{};
+ auto keypair = keysManager.loadKeys();
// Check if both keys are read properly
- if (auto loadedKeypair = keysManager.loadKeys()) {
- keypair = *loadedKeypair;
- } else {
+ if (not keypair) {
// Abort execution if not
log->error("Failed to load keypair");
return EXIT_FAILURE;
@@ -119,7 +116,7 @@ int main(int argc, char *argv[]) {
std::chrono::milliseconds(config[mbr::ProposalDelay].GetUint()),
std::chrono::milliseconds(config[mbr::VoteDelay].GetUint()),
std::chrono::milliseconds(config[mbr::LoadDelay].GetUint()),
- keypair);
+ *keypair);
// Check if iroha daemon storage was successfully initialized
if (not irohad.storage) {
@@ -151,9 +148,20 @@ int main(int argc, char *argv[]) {
log->info("Block is parsed");
// Applying transactions from genesis block to iroha storage
- irohad.storage->insertBlock(shared_model::proto::from_old(block.value()));
+ irohad.storage->insertBlock(*block.value());
log->info("Genesis block inserted, number of transactions: {}",
- block.value().transactions.size());
+ block.value()->transactions().size());
+ }
+
+ // check if at least one block is available in the ledger
+ auto blocks_exist = false;
+ irohad.storage->getBlockQuery()->getTopBlocks(1).subscribe(
+ [&blocks_exist](auto block) { blocks_exist = true; });
+
+ if (not blocks_exist) {
+ log->error(
+ "There are no blocks in the ledger. Use --genesis_block parameter.");
+ return EXIT_FAILURE;
}
// init pipeline components
diff --git a/irohad/main/raw_block_loader.hpp b/irohad/main/raw_block_loader.hpp
index e6500c4b04..c6fc3b0c66 100644
--- a/irohad/main/raw_block_loader.hpp
+++ b/irohad/main/raw_block_loader.hpp
@@ -19,13 +19,17 @@
#define IROHA_RAW_BLOCK_INSERTION_HPP
#include
-#include
#include
-#include
-#include "ametsuchi/storage.hpp"
+
+#include
+
#include "logger/logger.hpp"
-#include "model/block.hpp"
-#include "model/converters/json_block_factory.hpp"
+
+namespace shared_model {
+ namespace interface {
+ class Block;
+ }
+}
namespace iroha {
namespace main {
@@ -44,19 +48,17 @@ namespace iroha {
* @param data - raw presenetation of block
* @return object if operation done successfully, nullopt otherwise
*/
- boost::optional parseBlock(std::string data);
+ boost::optional>
+ parseBlock(const std::string &data);
/**
- * Additional method
* Loading file from target path
* @param path - target file
- * @return string with content or nullopt
+ * @return string with file content or nullopt
*/
- boost::optional loadFile(std::string path);
+ boost::optional loadFile(const std::string &path);
private:
- model::converters::JsonBlockFactory block_factory_;
-
logger::Logger log_;
};
diff --git a/irohad/main/server_runner.cpp b/irohad/main/server_runner.cpp
index 0b40f3a8a7..c7b2a1db1e 100644
--- a/irohad/main/server_runner.cpp
+++ b/irohad/main/server_runner.cpp
@@ -44,6 +44,10 @@ iroha::expected::Result ServerRunner::run() {
builder.RegisterService(service.get());
}
+ // in order to bypass built-it limitation of gRPC message size
+ builder.SetMaxReceiveMessageSize(INT_MAX);
+ builder.SetMaxSendMessageSize(INT_MAX);
+
serverInstance_ = builder.BuildAndStart();
serverInstanceCV_.notify_one();
@@ -61,3 +65,9 @@ void ServerRunner::waitForServersReady() {
serverInstanceCV_.wait(lock);
}
}
+
+void ServerRunner::shutdown() {
+ if (serverInstance_) {
+ serverInstance_->Shutdown();
+ }
+}
diff --git a/irohad/main/server_runner.hpp b/irohad/main/server_runner.hpp
index 04de6ad1d5..d633d89f03 100644
--- a/irohad/main/server_runner.hpp
+++ b/irohad/main/server_runner.hpp
@@ -53,6 +53,11 @@ class ServerRunner {
*/
void waitForServersReady();
+ /**
+ * Ask grpc server to terminate.
+ */
+ void shutdown();
+
private:
std::unique_ptr serverInstance_;
std::mutex waitForServer_;
diff --git a/irohad/model/CMakeLists.txt b/irohad/model/CMakeLists.txt
index a35be78ca2..77c02c7715 100644
--- a/irohad/model/CMakeLists.txt
+++ b/irohad/model/CMakeLists.txt
@@ -29,8 +29,8 @@ target_link_libraries(sha3_hash
add_library(model
model_crypto_provider_impl.cpp
impl/model_operators.cpp
- impl/query_execution.cpp
)
+
target_link_libraries(model
hash
sha3_hash
diff --git a/irohad/model/converters/impl/json_transaction_factory.cpp b/irohad/model/converters/impl/json_transaction_factory.cpp
index 75e1a285c1..7ab22929b9 100644
--- a/irohad/model/converters/impl/json_transaction_factory.cpp
+++ b/irohad/model/converters/impl/json_transaction_factory.cpp
@@ -43,7 +43,6 @@ namespace iroha {
document.AddMember("created_ts", transaction.created_ts, allocator);
document.AddMember(
"creator_account_id", transaction.creator_account_id, allocator);
- document.AddMember("tx_counter", transaction.tx_counter, allocator);
Value commands;
commands.SetArray();
@@ -82,7 +81,6 @@ namespace iroha {
return boost::make_optional(Transaction())
| des.Uint64(&Transaction::created_ts, "created_ts")
| des.String(&Transaction::creator_account_id, "creator_account_id")
- | des.Uint64(&Transaction::tx_counter, "tx_counter")
| des.Array(&Transaction::signatures, "signatures")
| des.Array(&Transaction::commands, "commands", des_commands);
}
diff --git a/irohad/model/converters/impl/pb_command_factory.cpp b/irohad/model/converters/impl/pb_command_factory.cpp
index 3a68bb484c..e8f7bcf363 100644
--- a/irohad/model/converters/impl/pb_command_factory.cpp
+++ b/irohad/model/converters/impl/pb_command_factory.cpp
@@ -21,6 +21,8 @@
#include "model/converters/pb_common.hpp"
+using namespace shared_model::permissions;
+
namespace iroha {
namespace model {
namespace converters {
@@ -85,19 +87,24 @@ namespace iroha {
// Can get all account assets
(protocol::RolePermission::can_get_all_acc_ast, can_get_all_acc_ast)
// Can get domain account assets
- (protocol::RolePermission::can_get_domain_acc_ast, can_get_domain_acc_ast)
+ (protocol::RolePermission::can_get_domain_acc_ast,
+ can_get_domain_acc_ast)
// Can get my account detail
- (protocol::RolePermission::can_get_my_acc_detail, can_get_my_acc_detail)
+ (protocol::RolePermission::can_get_my_acc_detail,
+ can_get_my_acc_detail)
// Can get all account detail
- (protocol::RolePermission::can_get_all_acc_detail, can_get_all_acc_detail)
+ (protocol::RolePermission::can_get_all_acc_detail,
+ can_get_all_acc_detail)
// Can get domain account detail
- (protocol::RolePermission::can_get_domain_acc_detail, can_get_domain_acc_detail)
+ (protocol::RolePermission::can_get_domain_acc_detail,
+ can_get_domain_acc_detail)
// Can get my account transactions
(protocol::RolePermission::can_get_my_acc_txs, can_get_my_acc_txs)
// Can get all account transactions
(protocol::RolePermission::can_get_all_acc_txs, can_get_all_acc_txs)
// Can get domain account transactions
- (protocol::RolePermission::can_get_domain_acc_txs, can_get_domain_acc_txs)
+ (protocol::RolePermission::can_get_domain_acc_txs,
+ can_get_domain_acc_txs)
// Can get my account assets transactions
(protocol::RolePermission::can_get_my_acc_ast_txs,
can_get_my_acc_ast_txs)
@@ -113,20 +120,20 @@ namespace iroha {
(protocol::RolePermission::can_get_all_txs, can_get_all_txs)
// Can grant set quorum
- (protocol::RolePermission::can_grant_can_set_quorum,
- can_grant + can_set_quorum)
+ (protocol::RolePermission::can_grant_can_set_my_quorum,
+ can_grant + can_set_my_quorum)
// Can grant add signatory
- (protocol::RolePermission::can_grant_can_add_signatory,
- can_grant + can_add_signatory)
+ (protocol::RolePermission::can_grant_can_add_my_signatory,
+ can_grant + can_add_my_signatory)
// Can grant remove signatory
- (protocol::RolePermission::can_grant_can_remove_signatory,
- can_grant + can_remove_signatory)
+ (protocol::RolePermission::can_grant_can_remove_my_signatory,
+ can_grant + can_remove_my_signatory)
// Can grant can_transfer
- (protocol::RolePermission::can_grant_can_transfer,
- can_grant + can_transfer)
+ (protocol::RolePermission::can_grant_can_transfer_my_assets,
+ can_grant + can_transfer_my_assets)
// Can write details to other accounts
- (protocol::RolePermission::can_grant_can_set_detail,
- can_grant + can_set_detail);
+ (protocol::RolePermission::can_grant_can_set_my_account_detail,
+ can_grant + can_set_my_account_detail);
boost::assign::insert(pb_grant_map_)
// Can add my signatory
@@ -142,7 +149,7 @@ namespace iroha {
can_set_detail)
// Can transfer my assets
(protocol::GrantablePermission::can_transfer_my_assets,
- can_transfer);
+ can_transfer);
}
// asset quantity
diff --git a/irohad/model/converters/impl/pb_query_response_factory.cpp b/irohad/model/converters/impl/pb_query_response_factory.cpp
index 02ae58224c..80f288e432 100644
--- a/irohad/model/converters/impl/pb_query_response_factory.cpp
+++ b/irohad/model/converters/impl/pb_query_response_factory.cpp
@@ -84,7 +84,7 @@ namespace iroha {
}
if (response) {
- response->set_query_hash(query_response->query_hash.to_string());
+ response->set_query_hash(query_response->query_hash.to_hexstring());
}
return response;
}
diff --git a/irohad/model/converters/impl/pb_transaction_factory.cpp b/irohad/model/converters/impl/pb_transaction_factory.cpp
index c4fc9ec257..c7c34a3d15 100644
--- a/irohad/model/converters/impl/pb_transaction_factory.cpp
+++ b/irohad/model/converters/impl/pb_transaction_factory.cpp
@@ -34,7 +34,6 @@ namespace iroha {
auto pl = pbtx.mutable_payload();
pl->set_created_time(tx.created_ts);
pl->set_creator_account_id(tx.creator_account_id);
- pl->set_tx_counter(tx.tx_counter);
for (const auto &command : tx.commands) {
auto cmd = pl->add_commands();
@@ -43,7 +42,7 @@ namespace iroha {
}
for (const auto &sig_obj : tx.signatures) {
- auto proto_signature = pbtx.add_signature();
+ auto proto_signature = pbtx.add_signatures();
proto_signature->set_pubkey(sig_obj.pubkey.to_string());
proto_signature->set_signature(sig_obj.signature.to_string());
}
@@ -56,11 +55,10 @@ namespace iroha {
model::Transaction tx;
const auto &pl = pb_tx.payload();
- tx.tx_counter = pl.tx_counter();
tx.creator_account_id = pl.creator_account_id();
tx.created_ts = pl.created_time();
- for (const auto &pb_sig : pb_tx.signature()) {
+ for (const auto &pb_sig : pb_tx.signatures()) {
model::Signature sig{};
sig.pubkey = pubkey_t::from_string(pb_sig.pubkey());
sig.signature = sig_t::from_string(pb_sig.signature());
diff --git a/irohad/model/generators/CMakeLists.txt b/irohad/model/generators/CMakeLists.txt
index 90df2ed84f..7c05584618 100644
--- a/irohad/model/generators/CMakeLists.txt
+++ b/irohad/model/generators/CMakeLists.txt
@@ -16,7 +16,6 @@
#
add_library(model_generators
- impl/signature_generator.cpp
impl/block_generator.cpp
impl/transaction_generator.cpp
impl/command_generator.cpp
diff --git a/irohad/model/generators/impl/command_generator.cpp b/irohad/model/generators/impl/command_generator.cpp
index 5b79890bef..6c1fcd4220 100644
--- a/irohad/model/generators/impl/command_generator.cpp
+++ b/irohad/model/generators/impl/command_generator.cpp
@@ -32,6 +32,7 @@
#include "validators/permissions.hpp"
using namespace generator;
+using namespace shared_model::permissions;
namespace iroha {
namespace model {
diff --git a/irohad/model/generators/impl/transaction_generator.cpp b/irohad/model/generators/impl/transaction_generator.cpp
index 0632dc2f98..fe44cde0e1 100644
--- a/irohad/model/generators/impl/transaction_generator.cpp
+++ b/irohad/model/generators/impl/transaction_generator.cpp
@@ -25,18 +25,28 @@
namespace iroha {
namespace model {
namespace generators {
+
+ iroha::keypair_t *makeOldModel(
+ const shared_model::crypto::Keypair &keypair) {
+ return new iroha::keypair_t{
+ shared_model::crypto::PublicKey::OldPublicKeyType::from_string(
+ toBinaryString(keypair.publicKey())),
+ shared_model::crypto::PrivateKey::OldPrivateKeyType::from_string(
+ toBinaryString(keypair.privateKey()))};
+ }
+
Transaction TransactionGenerator::generateGenesisTransaction(
ts64_t timestamp, std::vector peers_address) {
Transaction tx;
tx.created_ts = timestamp;
tx.creator_account_id = "";
- tx.tx_counter = 0;
CommandGenerator command_generator;
// Add peers
for (size_t i = 0; i < peers_address.size(); ++i) {
KeysManagerImpl manager("node" + std::to_string(i));
manager.createKeys();
- auto keypair = *manager.loadKeys();
+ auto keypair = *std::unique_ptr(
+ makeOldModel(*manager.loadKeys()));
tx.commands.push_back(command_generator.generateAddPeer(
Peer(peers_address[i], keypair.pubkey)));
}
@@ -57,12 +67,14 @@ namespace iroha {
// Create accounts
KeysManagerImpl manager("admin@test");
manager.createKeys();
- auto keypair = *manager.loadKeys();
+ auto keypair = *std::unique_ptr(
+ makeOldModel(*manager.loadKeys()));
tx.commands.push_back(command_generator.generateCreateAccount(
"admin", "test", keypair.pubkey));
manager = KeysManagerImpl("test@test");
manager.createKeys();
- keypair = *manager.loadKeys();
+ keypair = *std::unique_ptr(
+ makeOldModel(*manager.loadKeys()));
tx.commands.push_back(command_generator.generateCreateAccount(
"test", "test", keypair.pubkey));
@@ -76,22 +88,19 @@ namespace iroha {
Transaction TransactionGenerator::generateTransaction(
ts64_t timestamp,
std::string creator_account_id,
- uint64_t tx_counter,
std::vector> commands) {
Transaction tx;
tx.created_ts = timestamp;
tx.creator_account_id = creator_account_id;
- tx.tx_counter = tx_counter;
tx.commands = commands;
return tx;
}
Transaction TransactionGenerator::generateTransaction(
std::string creator_account_id,
- uint64_t tx_counter,
std::vector> commands) {
return generateTransaction(
- iroha::time::now(), creator_account_id, tx_counter, commands);
+ iroha::time::now(), creator_account_id, commands);
}
} // namespace generators
diff --git a/irohad/model/generators/transaction_generator.hpp b/irohad/model/generators/transaction_generator.hpp
index f72d335e76..fe72ed4361 100644
--- a/irohad/model/generators/transaction_generator.hpp
+++ b/irohad/model/generators/transaction_generator.hpp
@@ -41,27 +41,23 @@ namespace iroha {
* Generate transaction from give meta data and commands list
* @param timestamp
* @param creator_account_id
- * @param tx_counter
* @param commands
* @return
*/
Transaction generateTransaction(
ts64_t timestamp,
std::string creator_account_id,
- uint64_t tx_counter,
std::vector> commands);
/**
* Generate transaction from give meta data and commands list
* @param timestamp
* @param creator_account_id
- * @param tx_counter
* @param commands
* @return
*/
Transaction generateTransaction(
std::string creator_account_id,
- uint64_t tx_counter,
std::vector> commands);
};
} // namespace generators
diff --git a/irohad/model/impl/model_operators.cpp b/irohad/model/impl/model_operators.cpp
index 1aaf25a310..89eb627738 100644
--- a/irohad/model/impl/model_operators.cpp
+++ b/irohad/model/impl/model_operators.cpp
@@ -201,7 +201,7 @@ namespace iroha {
/* Signature */
bool Signature::operator==(const Signature &rhs) const {
- return rhs.pubkey == pubkey && rhs.signature == signature;
+ return rhs.pubkey == pubkey;
}
/* Transaction */
@@ -211,7 +211,7 @@ namespace iroha {
rhs.commands.begin(),
rhs.commands.end(),
[](const auto &i, const auto &j) { return *i == *j; })
- && rhs.tx_counter == tx_counter && rhs.signatures == signatures
+ && rhs.signatures == signatures
&& rhs.created_ts == created_ts;
}
diff --git a/irohad/model/query_execution.hpp b/irohad/model/query_execution.hpp
index 7953d4be59..e69de29bb2 100644
--- a/irohad/model/query_execution.hpp
+++ b/irohad/model/query_execution.hpp
@@ -1,137 +0,0 @@
-/**
- * Copyright Soramitsu Co., Ltd. 2017 All Rights Reserved.
- * http://soramitsu.co.jp
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-#ifndef IROHA_QUERY_EXECUTION_HPP
-#define IROHA_QUERY_EXECUTION_HPP
-
-#include "ametsuchi/block_query.hpp"
-#include "ametsuchi/wsv_query.hpp"
-#include "builders/protobuf/builder_templates/query_response_template.hpp"
-
-namespace shared_model {
- namespace interface {
- class QueryResponse;
- class Query;
- } // namespace interface
-} // namespace shared_model
-
-namespace iroha {
- namespace model {
-
- /**
- * Converting business objects to protobuf and vice versa
- */
- class QueryProcessingFactory {
- using QueryResponseBuilder =
- shared_model::proto::TemplateQueryResponseBuilder<0>;
-
- using QueryResponseBuilderDone =
- shared_model::proto::TemplateQueryResponseBuilder<1>;
-
- public:
- /**
- * Execute and validate query.
- *
- * @param query
- * @return
- */
- std::shared_ptr execute(
- const shared_model::interface::Query &query);
- /**
- *
- * @param wsvQuery
- * @param blockQuery
- */
- QueryProcessingFactory(std::shared_ptr wsvQuery,
- std::shared_ptr blockQuery);
-
- private:
- bool validate(
- const shared_model::interface::Query &query,
- const shared_model::interface::GetAssetInfo &get_asset_info);
-
- bool validate(const shared_model::interface::Query &query,
- const shared_model::interface::GetRoles &get_roles);
-
- bool validate(const shared_model::interface::Query &query,
- const shared_model::interface::GetRolePermissions
- &get_role_permissions);
-
- bool validate(
- const shared_model::interface::Query &query,
- const shared_model::interface::GetAccountAssets &get_account_assets);
-
- bool validate(const shared_model::interface::Query &query,
- const shared_model::interface::GetAccount &get_account);
-
- bool validate(
- const shared_model::interface::Query &query,
- const shared_model::interface::GetSignatories &get_signatories);
-
- bool validate(const shared_model::interface::Query &query,
- const shared_model::interface::GetAccountTransactions
- &get_account_transactions);
-
- bool validate(const shared_model::interface::Query &query,
- const shared_model::interface::GetAccountAssetTransactions
- &get_account_asset_transactions);
-
- bool validate(
- const shared_model::interface::Query &query,
- const shared_model::interface::GetAccountDetail &get_account_detail);
-
- bool validate(
- const shared_model::interface::Query &query,
- const shared_model::interface::GetTransactions &get_transactions);
-
- QueryResponseBuilderDone executeGetAssetInfo(
- const shared_model::interface::GetAssetInfo &get_asset_info);
-
- QueryResponseBuilderDone executeGetRoles(
- const shared_model::interface::GetRoles &query);
-
- QueryResponseBuilderDone executeGetRolePermissions(
- const shared_model::interface::GetRolePermissions &query);
-
- QueryResponseBuilderDone executeGetAccountAssets(
- const shared_model::interface::GetAccountAssets &query);
-
- QueryResponseBuilderDone executeGetAccountDetail(
- const shared_model::interface::GetAccountDetail &query);
-
- QueryResponseBuilderDone executeGetAccount(
- const shared_model::interface::GetAccount &query);
-
- QueryResponseBuilderDone executeGetSignatories(
- const shared_model::interface::GetSignatories &query);
-
- QueryResponseBuilderDone executeGetAccountAssetTransactions(
- const shared_model::interface::GetAccountAssetTransactions &query);
-
- QueryResponseBuilderDone executeGetAccountTransactions(
- const shared_model::interface::GetAccountTransactions &query);
-
- QueryResponseBuilderDone executeGetTransactions(
- const shared_model::interface::GetTransactions &query);
-
- std::shared_ptr _wsvQuery;
- std::shared_ptr _blockQuery;
- };
-
- } // namespace model
-} // namespace iroha
-
-#endif // IROHA_QUERY_EXECUTION_HPP
diff --git a/irohad/model/transaction.hpp b/irohad/model/transaction.hpp
index 7b0df51ec3..16272c4765 100644
--- a/irohad/model/transaction.hpp
+++ b/irohad/model/transaction.hpp
@@ -54,17 +54,6 @@ namespace iroha {
*/
std::string creator_account_id{};
- /**
- * Number for protecting against replay attack.
- * Number that is stored inside of each account.
- * Used to prevent replay attacks.
- * During a stateful validation look at account and compare numbers
- * if number inside a transaction is less than in account,
- * this transaction is replayed.
- * META field
- */
- uint64_t tx_counter{};
-
/**
* Bunch of commands attached to transaction
* shared_ptr is used since Proposal has to be copied
diff --git a/irohad/network/CMakeLists.txt b/irohad/network/CMakeLists.txt
index 9c57e2e9d5..0e0bfadd68 100644
--- a/irohad/network/CMakeLists.txt
+++ b/irohad/network/CMakeLists.txt
@@ -4,7 +4,7 @@ add_library(networking
target_link_libraries(networking
rxcpp
- model
+ shared_model_interfaces
ordering_service
synchronizer
logger
@@ -15,10 +15,11 @@ add_library(block_loader
)
target_link_libraries(block_loader
- pb_model_converters
loader_grpc
rxcpp
- model
+ shared_model_interfaces
+ shared_model_proto_backend
+ logger
)
add_library(block_loader_service
diff --git a/irohad/network/impl/async_grpc_client.hpp b/irohad/network/impl/async_grpc_client.hpp
index 9db412b9aa..b82a732a04 100644
--- a/irohad/network/impl/async_grpc_client.hpp
+++ b/irohad/network/impl/async_grpc_client.hpp
@@ -32,7 +32,9 @@ namespace iroha {
template
class AsyncGrpcClient {
public:
- AsyncGrpcClient() : thread_(&AsyncGrpcClient::asyncCompleteRpc, this) {}
+ explicit AsyncGrpcClient(logger::Logger &&log)
+ : thread_(&AsyncGrpcClient::asyncCompleteRpc, this),
+ log_(std::move(log)) {}
/**
* Listen to gRPC server responses
@@ -42,7 +44,9 @@ namespace iroha {
auto ok = false;
while (cq_.Next(&got_tag, &ok)) {
auto call = static_cast(got_tag);
-
+ if (not call->status.ok()) {
+ log_->warn("RPC failed: {}", call->status.error_message());
+ }
delete call;
}
}
@@ -56,6 +60,7 @@ namespace iroha {
grpc::CompletionQueue cq_;
std::thread thread_;
+ logger::Logger log_;
/**
* State and data information of gRPC call
diff --git a/irohad/network/impl/block_loader_impl.cpp b/irohad/network/impl/block_loader_impl.cpp
index 01670afb0c..90203580f4 100644
--- a/irohad/network/impl/block_loader_impl.cpp
+++ b/irohad/network/impl/block_loader_impl.cpp
@@ -17,13 +17,12 @@
#include "network/impl/block_loader_impl.hpp"
-#include
-
#include
#include "backend/protobuf/block.hpp"
#include "builders/protobuf/transport_builder.hpp"
#include "interfaces/common_objects/peer.hpp"
+#include "network/impl/grpc_channel_builder.hpp"
using namespace iroha::ametsuchi;
using namespace iroha::network;
@@ -43,7 +42,6 @@ BlockLoaderImpl::BlockLoaderImpl(
const char *kPeerNotFound = "Cannot find peer";
const char *kTopBlockRetrieveFail = "Failed to retrieve top block";
-const char *kInvalidBlockSignatures = "Block signatures are invalid";
const char *kPeerRetrieveFail = "Failed to retrieve peers";
const char *kPeerFindFail = "Failed to find requested peer";
@@ -51,14 +49,11 @@ rxcpp::observable> BlockLoaderImpl::retrieveBlocks(
const PublicKey &peer_pubkey) {
return rxcpp::observable<>::create>(
[this, peer_pubkey](auto subscriber) {
- boost::optional top_block;
+ boost::optional> top_block;
block_query_->getTopBlocks(1)
.subscribe_on(rxcpp::observe_on_new_thread())
.as_blocking()
- .subscribe([&top_block](auto block) {
- top_block =
- *std::unique_ptr(block->makeOldModel());
- });
+ .subscribe([&top_block](auto block) { top_block = block; });
if (not top_block) {
log_->error(kTopBlockRetrieveFail);
subscriber.on_completed();
@@ -77,10 +72,10 @@ rxcpp::observable> BlockLoaderImpl::retrieveBlocks(
protocol::Block block;
// request next block to our top
- request.set_height(top_block->height + 1);
+ request.set_height((*top_block)->height() + 1);
auto reader =
- this->getPeerStub(peer.value()).retrieveBlocks(&context, request);
+ this->getPeerStub(**peer).retrieveBlocks(&context, request);
while (reader->Read(&block)) {
shared_model::proto::TransportBuilder<
shared_model::proto::Block,
@@ -122,8 +117,7 @@ boost::optional> BlockLoaderImpl::retrieveBlock(
// request block with specified hash
request.set_hash(toBinaryString(block_hash));
- auto status =
- getPeerStub(peer.value()).retrieveBlock(&context, request, &block);
+ auto status = getPeerStub(**peer).retrieveBlock(&context, request, &block);
if (not status.ok()) {
log_->warn(status.error_message());
return boost::none;
@@ -140,8 +134,8 @@ boost::optional> BlockLoaderImpl::retrieveBlock(
return boost::optional>(std::move(result));
}
-boost::optional BlockLoaderImpl::findPeer(
- const shared_model::crypto::PublicKey &pubkey) {
+boost::optional>
+BlockLoaderImpl::findPeer(const shared_model::crypto::PublicKey &pubkey) {
auto peers = peer_query_->getLedgerPeers();
if (not peers) {
log_->error(kPeerRetrieveFail);
@@ -157,19 +151,17 @@ boost::optional BlockLoaderImpl::findPeer(
log_->error(kPeerFindFail);
return boost::none;
}
-
- return *std::unique_ptr((*it)->makeOldModel());
+ return *it;
}
proto::Loader::Stub &BlockLoaderImpl::getPeerStub(
- const iroha::model::Peer &peer) {
- auto it = peer_connections_.find(peer);
+ const shared_model::interface::Peer &peer) {
+ auto it = peer_connections_.find(peer.address());
if (it == peer_connections_.end()) {
it = peer_connections_
.insert(std::make_pair(
- peer,
- proto::Loader::NewStub(grpc::CreateChannel(
- peer.address, grpc::InsecureChannelCredentials()))))
+ peer.address(),
+ network::createClient