From ccee941ac98290d3d3a6c9b06118368e97dd2746 Mon Sep 17 00:00:00 2001 From: Daniel Rodriguez Date: Tue, 24 Jan 2023 22:41:31 +0000 Subject: [PATCH 1/8] Creating Transaction with Metadata --- checkPayment.js | 45 ++++++++ metadata.json | 7 ++ node_modules/.package-lock.json | 15 +++ node_modules/node-cmd/README.md | 158 +++++++++++++++++++++++++++++ node_modules/node-cmd/cmd.js | 42 ++++++++ node_modules/node-cmd/licence | 21 ++++ node_modules/node-cmd/package.json | 35 +++++++ package-lock.json | 27 +++++ package.json | 6 ++ tx.draft | 5 + 10 files changed, 361 insertions(+) create mode 100644 checkPayment.js create mode 100644 metadata.json create mode 100644 node_modules/.package-lock.json create mode 100644 node_modules/node-cmd/README.md create mode 100644 node_modules/node-cmd/cmd.js create mode 100644 node_modules/node-cmd/licence create mode 100644 node_modules/node-cmd/package.json create mode 100644 package-lock.json create mode 100644 package.json create mode 100644 tx.draft diff --git a/checkPayment.js b/checkPayment.js new file mode 100644 index 0000000..17658b9 --- /dev/null +++ b/checkPayment.js @@ -0,0 +1,45 @@ +import * as fs from 'fs'; +// Please add this dependency using npm install node-cmd +import cmd from 'node-cmd'; + +// Path to the cardano-cli binary or use the global one +const CARDANO_CLI_PATH = "cardano-cli"; +// The `testnet` identifier number +const CARDANO_NETWORK_MAGIC = 2; //1097911063; +// The directory where we store our payment keys +// assuming our current directory context is $HOME/receive-ada-sample +const CARDANO_KEYS_DIR = "keys"; +// The total payment we expect in lovelace unit +const TOTAL_EXPECTED_LOVELACE = 1000000; + +// Read wallet address value from payment.addr file +const walletAddress = fs.readFileSync(`${CARDANO_KEYS_DIR}/my_address.addr`).toString(); + +// We use the node-cmd npm library to execute shell commands and read the output data +const rawUtxoTable = cmd.runSync([ + CARDANO_CLI_PATH, + "query", "utxo", + "--testnet-magic", CARDANO_NETWORK_MAGIC, + "--address", walletAddress +].join(" ")); + +// Calculate total lovelace of the UTXO(s) inside the wallet address +const utxoTableRows = rawUtxoTable.data.trim().split('\n'); +console.log(utxoTableRows); + +let totalLovelaceRecv = 0; +let isPaymentComplete = false; + +for (let x = 2; x < utxoTableRows.length; x++) { + const cells = utxoTableRows[x].split(" ").filter(i => i); + totalLovelaceRecv += parseInt(cells[2]); + console.log(cells); +} + +// Determine if the total lovelace received is more than or equal to +// the total expected lovelace and displaying the results. +isPaymentComplete = totalLovelaceRecv >= TOTAL_EXPECTED_LOVELACE; + +console.log(`Total Received: ${totalLovelaceRecv} LOVELACE`); +console.log(`Expected Payment: ${TOTAL_EXPECTED_LOVELACE} LOVELACE`); +console.log(`Payment Complete: ${(isPaymentComplete ? "✅" : "❌")}`); \ No newline at end of file diff --git a/metadata.json b/metadata.json new file mode 100644 index 0000000..1efa68d --- /dev/null +++ b/metadata.json @@ -0,0 +1,7 @@ +{ + "1337": { + "name": "Receta Electronica Citaldoc", + "Rp": "Albendazol (Zentel) 200mg/5ml. Susp. 4% Fco 1 (uno)", + "Indicaciones": "1 cdita (5ml) una vez x dia por dos x 2 semanas a la misma hora." + } +} diff --git a/node_modules/.package-lock.json b/node_modules/.package-lock.json new file mode 100644 index 0000000..884f4ba --- /dev/null +++ b/node_modules/.package-lock.json @@ -0,0 +1,15 @@ +{ + "name": "repo", + "lockfileVersion": 2, + "requires": true, + "packages": { + "node_modules/node-cmd": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/node-cmd/-/node-cmd-5.0.0.tgz", + "integrity": "sha512-4sQTJmsS5uZKAPz/Df9fnIbmvOySfGdW+UreH4X5NcAOOpKjaE+K5wf4ehNBbZVPo0vQ36RkRnhhsXXJAT+Syw==", + "engines": { + "node": ">=6.4.0" + } + } + } +} diff --git a/node_modules/node-cmd/README.md b/node_modules/node-cmd/README.md new file mode 100644 index 0000000..b07d850 --- /dev/null +++ b/node_modules/node-cmd/README.md @@ -0,0 +1,158 @@ +# node-cmd + +*Node.js commandline/terminal interface.* + +Simple commandline, terminal, or shell interface to allow you to run cli or bash style commands as if you were in the terminal. + +Run commands asynchronously, and if needed can get the output as a string. + +#### NPM Stats + +npm info : +[![NPM](https://nodei.co/npm/node-cmd.png?downloads=true&downloadRank=true&stars=true)](https://nodei.co/npm/node-cmd/) +[See npm trends and stats for node-cmd](http://npm-stat.com/charts.html?package=node-cmd&author=&from=&to=) +![node-cmd npm version](https://img.shields.io/npm/v/node-cmd.svg) ![supported node version for node-cmd](https://img.shields.io/node/v/node-cmd.svg) ![total npm downloads for node-cmd](https://img.shields.io/npm/dt/node-cmd.svg) ![monthly npm downloads for node-cmd](https://img.shields.io/npm/dm/node-cmd.svg) ![npm licence for node-cmd](https://img.shields.io/npm/l/node-cmd.svg) + +[![RIAEvangelist](https://avatars3.githubusercontent.com/u/369041?v=3&s=100)](https://github.com/RIAEvangelist) + +GitHub info : +![node-cmd GitHub Release](https://img.shields.io/github/release/RIAEvangelist/node-cmd.svg) ![GitHub license node-cmd license](https://img.shields.io/github/license/RIAEvangelist/node-cmd.svg) ![open issues for node-cmd on GitHub](https://img.shields.io/github/issues/RIAEvangelist/node-cmd.svg) + +Package details websites : +* [GitHub.io site](http://riaevangelist.github.io/node-cmd/ "node-cmd documentation"). A prettier version of this site. +* [NPM Module](https://www.npmjs.org/package/node-cmd "node-cmd npm module"). The npm page for the node-cmd module. + +This work is licenced via the MIT Licence. + + +# Methods + +|method | arguments | functionality | returns | +|-------|-----------|---------------|---------| +|run | command, callback | runs a command asynchronously| args for callback `err`,`data`,`stderr` | +|runSync| command | runs a command ***synchronously*** | obj {`err`,`data`,`stderr`} | + + +## Examples + +```javascript + +//*nix + + var cmd=require('node-cmd'); + +//*nix supports multiline commands + + cmd.runSync('touch ./example/example.created.file'); + + cmd.run( + `cd ./example +ls`, + function(err, data, stderr){ + console.log('examples dir now contains the example file along with : ',data) + } + ); + +``` + +```javascript + +//Windows + + var cmd=require('node-cmd'); + +//Windows multiline commands are not guaranteed to work try condensing to a single line. + + const syncDir=cmd.runSync('cd ./example & dir'); + + console.log(` + + Sync Err ${syncDir.err} + + Sync stderr: ${syncDir.stderr} + + Sync Data ${syncDir.data} + + `); + + cmd.run(`dir`, + function(err, data, stderr){ + console.log('the node-cmd dir contains : ',data) + } + ); + +``` + +```javascript + +//clone this repo! + + var cmd=require('node-cmd'); + + const syncClone=cmd.runSync('git clone https://github.com/RIAEvangelist/node-cmd.git'); + + console.log(syncClone); + +``` + + +### Getting the CMD Process ID + +```javascript + + var cmd=require('node-cmd'); + + var process=cmd.run('node'); + console.log(process.pid); + +``` + +### Running a python shell from node + +```javascript +const cmd=require('node-cmd'); + +const processRef=cmd.run('python -i'); +let data_line = ''; + +//listen to the python terminal output +processRef.stdout.on( + 'data', + function(data) { + data_line += data; + if (data_line[data_line.length-1] == '\n') { + console.log(data_line); + } + } +); + +const pythonTerminalInput=`primes = [2, 3, 5, 7] +for prime in primes: + print(prime) + +`; + +//show what we are doing +console.log(`>>>${pythonTerminalInput}`); + +//send it to the open python terminal +processRef.stdin.write(pythonTerminalInput); + +``` + +Output : + +```python + +>>>primes = [2, 3, 5, 7] +for prime in primes: + print(prime) + + +2 +3 +5 +7 + + +``` diff --git a/node_modules/node-cmd/cmd.js b/node_modules/node-cmd/cmd.js new file mode 100644 index 0000000..fab82e6 --- /dev/null +++ b/node_modules/node-cmd/cmd.js @@ -0,0 +1,42 @@ +const { exec, execSync } = require('child_process'); + +const commandline={ + run:runCommand, + runSync:runSync, +}; + +function runCommand(command,callback){ + + return exec( + command, + ( + function(){ + return function(err,data,stderr){ + if(!callback) + return; + + callback(err, data, stderr); + } + } + )(callback) + ); +} + +function runSync(command){ + try { + return { + data: execSync(command).toString(), + err: null, + stderr: null + } + } + catch (error) { + return { + data: null, + err: error.stderr.toString(), + stderr: error.stderr.toString() + } + } +} + +module.exports=commandline; diff --git a/node_modules/node-cmd/licence b/node_modules/node-cmd/licence new file mode 100644 index 0000000..6c7d5e2 --- /dev/null +++ b/node_modules/node-cmd/licence @@ -0,0 +1,21 @@ +MIT License + +Copyright (c) 2020 Brandon Nozaki Miller + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. \ No newline at end of file diff --git a/node_modules/node-cmd/package.json b/node_modules/node-cmd/package.json new file mode 100644 index 0000000..b41f15f --- /dev/null +++ b/node_modules/node-cmd/package.json @@ -0,0 +1,35 @@ +{ + "name": "node-cmd", + "version": "5.0.0", + "description": "Simple commandline/terminal/shell interface to allow you to run cli or bash style commands as if you were in the terminal.", + "main": "cmd.js", + "directories": { + "example": "example" + }, + "engines" : { + "node" : ">=6.4.0" + }, + "scripts": { + "test": "echo \"Error: no test specified\" && exit 1" + }, + "repository": { + "type": "git", + "url": "https://github.com/RIAEvangelist/node-cmd.git" + }, + "keywords": [ + "commandline", + "terminal", + "shell", + "cmd", + "cli", + "bash", + "script", + "node" + ], + "author": "Brandon Nozaki Miller", + "license": "MIT", + "bugs": { + "url": "https://github.com/RIAEvangelist/node-cmd/issues" + }, + "homepage": "https://github.com/RIAEvangelist/node-cmd" +} diff --git a/package-lock.json b/package-lock.json new file mode 100644 index 0000000..b5e27b4 --- /dev/null +++ b/package-lock.json @@ -0,0 +1,27 @@ +{ + "name": "repo", + "lockfileVersion": 2, + "requires": true, + "packages": { + "": { + "dependencies": { + "node-cmd": "^5.0.0" + } + }, + "node_modules/node-cmd": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/node-cmd/-/node-cmd-5.0.0.tgz", + "integrity": "sha512-4sQTJmsS5uZKAPz/Df9fnIbmvOySfGdW+UreH4X5NcAOOpKjaE+K5wf4ehNBbZVPo0vQ36RkRnhhsXXJAT+Syw==", + "engines": { + "node": ">=6.4.0" + } + } + }, + "dependencies": { + "node-cmd": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/node-cmd/-/node-cmd-5.0.0.tgz", + "integrity": "sha512-4sQTJmsS5uZKAPz/Df9fnIbmvOySfGdW+UreH4X5NcAOOpKjaE+K5wf4ehNBbZVPo0vQ36RkRnhhsXXJAT+Syw==" + } + } +} diff --git a/package.json b/package.json new file mode 100644 index 0000000..ba9c008 --- /dev/null +++ b/package.json @@ -0,0 +1,6 @@ +{ + "type": "module", + "dependencies": { + "node-cmd": "^5.0.0" + } +} diff --git a/tx.draft b/tx.draft new file mode 100644 index 0000000..67f211e --- /dev/null +++ b/tx.draft @@ -0,0 +1,5 @@ +{ + "type": "Unwitnessed Tx BabbageEra", + "description": "Ledger Cddl Format", + "cborHex": "84a40081825820462c904495cf8cf33059daeebb81592085c6555cc342b845b65f1b1d60c6f7e2000181a200581d6055f79dfcafc5773120f64df9e1ca5932ac0c7477ca97b0c8b972ec2e011b000000025409283b021a0002bbc50758203408ce70d5afe51cb73c28de6fde092c60f92790b7d99e07641e5138499229a3a0f5d90103a100a1190539a36c496e6469636163696f6e65737840312063646974612028356d6c2920756e612076657a20782064696120706f7220646f73207820322073656d616e61732061206c61206d69736d6120686f72612e6252707833416c62656e64617a6f6c20285a656e74656c29203230306d672f356d6c2e20537573702e2034252046636f20312028756e6f29646e616d65781b52656365746120456c656374726f6e69636120436974616c646f63" +} From 2f7dd0e151912d74b247c8314097d76eede0db94 Mon Sep 17 00:00:00 2001 From: Daniel Rodriguez Date: Tue, 24 Jan 2023 19:55:53 -0300 Subject: [PATCH 2/8] Addding keys for testing --- node_modules/.package-lock.json | 2 +- package-lock.json | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/node_modules/.package-lock.json b/node_modules/.package-lock.json index 884f4ba..b1da49e 100644 --- a/node_modules/.package-lock.json +++ b/node_modules/.package-lock.json @@ -1,5 +1,5 @@ { - "name": "repo", + "name": "cardano-cli-starter-kit", "lockfileVersion": 2, "requires": true, "packages": { diff --git a/package-lock.json b/package-lock.json index b5e27b4..58dc9b2 100644 --- a/package-lock.json +++ b/package-lock.json @@ -1,5 +1,5 @@ { - "name": "repo", + "name": "cardano-cli-starter-kit", "lockfileVersion": 2, "requires": true, "packages": { From 5e117878b495879274f2ba36206c850b3085006a Mon Sep 17 00:00:00 2001 From: Daniel Rodriguez Date: Tue, 24 Jan 2023 20:01:36 -0300 Subject: [PATCH 3/8] Revert "Addding keys for testing" - No need vkeys, skey, and address This reverts commit 2f7dd0e151912d74b247c8314097d76eede0db94. --- node_modules/.package-lock.json | 2 +- package-lock.json | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/node_modules/.package-lock.json b/node_modules/.package-lock.json index b1da49e..884f4ba 100644 --- a/node_modules/.package-lock.json +++ b/node_modules/.package-lock.json @@ -1,5 +1,5 @@ { - "name": "cardano-cli-starter-kit", + "name": "repo", "lockfileVersion": 2, "requires": true, "packages": { diff --git a/package-lock.json b/package-lock.json index 58dc9b2..b5e27b4 100644 --- a/package-lock.json +++ b/package-lock.json @@ -1,5 +1,5 @@ { - "name": "cardano-cli-starter-kit", + "name": "repo", "lockfileVersion": 2, "requires": true, "packages": { From fda8bf974ff7b1251b52fdfd07915dbd93ff7860 Mon Sep 17 00:00:00 2001 From: Daniel Rodriguez Date: Fri, 3 Mar 2023 21:15:28 +0000 Subject: [PATCH 4/8] bash Common, queyAddress y SendLovelaces. --- 00_common.sh | 1549 +++++++++++++++++ 01_queryAddress.sh | 239 +++ 01_sendLovelaces.sh | 641 +++++++ mithr/demo-wallet/commandos.txt | 135 ++ .../demo-wallet/policy/tokens/token_meta.json | 10 + receta.js | 51 + sendPrescription.sh | 27 + 7 files changed, 2652 insertions(+) create mode 100644 00_common.sh create mode 100755 01_queryAddress.sh create mode 100755 01_sendLovelaces.sh create mode 100644 mithr/demo-wallet/commandos.txt create mode 100644 mithr/demo-wallet/policy/tokens/token_meta.json create mode 100644 receta.js create mode 100755 sendPrescription.sh diff --git a/00_common.sh b/00_common.sh new file mode 100644 index 0000000..ecacdcd --- /dev/null +++ b/00_common.sh @@ -0,0 +1,1549 @@ +#!/bin/bash +unset magicparam network addrformat + +############################################################################################################################## +# +# MAIN CONFIG FILE: +# +# Please set the following variables to your needs, you can overwrite them dynamically +# by placing a file with name "common.inc" in the calling directory or in "$HOME/.common.inc". +# It will be sourced into this file automatically if present and can overwrite the values below dynamically :-) +# +############################################################################################################################## + + +#--------- Set the Path to your node socket file and to your genesis files here --------- +socket="/ipc/node.socket" #Path to your cardano-node socket for machines in online-mode. Another example would be "$HOME/cnode/sockets/node.socket" +genesisfile="./mainnet-shelley-genesis.json" #Shelley-Genesis path, you can also use the placeholder $HOME to specify your home directory +genesisfile_byron="mainnet-byron-genesis.json" #Byron-Genesis path, you can also use the placeholder $HOME to specify your home directory + + +#--------- Set the Path to your main binaries here --------- +cardanocli="/usr/local/bin/cardano-cli" #Path to your cardano-cli binary you wanna use. If your binary is present in the Path just set it to "cardano-cli" without the "./" infront +cardanonode="/usr/local/bin/cardano-node" #Path to your cardano-node binary you wanna use. If your binary is present in the Path just set it to "cardano-node" without the "./" infront +bech32_bin="/usr/local/bin/bech32" #Path to your bech32 binary you wanna use. If your binary is present in the Path just set it to "bech32" without the "./" infront + + +#--------- You can work in offline mode too, please read the instructions on the github repo README :-) +offlineMode="no" #change this to "yes" if you run these scripts on a cold machine, it need a counterpart with set to "no" on a hot machine +offlineFile="./offlineTransfer.json" #path to the filename (JSON) that will be used to transfer the data between a hot and a cold machine + + +#------------------------------------------------------------------------------------------------------------------------------------------------------------------------ + + +#--------- Only needed if you wanna do catalyst voting or include your itn witness for your pool-ticker +cardanosigner="./cardano-signer" #Path to your cardano-signer binary you wanna use. If your binary is present in the Path just set it to "cardano-signer" without the "./" infront + + +#--------- Only needed if you wanna do catalyst voting +catalyst_toolbox_bin="./catalyst-toolbox" #Path to your catalyst-toolbox binary you wanna use. If your binary is present in the Path just set it to "catalyst-toolbox" without the "./" infront +cardanoaddress="./cardano-address" #Path to your cardano-address binary you wanna use. If your binary is present in the Path just set it to "cardano-address" without the "./" infront + + +#--------- Only needed if you wanna use a hardware key (Ledger/Trezor) too, please read the instructions on the github repo README :-) +cardanohwcli="cardano-hw-cli" #Path to your cardano-hw-cli binary you wanna use. If your binary is present in the Path just set it to "cardano-hw-cli" without the "./" infront + + +#--------- Only needed if you wanna generate the right format for the NativeAsset Metadata Registry +cardanometa="./token-metadata-creator" #Path to your token-metadata-creator binary you wanna use. If present in the Path just set it to "token-metadata-creator" without the "./" infront + +#--------- Only needed if you wanna change the BlockChain from the Mainnet to a Testnet Chain Setup, uncomment the network you wanna use by removing the leading # +# Using a preconfigured network name automatically loads and sets the magicparam, addrformat and byronToShelleyEpochs parameters, also API-URLs, etc. + +#network="Mainnet" #Mainnet (Default) +#network="PreProd" #PreProd Testnet (new default Testnet) +network="Preview" #Preview Testnet (new fast Testnet) +#network="Legacy" #Legacy TestChain (formally known as Public-Testnet) +#network="GuildNet" #GuildNet Testnet + +#--------- You can of course specify your own values by setting a new network=, magicparam=, addrformat= and byronToShelleyEpochs= parameter :-) +#network="new-devchain"; magicparam="--testnet-magic 11111"; addrformat="--testnet-magic 11111"; byronToShelleyEpochs=6 #Custom Chain settings + + + +#--------- some other stuff ----- +showVersionInfo="yes" #yes/no to show the version info and script mode on every script call +queryTokenRegistry="yes" #yes/no to query each native asset/token on the token registry server live +cropTxOutput="yes" #yes/no to crop the unsigned/signed txfile outputs on transactions to a max. of 4000chars + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +############################################################################################################################## +# +# 'DONT EDIT BELOW THIS LINE !!!' +# +############################################################################################################################## + + +#------------------------------------------------------- +#DisplayMajorErrorMessage +majorError() { +echo -e "\e[97m\n" > $(tty) +echo -e " _ ._ _ , _ ._\n (_ ' ( \` )_ .__)\n ( ( ( ) \`) ) _)\n (__ (_ (_ . _) _) ,__)\n \`~~\`\\ ' . /\`~~\`\n ; ;\n / \\ \n_____________/_ __ \\___________________________________________\n" > $(tty) +echo -e "\e[35m${1}\n\nIf you think all is right at your side, please check the GitHub repo if there\nis a newer version/bugfix available, thx: https://github.com/gitmachtl/scripts\e[0m\n" > $(tty); exit 1; +} +#------------------------------------------------------- + +#API Endpoints and Network-Settings for the various chains + +network=${network:-mainnet} #sets the default network to mainnet, if not set otherwise +unset _magicparam _addrformat _byronToShelleyEpochs _tokenMetaServer _transactionExplorer _koiosAPI _adahandlePolicyID + +#Load and overwrite variables via env files if present +scriptDir=$(dirname "$0" 2> /dev/null) +if [[ -f "${scriptDir}/common.inc" ]]; then source "${scriptDir}/common.inc"; fi +if [[ -f "$HOME/.common.inc" ]]; then source "$HOME/.common.inc"; fi +if [[ -f "common.inc" ]]; then source "common.inc"; fi + +#Set the list of preconfigured networknames +networknames="mainnet, preprod, preview, legacy, vasildev" + +#Check if there are testnet parameters set but network is still "mainnet" +if [[ "${magicparam}${addrformat}" == *"testnet"* && "${network,,}" == "mainnet" ]]; then majorError "Mainnet selected, but magicparam(${magicparam})/addrformat(${addrformat}) have testnet settings!\n\nPlease select the right chain in the '00_common.sh', '${scriptDir}/common.inc', '$HOME/.common.inc' or './common.inc' file by setting the value for the parameter network to one of the preconfiged networknames:\n${networknames}\n\nThere is no need anymore, to set the parameters magicparam/addrformat/byronToShelleyEpochs for the preconfigured networks. Its enough to specify it for example with: network=\"preprod\"\nOf course you can still set them and also set a custom networkname like: network=\"vasil-dev\""; exit 1; fi + + +#Preload the variables, based on the "network" name +case "${network,,}" in + + "mainnet" ) + network="Mainnet" #nicer name for info-display + _magicparam="--mainnet" #MagicParameter Extension --mainnet / --testnet-magic xxx + _addrformat="--mainnet" #Addressformat for the address generation, normally the same as magicparam + _byronToShelleyEpochs=208 #The number of Byron Epochs before the Chain forks to Shelley-Era + _tokenMetaServer="https://tokens.cardano.org/metadata/" #Token Metadata API URLs -> autoresolve into ${tokenMetaServer}/ + _transactionExplorer="https://cardanoscan.io/transaction/" #URLS for the Transaction-Explorers -> autoresolve into ${transactionExplorer}/ + _koiosAPI="https://api.koios.rest/api/v0" #Koios-API URLs -> autoresolve into ${koiosAPI} + _adahandlePolicyID="f0ff48bbb7bbe9d59a40f1ce90e9e9d0ff5002ec48f232b49ca0fb9a" #PolicyIDs for the adaHandles -> autoresolve into ${adahandlePolicyID} + ;; + + + "legacy"|"testnet" ) + network="Legacy" + _magicparam="--testnet-magic 1097911063" + _addrformat="--testnet-magic 1097911063" + _byronToShelleyEpochs=74 + _tokenMetaServer="https://metadata.cardano-testnet.iohkdev.io/metadata" + _transactionExplorer="https://testnet.cexplorer.io/tx" + _koiosAPI= + _adahandlePolicyID="8d18d786e92776c824607fd8e193ec535c79dc61ea2405ddf3b09fe3" + ;; + + + "preprod"|"pre-prod" ) + network="PreProd" + _magicparam="--testnet-magic 1" + _addrformat="--testnet-magic 1" + _byronToShelleyEpochs=4 + _tokenMetaServer="https://metadata.cardano-testnet.iohkdev.io/metadata" + _transactionExplorer="https://preprod.cardanoscan.io/transaction" + _koiosAPI="https://preprod.koios.rest/api/v0" + _adahandlePolicyID="f0ff48bbb7bbe9d59a40f1ce90e9e9d0ff5002ec48f232b49ca0fb9a" #PolicyIDs for the adaHandles -> autoresolve into ${adahandlePolicyID} + ;; + + + "preview"|"pre-view" ) + network="Preview" + _magicparam="--testnet-magic 2" + _addrformat="--testnet-magic 2" + _byronToShelleyEpochs=0 + _tokenMetaServer="https://metadata.cardano-testnet.iohkdev.io/metadata" + _transactionExplorer="https://preview.cardanoscan.io/transaction" + _koiosAPI="https://preview.koios.rest/api/v0" + _adahandlePolicyID="f0ff48bbb7bbe9d59a40f1ce90e9e9d0ff5002ec48f232b49ca0fb9a" #PolicyIDs for the adaHandles -> autoresolve into ${adahandlePolicyID} + ;; + + + "guildnet"|"guild-net" ) + network="GuildNet" + _magicparam="--testnet-magic 141" + _addrformat="--testnet-magic 141" + _byronToShelleyEpochs=2 + _tokenMetaServer="https://metadata.cardano-testnet.iohkdev.io/metadata" + _transactionExplorer= + _koiosAPI="https://guild.koios.rest/api/v0" + _adahandlePolicyID="f0ff48bbb7bbe9d59a40f1ce90e9e9d0ff5002ec48f232b49ca0fb9a" #PolicyIDs for the adaHandles -> autoresolve into ${adahandlePolicyID} + ;; + +esac + + +#Assign the values to the used variables if not defined before with an other value +magicparam=${magicparam:-"${_magicparam}"} +addrformat=${addrformat:-"${_addrformat}"} +byronToShelleyEpochs=${byronToShelleyEpochs:-"${_byronToShelleyEpochs}"} +tokenMetaServer=${tokenMetaServer:-"${_tokenMetaServer}"} +transactionExplorer=${transactionExplorer:-"${_transactionExplorer}"} +koiosAPI=${koiosAPI:-"${_koiosAPI}"} +adahandlePolicyID=${adahandlePolicyID:-"${_adahandlePolicyID}"} + + +#Check about the / at the end of the URLs +if [[ "${tokenMetaServer: -1}" == "/" ]]; then tokenMetaServer=${tokenMetaServer%?}; fi #make sure the last char is not a / +if [[ "${koiosAPI: -1}" == "/" ]]; then koiosAPI=${koiosAPI%?}; fi #make sure the last char is not a / +if [[ "${transactionExplorer: -1}" == "/" ]]; then transactionExplorer=${transactionExplorer%?}; fi #make sure the last char is not a / + + +#Check about the needed chain params +if [[ "${magicparam}" == "" || ${addrformat} == "" || ${byronToShelleyEpochs} == "" ]]; then majorError "The 'magicparam', 'addrformat' or 'byronToShelleyEpochs' is not set!\nOr maybe you have set the wrong parameter network=\"${network}\" ?\nList of preconfigured network-names: ${networknames}"; exit 1; fi + +#Don't allow to overwrite the needed Versions, so we set it after the overwrite part +minNodeVersion="1.35.4" #minimum allowed node version for this script-collection version +maxNodeVersion="9.99.9" #maximum allowed node version, 9.99.9 = no limit so far +minLedgerCardanoAppVersion="5.0.0" #minimum version for the cardano-app on the Ledger HW-Wallet +minTrezorCardanoAppVersion="2.5.3" #minimum version for the firmware on the Trezor HW-Wallet +minHardwareCliVersion="1.12.0" #minimum version for the cardano-hw-cli +minCardanoAddressVersion="3.11.0" #minimum version for the cardano-address binary +minCardanoSignerVersion="1.11.0" #minimum version for the cardano-signer binary +minCatalystToolboxVersion="0.5.0" #minimum version for the catalyst-toolbox binary + + +#Set the CARDANO_NODE_SOCKET_PATH for all cardano-cli operations +export CARDANO_NODE_SOCKET_PATH=${socket} + +#Set the bc linebreak to a big number so we can work with really biiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiig numbers +export BC_LINE_LENGTH=1000 + +#Setting online/offline variables and offlineFile default value, versionInfo, tokenRegistryquery, tx output cropping to boolean values +if [[ "${offlineMode^^}" == "YES" ]]; then offlineMode=true; onlineMode=false; else offlineMode=false; onlineMode=true; fi +if [[ "${offlineFile}" == "" ]]; then offlineFile="./offlineTransfer.json"; fi +if [[ "${showVersionInfo^^}" == "NO" ]]; then showVersionInfo=false; else showVersionInfo=true; fi +if [[ "${queryTokenRegistry^^}" == "NO" ]]; then queryTokenRegistry=false; else queryTokenRegistry=true; fi +if [[ "${cropTxOutput^^}" == "NO" ]]; then cropTxOutput=false; else cropTxOutput=true; fi + + +#------------------------------------------------------------- +#Do a cli and node version check +versionCheck() { printf '%s\n%s' "${1}" "${2}" | sort -C -V; } #$1=minimal_needed_version, $2=current_node_version + +exists() { + command -v "$1" >/dev/null 2>&1 +} + +#Check cardano-cli +if ! exists "${cardanocli}"; then majorError "Path ERROR - Path to cardano-cli is not correct or cardano-cli binaryfile is missing!\nYour current set path is: ${cardanocli}"; exit 1; fi +versionCLI=$(${cardanocli} version 2> /dev/null |& head -n 1 |& awk {'print $2'}) +versionCheck "${minNodeVersion}" "${versionCLI}" +if [[ $? -ne 0 ]]; then majorError "Version ${versionCLI} ERROR - Please use a cardano-cli version ${minNodeVersion} or higher !\nOld versions are not supported for security reasons, please upgrade - thx."; exit 1; fi +versionCheck "${versionCLI}" "${maxNodeVersion}" +if [[ $? -ne 0 ]]; then majorError "Version ${versionCLI} ERROR - Please use a cardano-cli version between ${minNodeVersion} and ${maxNodeVersion} !\nOther versions are not supported for compatibility issues, please check if newer scripts are available - thx."; exit 1; fi +if ${showVersionInfo}; then echo -ne "\n\e[0mVersion-Info: \e[32mcli ${versionCLI}\e[0m"; fi + +#Check cardano-node only in online mode +if ${onlineMode}; then + if ! exists "${cardanonode}"; then majorError "Path ERROR - Path to cardano-node is not correct or cardano-node binaryfile is missing!\nYour current set path is: ${cardanocli}"; exit 1; fi + versionNODE=$(${cardanonode} version 2> /dev/null |& head -n 1 |& awk {'print $2'}) + versionCheck "${minNodeVersion}" "${versionNODE}" + if [[ $? -ne 0 ]]; then majorError "Version ${versionNODE} ERROR - Please use a cardano-node version ${minNodeVersion} or higher !\nOld versions are not supported for security reasons, please upgrade - thx."; exit 1; fi + versionCheck "${versionNODE}" "${maxNodeVersion}" + if [[ $? -ne 0 ]]; then majorError "Version ${versionNODE} ERROR - Please use a cardano-node version between ${minNodeVersion} and ${maxNodeVersion} !\nOther versions are not supported for compatibility issues, please check if newer scripts are available - thx."; exit 1; fi + if ${showVersionInfo}; then echo -ne " / \e[32mnode ${versionNODE}\e[0m"; fi +fi + +#Check bech32 tool if given path is ok, if not try to use the one in the scripts folder +if ! exists "${bech32_bin}"; then + #Try the one in the scripts folder + if [[ -f "${scriptDir}/bech32" ]]; then bech32_bin="${scriptDir}/bech32"; + else majorError "Path ERROR - Path to the 'bech32' binary is not correct or 'bech32' binaryfile is missing!\nYou can find it here: https://github.com/input-output-hk/bech32/releases/latest\nThis is needed to calculate the correct Bech32-Assetformat like 'asset1ee0u29k4xwauf0r7w8g30klgraxw0y4rz2t7xs'."; exit 1; fi +fi + +#Display current Mode (online or offline) +if ${showVersionInfo}; then + if ${offlineMode}; then + echo -ne "\t\tScripts-Mode: \e[32moffline\e[0m"; + else + echo -ne "\t\tScripts-Mode: \e[36monline\e[0m"; + if [ ! -e "${socket}" ]; then echo -ne "\n\n\e[35mWarning: Node-Socket does not exist !\e[0m"; fi + fi + + if [[ "${magicparam}" == *"mainnet"* ]]; then + echo -ne "\t\t\e[32mMainnet\e[0m"; + else + echo -ne "\t\t\e[91mTestnet: ${network} (magic $(echo ${magicparam} | awk {'print $2'}))\e[0m"; + fi + +echo +echo +fi + +#------------------------------------------------------------- +#Check path to genesis files +# if [[ ! -f "${genesisfile}" ]]; then majorError "Path ERROR - Path to the shelley genesis file '${genesisfile}' is wrong or the file is missing!"; exit 1; fi +# if [[ ! -f "${genesisfile_byron}" ]]; then majorError "Path ERROR - Path to the byron genesis file '${genesisfile_byron}' is wrong or the file is missing!"; exit 1; fi + + + +#------------------------------------------------------------- +#Check if curl, jq, bc and xxd is installed +if ! exists curl; then echo -e "\e[33mYou need the little tool 'curl', its needed to fetch online data !\n\nInstall it on Ubuntu/Debian like:\n\e[97msudo apt update && sudo apt -y install curl\n\n\e[33mThx! :-)\e[0m\n"; exit 2; fi +if ! exists jq; then echo -e "\e[33mYou need the little tool 'jq', its needed to do the json processing !\n\nInstall it on Ubuntu/Debian like:\n\e[97msudo apt update && sudo apt -y install jq\n\n\e[33mThx! :-)\e[0m\n"; exit 2; fi +if ! exists bc; then echo -e "\e[33mYou need the little tool 'bc', its needed to do larger number calculations !\n\nInstall it on Ubuntu/Debian like:\n\e[97msudo apt update && sudo apt -y install bc\n\n\e[33mThx! :-)\e[0m\n"; exit 2; fi +if ! exists xxd; then echo -e "\e[33mYou need the little tool 'xxd', its needed to convert hex strings !\n\nInstall it on Ubuntu/Debian like:\n\e[97msudo apt update && sudo apt -y install xxd\n\n\e[33mThx! :-)\e[0m\n"; exit 2; fi + + +#------------------------------------------------------------- +#Searching for the temp directory (used for transactions files) +tempDir=$(dirname $(mktemp -ut tmp.XXXX)) + + + + +#------------------------------------------------------- +#AddressType check +check_address() { +tmp=$(${cardanocli} address info --address $1 2> /dev/null) +if [[ $? -ne 0 ]]; then echo -e "\e[35mERROR - Unknown address format for address: $1 !\e[0m"; exit 1; fi +era=$(jq -r .era <<< ${tmp} 2> /dev/null) +if [[ "${era^^}" == "BYRON" ]]; then echo -e "\e[33mINFO - Byron addresses are only supported as a destination address!\e[0m\n"; fi +} + +get_addressType() { +${cardanocli} address info --address $1 2> /dev/null | jq -r .type +} + +get_addressEra() { +${cardanocli} address info --address $1 2> /dev/null | jq -r .era +} + +addrTypePayment="payment" +addrTypeStake="stake" + +#------------------------------------------------------- +#AdaHandle Format check (exits with true or false) +checkAdaHandleFormat() { + #AdaHandles with optional SubHandles + if [[ "${1,,}" =~ ^\$[a-z0-9_.-]{1,15}(@[a-z0-9_.-]{1,15})?$ ]]; then true; else false; fi +} + + +#------------------------------------------------------------- +#Subroutine for user interaction +ask() { + local prompt default reply + + if [ "${2:-}" = "Y" ]; then + prompt="Y/n" + default=Y + elif [ "${2:-}" = "N" ]; then + prompt="y/N" + default=N + else + prompt="y/n" + default= + fi + + while true; do + + # Ask the question (not using "read -p" as it uses stderr not stdout) + echo -ne "$1 [$prompt] " + + # Read the answer (use /dev/tty in case stdin is redirected from somewhere else) + read reply $(tty) #redirect to the tty output + IFS= read -s pass #read in the password but don't show it + local hidden=$(sed 's/./*/g' <<< ${pass}) + echo -ne "${hidden}" > $(tty) #show stars for the chars + echo -n "${pass}" #pass the password to the calling instance + unset pass #unset the variable +} +#------------------------------------------------------- + + + + +#------------------------------------------------------- +#Subroutines to set read/write flags for important files +file_lock() +{ +if [ -f "$1" ]; then chmod 400 "$1"; fi +} + +file_unlock() +{ +if [ -f "$1" ]; then chmod 600 "$1"; fi +} +#------------------------------------------------------- + +#------------------------------------------------------- +#Subroutines to calculate current epoch from genesis.json offline +get_currentEpoch() +{ +local startTimeGenesis=$(cat ${genesisfile} | jq -r .systemStart) +local startTimeSec=$(date --date=${startTimeGenesis} +%s) #in seconds (UTC) +local currentTimeSec=$(date -u +%s) #in seconds (UTC) +local epochLength=$(cat ${genesisfile} | jq -r .epochLength) +local currentEPOCH=$(( (${currentTimeSec}-${startTimeSec}) / ${epochLength} )) #returns a integer number, we like that +echo ${currentEPOCH} +} +#------------------------------------------------------- + +#------------------------------------------------------- +#Subroutines to calculate time until next epoch from genesis.json offline +get_timeUntilNextEpoch() +{ +local startTimeGenesis=$(cat ${genesisfile} | jq -r .systemStart) +local startTimeSec=$(date --date=${startTimeGenesis} +%s) #in seconds (UTC) +local currentTimeSec=$(date -u +%s) #in seconds (UTC) +local epochLength=$(cat ${genesisfile} | jq -r .epochLength) +local currentEPOCH=$(( (${currentTimeSec}-${startTimeSec}) / ${epochLength} )) #returns a integer number, we like that +local timeUntilNextEpoch=$(( ${epochLength} - (${currentTimeSec}-${startTimeSec}) + (${currentEPOCH}*${epochLength}) )) +echo ${timeUntilNextEpoch} +} +#------------------------------------------------------- + + +#------------------------------------------------------- +#Subroutines to calculate current slotHeight(tip) depending on online/offline mode +get_currentTip() +{ +if ${onlineMode}; then + local currentTip=$(${cardanocli} query tip ${magicparam} 2> /dev/null | jq -r .slot 2> /dev/null); #only "slot" instead of "slotNo" since 1.26.0 + + #if the return is blank (bug in the cli), then retry 2 times. if failing again, exit with a majorError + if [[ "${currentTip}" == "" ]]; then local currentTip=$(${cardanocli} query tip ${magicparam} 2> /dev/null | jq -r .slot 2> /dev/null); + if [[ "${currentTip}" == "" ]]; then local currentTip=$(${cardanocli} query tip ${magicparam} 2> /dev/null | jq -r .slot 2> /dev/null); + if [[ "${currentTip}" == "" ]]; then majorError "query tip return from cardano-cli failed"; exit 1; fi + fi + fi + else + #Static + local slotLength=$(cat ${genesisfile} | jq -r .slotLength) #In Secs + local epochLength=$(cat ${genesisfile} | jq -r .epochLength) #In Secs + local slotsPerKESPeriod=$(cat ${genesisfile} | jq -r .slotsPerKESPeriod) #Number + local startTimeByron=$(cat ${genesisfile_byron} | jq -r .startTime) #In Secs(abs) + local startTimeGenesis=$(cat ${genesisfile} | jq -r .systemStart) #In Text + local startTimeSec=$(date --date=${startTimeGenesis} +%s) #In Secs(abs) + local transTimeEnd=$(( ${startTimeSec}+(${byronToShelleyEpochs}*${epochLength}) )) #In Secs(abs) End of the TransitionPhase + local byronSlots=$(( (${startTimeSec}-${startTimeByron}) / 20 )) #NumSlots between ByronChainStart and ShelleyGenesisStart(TransitionStart) + local transSlots=$(( (${byronToShelleyEpochs}*${epochLength}) / 20 )) #NumSlots in the TransitionPhase + + #Dynamic + local currentTimeSec=$(date -u +%s) + + #Calculate current slot + if [[ "${currentTimeSec}" -lt "${transTimeEnd}" ]]; + then #In Transistion Phase between ShelleyGenesisStart and TransitionEnd + local currentTip=$(( ${byronSlots} + (${currentTimeSec}-${startTimeSec}) / 20 )) + else #After Transition Phase + local currentTip=$(( ${byronSlots} + ${transSlots} + ((${currentTimeSec}-${transTimeEnd}) / ${slotLength}) )) + fi + + fi +echo ${currentTip} +} +#------------------------------------------------------- + + +#------------------------------------------------------- +#Subroutines to calculate current TTL +get_currentTTL() +{ +echo $(( $(get_currentTip) + 100000 )) #100000 so a little over a day to have time to collect witnesses and transmit the transaction +} +#------------------------------------------------------- + +#------------------------------------------------------- +#Subroutines to check the syncState of the node +get_currentSync() +{ +if ${onlineMode}; then + local currentSync=$(${cardanocli} query tip ${magicparam} 2> /dev/null | jq -r .syncProgress 2> /dev/null); + + #if the return is blank (bug in the cli), then retry 2 times. if failing again, exit with a majorError + if [[ "${currentSync}" == "" ]]; then local currentSyncp=$(${cardanocli} query tip ${magicparam} 2> /dev/null | jq -r .syncProgress 2> /dev/null); + if [[ "${currentSync}" == "" ]]; then local currentTip=$(${cardanocli} query tip ${magicparam} 2> /dev/null | jq -r .syncProgress 2> /dev/null); + if [[ "${currentSync}" == "" ]]; then majorError "query tip return from cardano-cli failed"; exit 1; fi + fi + fi + + if [[ ${currentSync} == "100.00" ]]; then echo "synced"; else echo "unsynced"; fi + + else + echo "offline" +fi +} +#------------------------------------------------------- + + + +#------------------------------------------------------- +#Displays an Errormessage if parameter is not 0 +checkError() +{ +if [[ $1 -ne 0 ]]; then echo -e "\n\n\e[35mERROR (Code $1) !\e[0m\n"; exit $1; fi +} +#------------------------------------------------------- + +#------------------------------------------------------- +#TrimString +function trimString +{ + echo "$1" | sed -n '1h;1!H;${;g;s/^[ \t]*//g;s/[ \t]*$//g;p;}' +} +#------------------------------------------------------- + +#------------------------------------------------------- +#Return the era the online node is in +get_NodeEra() { +local tmpEra=$(${cardanocli} query tip ${magicparam} 2> /dev/null | jq -r ".era | select (.!=null)" 2> /dev/null) +if [[ ! "${tmpEra}" == "" ]]; then tmpEra=${tmpEra,,}; else tmpEra="auto"; fi +echo "${tmpEra}"; return 0; #return era in lowercase +} +##Set nodeEra parameter ( --byron-era, --shelley-era, --allegra-era, --mary-era, --alonzo-era, --babbage-era or empty) +if ${onlineMode}; then tmpEra=$(get_NodeEra); else tmpEra=$(jq -r ".protocol.era" 2> /dev/null < ${offlineFile}); fi +if [[ ! "${tmpEra}" == "auto" ]]; then nodeEraParam="--${tmpEra}-era"; else nodeEraParam=""; fi + +#Temporary fix to lock the transaction build-raw to alonzo era for +#Hardware-Wallet operations. Babbage-Era is not yet supported, so we will lock this for now +#if [[ "${nodeEraParam}" == "" ]] || [[ "${nodeEraParam}" == "--babbage-era" ]]; then nodeEraParam="--alonzo-era"; fi + + +#------------------------------------------------------- + + + +#------------------------------------------------------- +#Converts a raw UTXO query output into the new UTXO JSON style since 1.26.0, but with stringnumbers +#Building the JSON structure from scratch, way faster than using jq for it +generate_UTXO() #Parameter1=RawUTXO, Parameter2=Address +{ + + #Convert given bech32 address into a base16(hex) address, not needed in theses scripts, but to make a true 1:1 copy of the normal UTXO JSON output + #local utxoAddress=$(${cardanocli} address info --address ${2} 2> /dev/null | jq -r .base16); if [[ $? -ne 0 ]]; then local utxoAddress=${2}; fi + local utxoAddress=${2} + local utxoJSON="{" #start with a blank JSON skeleton and an open { + + while IFS= read -r line; do + IFS=' ' read -ra utxo_entry <<< "${line}" # utxo_entry array holds entire utxo string + + local utxoHashIndex="${utxo_entry[0]}#${utxo_entry[1]}" + + #There are lovelaces on the UTXO -> check if the name is "lovelace" or if there are just 3 arguments + if [[ "${utxo_entry[3]}" == "lovelace" ]] || [[ ${#utxo_entry[@]} -eq 3 ]]; then + local idx=5; #normal indexstart for the next checks + local utxoAmountLovelaces=${utxo_entry[2]}; + else + local idx=2; #earlier indexstart, because no lovelaces present + local utxoAmountLovelaces=0; + fi + + #Build the entry for each UtxoHashIndex, start with the hash and the entry for the address and the lovelaces + local utxoJSON+="\"${utxoHashIndex}\": { \"address\": \"${utxoAddress}\", \"value\": { \"lovelace\": \"${utxoAmountLovelaces}\"" + + #value part is open + local value_open=true + + local idxCompare=$(( ${idx} - 1 )) + local old_asset_policy="" + local policy_open=false + + #Add the Token entries if tokens available, also check for data (script) entries + if [[ ${#utxo_entry[@]} -gt ${idxCompare} ]]; then # contains tokens + + while [[ ${#utxo_entry[@]} -gt ${idx} ]]; do #check if there are more entries, and the amount is a number + local next_entry=${utxo_entry[${idx}]} + + #if the next entry is a number -> process asset/tokendata + if [[ "${next_entry}" =~ ^[0-9]+$ ]]; then + local asset_amount=${next_entry} + local asset_hash_name="${utxo_entry[$((idx+1))]}" + IFS='.' read -ra asset <<< "${asset_hash_name}" + local asset_policy=${asset[0]} + + #Open up a policy if it is a different one + if [[ "${asset_policy}" != "${old_asset_policy}" ]]; then #open up a new policy + if ${policy_open}; then local utxoJSON="${utxoJSON%?}}"; fi #close the previous policy first and remove the last , from the last assetname entry of the previous policy + local utxoJSON+=", \"${asset_policy}\": {" + local policy_open=true + local old_asset_policy=${asset_policy} + fi + + local asset_name=${asset[1]} + #Add the Entry of the Token + local utxoJSON+="\"${asset_name}\": \"${asset_amount}\"," # the , will be deleted when the policy part closes + local idx=$(( ${idx} + 3 )) + + #if its a data entry, add the datumhash key-field to the json output + elif [[ "${next_entry}" == "TxOutDatumHash" ]] && [[ "${utxo_entry[$((idx+1))]}" == *"Data"* ]]; then + if ${policy_open}; then local utxoJSON="${utxoJSON%?}}"; local policy_open=false; fi #close the previous policy first and remove the last , from the last assetname entry of the previous policy + if ${value_open}; then local utxoJSON+="}"; local value_open=false; fi #close the open value part + local data_entry_hash=${utxo_entry[$((idx+2))]} + #Add the Entry for the data(datumhash) + local utxoJSON+=",\"datumhash\": \"${data_entry_hash//\"/}\"" + local idx=$(( ${idx} + 4 )) + + #stop the decoding if an entry related to a "Datum" is found that is not the "TxOutDatumHash" from above, can be extended in the future if needed + elif [[ "${next_entry^^}" == *"DATUM"* ]]; then break + + else + local idx=$(( ${idx} + 1 )) #go to the next entry of the array + fi + done + fi + + #close policy if still open + if ${policy_open}; then local utxoJSON="${utxoJSON%?}}"; fi #close the previous policy first and remove the last char "," from the last assetname entry of the previous policy + + #close value part if still open + if ${value_open}; then local utxoJSON+="}"; fi #close the open value part + + #close the utxo part + local utxoJSON+="}," #the last char "," will be deleted at the end + +done < <(printf "${1}\n" | tail -n +3) #read in from parameter 1 (raw utxo) but cut first two lines + + #close the whole json but delete the last char "," before that. do it only if there are entries present (length>1), else return an empty json + if [[ ${#utxoJSON} -gt 1 ]]; then echo "${utxoJSON%?}}"; else echo "{}"; fi; + +} +#------------------------------------------------------- + + + +#------------------------------------------------------- +#Cuts out all UTXOs in a mary style UTXO JSON that are not the given UTXO hash ($2) +#The given UTXO hash can be multiple UTXO hashes with the or separator | for egrep +filterFor_UTXO_old() +{ +local inJSON=${1} +local searchUTXO=${2} +local outJSON=${inJSON} +local utxoEntryCnt=$(jq length <<< ${inJSON}) +local tmpCnt=0 +for (( tmpCnt=0; tmpCnt<${utxoEntryCnt}; tmpCnt++ )) +do +local utxoHashIndex=$(jq -r "keys[${tmpCnt}]" <<< ${inJSON}) +if [[ $(echo "${utxoHashIndex}" | egrep "${searchUTXO}" | wc -l) -eq 0 ]]; then local outJSON=$( jq "del (.\"${utxoHashIndex}\")" <<< ${outJSON}); fi +done +echo "${outJSON}" +} +#------------------------------------------------------- + +#------------------------------------------------------- +#Cuts out all UTXOs in a mary style UTXO JSON that are not the given UTXO hash ($2) +#The given UTXO hash can be multiple UTXO hashes with the separator | +filterFor_UTXO() +{ +local inJSON=${1} +local searchUTXO=${2} +local outJSON="{}" + +IFS='|' read -ra searchUTXOs <<< "${searchUTXO}" #split the given utxos on the | separator +local noOfSearchUTXOs=${#searchUTXOs[@]} +for (( tmpCnt=0; tmpCnt<${noOfSearchUTXOs}; tmpCnt++ )) +do + local utxoHashIndex=${searchUTXOs[${tmpCnt}]} #the current hashindex + local sourceUTXO=$(jq -r .\"${utxoHashIndex}\" <<< ${inJSON}) #the hashindex of the source json + if [[ "${sourceUTXO}" != "null" ]]; then local outJSON=$( jq ". += { \"${utxoHashIndex}\": ${sourceUTXO} }" <<< ${outJSON}); fi +done +echo "${outJSON}" +} +#------------------------------------------------------- + +#------------------------------------------------------- +#Convert PolicyID|assetName TokenName into Bech32 format "token1....." +convert_tokenName2BECH() { + #${1} = policyID | assetName as a HEX String + #${2} = assetName in ASCII or empty +local tmp_policyID=$(trimString "${1}") #make sure there are not spaces before and after +local tmp_assetName=$(trimString "${2}") +if [[ ! "${tmp_assetName}" == "" ]]; then local tmp_assetName=$(echo -n "${tmp_assetName}" | xxd -b -ps -c 80 | tr -d '\n'); fi + +#echo -n "${tmp_policyID}${tmp_assetName}" | xxd -r -ps | b2sum -l 160 -b | cut -d' ' -f 1 | ${bech32_bin} asset +echo -n "${tmp_policyID}${tmp_assetName}" | xxd -r -ps | b2sum -l 160 -b | awk {'print $1'} | ${bech32_bin} asset +} +#------------------------------------------------------- + +#------------------------------------------------------- +#Convert ASCII assetName into HEX assetName +convert_assetNameASCII2HEX() { +echo -n "${1}" | xxd -b -ps -c 80 | tr -d '\n' +} +#------------------------------------------------------- + +#------------------------------------------------------- +#Convert HEX assetName into ASCII assetName +convert_assetNameHEX2ASCII() { +echo -n "${1}" | xxd -r -ps +} +#------------------------------------------------------- + +#------------------------------------------------------- +#Convert HEX assetName into ASCII assetName. If possible return ".assetName" else return just the HEX assetName without a leading point'.' +convert_assetNameHEX2ASCII_ifpossible() { +if [[ "${1}" =~ ^(..){0,}00(.+)?$ ]]; then echo -n "${1}"; #if the given hexstring contains a nullbyte -> return the hexstring +else + local tmpAssetName=$(echo -n "${1}" | xxd -r -ps) + if [[ "${tmpAssetName}" == "${tmpAssetName//[^[:alnum:]]/}" ]]; then echo -n ".${tmpAssetName}"; else echo -n "${1}"; fi +fi +} +#------------------------------------------------------- + + +#------------------------------------------------------- +#Calculate the minimum UTXO value that has to be sent depending on the assets and the minUTXO protocol-parameters +calc_minOutUTXOcli() { + #${1} = protocol-parameters(json format) content + #${2} = tx-out string + +local protocolParam=${1} +###local multiAsset=$(echo ${2} | cut -d'+' -f 3-) #split at the + marks and only keep assets +tmp=$(${cardanocli} transaction calculate-min-required-utxo ${nodeEraParam} --protocol-params-file <(echo "${protocolParam}") --tx-out "${2}" 2> /dev/null) + +if [[ $? -ne 0 ]]; then echo -e "\e[35mERROR - Can't calculate minValue for the given tx-out string: ${2} !\e[0m"; exit 1; fi +echo ${tmp} | cut -d' ' -f 2 #Output is "Lovelace xxxxxx", so return the second part +} + + +#------------------------------------------------------- +#Calculate the minimum UTXO value that has to be sent depending on the assets and the protocol-parameters +calc_minOutUTXO() { + + #${1} = protocol-parameters(json format) content + #${2} = tx-out string + +local protocolParam=${1} +IFS='+' read -ra asset_entry <<< "${2}" #split the tx-out string into address, lovelaces, assets (read it into asset_entry array) + +#protocol version major +#7=babbage, 5+6=alonzo, 4=mary, 3=allegra, 2=shelley, 0+1=byron +local protocolVersionMajor=$(jq -r ".protocolVersion.major | select (.!=null)" <<< ${protocolParam}) + + +### switch the method of the minOutUTXO calculation depending on the current era, starting with protocolVersionMajor>=7 (babbage) +if [[ ${protocolVersionMajor} -ge 7 ]]; then #7=Babbage and above, new since babbage: CIP-0055 -> minOutUTXO depends on the cbor bytes length + + #chain constants for babbage + local constantOverhead=160 #constantOverhead=160 bytes set for babbage-era, 158 for mary/alonzo transactions in babbage era + + #Get the destination address in hex format as well as the amount of lovelaces + #local toAddrHex=$(echo -n "${asset_entry[0]}" | ${bech32_bin} | tr -d '\n') #this would only work for bech32-shelley addresses + local toAddrHex=$(${cardanocli} address info --address ${asset_entry[0]} 2> /dev/null | jq -r .base16 | tr -d '\n') #this works for bech32-shelley and even base58-byron addresses + local toLovelaces=${asset_entry[1]} + + if [[ ${#asset_entry[@]} -eq 2 ]]; then #only lovelaces, no assets + + + case ${nodeEraParam,,} in + + *"babbage"* ) #Build the tx-out cbor in babbage-tx format with maps + local cborStr="" #setup a clear new cbor string variable, will hold the tx-out cbor part + local cborStr+=$(to_cbor "map" 2) #map 2 + local cborStr+=$(to_cbor "unsigned" 0) #unsigned 0 + local cborStr+=$(to_cbor "bytes" "${toAddrHex}") #toAddr in hex + local cborStr+=$(to_cbor "unsigned" 1) #unsigned 1 + local cborStr+=$(to_cbor "unsigned" ${toLovelaces}) #amount of lovelaces + ;; + + * ) #Build the tx-out cbor in alonzo/shelley format with array + local cborStr="" #setup a clear new cbor string variable, will hold the tx-out cbor part + local cborStr+=$(to_cbor "array" 2) #array 2 + local cborStr+=$(to_cbor "bytes" "${toAddrHex}") #toAddr in hex + local cborStr+=$(to_cbor "unsigned" ${toLovelaces}) #amount of lovelaces + ;; + + esac + + + else #assets involved + + local idx=2 + local pidCollector="" #holds the list of individual policyIDs + local assetsCollector="" #holds the list of individual assetHases (policyID+assetName) + + while [[ ${#asset_entry[@]} -gt ${idx} ]]; do #step thru all given assets + + #separate assetamount from asset_hash(policyID.assetName) + IFS=' ' read -ra asset <<< "${asset_entry[${idx}]}" + local asset_amount=${asset[0]} + local asset_hash=${asset[1]} + + #split asset_hash_name into policyID and assetName(hex) + local asset_hash_policy=${asset_hash:0:56} + local asset_hash_hexname=${asset_hash:57} + + #collect the entries in individual lists to sort them later + local pidCollector+="${asset_hash_policy}\n" + local assetsCollector+="amount=${asset_amount} pid=${asset_hash_policy} name=${asset_hash_hexname}\n" + + local idx=$(( ${idx} + 1 )) + + done + + #only keep unique pids and get the number of each individual pid, also get the number of total individual pids + local pidCollector=$(echo -ne "${pidCollector}" | sort | uniq -c) + local numPIDs=$(wc -l <<< "${pidCollector}") + + + case ${nodeEraParam,,} in + + *"babbage"* ) #Build the tx-out cbor in babbage-tx format with maps + + local cborStr="" #setup a clear new cbor string variable, will hold the tx-out cbor part + local cborStr+=$(to_cbor "map" 2) #map 2 + local cborStr+=$(to_cbor "unsigned" 0) #unsigned 0 + local cborStr+=$(to_cbor "bytes" "${toAddrHex}") #toAddr in hex + local cborStr+=$(to_cbor "unsigned" 1) #unsigned 1 + ;; + + * ) #Build the tx-out cbor in alonzo/shelley format with array + local cborStr="" #setup a clear new cbor string variable, will hold the tx-out cbor part + local cborStr+=$(to_cbor "array" 2) #array 2 + local cborStr+=$(to_cbor "bytes" "${toAddrHex}") #toAddr in hex + ;; + + esac + + local cborStr+=$(to_cbor "array" 2) #array 2 -> first entry value of lovelaces, second is maps of assets + local cborStr+=$(to_cbor "unsigned" ${toLovelaces}) #amount of lovelaces + + local cborStr+=$(to_cbor "map" ${numPIDs}) #map x -> number of individual PIDs + + #process each individual pid + while read pidLine ; do + local numOfAssets=$(awk {'print $1'} <<< ${pidLine}) + local pidHash=$(awk {'print $2'} <<< ${pidLine}) + + local cborStr+=$(to_cbor "bytes" "${pidHash}") #asset pid as byteArray + local cborStr+=$(to_cbor "map" "${numOfAssets}") #map for number of asset with that pid + + #process each individual asset + while read assetLine ; do + local tmpAssetAmount=$(awk {'print $1'} <<< ${assetLine}); local tmpAssetAmount=${tmpAssetAmount:7} + local tmpAssetHexName=$(awk {'print $3'} <<< ${assetLine}); local tmpAssetHexName=${tmpAssetHexName:5} + + local cborStr+=$(to_cbor "bytes" "${tmpAssetHexName}") #asset name as byteArray + local cborStr+=$(to_cbor "unsigned" ${tmpAssetAmount}) #amount of this asset + + done < <(echo -e "${assetsCollector}" | grep "pid=${pidHash}") + + done <<< "${pidCollector}" + + fi #only lovelaces or lovelaces + assets + + #We need to get the CostPerByte. This is reported via the protocol-parameters in the utxoCostPerByte or utxoCostPerWord parameter + local utxoCostPerByte=$(jq -r ".utxoCostPerByte | select (.!=null)" <<< ${protocolParam}); #babbage + if [[ "${utxoCostPerByte}" == "" ]]; then #if the parameter is not present, use the utxoCostPerWord one. a word is 8 bytes + local utxoCostPerWord=$(jq -r ".utxoCostPerWord | select (.!=null)" <<< ${protocolParam}); + local utxoCostPerByte=$(( ${utxoCostPerWord} / 8 )) + fi + + #cborLength is length of cborStr / 2 because of the hexchars (2 chars -> 1 byte) + minOutUTXO=$(( ( (${#cborStr} / 2) + ${constantOverhead} ) * ${utxoCostPerByte} )) + echo ${minOutUTXO} + exit #calculation for babbage is done, leave the function +fi + +### if we are here, it was not a babbage style calculation, so lets do it for the other eras +### do the calculation for shelley, allegra, mary, alonzo + +#chain constants, based on the specifications: https://hydra.iohk.io/build/5949624/download/1/shelley-ma.pdf +local k0=0 #coinSize=0 in mary-era, 2 in alonzo-era +local k1=6 +local k2=12 #assetSize=12 +local k3=28 #pidSize=28 +local k4=8 #word=8 bytes +local utxoEntrySizeWithoutVal=27 #6+txOutLenNoVal(14)+txInLen(7) +local adaOnlyUTxOSize=$((${utxoEntrySizeWithoutVal} + ${k0})) + +local minUTXOValue=$(jq -r ".minUTxOValue | select (.!=null)" <<< ${protocolParam}); #shelley, allegra, mary +local utxoCostPerWord=$(jq -r ".utxoCostPerWord | select (.!=null)" <<< ${protocolParam}); #alonzo + +### switch the method of the minOutUTXO calculation depending on the current era +if [[ ${protocolVersionMajor} -ge 5 ]]; then #5+6=Alonzo, new since alonzo: the k0 parameter increases by 2 compared to the mary one + adaOnlyUTxOSize=$(( adaOnlyUTxOSize + 2 )); #2 more starting with the alonzo era + minUTXOValue=$(( ${utxoCostPerWord} * ${adaOnlyUTxOSize} )); +fi + +### from here on, the calculation is the same for shelley, allegra, mary, alonzo + +#preload it with the minUTXOValue from the parameters, will be overwritten at the end if costs are higher +local minOutUTXO=${minUTXOValue} + +if [[ ${#asset_entry[@]} -gt 2 ]]; then #contains assets, do calculations. otherwise leave it at the default value + local idx=2 + local pidCollector="" #holds the list of individual policyIDs + local assetsCollector="" #holds the list of individual assetHases (policyID+assetName) + local nameCollector="" #holds the list of individual assetNames(hex format) + + while [[ ${#asset_entry[@]} -gt ${idx} ]]; do + + #separate assetamount from asset_hash(policyID.assetName) + IFS=' ' read -ra asset <<< "${asset_entry[${idx}]}" + local asset_hash=${asset[1]} + + #split asset_hash_name into policyID and assetName(hex) + local asset_hash_policy=${asset_hash:0:56} + local asset_hash_hexname=${asset_hash:57} + + #collect the entries in individual lists to sort them later + local pidCollector+="${asset_hash_policy}\n" + local assetsCollector+="${asset_hash_policy}${asset_hash_hexname}\n" + if [[ ! "${asset_hash_hexname}" == "" ]]; then local nameCollector+="${asset_hash_hexname}\n"; fi + + local idx=$(( ${idx} + 1 )) + + done + + #get uniq entries + local numPIDs=$(echo -ne "${pidCollector}" | sort | uniq | wc -l) + local numAssets=$(echo -ne "${assetsCollector}" | sort | uniq | wc -l) + + #get sumAssetNameLengths + local sumAssetNameLengths=$(( $(echo -ne "${nameCollector}" | sort | uniq | tr -d '\n' | wc -c) / 2 )) #divide consolidated hexstringlength by 2 because 2 hex chars -> 1 byte + + #calc the utxoWords + local roundupBytesToWords=$(bc <<< "scale=0; ( ${numAssets}*${k2} + ${sumAssetNameLengths} + ${numPIDs}*${k3} + (${k4}-1) ) / ${k4}") + local tokenBundleSize=$(( ${k1} + ${roundupBytesToWords} )) + + #calc minAda needed with assets + local minAda=$(( $(bc <<< "scale=0; ${minUTXOValue} / ${adaOnlyUTxOSize}") * ( ${utxoEntrySizeWithoutVal} + ${tokenBundleSize} ) )) + + #if minAda is higher than the bottom minUTXOValue, set the output to the higher value (max function) + if [[ ${minAda} -gt ${minUTXOValue} ]]; then minOutUTXO=${minAda}; fi + + minOutUTXO=${minAda}; + +fi + +echo ${minOutUTXO} #return the minOutUTXO value for the txOut-String with or without assets +} +#------------------------------------------------------- + + + +#------------------------------------------------------- +#to_cbor function +# +# converts different majortypes and there values into a cborHexString +# +to_cbor() { + + # ${1} type: unsigned, negative, bytes, string, array, map, tag + # ${2} value: unsigned int value or hexstring for bytes + + local type=${1} + local value="${2}" + + # majortypes + # unsigned 000x|xxxx majortype 0 not limited, but above 18446744073709551615 (2^64), the numbers are represented via tag2 + bytearray + # bytes 010x|xxxx majortype 2 limited to max. 65535 here + # array 100x|xxxx majortype 4 limited to max. 65535 here + # map 101x|xxxx majortype 5 limited to max. 65535 here + + # extras - not used yet but implemented for the future + # negative 001x|xxxx majortype 1 not limited, but below -18446744073709551616 (-2^64 -1), the numbers are represented via tag3 + bytearray + # string 011x|xxxx majortype 3 limited to max. 65535 chars + # tag 110x|xxxx majortype 6 limited to max. 65535 here + + +case ${type} in + + #unsigned - input is an unsigned integer, range is selected via a bc query because bash can't handle big numbers + unsigned ) if [[ $(bc <<< "${value} < 24") -eq 1 ]]; then printf -v cbor "%02x" $((10#${value})) #1byte total value below 24 + elif [[ $(bc <<< "${value} < 256") -eq 1 ]]; then printf -v cbor "%04x" $((0x1800 + 10#${value})) #2bytes total: first 0x1800 + 1 lower byte value + elif [[ $(bc <<< "${value} < 65536") -eq 1 ]]; then printf -v cbor "%06x" $((0x190000 + 10#${value})) #3bytes total: first 0x190000 + 2 lowerbytes value + elif [[ $(bc <<< "${value} < 4294967296") -eq 1 ]]; then printf -v cbor "%10x" $((0x1A00000000 + 10#${value})) #5bytes total: 0x1A00000000 + 4 lower bytes value + elif [[ $(bc <<< "${value} < 18446744073709551616") -eq 1 ]]; then local tmp="00$(bc <<< "obase=16;ibase=10;${value}+498062089990157893632")"; cbor="${tmp: -18}" #9bytes total: first 0x1B0000000000000000 + 8 lower bytes value + #if value does not fit into an 8byte unsigned integer, the cbor representation is tag2(pos.bignum)+bytearray of the value + else local cbor=$(to_cbor "tag" 2); local tmp="00$(bc <<< "obase=16;ibase=10;${value}")"; tmp=${tmp: -$(( (${#tmp}-1)/2*2 ))}; local cbor+=$(to_cbor "bytes" ${tmp}) #fancy calc to get a leading zero in the hex array if needed + fi + ;; + + #bytestring - input is a hexstring + bytes ) local bytesLength=$(( ${#value} / 2 )) #bytesLength is length of value /2 because of hex encoding (2chars -> 1byte) + if [[ ${bytesLength} -lt 24 ]]; then printf -v cbor "%02x${value}" $((0x40 + 10#${bytesLength})) #1byte total 0x40 + lower part value & bytearrayitself + elif [[ ${bytesLength} -lt 256 ]]; then printf -v cbor "%04x${value}" $((0x5800 + 10#${bytesLength})) #2bytes total: first 0x4000 + 0x1800 + 1 lower byte value & bytearrayitself + elif [[ ${bytesLength} -lt 65536 ]]; then printf -v cbor "%06x${value}" $((0x590000 + 10#${bytesLength})) #3bytes total: first 0x400000 + 0x190000 + 2 lower bytes value & bytearrayitself + fi + ;; + + #array - input is an unsigned integer + array ) if [[ ${value} -lt 24 ]]; then printf -v cbor "%02x" $((0x80 + 10#${value})) #1byte total 0x80 + lower part value + elif [[ ${value} -lt 256 ]]; then printf -v cbor "%04x" $((0x9800 + 10#${value})) #2bytes total: first 0x8000 + 0x1800 & 1 lower byte value + elif [[ ${value} -lt 65536 ]]; then printf -v cbor "%06x" $((0x990000 + 10#${value})) #3bytes total: first 0x800000 + 0x190000 & 2 lower bytes value + fi + ;; + + #map - input is an unsigned integer + map ) if [[ ${value} -lt 24 ]]; then printf -v cbor "%02x" $((0xA0 + 10#${value})) #1byte total 0xA0 + lower part value + elif [[ ${value} -lt 256 ]]; then printf -v cbor "%04x" $((0xB800 + 10#${value})) #2bytes total: first 0xA000 + 0x1800 & 1 lower byte value + elif [[ ${value} -lt 65536 ]]; then printf -v cbor "%06x" $((0xB90000 + 10#${value})) #3bytes total: first 0xA00000 + 0x190000 & 2 lower bytes value + fi + ;; + + ### + ### the following types are not used in these scripts yet, but added to have a more complete function for the future + ### + + #negative - input is a negative unsigned integer, range is selected via a bc query because bash can't handle big numbers + negative ) local value=$(bc <<< "${value//-/} -1") #negative representation in cbor is the neg. number as a pos. number minus 1, so a -500 will be represented as a 499 + if [[ $(bc <<< "${value} < 24") -eq 1 ]]; then printf -v cbor "%02x" $((0x20 + 10#${value})) #1byte total 0x20 value below 24 + elif [[ $(bc <<< "${value} < 256") -eq 1 ]]; then printf -v cbor "%04x" $((0x3800 + 10#${value})) #2bytes total: first 0x2000 + 0x1800 + 1 lower byte value + elif [[ $(bc <<< "${value} < 65536") -eq 1 ]]; then printf -v cbor "%06x" $((0x390000 + 10#${value})) #3bytes total: first 0x200000 + 0x190000 + 2 lowerbytes value + elif [[ $(bc <<< "${value} < 4294967296") -eq 1 ]]; then printf -v cbor "%10x" $((0x3A00000000 + 10#${value})) #5bytes total: 0x2000000000 + 0x1A00000000 + 4 lower bytes value + elif [[ $(bc <<< "${value} < 18446744073709551616") -eq 1 ]]; then local tmp="00$(bc <<< "obase=16;ibase=10;${value}+1088357900348863545344")"; cbor="${tmp: -18}" #9bytes total: first 0x3B0000000000000000 + 8 lower bytes value + #if value does not fit into an 8byte unsigned integer, the cbor representation is tag3(neg.bignum)+bytearray of the value + else local cbor=$(to_cbor "tag" 3); local tmp="00$(bc <<< "obase=16;ibase=10;${value}")"; tmp=${tmp: -$(( (${#tmp}-1)/2*2 ))}; local cbor+=$(to_cbor "bytes" ${tmp}) #fancy calc to get a leading zero in the hex array if needed + fi + ;; + + #tag - input is an unsigned integer + tag ) if [[ ${value} -lt 24 ]]; then printf -v cbor "%02x" $((0xC0 + 10#${value})) #1byte total 0xC0 + lower part value + elif [[ ${value} -lt 256 ]]; then printf -v cbor "%04x" $((0xD800 + 10#${value})) #2bytes total: first 0xC000 + 0x1800 & 1 lower byte value + elif [[ ${value} -lt 65536 ]]; then printf -v cbor "%06x" $((0xD90000 + 10#${value})) #3bytes total: first 0xC00000 + 0x190000 & 2 lower bytes value + fi + ;; + + #textstring - input is a utf8-string + string ) local value=$(echo -ne "${value}" | xxd -p -c 65536 | tr -d '\n') #convert the given string into a hexstring and process it further like a bytearray + local bytesLength=$(( ${#value} / 2 )) #bytesLength is length of value /2 because of hex encoding (2chars -> 1byte) + if [[ ${bytesLength} -lt 24 ]]; then printf -v cbor "%02x${value}" $((0x60 + 10#${bytesLength})) #1byte total 0x60 + lower part value & bytearrayitself + elif [[ ${bytesLength} -lt 256 ]]; then printf -v cbor "%04x${value}" $((0x7800 + 10#${bytesLength})) #2bytes total: first 0x6000 + 0x1800 + 1 lower byte value & bytearrayitself + elif [[ ${bytesLength} -lt 65536 ]]; then printf -v cbor "%06x${value}" $((0x790000 + 10#${bytesLength})) #3bytes total: first 0x600000 + 0x190000 + 2 lower bytes value & bytearrayitself + fi + ;; + + +esac + +echo -n "${cbor^^}" #return the cbor in uppercase +} +#------------------------------------------------------- + + + + + + +#------------------------------------------------------- +#Show Informations about the content in the offlineJSON +showOfflineFileInfo() { +#Displays infos about the content in the offlineJSON +echo -e "\e[0mChecking Content of the offlineFile: \e[32m$(basename ${offlineFile})\e[0m" +echo + +if [[ $(jq ".protocol.parameters | length" <<< ${offlineJSON}) -gt 0 ]]; then echo -ne "\e[0mProtocol-Parameters:\e[32m present\e[0m\t"; else echo -ne "\e[0mProtocol-Parameters:\e[35m missing\e[0m\t"; fi + +if [[ ! "$(jq -r ".protocol.era" <<< ${offlineJSON})" == null ]]; then echo -e "\e[0m Protocol-Era:\e[32m $(jq -r ".protocol.era" <<< ${offlineJSON})\e[0m"; else echo -e "\e[0m Protocol-Era:\e[35m missing\e[0m"; fi + +local historyCnt=$(jq -r ".history | length" <<< ${offlineJSON}) +echo -e "\e[0m History-Entries:\e[32m ${historyCnt}\e[0m"; + +if [[ ${historyCnt} -gt 0 ]]; then echo -e "\e[0m Last-Action:\e[32m $(jq -r ".history[-1].action" <<< ${offlineJSON}) \e[90m($(jq -r ".history[-1].date" <<< ${offlineJSON}))\e[0m"; fi + +local tokenMetaCnt=$(jq -r ".tokenMetaServer | length" <<< ${offlineJSON}) +if [[ ${tokenMetaCnt} -gt 0 ]]; then echo -e "\e[0m TokenMeta-Entries:\e[32m ${tokenMetaCnt}\e[0m"; fi + +if ${offlineMode}; then + echo -ne "\e[0m Online Versions:" + local versionTmp=$(jq -r ".general.onlineCLI" <<< ${offlineJSON}); if [[ "${versionTmp}" == null ]]; then versionTmp="-.--.-"; fi; echo -ne "\e[32m cli ${versionTmp}\e[0m" + local versionTmp=$(jq -r ".general.onlineNODE" <<< ${offlineJSON}); if [[ "${versionTmp}" == null ]]; then versionTmp="-.--.-"; fi; echo -e " /\e[32m node ${versionTmp}\e[0m" + else + echo -ne "\e[0m Offline Version:" + local versionTmp=$(jq -r ".general.offlineCLI" <<< ${offlineJSON}); if [[ "${versionTmp}" == null ]]; then versionTmp="-.--.-"; fi; echo -e "\e[32m cli ${versionTmp}\e[0m" +fi +echo +local addressCnt=$(jq -r ".address | length" <<< ${offlineJSON}) +echo -e "\e[0m Address-Entries:\e[32m ${addressCnt}\e[0m\t"; + +for (( tmpCnt=0; tmpCnt<${addressCnt}; tmpCnt++ )) +do + local addressKey=$(jq -r ".address | keys[${tmpCnt}]" <<< ${offlineJSON}) + local addressName=$(jq -r ".address.\"${addressKey}\".name" <<< ${offlineJSON}) + local addressAmount=$(jq -r ".address.\"${addressKey}\".totalamount" <<< ${offlineJSON}) lovelaces + addressAmount="$(convertToADA ${addressAmount}) ADA"; + local addressAssetsCnt=$(jq -r ".address.\"${addressKey}\".totalassetscnt" <<< ${offlineJSON}) + if [[ ${addressAssetsCnt} -gt 0 ]]; then addressAmount="${addressAmount} + ${addressAssetsCnt} Asset-Types"; fi + local addressDate=$(jq -r ".address.\"${addressKey}\".date" <<< ${offlineJSON}) + local addressUsedAsPayment=$(jq -r ".address.\"${addressKey}\".used" <<< ${offlineJSON}) + local addressType=$(jq -r ".address.\"${addressKey}\".type" <<< ${offlineJSON}) + if [[ ${addressUsedAsPayment} == "yes" ]]; then + addressUsed="used"; if [[ ${addressType} == ${addrTypePayment} ]]; then addressUsed="${addressUsed}, but can receive"; fi; + else + addressUsed=""; + fi + echo -e "\n\e[90m\t[$((${tmpCnt}+1))]\t\e[0m${addressName} \e[90m(${addressAmount}, ${addressDate}) \e[35m${addressUsed}\e[0m\n\t \t\e[90m${addressKey}\e[0m" +done +local filesCnt=$(jq -r ".files | length" <<< ${offlineJSON}); +echo +echo -e "\e[0m Files-Attached:\e[32m ${filesCnt}\e[0m"; if [[ ${filesCnt} -gt 0 ]]; then echo; fi +for (( tmpCnt=0; tmpCnt<${filesCnt}; tmpCnt++ )) +do + local filePath=$(jq -r ".files | keys[${tmpCnt}]" <<< ${offlineJSON}) + local fileDate=$(jq -r ".files.\"${filePath}\".date" <<< ${offlineJSON}) + local fileSize=$(jq -r ".files.\"${filePath}\".size" <<< ${offlineJSON}) + echo -e "\e[90m\t[$((${tmpCnt}+1))]\t\e[0m${filePath} \e[90m(${fileSize} bytes, ${fileDate})\e[0m" +done +echo +local transactionsCnt=$(jq -r ".transactions | length" <<< ${offlineJSON}) +echo -e "\e[0mTransactions in Cue:\e[32m ${transactionsCnt}\e[0m\n"; +for (( tmpCnt=0; tmpCnt<${transactionsCnt}; tmpCnt++ )) +do + local transactionType=$(jq -r ".transactions[${tmpCnt}].type" <<< ${offlineJSON}) + local transactionEra=$(jq -r ".transactions[${tmpCnt}].era" <<< ${offlineJSON}) + local transactionDate=$(jq -r ".transactions[${tmpCnt}].date" <<< ${offlineJSON}) + local transactionFromName=$(jq -r ".transactions[${tmpCnt}].fromAddr" <<< ${offlineJSON}) + local transactionFromAddr=$(jq -r ".transactions[${tmpCnt}].sendFromAddr" <<< ${offlineJSON}) + local transactionToName=$(jq -r ".transactions[${tmpCnt}].toAddr" <<< ${offlineJSON}) + local transactionToAddr=$(jq -r ".transactions[${tmpCnt}].sendToAddr" <<< ${offlineJSON}) + + case ${transactionType} in + Transaction|Asset-Minting|Asset-Burning ) + #Normal UTXO Transaction (lovelaces and/or tokens) + echo -e "\e[90m\t[$((${tmpCnt}+1))]\t\e[0m${transactionType}[${transactionEra}] from '${transactionFromName}' to '${transactionToName}' \e[90m(${transactionDate})" + echo -e "\t \t\e[90mfrom ${transactionFromAddr}\n\t \t\e[90mto ${transactionToAddr}\e[0m" + ;; + + Withdrawal ) + #Rewards Withdrawal Transaction + local transactionStakeName=$(jq -r ".transactions[${tmpCnt}].stakeAddr" <<< ${offlineJSON}) + local transactionStakeAddr=$(jq -r ".transactions[${tmpCnt}].stakingAddr" <<< ${offlineJSON}) + echo -e "\e[90m\t[$((${tmpCnt}+1))]\t\e[0mRewards-Withdrawal[${transactionEra}] from '${transactionStakeName}' to '${transactionToName}', payment via '${transactionFromName}' \e[90m(${transactionDate})" + echo -e "\t \t\e[90mfrom ${transactionStakeAddr}\n\t \t\e[90mto ${transactionToAddr}\n\t \t\e[90mpayment via ${transactionFromAddr}\e[0m" + ;; + + StakeKeyRegistration|StakeKeyDeRegistration ) + #StakeKeyRegistration or Deregistration + local transactionStakeName=$(jq -r ".transactions[${tmpCnt}].stakeAddr" <<< ${offlineJSON}) + echo -e "\e[90m\t[$((${tmpCnt}+1))]\t\e[0m${transactionType}[${transactionEra}] for '${transactionStakeName}', payment via '${transactionFromName}' \e[90m(${transactionDate})" + echo -e "\t \t\e[90mpayment via ${transactionFromAddr}\e[0m" + ;; + + DelegationCertRegistration ) + #Delegation Certificate Registration + local transactionDelegName=$(jq -r ".transactions[${tmpCnt}].delegName" <<< ${offlineJSON}) + echo -e "\e[90m\t[$((${tmpCnt}+1))]\t\e[0m${transactionType}[${transactionEra}] for '${transactionDelegName}', payment via '${transactionFromName}' \e[90m(${transactionDate})" + echo -e "\t \t\e[90mpayment via ${transactionFromAddr}\e[0m" + ;; + + PoolRegistration|PoolReRegistration|PoolRetirement ) + #Delegation Certificate Registration + local poolMetaTicker=$(jq -r ".transactions[${tmpCnt}].poolMetaTicker" <<< ${offlineJSON}) + echo -e "\e[90m\t[$((${tmpCnt}+1))]\t\e[0m${transactionType}[${transactionEra}] for Pool '${poolMetaTicker}', payment via '${transactionFromName}' \e[90m(${transactionDate})" + echo -e "\t \t\e[90mpayment via ${transactionFromAddr}\e[0m" + ;; + + + * ) #Unknown Transaction Type !? + echo -e "\e[90m\t[$((${tmpCnt}+1))]\t\e[35mUnknown transaction type\e[0m" + ;; + esac + +echo +done + +} +#------------------------------------------------------- + +#------------------------------------------------------- +#Read the current offlineFile into the offlineJSON variable +readOfflineFile() { +if [ -f "${offlineFile}" ]; then + offlineJSON=$(jq . ${offlineFile} 2> /dev/null) + if [[ $? -ne 0 ]]; then echo -e "\e[35mERROR - '$(basename ${offlineFile})' is not a valid JSON file, please generate a valid offlineJSON first in onlinemode.\e[0m\n"; exit 1; fi + if [[ $(trimString "${offlineJSON}") == "" ]]; then echo -e "\e[35mERROR - '$(basename ${offlineFile})' is not a valid JSON file, please generate a valid offlineJSON first in onlinemode.\e[0m\n"; exit 1; fi #nothing in the file + if [[ ! $(jq ".protocol.parameters | length" <<< ${offlineJSON}) -gt 0 ]]; then echo -e "\e[35mERROR - '$(basename ${offlineFile})' contains no protocol parameters. Please generate a valid offlineJSON first in onlinemode.\e[0m\n"; exit 1; fi + else + offileJSON=null + echo -e "\e[35mERROR - '$(basename ${offlineFile})' is not present, please generate a valid offlineJSON first in onlinemode.\e[0m\n"; exit 1; + +fi +} +#------------------------------------------------------- + +#------------------------------------------------------- +#Get the hardware-wallet ready, check the cardano-app version +start_HwWallet() { + +local onlyForManu=${1^^} + +if [[ "$(which ${cardanohwcli})" == "" ]]; then echo -e "\n\e[35mError - cardano-hw-cli binary not found, please install it first and set the path to it correct in the 00_common.sh, common.inc or $HOME/.common.inc !\e[0m\n"; exit 1; fi + +versionHWCLI=$(${cardanohwcli} version 2> /dev/null |& head -n 1 |& awk {'print $6'}) +versionCheck "${minHardwareCliVersion}" "${versionHWCLI}" +if [[ $? -ne 0 ]]; then majorError "Version ERROR - Please use a cardano-hw-cli version ${minHardwareCliVersion} or higher !\nYour version ${versionHWCLI} is no longer supported for security reasons or features, please upgrade - thx."; exit 1; fi + +echo -ne "\e[33mPlease connect & unlock your Hardware-Wallet, open the Cardano-App on Ledger-Devices (abort with CTRL+C)\e[0m\n\n\033[2A\n" +local tmp=$(${cardanohwcli} device version 2> /dev/stdout) +local pointStr="....." +until [[ "${tmp}" == *"app version"* && ! "${tmp}" == *"undefined"* ]]; do + + if [[ "${tmp}" == *"General error"* ]]; then tmp="Cardano App not opened?"; fi + + local tmpCnt=6 + while [[ ${tmpCnt} > 0 ]]; do + tmpCnt=$(( ${tmpCnt} - 1 )) + echo -ne "\r\e[35m${tmp:0:64} ...\e[0m - retry in ${tmpCnt} secs ${pointStr:${tmpCnt}}\033[K" + sleep 1 + done +tmp=$(${cardanohwcli} device version 2> /dev/stdout) +done + +local walletManu=$(echo "${tmp}" |& head -n 1 |& awk {'print $1'}) +local versionApp=$(echo "${tmp}" |& head -n 1 |& awk {'print $4'}) + +#Check if the function was set to be only available on a specified manufacturer hw wallet +if [ ! "${onlyForManu}" == "" ] && [ ! "${onlyForManu}" == "${walletManu^^}" ]; then echo -e "\n\e[35mError - This function is NOT available on this type of Hardware-Wallet, only available on a ${onlyForManu} device at the moment!\e[0m\n"; exit 1; fi + +case ${walletManu^^} in + + LEDGER ) #For Ledger Hardware-Wallets + versionCheck "${minLedgerCardanoAppVersion}" "${versionApp}" + if [[ $? -ne 0 ]]; then majorError "Version ERROR - Please use a Cardano App version ${minLedgerCardanoAppVersion} or higher on your LEDGER Hardware-Wallet!\nOlder versions like your current ${versionApp} are not supported, please upgrade - thx."; exit 1; fi + echo -ne "\r\033[1A\e[0mCardano App Version \e[32m${versionApp}\e[0m (HW-Cli Version \e[32m${versionHWCLI}\e[0m) found on your \e[32m${walletManu}\e[0m device!\033[K\n\e[32mPlease approve the action on your Hardware-Wallet (abort with CTRL+C) \e[0m... \033[K" + ;; + + TREZOR ) #For Trezor Hardware-Wallets + versionCheck "${minTrezorCardanoAppVersion}" "${versionApp}" + if [[ $? -ne 0 ]]; then majorError "Version ERROR - Please use Firmware version ${minTrezorCardanoAppVersion} or higher on your TREZOR Hardware-Wallet!\nOlder versions like your current ${versionApp} are not supported, please upgrade - thx."; exit 1; fi + echo -ne "\r\033[1A\e[0mFirmware-Version \e[32m${versionApp}\e[0m (HW-Cli Version \e[32m${versionHWCLI}\e[0m) found on your \e[32m${walletManu}\e[0m device!\033[K\n\e[32mPlease approve the action on your Hardware-Wallet (abort with CTRL+C) \e[0m... \033[K" + ;; + + * ) #For any other Manuf. + majorError "Only Ledger and Trezor Hardware-Wallets are supported at the moment!"; exit 1; + ;; +esac + +} + +#------------------------------------------------------- + +#------------------------------------------------------- +#Convert the given lovelaces $1 into ada (divide by 1M) +convertToADA() { +#echo $(bc <<< "scale=6; ${1} / 1000000" | sed -e 's/^\./0./') #divide by 1M and add a leading zero if below 1 ada +printf "%'.6f" "${1}e-6" #return in ADA format (with 6 commas) +} + + +#------------------------------------------------------- +#Get the real bytelength of a given string (for UTF-8 byte check) +byteLength() { + echo -n "${1}" | wc --bytes +} + + +#------------------------------------------------------- +#Autocorrection of the TxBody to be in canonical order for HW-Wallet transactions +autocorrect_TxBodyFile() { + +local txBodyFile="${1}" +local txBodyTmpFile="${1}-corrected" + +#check cardanohwcli presence and version +if [[ "$(which ${cardanohwcli})" == "" ]]; then echo -e "\n\e[35mError - cardano-hw-cli binary not found, please install it first and set the path to it correct in the 00_common.sh, common.inc or $HOME/.common.inc !\e[0m\n"; exit 1; fi +versionHWCLI=$(${cardanohwcli} version 2> /dev/null |& head -n 1 |& awk {'print $6'}) +versionCheck "${minHardwareCliVersion}" "${versionHWCLI}" +if [[ $? -ne 0 ]]; then majorError "Version ERROR - Please use a cardano-hw-cli version ${minHardwareCliVersion} or higher !\nYour version ${versionHWCLI} is no longer supported for security reasons or features, please upgrade - thx."; exit 1; fi + +#do the correction +#tmp=$(${cardanohwcli} transaction transform-raw --tx-body-file ${txBodyFile} --out-file ${txBodyTmpFile} 2> /dev/stdout) #old default format +tmp=$(${cardanohwcli} transaction transform --tx-file ${txBodyFile} --out-file ${txBodyTmpFile} 2> /dev/stdout) #new cddl format + +if [[ $? -ne 0 ]]; then echo -e "\n${tmp}"; exit 1; fi +tmp_lastline=$(echo "${tmp}" | tail -n 1) +if [[ "${tmp_lastline^^}" =~ (ERROR) ]]; then echo -e "\n${tmp}"; exit 1; fi + +#ok, no error occured to this point. copy the generated new TxBody file over the original one +mv ${txBodyTmpFile} ${txBodyFile}; if [[ $? -ne 0 ]]; then echo -e "\n\e[35mError: Could not write new TxBody File!"; exit 1; fi + +#all went well, now return the lastline output +echo "${tmp_lastline}"; exit 0 +} +#------------------------------------------------------- + + +#------------------------------------------------------- +#Autocorrection of the TxBody to be in canonical order for HW-Wallet transactions +#Also repairs a maybe broken AuxDataHash! +autocorrect_TxBodyFile_withAuxDataHashCorrection() { + +local txBodyFile="${1}" +local txBodyTmpFile="${1}-corrected" +local auxHashStatus="" + +#check cardanohwcli presence and version +if [[ "$(which ${cardanohwcli})" == "" ]]; then echo -e "\n\e[35mError - cardano-hw-cli binary not found, please install it first and set the path to it correct in the 00_common.sh, common.inc or $HOME/.common.inc !\e[0m\n"; exit 1; fi +versionHWCLI=$(${cardanohwcli} version 2> /dev/null |& head -n 1 |& awk {'print $6'}) +versionCheck "${minHardwareCliVersion}" "${versionHWCLI}" +if [[ $? -ne 0 ]]; then majorError "Version ERROR - Please use a cardano-hw-cli version ${minHardwareCliVersion} or higher !\nYour version ${versionHWCLI} is no longer supported for security reasons or features, please upgrade - thx."; exit 1; fi + +#search for the auxmetadata and generate the current aux hash from it as a verification. +#this is a fast simple solution by searching for the hexbytes f5d90103 as the mark of the auxdata beginning, there is no deep cbor analysis behind it +local currentAuxHash=$(cat ${txBodyFile} | sed -n "s/.*f5\(d90103.*\)\"/\1/p" | xxd -r -ps | b2sum -l 256 -b | awk {'print $1'}) #holds the expected auxhash +local currentAuxHash=$(cat ${txBodyFile} | sed -n "s/.*\($currentAuxHash\).*/\1/p") #holds the auxhash if it was found in the txcbor as a proof that the auxdata was found correctly + +#do the correction +tmp=$(${cardanohwcli} transaction transform --tx-file ${txBodyFile} --out-file ${txBodyTmpFile} 2> /dev/stdout) #new cddl format +if [[ $? -ne 0 ]]; then echo -e "\n${tmp}"; exit 1; fi +tmp_lastline=$(echo "${tmp}" | tail -n 1) +if [[ "${tmp_lastline^^}" =~ (ERROR) ]]; then echo -e "\n${tmp}"; exit 1; fi + +#generate the newAuxHash after the canonical order transformation +local newAuxHash=$(cat ${txBodyTmpFile} | sed -n 's/.*f5\(d90103.*\)\"/\1/p' | xxd -r -ps | b2sum -l 256 -b | awk {'print $1'}) +if [[ "${currentAuxHash}" != "" && "${currentAuxHash}" != "${newAuxHash}" ]]; then #only do it when the currentAuxHash holds a hash (detection worked) and if the new one is different to the old one + sed -i "s/${currentAuxHash}/${newAuxHash}/g" ${txBodyTmpFile}; if [ $? -ne 0 ]; then echo -e "\nCouldn't write temporary ${txBodyTmpFile} with a corrected AuxHash!"; exit 1; fi + local auxHashStatus="\e[91m\nCorrected the AuxHash from '${currentAuxHash}' to '${newAuxHash}' too!" +fi + +#ok, no error occured to this point. copy the generated new TxBody file over the original one +mv ${txBodyTmpFile} ${txBodyFile}; if [[ $? -ne 0 ]]; then echo -e "\n\e[35mError: Could not write new TxBody File!"; exit 1; fi + +#all went well, now return the lastline output +echo "${tmp_lastline}${auxHashStatus}"; exit 0 +} +#------------------------------------------------------- + + +#------------------------------------------------------- +#Show a rotating bar in asynchron mode during processing like utxo query +#Stop animation by sending a SIGINT to this child process +# +# ${1} = preText +function showProcessAnimation() { + +local stopAnimation="false"; +local idx=0; +#local animChar=("-" "\\" "|" "/"); +#local animChar=("⎺" "\\" "⎽" "/"); +local animChar=("> " ">> " ">>> " " >>> " " >>>" " >>" " >" " "); +#local animChar=("> " " > " " > " " > " " >" " < " " < " " < "); + +trap terminate SIGINT +terminate(){ stopAnimation="true"; } + +until [[ ${stopAnimation} == "true" ]]; do + idx=$(( (${idx}+1)%8 )) + echo -ne "\r\e[0m${1}${animChar[$idx]} " + sleep 0.2 +done +} +#------------------------------------------------------- +stopProcessAnimation() { +pkill -SIGINT -P $$ && echo -ne "\r\033[K" #stop childprocess and delete the outputline +} +#------------------------------------------------------- + + + +#------------------------------------------------------- +#checks if the given password $1 is a strong one +#min. 10 chars long, includes at least one uppercase, one lowercase, one special char +is_strong_password() { + [[ "$1" =~ ^(.*[a-z]) ]] && [[ "$1" =~ ^(.*[A-Z]) ]] && [[ "$1" =~ ^(.*[0-9]) ]] && [[ "$1" =~ ^(.*[^a-zA-Z0-9]) ]] && [[ "$1" =~ ^(.){10,} ]] && echo "true" +} +#------------------------------------------------------- + + +#------------------------------------------------------- +#encrypt skey json data, will return a json with a +#modified 'description' field and encrypted 'encrHex' field +# +# ${1} = skeyJSON data +# ${2} = password +encrypt_skeyJSON() { + + local skeyJSON="${1}" + local password="${2}" + + #check that the encryption/decryption tool gpg exists + if ! exists gpg; then echo -e "\n\n\e[33mYou need the little tool 'gnupg', its needed to encrypt/decrypt the data !\n\nInstall it on Ubuntu/Debian like:\n\e[97msudo apt update && sudo apt -y install gnupg\n\n\e[33mThx! :-)\e[0m\n" > $(tty); exit 1; fi + + #check if the skeyJSON is already encrypted + if [[ $(egrep "encrHex|Encrypted" <<< "${skeyJSON}" | wc -l) -ne 0 ]]; then echo "It is already encrypted!"; exit 1; fi + + #read data + local skeyType=$(jq -r .type <<< ${skeyJSON}); if [[ $? -ne 0 ]]; then echo "Can't read the .type field!"; exit 1; fi + if [[ "${skeyJSON}" != *"SigningKey"* ]]; then echo "Type field does not contain 'SigningKey' information!"; exit 1; fi + local skeyDescription=$(jq -r .description <<< ${skeyJSON}); if [[ $? -ne 0 ]]; then echo "Can't read the .description field!"; exit 1; fi + local skeyCBOR=$(jq -r .cborHex <<< ${skeyJSON}); if [[ $? -ne 0 ]]; then echo "Can't read the .cborHex field!"; exit 1; fi + unset skeyJSON #not used after this line + + #encrypt + local encrHex=$(gpg --symmetric --yes --batch --quiet --cipher-algo AES256 --passphrase "${password}" --log-file /dev/null <<< ${skeyCBOR} 2> /dev/null | xxd -ps -c 1000000) + unset skeyCBOR #not used after this line + unset password #not used after this line + if [[ "${encrHex}" == "" ]]; then echo "Couldn't encrypt the data via gpg!"; exit 1; fi + + #return data and format it via jq (monochrome) + echo -e "{ \"type\": \"${skeyType}\", \"description\": \"Encrypted ${skeyDescription}\", \"encrHex\": \"${encrHex}\" }" | jq -M . + +} +#------------------------------------------------------- + + +#------------------------------------------------------- +#decrypt skey json data, will return a json with the +#original 'description' field and a decrypted 'cborHex' field +# +# ${1} = skeyJSON data +# ${2} = password +decrypt_skeyJSON() { + + local skeyJSON="${1}" + local password="${2}" + + #check that the encryption/decryption tool gpg exists + if ! exists gpg; then echo -e "\n\n\e[33mYou need the little tool 'gnupg', its needed to encrypt/decrypt the data !\n\nInstall it on Ubuntu/Debian like:\n\e[97msudo apt update && sudo apt -y install gnupg\n\n\e[33mThx! :-)\e[0m\n" > $(tty); exit 1; fi + + #check if the skeyJSON is already decrypted + if [[ $(egrep "encrHex|Encrypted" <<< "${skeyJSON}" | wc -l) -eq 0 ]]; then echo "It is already decrypted!"; exit 1; fi + + #read data + local skeyType=$(jq -r .type <<< ${skeyJSON}); if [[ $? -ne 0 ]]; then echo "Can't read the .type field!"; exit 1; fi + if [[ "${skeyJSON}" != *"SigningKey"* ]]; then echo "Type field does not contain 'SigningKey' information!"; exit 1; fi + local skeyDescription=$(jq -r .description <<< ${skeyJSON}); if [[ $? -ne 0 ]]; then echo "Can't read the .description field!"; exit 1; fi + local skeyEncrHex=$(jq -r .encrHex <<< ${skeyJSON}); if [[ $? -ne 0 ]]; then echo "Can't read the .encrHex field!"; exit 1; fi + unset skeyJSON #not used after this line + + #decrypt + local cborHex=$(xxd -ps -r <<< ${skeyEncrHex} | gpg --decrypt --yes --batch --quiet --passphrase "${password}" --log-file /dev/null 2> /dev/null) + unset skeyEncrHex #not used after this line + unset password #not used after this line + if [[ "${cborHex}" == "" ]]; then echo "Couldn't decrypt the data via gpg! Wrong password?"; exit 1; fi + + #return data and format it via jq (monochrome) + echo -e "{ \"type\": \"${skeyType}\", \"description\": \"${skeyDescription//Encrypted /}\", \"cborHex\": \"${cborHex}\" }" | jq -M . + unset cborHex + +} +#------------------------------------------------------- + + +#------------------------------------------------------- +#read skey file and decrypt it if needed +# +#this function returns the skey json which will be used for example to sign transactions directly and not via a file read +# +# ${1} = skeyFILE +# +read_skeyFILE() { + + local skeyFILE="${1}" + local cborHex="" + + local viaENV="" + + #check if the file exists + if [ ! -f "${skeyFILE}" ]; then echo -e "\e[35mGiven SKEY-File does not exist!\e[0m\n\n"; exit 1; fi + + #check if the skeyJSON is already decrypted, if so, just return the content + if [[ $(egrep "encrHex|Encrypted" < "${skeyFILE}" | wc -l) -eq 0 ]]; then echo -ne "\e[0mReading unencrypted file \e[32m${skeyFILE}\e[0m ... " > $(tty); cat "${skeyFILE}"; exit 0; fi + + #its encrypted, check that the encryption/decryption tool gpg exists + if ! exists gpg; then echo -e "\n\n\e[33mYou need the little tool 'gnupg', its needed to encrypt/decrypt the data !\n\nInstall it on Ubuntu/Debian like:\n\e[97msudo apt update && sudo apt -y install gnupg\n\n\e[33mThx! :-)\e[0m\n" > $(tty); exit 1; fi + + #main loop to repeat the decryption until we have a cborHex + while [[ "${cborHex}" == "" ]]; do + + #check if there is a passwort set in the ENV_DECRYPT_PASSWORD variable, if so, just do a short check and not prompt for a password + + if [[ "${ENV_DECRYPT_PASSWORD}" == "" ]]; then #prompt for a password + #prompt for the password + local password=$(ask_pass "\e[33mEnter the Password to decrypt '${skeyFILE}' (empty to abort)") + if [[ ${password} == "" ]]; then echo -e "\e[35mAborted\e[0m\n\n"; exit 1; fi + while [[ $(is_strong_password "${password}") != "true" ]]; do + echo -e "\n\e[35mThis is not a strong password, so it couldn't be the right one. Lets try it again...\e[0m\n" > $(tty) + local password=$(ask_pass "\e[33mEnter the Password to decrypt '${skeyFILE}' (empty to abort)") + if [[ ${password} == "" ]]; then echo -e "\e[35mAborted\e[0m\n\n"; exit 1; fi + done + + else #password is present in the ENV_DECRYPT_PASSWORD variable + + #exit with an error if the password in the ENV_DECRYPT_PASSWORD is not a strong one + if [[ $(is_strong_password "${ENV_DECRYPT_PASSWORD}") != "true" ]]; then echo -e "\n\e[35mThis is not a strong password via ENV_DECRYPT_PASSWORD... abort!\n\n"; exit 1; fi + local password=${ENV_DECRYPT_PASSWORD} + local viaENV="via ENV_DECRYPT_PASSWORD " #to extend the processing text + + fi + + #read data + local skeyJSON=$(cat "${skeyFILE}") + local skeyType=$(jq -r .type <<< ${skeyJSON}); if [[ $? -ne 0 ]]; then echo "Can't read the .type field!"; exit 1; fi + if [[ "${skeyJSON}" != *"SigningKey"* ]]; then echo "Type field does not contain 'SigningKey' information!"; exit 1; fi + local skeyDescription=$(jq -r .description <<< ${skeyJSON}); if [[ $? -ne 0 ]]; then echo "Can't read the .description field!"; exit 1; fi + local skeyEncrHex=$(jq -r .encrHex <<< ${skeyJSON}); if [[ $? -ne 0 ]]; then echo "Can't read the .encrHex field!"; exit 1; fi + unset skeyJSON #not used after this line + + #decrypt + echo -ne "\r\033[K\e[0mDecrypting the file '\e[32m${skeyFILE}\e[0m' ${viaENV}... " > $(tty) + local cborHex=$(xxd -ps -r <<< ${skeyEncrHex} 2> /dev/null | gpg --decrypt --yes --batch --quiet --passphrase "${password}" --log-file /dev/null 2> /dev/null) + unset skeyEncrHex #not used after this line + unset password #not used after this line + if [[ "${cborHex}" == "" ]]; then + if [[ "${ENV_DECRYPT_PASSWORD}" != "" ]]; then echo -e "\e[35mCouldn't decrypt the data via ENV_DECRYPT_PASSWORD! Wrong password?\e[0m"; exit 1; fi #if there was an error and password was from the ENV, exit with an error + echo -e "\e[35mCouldn't decrypt the data! Wrong password?\e[0m" > $(tty); + fi + + done + + #we have cborHex content now, so the decryption worked + + #return data in json format, remove the added "Encrypted " in the description field on the fly + printf "{ \"type\": \"${skeyType}\", \"description\": \"${skeyDescription//Encrypted /}\", \"cborHex\": \"${cborHex}\" }" + unset cborHex + +} +#------------------------------------------------------- + diff --git a/01_queryAddress.sh b/01_queryAddress.sh new file mode 100755 index 0000000..934171a --- /dev/null +++ b/01_queryAddress.sh @@ -0,0 +1,239 @@ +#!/bin/bash + +# Script is brought to you by ATADA Stakepool, Telegram @atada_stakepool + +#load variables and functions from common.sh +. "$(dirname "$0")"/00_common.sh + + +#Check the commandline parameter +if [[ $# -eq 1 && ! $1 == "" ]]; then addrName="$(dirname $1)/$(basename $1 .addr)"; addrName=${addrName/#.\//}; else echo "ERROR - Usage: $0 "; exit 2; fi + + +#Check if addrName file does not exists, make a dummy one in the temp directory and fill in the given parameter as the hash address +if [ ! -f "${addrName}.addr" ]; then + addrName=$(trimString "${addrName}") #trim it if spaces present + + #check if its a regular cardano payment address + typeOfAddr=$(get_addressType "${addrName}"); + if [[ ${typeOfAddr} == ${addrTypePayment} || ${typeOfAddr} == ${addrTypeStake} ]]; then echo "$(basename ${addrName})" > ${tempDir}/tempAddr.addr; addrName="${tempDir}/tempAddr"; + + #check if its an adahandle + elif checkAdaHandleFormat "${addrName}"; then + if ${offlineMode}; then echo -e "\n\e[35mERROR - Adahandles are only supported in Online mode.\n\e[0m"; exit 1; fi + adahandleName=${addrName,,} + assetNameHex=$(convert_assetNameASCII2HEX ${adahandleName:1}) + #query adahandle asset holding address via koios + showProcessAnimation "Query Adahandle into holding address: " & + response=$(curl -s -m 10 -X GET "${koiosAPI}/asset_address_list?_asset_policy=${adahandlePolicyID}&_asset_name=${assetNameHex}" -H "Accept: application/json" 2> /dev/null) + stopProcessAnimation; + #check if the received json only contains one entry in the array (will also not be 1 if not a valid json) + if [[ $(jq ". | length" 2> /dev/null <<< ${response}) -ne 1 ]]; then echo -e "\n\e[35mCould not resolve Adahandle to an address.\n\e[0m"; exit 1; fi + addrName=$(jq -r ".[0].payment_address" <<< ${response} 2> /dev/null) + typeOfAddr=$(get_addressType "${addrName}"); + if [[ ${typeOfAddr} != ${addrTypePayment} ]]; then echo -e "\n\e[35mERROR - Resolved address '${addrName}' is not a valid payment address.\n\e[0m"; exit 1; fi; + showProcessAnimation "Verify Adahandle is on resolved address: " & + utxo=$(${cardanocli} query utxo --address ${addrName} ${magicparam} ); stopProcessAnimation; checkError "$?"; if [ $? -ne 0 ]; then exit $?; fi; + if [[ $(grep "${adahandlePolicyID}.${assetNameHex} " <<< ${utxo} | wc -l) -ne 1 ]]; then + echo -e "\n\e[35mERROR - Resolved address '${addrName}' does not hold the \$adahandle '${adahandleName}' !\n\e[0m"; exit 1; fi; + echo -e "\e[0mFound \$adahandle '${adahandleName}' on Address:\e[32m ${addrName}\e[0m\n" + echo "$(basename ${addrName})" > ${tempDir}/adahandle-resolve.addr; addrName="${tempDir}/adahandle-resolve"; + + #otherwise post an error message + else echo -e "\n\e[35mERROR - Destination Address can't be resolved. Maybe filename wrong, or not a payment-address.\n\e[0m"; exit 1; + + fi +fi + +showToAddr=${adahandleName:-"${addrName}.addr"} #shows the adahandle instead of the destination address file if available + +checkAddr=$(cat ${addrName}.addr) + +typeOfAddr=$(get_addressType "${checkAddr}"); checkError "$?"; if [ $? -ne 0 ]; then exit $?; fi; + +#What type of Address is it? Base&Enterprise or Stake +if [[ ${typeOfAddr} == ${addrTypePayment} ]]; then #Enterprise and Base UTXO adresses + + echo -e "\e[0mChecking UTXOs of Payment-Address\e[32m ${showToAddr}\e[0m: ${checkAddr}" + echo + + echo -e "\e[0mAddress-Type / Era:\e[32m $(get_addressType "${checkAddr}")\e[0m / \e[32m$(get_addressEra "${checkAddr}")\e[0m" + echo + + #Get UTX0 Data for the address. When in online mode of course from the node and the chain, in offlinemode from the transferFile + #${nodeEraParam} not needed anymore + if ${onlineMode}; then + showProcessAnimation "Query-UTXO: " & + utxo=$(${cardanocli} query utxo --address ${checkAddr} ${magicparam} ); stopProcessAnimation; checkError "$?"; if [ $? -ne 0 ]; then exit $?; fi; + showProcessAnimation "Convert-UTXO: " & + utxoJSON=$(generate_UTXO "${utxo}" "${checkAddr}"); stopProcessAnimation; + else + readOfflineFile; #Reads the offlinefile into the offlineJSON variable + utxoJSON=$(jq -r ".address.\"${checkAddr}\".utxoJSON" <<< ${offlineJSON}) + if [[ "${utxoJSON}" == null ]]; then echo -e "\e[35mAddress not included in the offline transferFile, please include it first online!\e[0m\n"; exit 1; fi + fi + + utxoEntryCnt=$(jq length <<< ${utxoJSON}) + if [[ ${utxoEntryCnt} == 0 ]]; then echo -e "\e[35mNo funds on the Address!\e[0m\n"; exit 1; else echo -e "\e[32m${utxoEntryCnt} UTXOs\e[0m found on the Address!"; fi + echo + + totalLovelaces=0; #Init for the Sum + totalAssetsJSON="{}"; #Building a total JSON with the different assetstypes "policyIdHash.name", amount and name + totalPolicyIDsLIST=""; #Buffer for the policyIDs, will be sorted/uniq/linecount at the end of the query + + #For each utxo entry, check the utxo#index and check if there are also any assets in that utxo#index + #LEVEL 1 - different UTXOs + + readarray -t utxoHashIndexArray <<< $(jq -r "keys_unsorted[]" <<< ${utxoJSON}) + readarray -t utxoLovelaceArray <<< $(jq -r "flatten | .[].value.lovelace" <<< ${utxoJSON}) + readarray -t assetsEntryCntArray <<< $(jq -r "flatten | .[].value | del (.lovelace) | length" <<< ${utxoJSON}) + readarray -t assetsEntryJsonArray <<< $(jq -c "flatten | .[].value | del (.lovelace)" <<< ${utxoJSON}) + readarray -t utxoDatumHashArray <<< $(jq -r "flatten | .[].datumhash" <<< ${utxoJSON}) + + + for (( tmpCnt=0; tmpCnt<${utxoEntryCnt}; tmpCnt++ )) + do + utxoHashIndex=${utxoHashIndexArray[${tmpCnt}]} + utxoAmount=${utxoLovelaceArray[${tmpCnt}]} #Lovelaces + totalLovelaces=$(bc <<< "${totalLovelaces} + ${utxoAmount}" ) +# echo -e "Hash#Index: ${utxoHashIndex}\tAmount: ${utxoAmount}"; + echo -e "Hash#Index: ${utxoHashIndex}\tADA: $(convertToADA ${utxoAmount}) \e[90m(${utxoAmount} lovelaces)\e[0m"; + if [[ ! "${utxoDatumHashArray[${tmpCnt}]}" == null ]]; then echo -e " DatumHash: ${utxoDatumHashArray[${tmpCnt}]}"; fi + assetsEntryCnt=${assetsEntryCntArray[${tmpCnt}]} + + if [[ ${assetsEntryCnt} -gt 0 ]]; then + + assetsJSON=${assetsEntryJsonArray[${tmpCnt}]} + assetHashIndexArray=(); readarray -t assetHashIndexArray <<< $(jq -r "keys_unsorted[]" <<< ${assetsJSON}) + assetNameCntArray=(); readarray -t assetNameCntArray <<< $(jq -r "flatten | .[] | length" <<< ${assetsJSON}) + + #LEVEL 2 - different policyIDs + for (( tmpCnt2=0; tmpCnt2<${assetsEntryCnt}; tmpCnt2++ )) + do + assetHash=${assetHashIndexArray[${tmpCnt2}]} #assetHash = policyID + totalPolicyIDsLIST+="${assetHash}\n" + + assetsNameCnt=${assetNameCntArray[${tmpCnt2}]} + assetNameArray=(); readarray -t assetNameArray <<< $(jq -r ".\"${assetHash}\" | keys_unsorted[]" <<< ${assetsJSON}) + assetAmountArray=(); readarray -t assetAmountArray <<< $(jq -r ".\"${assetHash}\" | flatten | .[]" <<< ${assetsJSON}) + + #LEVEL 3 - different names under the same policyID + for (( tmpCnt3=0; tmpCnt3<${assetsNameCnt}; tmpCnt3++ )) + do + assetName=${assetNameArray[${tmpCnt3}]} + assetAmount=${assetAmountArray[${tmpCnt3}]} + assetBech=$(convert_tokenName2BECH "${assetHash}${assetName}" "") + if [[ "${assetName}" == "" ]]; then point=""; else point="."; fi + oldValue=$(jq -r ".\"${assetHash}${point}${assetName}\".amount" <<< ${totalAssetsJSON}) + newValue=$(bc <<< "${oldValue}+${assetAmount}") + assetTmpName=$(convert_assetNameHEX2ASCII_ifpossible "${assetName}") #if it starts with a . -> ASCII showable name, otherwise the HEX-String + totalAssetsJSON=$( jq ". += {\"${assetHash}${point}${assetName}\":{amount: \"${newValue}\", name: \"${assetTmpName}\", bech: \"${assetBech}\"}}" <<< ${totalAssetsJSON}) + if [[ "${assetTmpName:0:1}" == "." ]]; then assetTmpName=${assetTmpName:1}; else assetTmpName="{${assetTmpName}}"; fi + + case ${assetHash} in + "${adahandlePolicyID}" ) #$adahandle + echo -e "\e[90m Asset: ${assetBech} \e[33mADA Handle: \$$(convert_assetNameHEX2ASCII ${assetName}) ${assetTmpName}\e[0m" + ;; + * ) #default + echo -e "\e[90m Asset: ${assetBech} Amount: ${assetAmount} ${assetTmpName}\e[0m" + ;; + esac + + done + done + fi + done + echo -e "\e[0m-----------------------------------------------------------------------------------------------------" + echo -e "Total ADA on the Address:\e[32m $(convertToADA ${totalLovelaces}) ADA / ${totalLovelaces} lovelaces \e[0m\n" + + totalPolicyIDsCnt=$(echo -ne "${totalPolicyIDsLIST}" | sort | uniq | wc -l) + + totalAssetsCnt=$(jq length <<< ${totalAssetsJSON}); + if [[ ${totalAssetsCnt} -gt 0 ]]; then + echo -e "\e[32m${totalAssetsCnt} Asset-Type(s) / ${totalPolicyIDsCnt} different PolicyIDs\e[0m found on the Address!\n" + printf "\e[0m%-56s%11s %16s %-44s %7s %s\n" "PolicyID:" "Asset-Name:" "Total-Amount:" "Bech-Format:" "Ticker:" "Meta-Name:" + + totalAssetsJSON=$(jq --sort-keys . <<< ${totalAssetsJSON}) #sort the json by the hashname + assetHashNameArray=(); readarray -t assetHashNameArray <<< $(jq -r "keys_unsorted[]" <<< ${totalAssetsJSON}) + assetAmountArray=(); readarray -t assetAmountArray <<< $(jq -r "flatten | .[].amount" <<< ${totalAssetsJSON}) + assetNameArray=(); readarray -t assetNameArray <<< $(jq -r "flatten | .[].name" <<< ${totalAssetsJSON}) + assetBechArray=(); readarray -t assetBechArray <<< $(jq -r "flatten | .[].bech" <<< ${totalAssetsJSON}) + + for (( tmpCnt=0; tmpCnt<${totalAssetsCnt}; tmpCnt++ )) + do + assetHashName=${assetHashNameArray[${tmpCnt}]} + assetAmount=${assetAmountArray[${tmpCnt}]} + assetName=${assetNameArray[${tmpCnt}]} + assetBech=${assetBechArray[${tmpCnt}]} + assetHashHex="${assetHashName//./}" #remove a . if present, we need a clean subject here for the registry request + + if $queryTokenRegistry; then if $onlineMode; then metaResponse=$(curl -sL -m 20 "${tokenMetaServer}/${assetHashHex}"); else metaResponse=$(jq -r ".tokenMetaServer.\"${assetHashHex}\"" <<< ${offlineJSON}); fi + metaAssetName=$(jq -r ".name.value | select (.!=null)" 2> /dev/null <<< ${metaResponse}); if [[ ! "${metaAssetName}" == "" ]]; then metaAssetName="${metaAssetName} "; fi + metaAssetTicker=$(jq -r ".ticker.value | select (.!=null)" 2> /dev/null <<< ${metaResponse}) + fi + + if [[ "${assetName}" == "." ]]; then assetName=""; fi + + printf "\e[90m%-70s \e[32m%16s %44s \e[90m%-7s \e[36m%s\e[0m\n" "${assetHashName:0:56}${assetName}" "${assetAmount}" "${assetBech}" "${metaAssetTicker}" "${metaAssetName}" + done + fi + echo + + + + +elif [[ ${typeOfAddr} == ${addrTypeStake} ]]; then #Staking Address + + echo -e "\e[0mChecking Rewards on Stake-Address\e[32m ${showToAddr}\e[0m: ${checkAddr}" + echo + + echo -e "\e[0mAddress-Type / Era:\e[32m $(get_addressType "${checkAddr}")\e[0m / \e[32m$(get_addressEra "${checkAddr}")\e[0m" + echo + + #Get rewards state data for the address. When in online mode of course from the node and the chain, in offlinemode from the transferFile + if ${onlineMode}; then + rewardsJSON=$(${cardanocli} query stake-address-info --address ${checkAddr} ${magicparam} | jq -rc .) + else + rewardsJSON=$(cat ${offlineFile} | jq -r ".address.\"${checkAddr}\".rewardsJSON" 2> /dev/null) + if [[ "${rewardsJSON}" == null ]]; then echo -e "\e[35mAddress not included in the offline transferFile, please include it first online!\e[0m\n"; exit; fi + fi + checkError "$?"; if [ $? -ne 0 ]; then exit $?; fi + + rewardsEntryCnt=$(jq -r 'length' <<< ${rewardsJSON}) + + if [[ ${rewardsEntryCnt} == 0 ]]; then echo -e "\e[35mStaking Address is not on the chain, register it first !\e[0m\n"; exit 1; + else echo -e "\e[0mFound:\e[32m ${rewardsEntryCnt}\e[0m entries\n"; + fi + + rewardsSum=0 + + for (( tmpCnt=0; tmpCnt<${rewardsEntryCnt}; tmpCnt++ )) + do + rewardsAmount=$(jq -r ".[${tmpCnt}].rewardAccountBalance" <<< ${rewardsJSON}) + rewardsAmountInADA=$(bc <<< "scale=6; ${rewardsAmount} / 1000000") + + delegationPoolID=$(jq -r ".[${tmpCnt}].delegation" <<< ${rewardsJSON}) + + rewardsSum=$((${rewardsSum}+${rewardsAmount})) + rewardsSumInADA=$(bc <<< "scale=6; ${rewardsSum} / 1000000") + + echo -ne "[$((${tmpCnt}+1))]\t" + + #Checking about rewards on the stake address + if [[ ${rewardsAmount} == 0 ]]; then echo -e "\e[35mNo rewards found on the stake Addr !\e[0m"; + else echo -e "Entry Rewards: \e[33m${rewardsAmountInADA} ADA / ${rewardsAmount} lovelaces\e[0m" + fi + + #If delegated to a pool, show the current pool ID + if [[ ! ${delegationPoolID} == null ]]; then echo -e " \tAccount is delegated to a Pool with ID: \e[32m${delegationPoolID}\e[0m"; else echo -e " \tAccount is not delegated to a Pool !"; fi + + echo + + done + + if [[ ${rewardsEntryCnt} -gt 1 ]]; then echo -e " \t-----------------------------------------\n"; echo -e " \tTotal Rewards: \e[33m${rewardsSumInADA} ADA / ${rewardsSum} lovelaces\e[0m\n"; fi + +else #unsupported address type + + echo -e "\e[35mAddress type unknown!\e[0m"; +fi \ No newline at end of file diff --git a/01_sendLovelaces.sh b/01_sendLovelaces.sh new file mode 100755 index 0000000..4248074 --- /dev/null +++ b/01_sendLovelaces.sh @@ -0,0 +1,641 @@ +#!/bin/bash + +# Script is brought to you by ATADA Stakepool, Telegram @atada_stakepool + +#load variables and functions from common.sh +. "$(dirname "$0")"/00_common.sh + + +if [ $# -ge 3 ]; then + fromAddr="$(dirname $1)/$(basename $1 .addr)"; fromAddr=${fromAddr/#.\//}; + toAddr="$(dirname $2)/$(basename $2 .addr)"; toAddr=${toAddr/#.\//}; + lovelacesToSend="${3^^}"; + else + cat >&2 < + [Opt: metadata.json/.cbor] + [Opt: list of UTXOs to use, | is the separator] + [Opt: Message comment, starting with "msg: ...", | is the separator] + [Opt: encrypted message mode "enc:basic". Currently only 'basic' mode is available.] + [Opt: passphrase for encrypted message mode "pass:", the default passphrase is 'cardano' is not provided] + [Opt: no of input UTXOs limitation, starting with "utxolimit: ..."] + [Opt: skip input UTXOs that contain assets (hex-format), starting with "skiputxowithasset: (assetName)", | is the separator] + [Opt: only input UTXOs that contain assets (hex-format), starting with "onlyutxowithasset: (assetName)", | is the separator] +Optional parameters: +- If you wanna attach a Transaction-Message like a short comment, invoice-number, etc with the transaction: + You can just add one or more Messages in quotes starting with "msg: ..." as a parameter. Max. 64chars / Message + "msg: This is a short comment for the transaction" ... that would be a one-liner comment + "msg: This is a the first comment line|and that is the second one" ... that would be a two-liner comment, | is the separator ! + If you also wanna encrypt it, set the encryption mode to basic by adding "enc: basic" to the parameters. + To change the default passphrase 'cardano' to you own, add the passphrase via "pass:" +- If you wanna attach a Metadata JSON: + You can add a Metadata.json (Auxilierydata) filename as a parameter to send it alone with the transaction. + There will be a simple basic check that the transaction-metadata.json file is valid. +- If you wanna attach a Metadata CBOR: + You can add a Metadata.cbor (Auxilierydata) filename as a parameter to send it along with the transaction. + Catalyst-Voting for example is done via the voting_metadata.cbor file. +- In rare cases you wanna define the exact UTXOs that should be used for sending Funds out: + "UTXO1#Index" ... to specify one UTXO, must be in quotes "..." + "UTXO1#Index|UTXO2#Index" ... to specify more UTXOs provide them with the | as separator, must be in quotes "..." +- In rare cases you wanna define the maximum count of input UTXOs that will be used for building the Transaction: + "utxolimit: xxx" ... to specify xxx number of input UTXOs to be used as maximum + "utxolimit: 300" ... to specify a maximum of 300 input UTXOs that will be used for the transaction +- In rare cases you wanna skip input UTXOs that contains one or more defined Assets policyIDs(+assetName) in hex-format: + "skiputxowithasset: yyy" ... to skip all input UTXOs that contains assets with the policyID yyy + "skiputxowithasset: yyy|zzz" ... to skip all input UTXOs that contains assets with the policyID yyy or zzz +- In rare cases you wanna only use input UTXOs that contains one or more defined Assets policyIDs(+assetName) in hex-format: + "onlyutxowithasset: yyy" ... to skip all input UTXOs that contains assets with the policyID yyy + "onlyutxowithasset: yyy|zzz" ... to skip all input UTXOs that contains assets with the policyID yyy or zzz +EOF + exit 1; fi + +#Check if Parameter 3 is a number or the keywords ALL or ALLFUNDS +if [[ ! "${lovelacesToSend}" == "ALL" && ! "${lovelacesToSend}" == "ALLFUNDS" && ! "${lovelacesToSend}" == "MIN" && -z "${lovelacesToSend##*[!0-9]*}" ]]; then echo -e "\n\e[35mERROR - No amount of lovecase (or keyword ALL/ALLFUNDS) specified.\n\e[0m"; exit 1; fi + +#Check all optional parameters about there types and set the corresponding variables +#Starting with the 4th parameter (index3) up to the last parameter +metafileParameter=""; metafile=""; filterForUTXO=""; transactionMessage="{}"; enc=""; passphrase="cardano" #Setting defaults + +paramCnt=$#; +#IFS=' ' read -ra allParameters <<< "${@}" #old, because the split on spaces is not working with messages +allParameters=( "$@" ) +for (( tmpCnt=3; tmpCnt<${paramCnt}; tmpCnt++ )) + do + paramValue=${allParameters[$tmpCnt]} + #echo -n "${tmpCnt}: ${paramValue} -> " + + #Check if an additional metadata.json/.cbor was set as parameter (not a Message, not a UTXO#IDX, not empty, not a number) + if [[ ! "${paramValue,,}" =~ ^msg:(.*)$ ]] && [[ ! "${paramValue,,}" =~ ^enc:(.*)$ ]] && [[ ! "${paramValue,,}" =~ ^pass:(.*)$ ]] && [[ ! "${paramValue,,}" =~ ^utxolimit:(.*)$ ]] && [[ ! "${paramValue,,}" =~ ^onlyutxowithasset:(.*)$ ]] && [[ ! "${paramValue,,}" =~ ^skiputxowithasset:(.*)$ ]] && [[ ! "${paramValue}" =~ ^([[:xdigit:]]+#[[:digit:]]+(\|?)){1,}$ ]] && [[ ! ${paramValue} == "" ]] && [ -z "${paramValue##*[!0-9]*}" ]; then + + metafile=${paramValue}; metafileExt=${metafile##*.} + if [[ -f "${metafile}" && "${metafileExt^^}" == "JSON" ]]; then #its a json file + #Do a simple basic check if the metadatum is in the 0..65535 range + metadatum=$(jq -r "keys_unsorted[0]" "${metafile}" 2> /dev/null) + if [[ $? -ne 0 ]]; then echo "ERROR - '${metafile}' is not a valid JSON file"; exit 1; fi + #Check if it is null, a number, lower then zero, higher then 65535, otherwise exit with an error + if [ "${metadatum}" == null ] || [ -z "${metadatum##*[!0-9]*}" ] || [ "${metadatum}" -lt 0 ] || [ "${metadatum}" -gt 65535 ]; then + echo "ERROR - MetaDatum Value '${metadatum}' in '${metafile}' must be in the range of 0..65535!"; exit 1; fi + metafileParameter="${metafileParameter}--metadata-json-file ${metafile} "; metafileList="${metafileList}'${metafile}' " + elif [[ -f "${metafile}" && "${metafileExt^^}" == "CBOR" ]]; then #its a cbor file + metafileParameter="${metafileParameter}--metadata-cbor-file ${metafile} "; metafileList="${metafileList}'${metafile}' " + else echo -e "The specified Metadata JSON/CBOR-File '${metafile}' does not exist. Fileextension must be '.json' or '.cbor' Please try again."; exit 1; + fi + + #Check if an additional UTXO#IDX filter was set as parameter "hex#num(|)" at least 1 time, but can be more often} + elif [[ "${paramValue}" =~ ^([[:xdigit:]]+#[[:digit:]]+(\|?)){1,}$ ]]; then filterForUTXO="${paramValue}"; + + #Check it its a MessageComment. Adding it to the JSON array if the length is <= 64 chars + elif [[ "${paramValue,,}" =~ ^msg:(.*)$ ]]; then #if the parameter starts with "msg:" then add it + msgString=$(trimString "${paramValue:4}"); + + #Split the messages within the parameter at the "|" char + IFS='|' read -ra allMessages <<< "${msgString}" + + #Add each message to the transactionMessage JSON + for (( tmpCnt2=0; tmpCnt2<${#allMessages[@]}; tmpCnt2++ )) + do + tmpMessage=${allMessages[tmpCnt2]} + if [[ $(byteLength "${tmpMessage}") -le 64 ]]; then + transactionMessage=$( jq ".\"674\".msg += [ \"${tmpMessage}\" ]" <<< ${transactionMessage} 2> /dev/null); + if [ $? -ne 0 ]; then echo -e "\n\e[35mMessage-Adding-ERROR: \"${tmpMessage}\" contain invalid chars for a JSON!\n\e[0m"; exit 1; fi + else echo -e "\n\e[35mMessage-Adding-ERROR: \"${tmpMessage}\" is too long, max. 64 bytes allowed, yours is $(byteLength "${tmpMessage}") bytes long!\n\e[0m"; exit 1; + fi + done + + #Check if its an utxo amount limitation + elif [[ "${paramValue,,}" =~ ^utxolimit:(.*)$ ]]; then #if the parameter starts with "utxolimit:" then set the utxolimit + utxoLimitCnt=$(trimString "${paramValue:10}"); + if [[ ${utxoLimitCnt} -le 0 ]]; then echo -e "\n\e[35mUTXO-Limit-ERROR: Please use a number value greater than zero!\n\e[0m"; exit 1; fi + + #Check if its an skipUtxoWithPolicy set + elif [[ "${paramValue,,}" =~ ^skiputxowithasset:(.*)$ ]]; then #if the parameter starts with "skiputxowithasset:" then set the skipUtxoWithAsset variable + skipUtxoWithAssetTmp=$(trimString "${paramValue:18}"); skipUtxoWithAssetTmp=${skipUtxoWithAssetTmp,,}; #read the value and convert it to lowercase + if [[ ! "${skipUtxoWithAssetTmp}" =~ ^(([[:xdigit:]][[:xdigit:]]){28,60}+(\|?)){1,}$ ]]; then + echo -e "\n\e[35mSkip-UTXO-With-Asset-ERROR: The given asset '${skipUtxoWithAssetTmp}' is not a valid policy(+assetname) hex string!\n\e[0m"; exit 1; fi + skipUtxoWithAssetTmp=${skipUtxoWithAssetTmp//|/ }; #replace the | with a space so it can be read as an array + skipUtxoWithAsset="" + #Check each entry (separated via a | char) if they contain also assethex-parts, if so place a . in the middle. Concate the final string. + for tmpEntry in ${skipUtxoWithAssetTmp}; do + if [[ ${#tmpEntry} -gt 56 ]]; then skipUtxoWithAsset+="${tmpEntry:0:56}.${tmpEntry:56}|"; else skipUtxoWithAsset+="${tmpEntry}|"; fi #representation in the rawquery output is . + done + skipUtxoWithAsset=${skipUtxoWithAsset%?}; #remove the last char "|" + + #Check if its an onlyUtxoWithPolicy set + elif [[ "${paramValue,,}" =~ ^onlyutxowithasset:(.*)$ ]]; then #if the parameter starts with "onylutxowithasset:" then set the onlyUtxoWithAsset variable + onlyUtxoWithAssetTmp=$(trimString "${paramValue:18}"); onlyUtxoWithAssetTmp=${onlyUtxoWithAssetTmp,,}; #read the value and convert it to lowercase + if [[ ! "${onlyUtxoWithAssetTmp}" =~ ^(([[:xdigit:]][[:xdigit:]]){28,60}+(\|?)){1,}$ ]]; then + echo -e "\n\e[35mOnly-UTXO-With-Asset-ERROR: The given asset '${onlyUtxoWithAssetTmp}' is not a valid policy(+assetname) hex string!\n\e[0m"; exit 1; fi + onlyUtxoWithAssetTmp=${onlyUtxoWithAssetTmp//|/ }; #replace the | with a space so it can be read as an array + onlyUtxoWithAsset="" + #Check each entry (separated via a | char) if they contain also assethex-parts, if so place a . in the middle. Concate the final string. + for tmpEntry in ${onlyUtxoWithAssetTmp}; do + if [[ ${#tmpEntry} -gt 56 ]]; then onlyUtxoWithAsset+="${tmpEntry:0:56}.${tmpEntry:56}|"; else onlyUtxoWithAsset+="${tmpEntry}|"; fi #representation in the rawquery output is . + done + onlyUtxoWithAsset=${onlyUtxoWithAsset%?}; #remove the last char "|" + + #Check if its a transaction encryption + elif [[ "${paramValue,,}" =~ ^enc:(.*)$ ]]; then #if the parameter starts with "enc:" then set the encryption variable + encryption=$(trimString "${paramValue:4}"); + + #Check if its a transaction encryption passphrase + elif [[ "${paramValue,,}" =~ ^pass:(.*)$ ]]; then #if the parameter starts with "passphrase:" then set the passphrase variable + passphrase="${paramValue:5}"; #don't to a trimstring here, because also spaces are a valid passphrase ! + + fi #end of different parameters check + + done + + + +if [ ! -f "${fromAddr}.addr" ]; then echo -e "\n\e[35mERROR - \"${fromAddr}.addr\" does not exist! Please create it first with script 03a or 02.\e[0m"; exit 1; fi +if ! [[ -f "${fromAddr}.skey" || -f "${fromAddr}.hwsfile" ]]; then echo -e "\n\e[35mERROR - \"${fromAddr}.skey/hwsfile\" does not exist! Please create it first with script 03a or 02.\e[0m"; exit 1; fi + +#Check if toAddr file does not exists, make a dummy one in the temp directory and fill in the given parameter as the hash address +if [ ! -f "${toAddr}.addr" ]; then + toAddr=$(trimString "${toAddr}") #trim it if spaces present + + #check if its a regular cardano payment address + typeOfAddr=$(get_addressType "${toAddr}"); + if [[ ${typeOfAddr} == ${addrTypePayment} ]]; then echo "$(basename ${toAddr})" > ${tempDir}/tempTo.addr; toAddr="${tempDir}/tempTo"; + + #check if its an adahandle + elif checkAdaHandleFormat "${toAddr}"; then + if ${offlineMode}; then echo -e "\n\e[35mERROR - Adahandles are only supported in Online mode.\n\e[0m"; exit 1; fi + adahandleName=${toAddr,,} + assetNameHex=$(convert_assetNameASCII2HEX ${adahandleName:1}) + #query adahandle asset holding address via koios + showProcessAnimation "Query Adahandle into holding address: " & + response=$(curl -s -m 10 -X GET "${koiosAPI}/asset_address_list?_asset_policy=${adahandlePolicyID}&_asset_name=${assetNameHex}" -H "Accept: application/json" 2> /dev/null) + stopProcessAnimation; + #check if the received json only contains one entry in the array (will also not be 1 if not a valid json) + if [[ $(jq ". | length" 2> /dev/null <<< ${response}) -ne 1 ]]; then echo -e "\n\e[35mCould not resolve \$adahandle '${adahandleName}' to an address.\n\e[0m"; exit 1; fi + toAddr=$(jq -r ".[0].payment_address" <<< ${response} 2> /dev/null) + typeOfAddr=$(get_addressType "${toAddr}"); + if [[ ${typeOfAddr} != ${addrTypePayment} ]]; then echo -e "\n\e[35mERROR - Resolved address '${toAddr}' is not a valid payment address.\n\e[0m"; exit 1; fi; + showProcessAnimation "Verify Adahandle is on resolved address: " & + utxo=$(${cardanocli} query utxo --address ${toAddr} ${magicparam} ); stopProcessAnimation; checkError "$?"; if [ $? -ne 0 ]; then exit $?; fi; + if [[ $(grep "${adahandlePolicyID}.${assetNameHex} " <<< ${utxo} | wc -l) -ne 1 ]]; then + echo -e "\n\e[35mERROR - Resolved address '${toAddr}' does not hold the \$adahandle '${adahandleName}' !\n\e[0m"; exit 1; fi; + echo -e "\e[0mFound \$adahandle '${adahandleName}' on Address:\e[32m ${toAddr}\e[0m\n" + echo "$(basename ${toAddr})" > ${tempDir}/adahandle-resolve.addr; toAddr="${tempDir}/adahandle-resolve"; + + #otherwise post an error message + else echo -e "\n\e[35mERROR - Destination Address can't be resolved. Maybe filename wrong, or not a payment-address.\n\e[0m"; exit 1; + + fi +fi + + +#Check if there are transactionMessages, if so, save the messages to a xxx.transactionMessage.json temp-file and add it to the list. Encrypt it if enabled. +if [[ ! "${transactionMessage}" == "{}" ]]; then + + transactionMessageMetadataFile="${tempDir}/$(basename ${fromAddr}).transactionMessage.json"; + tmp=$( jq . <<< ${transactionMessage} 2> /dev/null) + if [ $? -eq 0 ]; then #json is valid, so no bad chars found + + #Check if encryption is enabled, encrypt the msg part + if [[ "${encryption,,}" == "basic" ]]; then + #check openssl + if ! exists openssl; then echo -e "\e[33mYou need 'openssl', its needed to encrypt the transaction messages !\n\nInstall it on Ubuntu/Debian like:\n\e[97msudo apt update && sudo apt -y install openssl\n\n\e[33mThx! :-)\e[0m\n"; exit 2; fi + msgPart=$( jq -crM ".\"674\".msg" <<< ${transactionMessage} 2> /dev/null ) + checkError "$?"; if [ $? -ne 0 ]; then exit $?; fi + encArray=$( openssl enc -e -aes-256-cbc -pbkdf2 -iter 10000 -a -k "${passphrase}" <<< ${msgPart} | awk {'print "\""$1"\","'} | sed '$ s/.$//' ) + checkError "$?"; if [ $? -ne 0 ]; then exit $?; fi + #compose new transactionMessage by using the encArray as the msg and also add the encryption mode 'basic' entry + tmp=$( jq ".\"674\".msg = [ ${encArray} ]" <<< '{"674":{"enc":"basic"}}' ) + checkError "$?"; if [ $? -ne 0 ]; then exit $?; fi + fi + + echo "${tmp}" > ${transactionMessageMetadataFile}; metafileParameter="${metafileParameter}--metadata-json-file ${transactionMessageMetadataFile} "; #add it to the list of metadata.jsons to attach + + else + echo -e "\n\e[35mERROR - Additional Transaction Message-Metafile is not valid:\n\n$${transactionMessage}\n\nPlease check your added Message-Paramters.\n\e[0m"; exit 1; + fi + +fi + +sendFromAddr=$(cat ${fromAddr}.addr) +sendToAddr=$(cat ${toAddr}.addr) + +check_address "${sendFromAddr}" +check_address "${sendToAddr}" + +showToAddr=${adahandleName:-"${toAddr}.addr"} #shows the adahandle instead of the destination address file if available + +echo -e "\e[0mSending lovelaces from Address\e[32m ${fromAddr}.addr\e[0m to Address\e[32m ${showToAddr}\e[0m:" +echo + +#get live values +currentTip=$(get_currentTip) +ttl=$(get_currentTTL) +currentEPOCH=$(get_currentEpoch) + +echo -e "\e[0mCurrent Slot-Height:\e[32m ${currentTip} \e[0m(setting TTL[invalid_hereafter] to ${ttl})" +echo + + +echo -e "\e[0mSource Address ${fromAddr}.addr:\e[32m ${sendFromAddr} \e[90m" +echo -e "\e[0mDestination Address ${showToAddr}:\e[32m ${sendToAddr} \e[90m" +echo + +# +# Checking UTXO Data of the source address and gathering data about total lovelaces and total assets +# + + #Get UTX0 Data for the address. When in online mode of course from the node and the chain, in offlinemode from the transferFile + if ${onlineMode}; then + showProcessAnimation "Query-UTXO: " & + utxo=$(${cardanocli} query utxo --address ${sendFromAddr} ${magicparam} ); stopProcessAnimation; checkError "$?"; if [ $? -ne 0 ]; then exit $?; fi; + if [[ ${skipUtxoWithAsset} != "" ]]; then utxo=$(echo "${utxo}" | egrep -v "${skipUtxoWithAsset}" ); fi #if its set to keep utxos that contains certain policies, filter them out + if [[ ${onlyUtxoWithAsset} != "" ]]; then utxo=$(echo "${utxo}" | egrep "${onlyUtxoWithAsset}" ); utxo=$(echo -e "Header\n-----\n${utxo}"); fi #only use given utxos. rebuild the two header lines + if [[ ${utxoLimitCnt} -gt 0 ]]; then utxo=$(echo "${utxo}" | head -n $(( ${utxoLimitCnt} + 2 )) ); fi #if there was a utxo cnt limit set, reduce it (+2 for the header) + showProcessAnimation "Convert-UTXO: " & + utxoJSON=$(generate_UTXO "${utxo}" "${sendFromAddr}"); stopProcessAnimation; + else + readOfflineFile; #Reads the offlinefile into the offlineJSON variable + utxoJSON=$(jq -r ".address.\"${sendFromAddr}\".utxoJSON" <<< ${offlineJSON}) + if [[ "${utxoJSON}" == null ]]; then echo -e "\e[35mPayment-Address not included in the offline transferFile, please include it first online!\e[0m\n"; exit 1; fi + fi + + #Only use UTXOs specied in the extra parameter if present + if [[ ! "${filterForUTXO}" == "" ]]; then echo -e "\e[0mUTXO-Mode: \e[32mOnly using the UTXO with Hash ${filterForUTXO}\e[0m\n"; utxoJSON=$(filterFor_UTXO "${utxoJSON}" "${filterForUTXO}"); fi + + txcnt=$(jq length <<< ${utxoJSON}) #Get number of UTXO entries (Hash#Idx), this is also the number of --tx-in for the transaction + if [[ ${txcnt} == 0 ]]; then echo -e "\e[35mNo funds on the Source Address!\e[0m\n"; exit 1; else echo -e "\e[32m${txcnt} UTXOs\e[0m found on the Source Address!\n"; fi + + totalLovelaces=0 + totalAssetsJSON="{}"; #Building a total JSON with the different assetstypes "policyIdHash.name", amount and name + totalPolicyIDsLIST=""; #Buffer for the policyIDs, will be sorted/uniq/linecount at the end of the query + assetsOutString=""; #This will hold the String to append on the --tx-out if assets present or it will be empty + + #For each utxo entry, check the utxo#index and check if there are also any assets in that utxo#index + #LEVEL 1 - different UTXOs + + readarray -t utxoHashIndexArray <<< $(jq -r "keys_unsorted[]" <<< ${utxoJSON}) + readarray -t utxoLovelaceArray <<< $(jq -r "flatten | .[].value.lovelace" <<< ${utxoJSON}) + readarray -t assetsEntryCntArray <<< $(jq -r "flatten | .[].value | del (.lovelace) | length" <<< ${utxoJSON}) + readarray -t assetsEntryJsonArray <<< $(jq -c "flatten | .[].value | del (.lovelace)" <<< ${utxoJSON}) + readarray -t utxoDatumHashArray <<< $(jq -r "flatten | .[].datumhash" <<< ${utxoJSON}) + + for (( tmpCnt=0; tmpCnt<${txcnt}; tmpCnt++ )) + do + utxoHashIndex=${utxoHashIndexArray[${tmpCnt}]} + utxoAmount=${utxoLovelaceArray[${tmpCnt}]} #Lovelaces + totalLovelaces=$(bc <<< "${totalLovelaces} + ${utxoAmount}" ) +# echo -e "Hash#Index: ${utxoHashIndex}\tAmount: ${utxoAmount}"; + echo -e "Hash#Index: ${utxoHashIndex}\tADA: $(convertToADA ${utxoAmount}) \e[90m(${utxoAmount} lovelaces)\e[0m"; + if [[ ! "${utxoDatumHashArray[${tmpCnt}]}" == null ]]; then echo -e " DatumHash: ${utxoDatumHashArray[${tmpCnt}]}"; fi + assetsEntryCnt=${assetsEntryCntArray[${tmpCnt}]} + + if [[ ${assetsEntryCnt} -gt 0 ]]; then + + assetsJSON=${assetsEntryJsonArray[${tmpCnt}]} + assetHashIndexArray=(); readarray -t assetHashIndexArray <<< $(jq -r "keys_unsorted[]" <<< ${assetsJSON}) + assetNameCntArray=(); readarray -t assetNameCntArray <<< $(jq -r "flatten | .[] | length" <<< ${assetsJSON}) + + #LEVEL 2 - different policyIDs + for (( tmpCnt2=0; tmpCnt2<${assetsEntryCnt}; tmpCnt2++ )) + do + assetHash=${assetHashIndexArray[${tmpCnt2}]} #assetHash = policyID + totalPolicyIDsLIST+="${assetHash}\n" + + assetsNameCnt=${assetNameCntArray[${tmpCnt2}]} + assetNameArray=(); readarray -t assetNameArray <<< $(jq -r ".\"${assetHash}\" | keys_unsorted[]" <<< ${assetsJSON}) + assetAmountArray=(); readarray -t assetAmountArray <<< $(jq -r ".\"${assetHash}\" | flatten | .[]" <<< ${assetsJSON}) + + #LEVEL 3 - different names under the same policyID + for (( tmpCnt3=0; tmpCnt3<${assetsNameCnt}; tmpCnt3++ )) + do + assetName=${assetNameArray[${tmpCnt3}]} + assetAmount=${assetAmountArray[${tmpCnt3}]} + assetBech=$(convert_tokenName2BECH "${assetHash}${assetName}" "") + if [[ "${assetName}" == "" ]]; then point=""; else point="."; fi + oldValue=$(jq -r ".\"${assetHash}${point}${assetName}\".amount" <<< ${totalAssetsJSON}) + newValue=$(bc <<< "${oldValue}+${assetAmount}") + assetTmpName=$(convert_assetNameHEX2ASCII_ifpossible "${assetName}") #if it starts with a . -> ASCII showable name, otherwise the HEX-String + totalAssetsJSON=$( jq ". += {\"${assetHash}${point}${assetName}\":{amount: \"${newValue}\", name: \"${assetTmpName}\", bech: \"${assetBech}\"}}" <<< ${totalAssetsJSON}) + if [[ "${assetTmpName:0:1}" == "." ]]; then assetTmpName=${assetTmpName:1}; else assetTmpName="{${assetTmpName}}"; fi + + case ${assetHash} in + "${adahandlePolicyID}" ) #$adahandle + echo -e "\e[90m Asset: ${assetBech} \e[33mADA Handle: \$$(convert_assetNameHEX2ASCII ${assetName}) ${assetTmpName}\e[0m" + ;; + * ) #default + echo -e "\e[90m Asset: ${assetBech} Amount: ${assetAmount} ${assetTmpName}\e[0m" + ;; + esac + + done + done + fi + txInString="${txInString} --tx-in ${utxoHashIndex}" + done + echo -e "\e[0m-----------------------------------------------------------------------------------------------------" + echo -e "Total ADA on the Address:\e[32m $(convertToADA ${totalLovelaces}) ADA / ${totalLovelaces} lovelaces \e[0m\n" + + totalPolicyIDsCnt=$(echo -ne "${totalPolicyIDsLIST}" | sort | uniq | wc -l) + totalAssetsCnt=$(jq length <<< ${totalAssetsJSON}); + if [[ ${totalAssetsCnt} -gt 0 ]]; then + echo -e "\e[32m${totalAssetsCnt} Asset-Type(s) / ${totalPolicyIDsCnt} different PolicyIDs\e[0m found on the Address!\n" + printf "\e[0m%-56s%11s %16s %-44s %7s %s\n" "PolicyID:" "Asset-Name:" "Total-Amount:" "Bech-Format:" "Ticker:" "Meta-Name:" + + totalAssetsJSON=$(jq --sort-keys . <<< ${totalAssetsJSON}) #sort the json by the hashname + assetHashNameArray=(); readarray -t assetHashNameArray <<< $(jq -r "keys_unsorted[]" <<< ${totalAssetsJSON}) + assetAmountArray=(); readarray -t assetAmountArray <<< $(jq -r "flatten | .[].amount" <<< ${totalAssetsJSON}) + assetNameArray=(); readarray -t assetNameArray <<< $(jq -r "flatten | .[].name" <<< ${totalAssetsJSON}) + assetBechArray=(); readarray -t assetBechArray <<< $(jq -r "flatten | .[].bech" <<< ${totalAssetsJSON}) + + for (( tmpCnt=0; tmpCnt<${totalAssetsCnt}; tmpCnt++ )) + do + assetHashName=${assetHashNameArray[${tmpCnt}]} + assetAmount=${assetAmountArray[${tmpCnt}]} + assetName=${assetNameArray[${tmpCnt}]} + assetBech=${assetBechArray[${tmpCnt}]} + assetHashHex="${assetHashName//./}" #remove a . if present, we need a clean subject here for the registry request + + if $queryTokenRegistry; then if $onlineMode; then metaResponse=$(curl -sL -m 20 "${tokenMetaServer}/${assetHashHex}"); else metaResponse=$(jq -r ".tokenMetaServer.\"${assetHashHex}\"" <<< ${offlineJSON}); fi + metaAssetName=$(jq -r ".name.value | select (.!=null)" 2> /dev/null <<< ${metaResponse}); if [[ ! "${metaAssetName}" == "" ]]; then metaAssetName="${metaAssetName} "; fi + metaAssetTicker=$(jq -r ".ticker.value | select (.!=null)" 2> /dev/null <<< ${metaResponse}) + fi + + if [[ "${assetName}" == "." ]]; then assetName=""; fi + + printf "\e[90m%-70s \e[32m%16s %44s \e[90m%-7s \e[36m%s\e[0m\n" "${assetHashName:0:56}${assetName}" "${assetAmount}" "${assetBech}" "${metaAssetTicker}" "${metaAssetName}" + if [[ $(bc <<< "${assetAmount}>0") -eq 1 ]]; then assetsOutString+="+${assetAmount} ${assetHashName}"; fi #only include in the sendout if more than zero + done + fi + + +echo + +#There are metadata file attached, list them: +if [[ ! "${metafileList}" == "" ]]; then echo -e "\e[0mInclude Metadata-File(s):\e[32m ${metafileList}\e[0m\n"; fi + +#There are transactionMessages attached, show the metadatafile: +if [[ ! "${transactionMessage}" == "{}" ]]; then + if [[ "${encArray}" ]]; then #if there is an encryption, show the original Metadata first with the encryption paramteters + echo -e "\e[0mOriginal Transaction-Message:\n\e[90m"; jq -rM <<< ${transactionMessage}; echo -e "\e[0m"; + echo -e "\e[0mEncrypted Transaction-Message mode \e[32m${encryption,,}\e[0m with Passphrase '\e[32m${passphrase}\e[0m'"; + echo + fi + echo -e "\e[0mInclude Transaction-Message-Metadata-File:\e[32m ${transactionMessageMetadataFile}\n\e[90m"; cat ${transactionMessageMetadataFile}; echo -e "\e[0m"; +fi + +#Read ProtocolParameters +if ${onlineMode}; then + protocolParametersJSON=$(${cardanocli} query protocol-parameters ${magicparam} ); #onlinemode + else + protocolParametersJSON=$(jq ".protocol.parameters" <<< ${offlineJSON}); #offlinemode + fi +checkError "$?"; if [ $? -ne 0 ]; then exit $?; fi + +# +# Depending on the input of lovelaces / keyword, set the right rxcnt (one receiver or two receivers) +# + +case "${lovelacesToSend}" in + + "ALLFUNDS" ) #If keyword ALLFUNDS was used, send all lovelaces and all assets to the destination address + rxcnt=1;; + + "ALL" ) #If keyword ALL was used, send all lovelaces to the destination address, but send back all the assets if available + if [[ ${totalAssetsCnt} -gt 0 ]]; then + rxcnt=2; #assets on the address, they must be sent back to the source + else + rxcnt=1; #no assets on the address + fi;; + + * ) #If no keyword was used, its just the amount of lovelacesToSend + rxcnt=2;; +esac + +#Generate Dummy-TxBody file for fee calculation +txBodyFile="${tempDir}/dummy.txbody" +rm ${txBodyFile} 2> /dev/null +if [[ ${rxcnt} == 1 ]]; then #Sending ALLFUNDS or sending ALL lovelaces and no assets on the address + ${cardanocli} transaction build-raw ${nodeEraParam} ${txInString} --tx-out "${sendToAddr}+1000000${assetsOutString}" --invalid-hereafter ${ttl} --fee 0 ${metafileParameter} --out-file ${txBodyFile} + checkError "$?"; if [ $? -ne 0 ]; then exit $?; fi + else #Sending chosen amount of lovelaces or ALL lovelaces but return the assets to the address + ${cardanocli} transaction build-raw ${nodeEraParam} ${txInString} --tx-out "${sendToAddr}+1000000${assetsOutString}" --tx-out ${sendToAddr}+1000000 --invalid-hereafter ${ttl} --fee 0 ${metafileParameter} --out-file ${txBodyFile} + checkError "$?"; if [ $? -ne 0 ]; then exit $?; fi + fi +fee=$(${cardanocli} transaction calculate-min-fee --tx-body-file ${txBodyFile} --protocol-params-file <(echo ${protocolParametersJSON}) --tx-in-count ${txcnt} --tx-out-count ${rxcnt} ${magicparam} --witness-count 1 --byron-witness-count 0 | awk '{ print $1 }') +checkError "$?"; if [ $? -ne 0 ]; then exit $?; fi + +#minOutUTXO=$(calc_minOutUTXO "${protocolParametersJSON}" "${sendToAddr}+0${assetsOutString}") + +echo -e "\e[0mMinimum Transaction Fee for ${txcnt}x TxIn & ${rxcnt}x TxOut: \e[32m $(convertToADA ${fee}) ADA / ${fee} lovelaces \e[90m" +#echo -e "\e[0mMinimum UTXO value for a Transaction: \e[32m ${minOutUTXO} lovelaces \e[90m" +echo + +# +# Depending on the input of lovelaces / keyword, set the right amount of lovelacesToSend, lovelacesToReturn and also check about sendinglimits like minOutUTXO for returning assets if available +# + +case "${lovelacesToSend}" in + + "ALLFUNDS" ) #If keyword ALLFUNDS was used, send all lovelaces and all assets to the destination address - rxcnt=1 + lovelacesToSend=$(( ${totalLovelaces} - ${fee} )) + minOutUTXO=$(calc_minOutUTXO "${protocolParametersJSON}" "${sendToAddr}+1000000${assetsOutString}") + echo -e "\e[0mLovelaces to send to ${showToAddr}: \e[33m ${lovelacesToSend} lovelaces \e[90m" + if [[ ${lovelacesToSend} -lt ${minOutUTXO} ]]; then echo -e "\e[35mNot enough funds on the source Addr! Minimum UTXO value is ${minOutUTXO} lovelaces.\e[0m"; exit 1; fi + if [[ ${totalAssetsCnt} -gt 0 ]]; then #assets are also send completly over, so display them + echo -ne "\e[0m Assets to send to ${showToAddr}: \e[33m " + for (( tmpCnt=0; tmpCnt<${totalAssetsCnt}; tmpCnt++ )) + do + assetHashName=$(jq -r "keys[${tmpCnt}]" <<< ${totalAssetsJSON}) + assetAmount=$(jq -r ".\"${assetHashName}\".amount" <<< ${totalAssetsJSON}) + assetName=$(jq -r ".\"${assetHashName}\".name" <<< ${totalAssetsJSON}) + echo -ne "${assetAmount} ${assetName} / " + done + echo + fi + ;; + + "ALL" ) #If keyword ALL was used, send all lovelaces to the destination address, but send back all the assets if available + if [[ ${totalAssetsCnt} -gt 0 ]]; then + #assets on the address, they must be sent back to the source address with the minOutUTXO amount of lovelaces, rxcnt=2 + minOutUTXO=$(calc_minOutUTXO "${protocolParametersJSON}" "${sendFromAddr}+1000000${assetsOutString}") + lovelacesToSend=$(( ${totalLovelaces} - ${fee} - ${minOutUTXO} )) #so send less over to + lovelacesToReturn=${minOutUTXO} #minimum amount to return all the assets to the source address + echo -e "\e[0mLovelaces to send to ${showToAddr}: \e[33m $(convertToADA ${lovelacesToSend}) ADA / ${lovelacesToSend} lovelaces \e[90m" + echo -e "\e[0mLovelaces to return to ${fromAddr}.addr: \e[32m $(convertToADA ${lovelacesToReturn}) ADA / ${lovelacesToReturn} lovelaces \e[90m (to preserve all the assets)" + if [[ ${lovelacesToSend} -lt ${minOutUTXO} ]]; then echo -e "\e[35mNot enough funds on the source Addr! Minimum UTXO value is ${minOutUTXO} lovelaces.\e[0m"; exit 1; fi + else + #no assets on the address, so just send over all the lovelaces, rxcnt=1 + minOutUTXO=$(calc_minOutUTXO "${protocolParametersJSON}" "${sendToAddr}+1000000") + lovelacesToSend=$(( ${totalLovelaces} - ${fee} )) + echo -e "\e[0mLovelaces to send to ${showToAddr}: \e[33m $(convertToADA ${lovelacesToSend}) ADA / ${lovelacesToSend} lovelaces \e[90m" + if [[ ${lovelacesToSend} -lt ${minOutUTXO} ]]; then echo -e "\e[35mNot enough funds on the source Addr! Minimum UTXO value is ${minOutUTXO} lovelaces.\e[0m"; exit 1; fi + fi;; + + "MIN" ) #If keyword MIN was used, send just the minimal possible amount of lovelces to the destination address, rest will be returned to the source address, rxcnt=2 + minOutUTXO=$(calc_minOutUTXO "${protocolParametersJSON}" "${sendToAddr}+1000000") + lovelacesToSend=${minOutUTXO} + echo -e "\e[0mLovelaces to send to ${showToAddr}: \e[33m $(convertToADA ${lovelacesToSend}) ADA / ${lovelacesToSend} lovelaces \e[90m" + lovelacesToReturn=$(( ${totalLovelaces} - ${fee} - ${lovelacesToSend} )) + echo -e "\e[0mLovelaces to return to ${fromAddr}.addr: \e[32m $(convertToADA ${lovelacesToReturn}) ADA / ${lovelacesToReturn} lovelaces \e[90m" + minReturnUTXO=$(calc_minOutUTXO "${protocolParametersJSON}" "${sendFromAddr}+1000000${assetsOutString}") + if [[ ${lovelacesToReturn} -lt ${minReturnUTXO} ]]; then echo -e "\e[35mNot enough funds on the source Addr to return the rest! Minimum UTXO value needed is ${minReturnUTXO} lovelaces.\e[0m"; + if [[ ${lovelacesToSend} -ge ${totalLovelaces} ]]; then echo -e "\e[35mIf you wanna send out ALL your lovelaces, use the keyword ALL instead of the amount.\e[0m";fi + exit 1; fi + ;; + + + * ) #If no keyword was used, its just the amount of lovelacesToSend to the destination address, rest will be returned to the source address, rxcnt=2 + minOutUTXO=$(calc_minOutUTXO "${protocolParametersJSON}" "${sendToAddr}+1000000") + echo -e "\e[0mLovelaces to send to ${showToAddr}: \e[33m $(convertToADA ${lovelacesToSend}) ADA / ${lovelacesToSend} lovelaces \e[90m" + if [[ ${lovelacesToSend} -lt ${minOutUTXO} ]]; then echo -e "\e[35mNot enough lovelaces to send to the destination! Minimum UTXO value is ${minOutUTXO} lovelaces.\e[0m"; exit 1; fi + lovelacesToReturn=$(( ${totalLovelaces} - ${fee} - ${lovelacesToSend} )) + echo -e "\e[0mLovelaces to return to ${fromAddr}.addr: \e[32m $(convertToADA ${lovelacesToReturn}) ADA / ${lovelacesToReturn} lovelaces \e[90m" + minReturnUTXO=$(calc_minOutUTXO "${protocolParametersJSON}" "${sendFromAddr}+${lovelacesToReturn}${assetsOutString}") + if [[ ${lovelacesToReturn} -lt ${minReturnUTXO} ]]; then echo -e "\e[35mNot enough funds on the source Addr to return the rest! Minimum UTXO value needed is ${minReturnUTXO} lovelaces.\e[0m"; + if [[ ${lovelacesToSend} -ge ${totalLovelaces} ]]; then echo -e "\e[35mIf you wanna send out ALL your lovelaces, use the keyword ALL instead of the amount.\e[0m";fi + exit 1; fi + ;; +esac + +txBodyFile="${tempDir}/$(basename ${fromAddr}).txbody" +txWitnessFile="${tempDir}/$(basename ${fromAddr}).txwitness" +txFile="${tempDir}/$(basename ${fromAddr}).tx" + +echo +echo -e "\e[0mBuilding the unsigned transaction body: \e[32m ${txBodyFile} \e[90m" +echo + +#Building unsigned transaction body +rm ${txBodyFile} 2> /dev/null +if [[ ${rxcnt} == 1 ]]; then #Sending ALL funds (rxcnt=1) + ${cardanocli} transaction build-raw ${nodeEraParam} ${txInString} --tx-out "${sendToAddr}+${lovelacesToSend}${assetsOutString}" --invalid-hereafter ${ttl} --fee ${fee} ${metafileParameter} --out-file ${txBodyFile} + checkError "$?"; if [ $? -ne 0 ]; then exit $?; fi + else #Sending chosen amount (rxcnt=2), return the rest(incl. assets) + ${cardanocli} transaction build-raw ${nodeEraParam} ${txInString} --tx-out ${sendToAddr}+${lovelacesToSend} --tx-out "${sendFromAddr}+${lovelacesToReturn}${assetsOutString}" --invalid-hereafter ${ttl} --fee ${fee} ${metafileParameter} --out-file ${txBodyFile} + #echo -e "\n\n\n${cardanocli} transaction build-raw ${nodeEraParam} ${txInString} --tx-out ${sendToAddr}+${lovelacesToSend} --tx-out \"${sendFromAddr}+${lovelacesToReturn}${assetsOutString}\" --invalid-hereafter ${ttl} --fee ${fee} --out-file ${txBodyFile}\n\n\n" + checkError "$?"; if [ $? -ne 0 ]; then exit $?; fi +fi + +dispFile=$(cat ${txBodyFile}); if ${cropTxOutput} && [[ ${#dispFile} -gt 4000 ]]; then echo "${dispFile:0:4000} ... (cropped)"; else echo "${dispFile}"; fi +echo + + +#Sign the unsigned transaction body with the SecureKey +rm ${txFile} 2> /dev/null + +#If payment address is a hardware wallet, use the cardano-hw-cli for the signing +if [[ -f "${fromAddr}.hwsfile" ]]; then + +# currentAuxHash=$(cat ${txBodyFile} | sed -n "s/.*f5\(d90103.*\)\"/\1/p" | xxd -r -ps | b2sum -l 256 -b | awk {'print $1'}) #holds the expected auxhash +# currentAuxHash=$(cat ${txBodyFile} | sed -n "s/.*\($currentAuxHash\).*/\1/p") #holds the auxhash if it was found in the txcbor as a proof + + echo -ne "\e[0mAutocorrect the TxBody for canonical order: " + tmp=$(autocorrect_TxBodyFile "${txBodyFile}"); if [ $? -ne 0 ]; then echo -e "\e[35m${tmp}\e[0m\n\n"; exit 1; fi + echo -e "\e[32m${tmp}\e[90m\n" + +# newAuxHash=$(cat ${txBodyFile} | sed -n 's/.*f5\(d90103.*\)\"/\1/p' | xxd -r -ps | b2sum -l 256 -b | awk {'print $1'}) +# if [[ "${currentAuxHash}" != "" && "${currentAuxHash}" != "${newAuxHash}" ]]; then #only do it when the currentAuxHash holds a hash (detection worked) and if the new one is different to the old one +# echo -ne "\e[0mAutocorrect the AuxHash from '${currentAuxHash}' to '${newAuxHash}': " +# sed -i "s/${currentAuxHash}/${newAuxHash}/g" ${txBodyFile}; if [ $? -ne 0 ]; then echo -e "\e[35mCouldn't write new ${txBodyFile} with a corrected AuxHash!\e[0m\n\n"; exit 1; fi +# echo -e "\e[32mOK\e[90m\n" +# fi + + + dispFile=$(cat ${txBodyFile}); if ${cropTxOutput} && [[ ${#dispFile} -gt 4000 ]]; then echo "${dispFile:0:4000} ... (cropped)"; else echo "${dispFile}"; fi + echo + + echo -e "\e[0mSign (Witness+Assemble) the unsigned transaction body with the \e[32m${fromAddr}.hwsfile\e[0m: \e[32m ${txFile}\e[0m" + echo + + #Witness and Assemble the TxFile + start_HwWallet; checkError "$?"; if [ $? -ne 0 ]; then exit $?; fi + #if rxcnt==2 that means that some funds are returned back to the hw-wallet and if its a staking address, we can hide the return + #amount of lovelaces which could cause confusion. we have to add the --change-output-key-file parameters for payment and stake if + #its a base address. this only works great for base addresses, otherwise a warning would pop up on the hw-wallet complaining about that + #there are no rewards, tzz. + hwWalletReturnStr="" + if [[ ${rxcnt} == 2 ]]; then + #but now we have to check if its a base address, in that case we also need to add the staking.hwsfile + stakeFromAddr="$(dirname ${fromAddr})/$(basename ${fromAddr} .payment).staking" + if [[ -f "${stakeFromAddr}.hwsfile" ]]; then hwWalletReturnStr="--change-output-key-file ${fromAddr}.hwsfile --change-output-key-file ${stakeFromAddr}.hwsfile"; fi + + fi + + tmp=$(${cardanohwcli} transaction witness --tx-file ${txBodyFile} --hw-signing-file ${fromAddr}.hwsfile ${hwWalletReturnStr} ${magicparam} --out-file ${txWitnessFile} 2> /dev/stdout) + if [[ "${tmp^^}" =~ (ERROR|DISCONNECT) ]]; then echo -e "\e[35m${tmp}\e[0m\n"; exit 1; else echo -ne "\e[0mWitnessed ... "; fi + checkError "$?"; if [ $? -ne 0 ]; then exit $?; fi + + ${cardanocli} transaction assemble --tx-body-file ${txBodyFile} --witness-file ${txWitnessFile} --out-file ${txFile} + checkError "$?"; if [ $? -ne 0 ]; then exit $?; fi + echo -e "Assembled ... \e[32mDONE\e[0m\n"; + +else + + #read the needed signing keys into ram and sign the transaction + skeyJSON=$(read_skeyFILE "${fromAddr}.skey"); if [ $? -ne 0 ]; then echo -e "\e[35m${skeyJSON}\e[0m\n"; exit 1; else echo -e "\e[32mOK\e[0m\n"; fi + + echo -e "\e[0mSign the unsigned transaction body with the \e[32m${fromAddr}.skey\e[0m: \e[32m ${txFile}\e[0m" + echo + + ${cardanocli} transaction sign --tx-body-file ${txBodyFile} --signing-key-file <(echo "${skeyJSON}") ${magicparam} --out-file ${txFile} + checkError "$?"; if [ $? -ne 0 ]; then exit $?; fi + + #forget the signing keys + unset skeyJSON + +fi + +echo -ne "\e[90m" +dispFile=$(cat ${txFile}); if ${cropTxOutput} && [[ ${#dispFile} -gt 4000 ]]; then echo "${dispFile:0:4000} ... (cropped)"; else echo "${dispFile}"; fi +echo + +#Do a txSize Check to not exceed the max. txSize value +cborHex=$(jq -r .cborHex < ${txFile}) +txSize=$(( ${#cborHex} / 2 )) +maxTxSize=$(jq -r .maxTxSize <<< ${protocolParametersJSON}) +if [[ ${txSize} -le ${maxTxSize} ]]; then echo -e "\e[0mTransaction-Size: ${txSize} bytes (max. ${maxTxSize})\n" + else echo -e "\n\e[35mError - ${txSize} bytes Transaction-Size is too big! The maximum is currently ${maxTxSize} bytes.\e[0m\n"; exit 1; fi + + +#If you wanna skip the Prompt, set the environment variable ENV_SKIP_PROMPT to "YES" - be careful!!! +#if ask "\e[33mDoes this look good for you, continue ?" N; then +if [ "${ENV_SKIP_PROMPT}" == "YES" ] || ask "\n\e[33mDoes this look good for you, continue ?" N; then + + echo + if ${onlineMode}; then #onlinesubmit + echo -ne "\e[0mSubmitting the transaction via the node... " + ${cardanocli} transaction submit --tx-file ${txFile} ${magicparam} + checkError "$?"; if [ $? -ne 0 ]; then exit $?; fi + echo -e "\e[32mDONE\n" + + #Show the TxID + txID=$(${cardanocli} transaction txid --tx-file ${txFile}); echo -e "\e[0m TxID is: \e[32m${txID}\e[0m" + checkError "$?"; if [ $? -ne 0 ]; then exit $?; fi; + if [[ "${transactionExplorer}" != "" ]]; then echo -e "\e[0mTracking: \e[32m${transactionExplorer}/${txID}\n\e[0m"; fi + + else #offlinestore + txFileJSON=$(cat ${txFile} | jq .) + offlineJSON=$( jq ".transactions += [ { date: \"$(date -R)\", + type: \"Transaction\", + era: \"$(jq -r .protocol.era <<< ${offlineJSON})\", + fromAddr: \"${fromAddr}\", + sendFromAddr: \"${sendFromAddr}\", + toAddr: \"${toAddr}\", + sendToAddr: \"${sendToAddr}\", + txJSON: ${txFileJSON} } ]" <<< ${offlineJSON}) + #Write the new offileFile content + offlineJSON=$( jq ".history += [ { date: \"$(date -R)\", action: \"signed utxo-transaction from '${fromAddr}' to '${toAddr}'\" } ]" <<< ${offlineJSON}) + offlineJSON=$( jq ".general += {offlineCLI: \"${versionCLI}\" }" <<< ${offlineJSON}) + echo "${offlineJSON}" > ${offlineFile} + #Readback the tx content and compare it to the current one + readback=$(cat ${offlineFile} | jq -r ".transactions[-1].txJSON") + if [[ "${txFileJSON}" == "${readback}" ]]; then + showOfflineFileInfo; + echo -e "\e[33mTransaction txJSON has been stored in the '$(basename ${offlineFile})'.\nYou can now transfer it to your online machine for execution.\e[0m\n"; + else + echo -e "\e[35mERROR - Could not verify the written data in the '$(basename ${offlineFile})'. Retry again or generate a new '$(basename ${offlineFile})'.\e[0m\n"; + fi + + fi + +fi + +echo -e "\e[0m\n" \ No newline at end of file diff --git a/mithr/demo-wallet/commandos.txt b/mithr/demo-wallet/commandos.txt new file mode 100644 index 0000000..3ab1f7c --- /dev/null +++ b/mithr/demo-wallet/commandos.txt @@ -0,0 +1,135 @@ +mkdir demo-wallet +cd demo-wallet +ls +cardano-cli address key-gen \ + --verification-key-file payment.vkey \ + --signing-key-file payment.skey +ls +payment.skey payment.vkey + +cardano-cli address build \ + --payment-verification-key-file payment.vkey \ + --out-file payment.addr \ + --testnet-magic 2 + +ls -l +total 12 +-rw------- 1 abc abc 63 Feb 16 14:26 payment.addr +-rw------- 1 abc abc 180 Feb 16 14:22 payment.skey +-rw------- 1 abc abc 190 Feb 16 14:22 payment.vkey + +---->TRANFERIR DESDE WALLET EXTERNA DE "Preview" NETWORK, 1000 ADAS (1000000000 lovelaces)<------------------------ + +cardano-cli query utxo --address addr_test1vp379gt70v3rka6f2fsr4hxsg6e63j8ce26tpe0mnm0ds4cnxlhuk --testnet-magic 2 + TxHash TxIx Amount +-------------------------------------------------------------------------------------- +e91cf551643e12a7178e0c2a24c20aff22cde79bf870a8a92bf7018f4cef976d 0 1000000000 lovelace + TxOutDatumNone + +mkdir policy +cardano-cli address key-gen \ + --verification-key-file policy/policy.vkey \ + --signing-key-file policy/policy.skey + +cardano-cli address key-hash --payment-verification-key-file policy/policy.vkey +48151d44a0b1805251923e3dfb42c6b8dc148dccbdd4d0b19c198442 + +mkdir policy/tokens +touch policy/tokens/token_policy.script + +Agregar este contenido al archivo: +{ + "type": "all", + "scripts": [ + { + "keyHash": "48151d44a0b1805251923e3dfb42c6b8dc148dccbdd4d0b19c198442", + "type": "sig" + } + ] +} + +cardano-cli transaction policyid --script-file ./policy/tokens/token_policy.script > ./policy/tokens/policy.id + +touch policy/tokens/token_meta.json + +Agergar este contenido al archivo: +{ + "721": { + "8552076c8b0df713de1fcace671735ce670fc2ca1dc3c01a7ebe74d6": { + "MithrToken": { + "name": "Mithr Token", + "image": "ipfs://Qmck8NhetbUvVVYQzWALMzAJsUARVJ8Cr5hiNnqQ57qNMr" + } + } + } +} + +cardano-cli query utxo \ + --address $(cat payment.addr) \ + --testnet-magic 2 + TxHash TxIx Amount +-------------------------------------------------------------------------------------- +e91cf551643e12a7178e0c2a24c20aff22cde79bf870a8a92bf7018f4cef976d 0 1000000000 lovelace + TxOutDatumNone + +4D69746872546F6B656E es igual a "MithrToken" (sin comillas) +en Hexa. Usar esto para convertir los nombres de los token de token_meta.json +https://www.rapidtables.com/convert/number/ascii-to-hex.html + +cardano-cli transaction build-raw \ +--fee 188865 \ +--tx-in e91cf551643e12a7178e0c2a24c20aff22cde79bf870a8a92bf7018f4cef976d#0 \ +--tx-out $(cat payment.addr)+999811135+"420000000 $(cat policy/tokens/policy.id).4D69746872546F6B656E" \ +--mint="420000000 $(cat policy/tokens/policy.id).4D69746872546F6B656E" \ +--minting-script-file policy/tokens/token_policy.script \ +--metadata-json-file policy/tokens/token_meta.json \ +--out-file token_tx.raw + +cardano-cli query protocol-parameters \ + --testnet-magic 2 \ + --out-file protocol.json + +ls +commandos.txt payment.addr payment.skey payment.vkey policy protocol.json token_tx.raw + +cardano-cli transaction calculate-min-fee \ +--tx-body-file token_tx.raw \ +--tx-in-count 1 \ +--tx-out-count 1 \ +--witness-count 1 \ +--byron-witness-count 0 \ +--mainnet \ +--protocol-params-file protocol.json + +FEE: 184861 Lovelace + +expr 1000000000 - 184861 +999815139 + +cardano-cli transaction build-raw \ +--fee 184861 \ +--tx-in e91cf551643e12a7178e0c2a24c20aff22cde79bf870a8a92bf7018f4cef976d#0 \ +--tx-out $(cat payment.addr)+999815139+"420000000 $(cat policy/tokens/policy.id).4D69746872546F6B656E" \ +--mint="420000000 $(cat policy/tokens/policy.id).4D69746872546F6B656E" \ +--minting-script-file policy/tokens/token_policy.script \ +--metadata-json-file policy/tokens/token_meta.json \ +--out-file token_tx.raw + +cardano-cli transaction sign \ +--signing-key-file payment.skey \ +--signing-key-file policy/policy.skey \ +--testnet-magic 2 \ +--tx-body-file token_tx.raw \ +--out-file token_tx.signed + +cardano-cli transaction submit \ +--tx-file token_tx.signed \ +--testnet-magic 2 + +Transaction successfully submitted. + +cardano-cli query utxo --address addr_test1vp379gt70v3rka6f2fsr4hxsg6e63j8ce26tpe0mnm0ds4cnxlhuk --testnet-magic 2 + + TxHash TxIx Amount +-------------------------------------------------------------------------------------- +9ed878e047edb6c1b2a69e369980b6d12e80a58b01425a41a6ec4eced75cf550 0 999815139 lovelace + 420000000 8552076c8b0df713de1fcace671735ce670fc2ca1dc3c01a7ebe74d6.4d69746872546f6b656e + TxOutDatumNone + +SIII !!!! diff --git a/mithr/demo-wallet/policy/tokens/token_meta.json b/mithr/demo-wallet/policy/tokens/token_meta.json new file mode 100644 index 0000000..6506db8 --- /dev/null +++ b/mithr/demo-wallet/policy/tokens/token_meta.json @@ -0,0 +1,10 @@ +{ + "721": { + "8552076c8b0df713de1fcace671735ce670fc2ca1dc3c01a7ebe74d6": { + "4D69746872546F6B656E": { + "name": "Mithr Token", + "image": "ipfs://Qmck8NhetbUvVVYQzWALMzAJsUARVJ8Cr5hiNnqQ57qNMr" + } + } + } +} \ No newline at end of file diff --git a/receta.js b/receta.js new file mode 100644 index 0000000..d47c15e --- /dev/null +++ b/receta.js @@ -0,0 +1,51 @@ +//Address usada: addr_test1vp2l080u4lzhwvfq7exlncw2tye2crr5wl9f0vxgh9ewctsy5dk43 +import * as fs from 'fs'; +// Agregar esta dependencia usando: npm install node-cmd +import cmd from 'node-cmd'; + +// Ruta que apunta a cardano-cli de uso global +const CARDANO_CLI_PATH = "cardano-cli"; +// El numero identificatorio de `testnet` (2 para workspace de Dameter.run) +const CARDANO_NETWORK_MAGIC = 2; //1097911063; +// Directorio donde almacenamos las Keys +// asumiento que el $HOME/workspace/repo es "./" +const CARDANO_KEYS_DIR = "keys"; + +// Leemos el valor de la wallet address desde el archivo payment.addr +const walletAddress = fs.readFileSync(`${CARDANO_KEYS_DIR}/my_address.addr`).toString(); + +// Usamos la libreria npm "node-cmd" para ejecutar comandos shell y leer los UTXO +const rawUtxoTable = cmd.runSync([ + CARDANO_CLI_PATH, + "query", "utxo", + "--testnet-magic", CARDANO_NETWORK_MAGIC, + "--address", walletAddress +].join(" ")); + +// Calculate total lovelace of the UTXO(s) inside the wallet address +const utxoTableRows = rawUtxoTable.data.trim().split('\n'); +console.log(utxoTableRows); + +let totalLovelaceRecv = 0; +let haySaldo = false; + +for (let x = 2; x < utxoTableRows.length; x++) { + const cells = utxoTableRows[x].split(" ").filter(i => i); + totalLovelaceRecv += parseInt(cells[2]); + console.log(cells); +} + +// Determine if the total lovelace received is more than or equal to +// the total expected lovelace and displaying the results. +haySaldo = !(totalLovelaceRecv == 0); + +console.log(`Total Received: ${totalLovelaceRecv} LOVELACE`); +console.log(`Hay Saldo: ${(haySaldo ? "✅" : "❌")}`); + +cardano-cli transaction build-raw \ + --tx-in 4e3a6e7fdcb0d0efa17bf79c13aed2b4cb9baf37fb1aa2e39553d5bd720c5c99#4 \ + --tx-out $(cat payment2.addr)+0 \ + --tx-out $(cat payment.addr)+0 \ + --invalid-hereafter 0 \ + --fee 0 \ + --out-file tx.draft \ No newline at end of file diff --git a/sendPrescription.sh b/sendPrescription.sh new file mode 100755 index 0000000..9f5f3b1 --- /dev/null +++ b/sendPrescription.sh @@ -0,0 +1,27 @@ +#!/bin/bash + +# Script is brought to you by DANIEL WEB3 RODRIGUEZ, Telegram @BreakpointDaniel + + +#Check the commandline parameter +if [[ $# -eq 1 && ! $1 == "" ]]; then addrName="$(dirname $1)/$(basename $1 .addr)"; addrName=${addrName/#.\//}; else echo "ERROR - Usage: $0 "; exit 2; fi + +#send address into a variable +#ADDRESS="${addrName}" +ADDRESS="addr_test1vp2l080u4lzhwvfq7exlncw2tye2crr5wl9f0vxgh9ewctsy5dk43"; + +#Check if addrName file does not exists, make a dummy one in the temp directory and fill in the given parameter as the hash address +if [ ! -f "${addrName}.addr" ]; then + echo 'Información de TIP'; + + echo $ADDRESS; + cardano-cli query tip --testnet-magic 2; + + typeOfAddr=$(cardano-cli query utxo --testnet-magic 2 --address $ADDRESS); + echo $typeOfAddr; + #if [[ ${typeOfAddr} == ${addrTypePayment} || ${typeOfAddr} == ${addrTypeStake} ]]; then echo "$(basename ${addrName})" > ${tempDir}/tempAddr.addr; addrName="${tempDir}/tempAddr"; + + + #cardano-cli query utxo --testnet-magic 2 --address $ADDRESS + +fi \ No newline at end of file From 45f8d9ba0643a86a2d6417ddfc7396b0905c0c85 Mon Sep 17 00:00:00 2001 From: Daniel Rodriguez Date: Fri, 3 Mar 2023 21:51:59 +0000 Subject: [PATCH 5/8] llamada a sendPrescription c/"metada file" param --- sendPrescription.sh | 24 +++++++++++++----------- 1 file changed, 13 insertions(+), 11 deletions(-) diff --git a/sendPrescription.sh b/sendPrescription.sh index 9f5f3b1..e15b2d1 100755 --- a/sendPrescription.sh +++ b/sendPrescription.sh @@ -1,27 +1,29 @@ #!/bin/bash # Script is brought to you by DANIEL WEB3 RODRIGUEZ, Telegram @BreakpointDaniel - +# Stakepool scripts ATADA : +# TESTNET: https://github.com/gitmachtl/scripts/tree/master/cardano/testnet +# MAINNET: https://github.com/gitmachtl/scripts/tree/master/cardano/mainnet #Check the commandline parameter -if [[ $# -eq 1 && ! $1 == "" ]]; then addrName="$(dirname $1)/$(basename $1 .addr)"; addrName=${addrName/#.\//}; else echo "ERROR - Usage: $0 "; exit 2; fi +if [[ $# -eq 1 && ! $1 == "" ]]; then metadatafile="$(dirname $1)/$(basename $1 .json)"; metadatafile=${metadatafile/#.\//}; else echo "ERROR - Usage: $0 "; exit 2; fi -#send address into a variable -#ADDRESS="${addrName}" -ADDRESS="addr_test1vp2l080u4lzhwvfq7exlncw2tye2crr5wl9f0vxgh9ewctsy5dk43"; +#send metadata file into a variable +METADATA_FILE="${metadatafile}.json" -#Check if addrName file does not exists, make a dummy one in the temp directory and fill in the given parameter as the hash address -if [ ! -f "${addrName}.addr" ]; then - echo 'Información de TIP'; +#Check if metadatafile file exists, create a transacion with metadata added +if [ -f "${metadatafile}.json" ]; then + echo 'Información de Metadata'; - echo $ADDRESS; + echo $METADATA_FILE; cardano-cli query tip --testnet-magic 2; - typeOfAddr=$(cardano-cli query utxo --testnet-magic 2 --address $ADDRESS); - echo $typeOfAddr; + #typeOfAddr=$(cardano-cli query utxo --testnet-magic 2 --address $ADDRESS); + #echo $typeOfAddr; #if [[ ${typeOfAddr} == ${addrTypePayment} || ${typeOfAddr} == ${addrTypeStake} ]]; then echo "$(basename ${addrName})" > ${tempDir}/tempAddr.addr; addrName="${tempDir}/tempAddr"; #cardano-cli query utxo --testnet-magic 2 --address $ADDRESS + ./01_sendLovelaces.sh my_address other_address 'MIN' $METADATA_FILE; fi \ No newline at end of file From 4c1da0a7adcb812d7039fd213af89b0485e84401 Mon Sep 17 00:00:00 2001 From: Daniel Rodriguez Date: Fri, 3 Mar 2023 23:42:21 +0000 Subject: [PATCH 6/8] Creacion de metadata file en base al parametro IN --- sendPrescription.sh | 39 +++++++++++++++++++++++++-------------- 1 file changed, 25 insertions(+), 14 deletions(-) diff --git a/sendPrescription.sh b/sendPrescription.sh index e15b2d1..e7361dd 100755 --- a/sendPrescription.sh +++ b/sendPrescription.sh @@ -6,24 +6,35 @@ # MAINNET: https://github.com/gitmachtl/scripts/tree/master/cardano/mainnet #Check the commandline parameter -if [[ $# -eq 1 && ! $1 == "" ]]; then metadatafile="$(dirname $1)/$(basename $1 .json)"; metadatafile=${metadatafile/#.\//}; else echo "ERROR - Usage: $0 "; exit 2; fi +if [[ $# -eq 1 && ! $1 == "" ]]; then receta_id="$(dirname $1)/$(basename $1 .json)"; receta_id=${receta_id/#.\//}; else echo "ERROR - Usage: $0 "; exit 2; fi #send metadata file into a variable -METADATA_FILE="${metadatafile}.json" +METADATA_FILE="${receta_id}.json" -#Check if metadatafile file exists, create a transacion with metadata added -if [ -f "${metadatafile}.json" ]; then +#Check if receta_id not file exists, I create a metadata file +if [ ! -f "${receta_id}.json" ]; then echo 'Información de Metadata'; echo $METADATA_FILE; - cardano-cli query tip --testnet-magic 2; - - #typeOfAddr=$(cardano-cli query utxo --testnet-magic 2 --address $ADDRESS); - #echo $typeOfAddr; - #if [[ ${typeOfAddr} == ${addrTypePayment} || ${typeOfAddr} == ${addrTypeStake} ]]; then echo "$(basename ${addrName})" > ${tempDir}/tempAddr.addr; addrName="${tempDir}/tempAddr"; - - - #cardano-cli query utxo --testnet-magic 2 --address $ADDRESS - ./01_sendLovelaces.sh my_address other_address 'MIN' $METADATA_FILE; - + cardano-cli query tip --testnet-magic 2; + + #create metadata file based on parameter + cat > $METADATA_FILE < Date: Fri, 3 Mar 2023 23:46:44 +0000 Subject: [PATCH 7/8] fix receta.js comentando ultima linea --- receta.js | 14 +++++++------- 1 file changed, 7 insertions(+), 7 deletions(-) diff --git a/receta.js b/receta.js index d47c15e..f718d71 100644 --- a/receta.js +++ b/receta.js @@ -42,10 +42,10 @@ haySaldo = !(totalLovelaceRecv == 0); console.log(`Total Received: ${totalLovelaceRecv} LOVELACE`); console.log(`Hay Saldo: ${(haySaldo ? "✅" : "❌")}`); -cardano-cli transaction build-raw \ - --tx-in 4e3a6e7fdcb0d0efa17bf79c13aed2b4cb9baf37fb1aa2e39553d5bd720c5c99#4 \ - --tx-out $(cat payment2.addr)+0 \ - --tx-out $(cat payment.addr)+0 \ - --invalid-hereafter 0 \ - --fee 0 \ - --out-file tx.draft \ No newline at end of file +// cardano-cli transaction build-raw \ +// --tx-in 4e3a6e7fdcb0d0efa17bf79c13aed2b4cb9baf37fb1aa2e39553d5bd720c5c99#4 \ +// --tx-out $(cat payment2.addr)+0 \ +// --tx-out $(cat payment.addr)+0 \ +// --invalid-hereafter 0 \ +// --fee 0 \ +// --out-file tx.draft \ No newline at end of file From eb31ed5141d08ae15319863669d541ac8c3b1a8a Mon Sep 17 00:00:00 2001 From: Daniel Rodriguez Date: Fri, 3 Mar 2023 23:58:08 +0000 Subject: [PATCH 8/8] Adding UTXO line --- sendPrescription.sh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/sendPrescription.sh b/sendPrescription.sh index e7361dd..4f51918 100755 --- a/sendPrescription.sh +++ b/sendPrescription.sh @@ -34,7 +34,7 @@ EOF echo "Creado Archivo JSON."; echo "Script Completado."; - #./01_sendLovelaces.sh my_address other_address 'MIN' $METADATA_FILE; + ./01_sendLovelaces.sh my_address other_address 'MIN' $METADATA_FILE; else echo "El archivo de metadata para la receta ID = ${receta_id} ya existe."; fi \ No newline at end of file