Compare commits

...

57 Commits

Author SHA1 Message Date
Max 961aea189d
Merge pull request #38 from MaxMustermann2/chain-id-signing-fix 2 years ago
MaxMustermann2 90308f4d78 chore: bump version 0.1.0 -> 0.1.1 2 years ago
MaxMustermann2 6d77fad854
test(signing): add example test with eth chainId 2 years ago
MaxMustermann2 883c78e341
fix(signing): drop shard ids from serializer 2 years ago
Max fa5f665604
Merge pull request #37 from MaxMustermann2/new-release-system 2 years ago
MaxMustermann2 e6a188251c
chore: update bumpver commit message 2 years ago
MaxMustermann2 e2aee5ef50
chore: add instructions to sign commit and tag 2 years ago
MaxMustermann2 2ebf783a4d
bump version 0.0.0 -> 0.1.0 2 years ago
MaxMustermann2 0dc407f9eb
chore: move to new build system 2 years ago
Max 4aac11191a
Merge pull request #36 from MaxMustermann2/version-upgrade 2 years ago
MaxMustermann2 28fcf53f18
bump version 2 years ago
Max d12bde7328
Merge pull request #35 from MaxMustermann2/validator-sigs 2 years ago
MaxMustermann2 1c90c914aa
docs: remove -1 as page option 2 years ago
MaxMustermann2 c89976f2c0
test(validator): update edit validator sign 2 years ago
MaxMustermann2 1dba8b1066
docs: update readme 2 years ago
MaxMustermann2 ca6a6ae1a8
chore(yapf): run yapf 2 years ago
MaxMustermann2 98514c7a2c
chore(lint): resolve `pylint` complaints 2 years ago
MaxMustermann2 a3295b9650
chore: run `black` on all files 2 years ago
MaxMustermann2 2135d1050c
test: update tests to be independent of localnet 2 years ago
MaxMustermann2 eb322498c4
fix(transaction): make error message singular 2 years ago
MaxMustermann2 c9125ba2de
fix(staking): remove -1 as default parameter 2 years ago
MaxMustermann2 6e83b2c8fd
fix(contract): remove invalid reference to method 2 years ago
MaxMustermann2 3e4533d486
fix(util): pass int `chainId` through 2 years ago
MaxMustermann2 cfd550b553
feat(transaction): add send + confirm staking tx 2 years ago
MaxMustermann2 218fee615b
fix(account): add default param, get_nonce 2 years ago
MaxMustermann2 6d6c417fe5
fix(util): do not validate integer chainId 2 years ago
MaxMustermann2 2f0d840f6e
fix(signing): require `chainId` for staking signs 2 years ago
MaxMustermann2 7959415bb4 Merge tempoxylophone/pyhmy:master 2 years ago
mikefreemanwd 25a058d5dd [feature]: Added utility function to convert from hex to one (other way from one to hex). Added vim .swp files to gitignore. 2 years ago
mikefreemanwd ce1ae2bc4b [bugfix]: Add bech32 submodule declaration. 2 years ago
mikefreemanwd 6bc1bbbb34 [bugfix]: Fix test command in Makefile. 2 years ago
MaxMustermann2 cdbdae85d1
Upgrade eth_account version 3 years ago
MaxMustermann2 4f266619fa
Add signature options to validator signing 3 years ago
Max c81f9eff41
transaction.py: use correct endpoint 3 years ago
Max 9f5e912b22
transaction.py: use correct endpoint 3 years ago
Max 8b62d70d4a
Create __init__.py 3 years ago
Ganesha Upadhyaya 668d7ef756
Merge pull request #22 from tshirtman/setup_cfg 3 years ago
gabriel pettier 81be225cf6 move package config to setup.cfg 3 years ago
Ganesha Upadhyaya 099274bec1
Merge pull request #20 from MaxMustermann2/master 3 years ago
Max Mustermann 719e314ba9 add syntax highlighting 4 years ago
Max Mustermann c731326072 confirm transaction => block hash is non zero 4 years ago
Max Mustermann 751a95375a Update README 4 years ago
Max Mustermann 1574b55003 Update comment for skipped test 4 years ago
Max Mustermann 686f3bc933 Update README.md to add dependencies installation instructions 4 years ago
Max Mustermann 15c289ffe4 add dependency 4 years ago
Max Mustermann d19de0d9f0 Remove random file 4 years ago
Max Mustermann b0bb5de4d3 Prepare for PR 4 years ago
Max Mustermann c9cf502d21 Update README 4 years ago
Max Mustermann bbc8ae5ca1 Update README 4 years ago
Max Mustermann 7a536e5724 Update README 4 years ago
Max Mustermann 9b5c08ce77 Update README 4 years ago
Ganesha Upadhyaya 7690a1ed62
Merge pull request #15 from Robovalidator/fix_test 4 years ago
hsiung 24ba28d50f Fix existing unit tests by generating a new raw transaction and specifying the shard 1 endpoint where needed 4 years ago
Ganesha Upadhyaya 03f12126b5
Update README.md 4 years ago
Daniel Van Der Maden 4bf2b1503b [version] Bump version 4 years ago
Daniel Van Der Maden f92259b68b [cli] Update lib linking for darwin only case 4 years ago
Janet Liang e10ea06c82
[account] Add warning when using is_valid_address (#13) 4 years ago
  1. 5
      .gitignore
  2. 404
      .style.yapf
  3. 20
      Makefile
  4. 564
      README.md
  5. 23
      pyhmy/__init__.py
  6. 11
      pyhmy/_version.py
  7. 504
      pyhmy/account.py
  8. 0
      pyhmy/bech32/__init__.py
  9. 115
      pyhmy/bech32/bech32.py
  10. 1521
      pyhmy/blockchain.py
  11. 429
      pyhmy/cli.py
  12. 18
      pyhmy/constants.py
  13. 297
      pyhmy/contract.py
  14. 59
      pyhmy/exceptions.py
  15. 193
      pyhmy/logging.py
  16. 30
      pyhmy/numbers.py
  17. 37
      pyhmy/rpc/exceptions.py
  18. 66
      pyhmy/rpc/request.py
  19. 255
      pyhmy/signing.py
  20. 889
      pyhmy/staking.py
  21. 520
      pyhmy/staking_signing.py
  22. 325
      pyhmy/staking_structures.py
  23. 847
      pyhmy/transaction.py
  24. 169
      pyhmy/util.py
  25. 742
      pyhmy/validator.py
  26. 44
      pyproject.toml
  27. 3
      pytest.ini
  28. 40
      setup.py
  29. 578
      tests/GenerateRawTransactions.ipynb
  30. 9
      tests/bech32-pyhmy/test_bech32.py
  31. 61
      tests/cli-pyhmy/test_cli.py
  32. 20
      tests/logging-pyhmy/test_logging.py
  33. 41
      tests/numbers-pyhmy/test_numbers.py
  34. 83
      tests/request-pyhmy/test_request.py
  35. 307
      tests/sdk-pyhmy/conftest.py
  36. 197
      tests/sdk-pyhmy/test_account.py
  37. 473
      tests/sdk-pyhmy/test_blockchain.py
  38. 86
      tests/sdk-pyhmy/test_contract.py
  39. 120
      tests/sdk-pyhmy/test_signing.py
  40. 307
      tests/sdk-pyhmy/test_staking.py
  41. 112
      tests/sdk-pyhmy/test_staking_signing.py
  42. 330
      tests/sdk-pyhmy/test_transaction.py
  43. 225
      tests/sdk-pyhmy/test_validator.py
  44. 69
      tests/util-pyhmy/test_util.py

5
.gitignore vendored

@ -130,4 +130,7 @@ dmypy.json
.pyre/
# IDE
.idea
.idea
# VIM
*.swp

@ -0,0 +1,404 @@
[style]
# Align closing bracket with visual indentation.
align_closing_bracket_with_visual_indent=True
# Allow dictionary keys to exist on multiple lines. For example:
#
# x = {
# ('this is the first element of a tuple',
# 'this is the second element of a tuple'):
# value,
# }
allow_multiline_dictionary_keys=False
# Allow lambdas to be formatted on more than one line.
allow_multiline_lambdas=False
# Allow splitting before a default / named assignment in an argument list.
allow_split_before_default_or_named_assigns=True
# Allow splits before the dictionary value.
allow_split_before_dict_value=False
# Let spacing indicate operator precedence. For example:
#
# a = 1 * 2 + 3 / 4
# b = 1 / 2 - 3 * 4
# c = (1 + 2) * (3 - 4)
# d = (1 - 2) / (3 + 4)
# e = 1 * 2 - 3
# f = 1 + 2 + 3 + 4
#
# will be formatted as follows to indicate precedence:
#
# a = 1*2 + 3/4
# b = 1/2 - 3*4
# c = (1+2) * (3-4)
# d = (1-2) / (3+4)
# e = 1*2 - 3
# f = 1 + 2 + 3 + 4
#
arithmetic_precedence_indication=False
# Number of blank lines surrounding top-level function and class
# definitions.
blank_lines_around_top_level_definition=2
# Number of blank lines between top-level imports and variable
# definitions.
blank_lines_between_top_level_imports_and_variables=1
# Insert a blank line before a class-level docstring.
blank_line_before_class_docstring=False
# Insert a blank line before a module docstring.
blank_line_before_module_docstring=False
# Insert a blank line before a 'def' or 'class' immediately nested
# within another 'def' or 'class'. For example:
#
# class Foo:
# # <------ this blank line
# def method():
# ...
blank_line_before_nested_class_or_def=False
# Do not split consecutive brackets. Only relevant when
# dedent_closing_brackets is set. For example:
#
# call_func_that_takes_a_dict(
# {
# 'key1': 'value1',
# 'key2': 'value2',
# }
# )
#
# would reformat to:
#
# call_func_that_takes_a_dict({
# 'key1': 'value1',
# 'key2': 'value2',
# })
coalesce_brackets=False
# The column limit.
column_limit=80
# The style for continuation alignment. Possible values are:
#
# - SPACE: Use spaces for continuation alignment. This is default behavior.
# - FIXED: Use fixed number (CONTINUATION_INDENT_WIDTH) of columns
# (ie: CONTINUATION_INDENT_WIDTH/INDENT_WIDTH tabs or
# CONTINUATION_INDENT_WIDTH spaces) for continuation alignment.
# - VALIGN-RIGHT: Vertically align continuation lines to multiple of
# INDENT_WIDTH columns. Slightly right (one tab or a few spaces) if
# cannot vertically align continuation lines with indent characters.
continuation_align_style=SPACE
# Indent width used for line continuations.
continuation_indent_width=4
# Put closing brackets on a separate line, dedented, if the bracketed
# expression can't fit in a single line. Applies to all kinds of brackets,
# including function definitions and calls. For example:
#
# config = {
# 'key1': 'value1',
# 'key2': 'value2',
# } # <--- this bracket is dedented and on a separate line
#
# time_series = self.remote_client.query_entity_counters(
# entity='dev3246.region1',
# key='dns.query_latency_tcp',
# transform=Transformation.AVERAGE(window=timedelta(seconds=60)),
# start_ts=now()-timedelta(days=3),
# end_ts=now(),
# ) # <--- this bracket is dedented and on a separate line
dedent_closing_brackets=True
# Disable the heuristic which places each list element on a separate line
# if the list is comma-terminated.
disable_ending_comma_heuristic=True
# Place each dictionary entry onto its own line.
each_dict_entry_on_separate_line=True
# Require multiline dictionary even if it would normally fit on one line.
# For example:
#
# config = {
# 'key1': 'value1'
# }
force_multiline_dict=True
# The regex for an i18n comment. The presence of this comment stops
# reformatting of that line, because the comments are required to be
# next to the string they translate.
i18n_comment=#\..*
# The i18n function call names. The presence of this function stops
# reformattting on that line, because the string it has cannot be moved
# away from the i18n comment.
i18n_function_call=N_, _
# Indent blank lines.
indent_blank_lines=False
# Put closing brackets on a separate line, indented, if the bracketed
# expression can't fit in a single line. Applies to all kinds of brackets,
# including function definitions and calls. For example:
#
# config = {
# 'key1': 'value1',
# 'key2': 'value2',
# } # <--- this bracket is indented and on a separate line
#
# time_series = self.remote_client.query_entity_counters(
# entity='dev3246.region1',
# key='dns.query_latency_tcp',
# transform=Transformation.AVERAGE(window=timedelta(seconds=60)),
# start_ts=now()-timedelta(days=3),
# end_ts=now(),
# ) # <--- this bracket is indented and on a separate line
indent_closing_brackets=False
# Indent the dictionary value if it cannot fit on the same line as the
# dictionary key. For example:
#
# config = {
# 'key1':
# 'value1',
# 'key2': value1 +
# value2,
# }
indent_dictionary_value=False
# The number of columns to use for indentation.
indent_width=4
# Join short lines into one line. E.g., single line 'if' statements.
join_multiple_lines=False
# Do not include spaces around selected binary operators. For example:
#
# 1 + 2 * 3 - 4 / 5
#
# will be formatted as follows when configured with "*,/":
#
# 1 + 2*3 - 4/5
no_spaces_around_selected_binary_operators=
# Use spaces around default or named assigns.
spaces_around_default_or_named_assign=True
# Adds a space after the opening '{' and before the ending '}' dict
# delimiters.
#
# {1: 2}
#
# will be formatted as:
#
# { 1: 2 }
spaces_around_dict_delimiters=True
# Adds a space after the opening '[' and before the ending ']' list
# delimiters.
#
# [1, 2]
#
# will be formatted as:
#
# [ 1, 2 ]
spaces_around_list_delimiters=True
# Use spaces around the power operator.
spaces_around_power_operator=False
# Use spaces around the subscript / slice operator. For example:
#
# my_list[1 : 10 : 2]
spaces_around_subscript_colon=True
# Adds a space after the opening '(' and before the ending ')' tuple
# delimiters.
#
# (1, 2, 3)
#
# will be formatted as:
#
# ( 1, 2, 3 )
spaces_around_tuple_delimiters=True
# The number of spaces required before a trailing comment.
# This can be a single value (representing the number of spaces
# before each trailing comment) or list of values (representing
# alignment column values; trailing comments within a block will
# be aligned to the first column value that is greater than the maximum
# line length within the block). For example:
#
# With spaces_before_comment=5:
#
# 1 + 1 # Adding values
#
# will be formatted as:
#
# 1 + 1 # Adding values <-- 5 spaces between the end of the
# # statement and comment
#
# With spaces_before_comment=15, 20:
#
# 1 + 1 # Adding values
# two + two # More adding
#
# longer_statement # This is a longer statement
# short # This is a shorter statement
#
# a_very_long_statement_that_extends_beyond_the_final_column # Comment
# short # This is a shorter statement
#
# will be formatted as:
#
# 1 + 1 # Adding values <-- end of line comments in block
# # aligned to col 15
# two + two # More adding
#
# longer_statement # This is a longer statement <-- end of line
# # comments in block aligned to col 20
# short # This is a shorter statement
#
# a_very_long_statement_that_extends_beyond_the_final_column # Comment <-- the end of line comments are aligned based on the line length
# short # This is a shorter statement
#
spaces_before_comment=2
# Insert a space between the ending comma and closing bracket of a list,
# etc.
space_between_ending_comma_and_closing_bracket=True
# Use spaces inside brackets, braces, and parentheses. For example:
#
# method_call( 1 )
# my_dict[ 3 ][ 1 ][ get_index( *args, **kwargs ) ]
# my_set = { 1, 2, 3 }
space_inside_brackets=True
# Split before arguments
split_all_comma_separated_values=True
# Split before arguments, but do not split all subexpressions recursively
# (unless needed).
split_all_top_level_comma_separated_values=False
# Split before arguments if the argument list is terminated by a
# comma.
split_arguments_when_comma_terminated=True
# Set to True to prefer splitting before '+', '-', '*', '/', '//', or '@'
# rather than after.
split_before_arithmetic_operator=False
# Set to True to prefer splitting before '&', '|' or '^' rather than
# after.
split_before_bitwise_operator=False
# Split before the closing bracket if a list or dict literal doesn't fit on
# a single line.
split_before_closing_bracket=True
# Split before a dictionary or set generator (comp_for). For example, note
# the split before the 'for':
#
# foo = {
# variable: 'Hello world, have a nice day!'
# for variable in bar if variable != 42
# }
split_before_dict_set_generator=True
# Split before the '.' if we need to split a longer expression:
#
# foo = ('This is a really long string: {}, {}, {}, {}'.format(a, b, c, d))
#
# would reformat to something like:
#
# foo = ('This is a really long string: {}, {}, {}, {}'
# .format(a, b, c, d))
split_before_dot=True
# Split after the opening paren which surrounds an expression if it doesn't
# fit on a single line.
split_before_expression_after_opening_paren=False
# If an argument / parameter list is going to be split, then split before
# the first argument.
split_before_first_argument=False
# Set to True to prefer splitting before 'and' or 'or' rather than
# after.
split_before_logical_operator=False
# Split named assignments onto individual lines.
split_before_named_assigns=True
# Set to True to split list comprehensions and generators that have
# non-trivial expressions and multiple clauses before each of these
# clauses. For example:
#
# result = [
# a_long_var + 100 for a_long_var in xrange(1000)
# if a_long_var % 10]
#
# would reformat to something like:
#
# result = [
# a_long_var + 100
# for a_long_var in xrange(1000)
# if a_long_var % 10]
split_complex_comprehension=True
# The penalty for splitting right after the opening bracket.
split_penalty_after_opening_bracket=300
# The penalty for splitting the line after a unary operator.
split_penalty_after_unary_operator=10000
# The penalty of splitting the line around the '+', '-', '*', '/', '//',
# ``%``, and '@' operators.
split_penalty_arithmetic_operator=300
# The penalty for splitting right before an if expression.
split_penalty_before_if_expr=0
# The penalty of splitting the line around the '&', '|', and '^'
# operators.
split_penalty_bitwise_operator=300
# The penalty for splitting a list comprehension or generator
# expression.
split_penalty_comprehension=2100
# The penalty for characters over the column limit.
split_penalty_excess_character=7000
# The penalty incurred by adding a line split to the logical line. The
# more line splits added the higher the penalty.
split_penalty_for_added_line_split=30
# The penalty of splitting a list of "import as" names. For example:
#
# from a_very_long_or_indented_module_name_yada_yad import (long_argument_1,
# long_argument_2,
# long_argument_3)
#
# would reformat to something like:
#
# from a_very_long_or_indented_module_name_yada_yad import (
# long_argument_1, long_argument_2, long_argument_3)
split_penalty_import_names=0
# The penalty of splitting the line around the 'and' and 'or'
# operators.
split_penalty_logical_operator=300
# Use the Tab character for indentation.
use_tabs=False

@ -1,6 +1,4 @@
CURRENT_SIGN_SETTING := $(shell git config commit.gpgSign)
.PHONY: clean-py clean-build
.PHONY: clean clean-py clean-build
help:
@echo "clean-build - remove build artifacts"
@ -25,26 +23,18 @@ clean-py:
find . -name '*~' -exec rm -f {} +
dev:
python3 -m pip install pytest
python3 -m pip install pytest-ordering
python3 -m pip install pyhmy[dev]
test:
python3 -m py.test -r s -s tests
python3 -m pytest -r s -s tests
install:
python3 -m pip install -e .
release: clean
python3 -m incremental.update pyhmy --patch --rc
python3 -m incremental.update pyhmy
python3 setup.py sdist bdist_wheel
python3 -m build
twine upload dist/*
sdist: clean
ifdef VERSION # Argument for incremental, reference: https://pypi.org/project/incremental/ .
python3 -m incremental.update pyhmy --$(VERSION)
else
python3 -m incremental.update pyhmy --dev
endif
python3 setup.py sdist bdist_wheel
python3 -m build
ls -l dist

@ -8,14 +8,12 @@ and [related codebases](https://github.com/harmony-one).
[Full documentation is located on Harmony's GitBook](https://docs.harmony.one/) (in progress).
## Installation
```
```bash
pip install pyhmy
```
On MacOS:
Make sure you have Python3 installed, and use python3 to install pyhmy
```bash
sudo pip3 install pathlib
sudo pip3 install pyhmy
```
@ -23,35 +21,561 @@ sudo pip3 install pyhmy
## Development
Clone the repository and then run the following:
```
```bash
make install
```
## Running tests
You need to run a local Harmony blockchain (instructions [here](https://github.com/harmony-one/harmony/README.md)) that has staking enabled.
You can run all of the tests with the following:
Before you can run tests, you need the python dependencies (`make install`), `docker` and `go` installed to quickly run a local blockchain with staking enabled (detailed instructions [here](https://github.com/harmony-one/harmony/blob/main/README.md)):
```bash
mkdir -p $(go env GOPATH)/src/github.com/harmony-one
cd $(go env GOPATH)/src/github.com/harmony-one
git clone https://github.com/harmony-one/mcl.git
git clone https://github.com/harmony-one/bls.git
git clone https://github.com/harmony-one/harmony.git
cd harmony
make debug
```
Once the terminal displays a couple of `Started server` lines, use another shell to run the following tests
```bash
make test
```
Or directly with `pytest` (reference [here](https://docs.pytest.org/en/latest/index.html) for more info):
```
py.test tests
```bash
pytest tests
```
## Releasing
You can release this library with the following command (assuming you have the credentials to upload):
```bash
make release
```
## Usage
```py
test_net = 'https://api.s0.b.hmny.io' # this is shard 0
test_net_shard_1 = 'https://api.s1.b.hmny.io'
test_address = 'one18t4yj4fuutj83uwqckkvxp9gfa0568uc48ggj7'
main_net = 'https://rpc.s0.t.hmny.io'
main_net_shard_1 = 'https://rpc.s1.t.hmny.io'
```
make release
#### utilities
##### Address conversion
```py
from pyhmy import util
hex_addr = util.convert_one_to_hex('one155jp2y76nazx8uw5sa94fr0m4s5aj8e5xm6fu3')
one_addr = util.convert_hex_to_one('0xA5241513DA9F4463F1d4874b548dFBAC29D91f34')
```
##### Ether / Wei conversion
```py
from pyhmy import numbers
one_ether_in_wei = numbers.convert_one_to_atto(1) # as a decimal.Decimal
wei_to_ether = numbers.convert_atto_to_one(int(1e18))
```
#### accounts
```py
from pyhmy import account
```
##### Balance / account related information
````py
balance = account.get_balance(test_address, endpoint=test_net) # on shard 0, in ATTO
total_balance = account.get_total_balance(test_address, endpoint=test_net) # on all shards, in ATTO
balance_by_shard = account.get_balance_on_all_shards(test_address, endpoint=test_net) # list of dictionaries with shard and balance as keys
genesis_balance = account.get_balance_by_block(test_address, block_num=0, endpoint=test_net)
latest_balance = account.get_balance_by_block(test_address, block_num='latest', endpoint=test_net) # block_num can be a string 'latest', or 'pending', if implemented at the RPC level
account_nonce = account.get_account_nonce(test_address, block_num='latest', endpoint=test_net)
````
##### Transaction counts
````py
tx_count = account.get_transactions_count(test_address, tx_type='ALL', endpoint=test_net)
sent_tx_count = account.get_transactions_count(test_address, tx_type='SENT', endpoint=test_net)
received_tx_count = account.get_transactions_count(test_address, tx_type='RECEIVED', endpoint=test_net)
legacy_tx_count = account.get_transaction_count(test_address, block_num='latest', endpoint=test_net) # API is legacy
legacy_tx_count_pending = account.get_transaction_count(test_address, block_num='pending', endpoint=test_net)
````
##### Staking transaction counts
````py
stx_count = account.get_staking_transactions_count(test_address, tx_type='ALL', endpoint=test_net)
sent_stx_count = account.get_staking_transactions_count(test_address, tx_type='SENT', endpoint=test_net)
received_stx_count = account.get_staking_transactions_count(test_address, tx_type='RECEIVED', endpoint=test_net)
````
##### Transaction history
To get a list of hashes, use `include_full_tx=False`
````py
first_100_tx_hashes = account.get_transaction_history(test_address, page=0, page_size=100, include_full_tx=False, endpoint=test_net)
````
To get the next 100 transactions, change the `page`
```py
next_100_tx_hashes = account.get_transaction_history(test_address, page=1, page_size=100, include_full_tx=False, endpoint=test_net)
```
To get a list of full transaction details, use `include_full_tx=True` (see `get_transaction_by_hash` for the reply structure
````py
first_3_full_tx = account.get_transaction_history(test_address, page=0, page_size=3, include_full_tx=True, endpoint=test_net)
````
To get newest transactions, use `order='DESC'`
````py
last_3_full_tx = account.get_transaction_history(test_address, page=0, page_size=3, include_full_tx=True, order='DESC', endpoint=test_net)
````
To change the transaction type (SENT / RECEIVED / ALL), pass the `tx_type` parameter
```py
first_100_received_tx_hashes = account.get_transaction_history(test_address, page=0, page_size=100, include_full_tx=False, tx_type='RECEIVED', endpoint=test_net)
```
##### Staking transaction history
To get a list of staking hashes, use `include_full_tx=False`
````py
first_100_stx_hashes = account.get_staking_transaction_history(test_address, page=0, page_size=100, include_full_tx=False, endpoint=test_net)
````
To get the next 100 staking transactions, change the `page`
```py
next_100_stx_hashes = account.get_staking_transaction_history(test_address, page=1, page_size=100, include_full_tx=False, endpoint=test_net)
```
To get a list of full staking transaction details, use `include_full_tx=True` (see `get_transaction_by_hash` for the reply structure
````py
first_3_full_stx = account.get_staking_transaction_history(test_address, page=0, page_size=3, include_full_tx=True, endpoint=test_net)
````
To get newest staking transactions, use `order='DESC'`
````py
last_3_full_stx = account.get_staking_transaction_history(test_address, page=0, page_size=3, include_full_tx=True, order='DESC', endpoint=test_net)
````
To change the staking transaction type (SENT / RECEIVED / ALL), pass the `tx_type` parameter
```py
first_100_received_stx_hashes = account.get_staking_transaction_history(test_address, page=0, page_size=100, include_full_tx=False, tx_type='RECEIVED', endpoint=test_net)
```
#### Blockchain
```py
from pyhmy import blockchain
from decimal import Decimal
```
##### Node / network information
```py
chain_id = blockchain.chain_id(test_net) # chain type, for example, mainnet or testnet
node_metadata = blockchain.get_node_metadata(test_net) # metadata about the endpoint
peer_info = blockchain.get_peer_info(test_net) # peers of the endpoint
protocol_version = blockchain.protocol_version(test_net) # protocol version being used
num_peers = blockchain.get_num_peers(test_net) # number of peers of the endpoin
version = blockchain.get_version(test_net) # EVM chain id, https://chainid.network
is_node_in_sync = blockchain.in_sync(test_net) # whether the node is in sync (not out of sync or not syncing)
is_beacon_in_sync = blockchain.beacon_in_sync(test_net) # whether the beacon node is in sync
prestaking_epoch_number = blockchain.get_prestaking_epoch(test_net)
staking_epoch_number = blockchain.get_staking_epoch(test_net)
```
##### Sharding information
```py
shard_id = blockchain.get_shard(test_net) # get shard id of the endpoint
sharding_structure = blockchain.get_sharding_structure(test_net) # list of dictionaries, each representing a shard
last_cross_links = blockchain.get_last_cross_links(test_net) # list of dictionaries for each shard except test_net
```
##### Current network status
```py
leader_address = blockchain.get_leader_address(test_net)
is_last_block = blockchain.is_last_block(block_num=0, test_net)
last_block_of_epoch5 = blockchain.epoch_last_block(block_num=5, test_net)
circulating_supply = Decimal(blockchain.get_circulating_supply(test_net))
premined = blockchain.get_total_supply(test_net) # should be None?
current_block_num = blockchain.get_block_number(test_net)
current_epoch = blockchain.get_current_epoch(test_net)
gas_price = blockchain.get_gas_price(test_net) # this returns 1 always
```
##### Block headers
```py
latest_header = blockchain.get_latest_header(test_net) # header contains hash, number, cross links, signature, time, etc (see get_latest_header for a full list)
latest_hash = latest_header['blockHash']
latest_number = latest_header['blockNumber']
previous_header = blockchain.get_header_by_number(latest_number-1, test_net)
chain_headers = blockchain.get_latest_chain_headers(test_net_shard_1) # chain headers by beacon and shard
```
##### Blocks
###### By block number
Fetch the barebones information about the block as a dictionary
```py
latest_block = blockchain.get_block_by_number(block_num='latest', endpoint=test_net)
```
Fetch a block with full information (`full_tx=True`) for each transaction in the block
```py
block = blockchain.get_block_by_number(block_num=9017724, full_tx=True, include_tx=True, include_staking_tx=True, endpoint=test_net)
```
Fetch a block and only staking transactions (`include_tx=False, include_staking_tx=True`) for the block
```py
block = blockchain.get_block_by_number(block_num='latest', include_tx=False, include_staking_tx=True, endpoint=test_net)
```
Fetch block signer addresses (`include_signers=True`) as a list
```py
signers = blockchain.get_block_by_number(block_num=9017724, include_signers=True, endpoint=test_net)['signers']
```
Or, alternatively, use the direct `get_block_signers` method:
```py
signers = blockchain.get_block_signers(block_num=9017724, endpoint=test_net)
```
Fetch the public keys for signers
```py
signers_keys = blockchain.get_block_signers_keys(block_num=9017724, endpoint=test_net)
```
Check if an address is a signer for a block
```py
is_block_signer = blockchain.is_block_signer(block_num=9017724, address='one1yc06ghr2p8xnl2380kpfayweguuhxdtupkhqzw', endpoint=test_net)
```
Fetch the number of blocks signed by a particular validator for the last epoch
```py
number_signed_blocks = blockchain.get_signed_blocks(address='one1yc06ghr2p8xnl2380kpfayweguuhxdtupkhqzw', endpoint=test_net)
```
Fetch a list of validators and their public keys for specific epoch number
```py
validators = blockchain.get_validators(epoch=12, endpoint=test_net)
validator_keys = blockchain.get_validator_keys(epoch=12, endpoint=test_net)
```
Fetch number of transactions
```py
tx_count = blockchain.get_block_transaction_count_by_number(block_num='latest', endpoint=test_net)
```
Fetch number of staking transactactions
```py
stx_count = blockchain.get_block_staking_transaction_count_by_number(block_num='latest', endpoint=test_net)
```
Fetch a list of blocks using the block numbers
```py
blocks = blockchain.get_blocks(start_block=0, end_block=2, full_tx=False, include_tx=False, include_staking_tx=False, include_signers=False, endpoint=test_net)
```
###### By block hash
Most of the functions described above can be applied for fetching information about a block whose hash is known, for example:
```py
block_hash = '0x44fa170c25f262697e5802098cd9eca72889a637ea52feb40c521f2681a6d720'
block = blockchain.get_block_by_hash(block_hash=block_hash, endpoint=test_net)
block_with_full_tx = blockchain.get_block_by_hash(block_hash=block_hash, full_tx=True, include_tx=True, include_staking_tx=True, endpoint=test_net)
block_with_only_staking_tx = blockchain.get_block_by_hash(block_hash=block_hash, include_tx=False, include_staking_tx=True, endpoint=test_net)
signers = blockchain.get_block_by_hash(block_hash=block_hash, include_signers=True, endpoint=test_net)['signers']
tx_count = blockchain.get_block_transaction_count_by_hash(block_hash=block_hash, endpoint=test_net)
stx_count = blockchain.get_block_staking_transaction_count_by_hash(block_hash=block_hash, endpoint=test_net)
```
#### Staking
```py
from pyhmy import staking
validator_addr = 'one1xjanr7lgulc0fqyc8dmfp6jfwuje2d94xfnzyd'
delegator_addr = 'one1y2624lg0mpkxkcttaj0c85pp8pfmh2tt5zhdte'
```
##### Validation
```py
all_validators = staking.get_all_validator_addresses(endpoint=test_net) # list of addresses
validator_information = staking.get_validator_information(validator_addr, endpoint=test_net) # dict with all info
validator_information_100 = staking.get_all_validator_information(page=0, endpoint=test_net)
elected_validators = staking.get_elected_validator_addresses(endpoint=test_net) # list of addresses
validators_for_epoch = staking.get_validators(epoch=73772, endpoint=test_net) # dict with list of validators and balance
validators_information_100_for_block = staking.get_all_validator_information_by_block_number(block_num=9017724, page=0, endpoint=test_net)
validator_keys_for_epoch = staking.get_validator_keys(epoch=73772, endpoint=test_net) # list of public keys
validator_information_at_block = staking.get_validator_information_by_block_number(validator_addr, block_num=9017724, endpoint=test_net)
self_delegation = staking.get_validator_self_delegation(validator_addr, endpoint=test_net)
total_delegation = staking.get_validator_total_delegation(validator_addr, endpoint=test_net)
```
##### Delegation
```py
delegation_information = staking.get_all_delegation_information(page=0, endpoint=test_net)
delegations_by_delegator = staking.get_delegations_by_delegator(delegator_addr, test_net)
delegations_by_delegator_at_block = staking.get_delegations_by_delegator_by_block_number(delegator_addr, block_num=9017724, endpoint=test_net)
delegation_by_delegator_and_validator = staking.get_delegation_by_delegator_and_validator(delegator_addr, validator_addr, test_net)
avail_redelegation_balance = staking.get_available_redelegation_balance(delegator_addr, test_net)
delegations_by_validator = staking.get_delegations_by_validator(validator_addr, test_net) # list of delegations made to this validator, each a dictionary
```
##### Network
```py
utility_metrics = staking.get_current_utility_metrics(test_net)
network_info = staking.get_staking_network_info(test_net)
super_committees = staking.get_super_committees(test_net)
super_committees_current = super_committees['current'] # list of voting committees as a dict
super_committees_previous = super_committees['previous']
total_staking = staking.get_total_staking(endpoint=test_net) # by all validators, only for beaconchain
median_stake_snapshot = staking.get_raw_median_stake_snapshot(test_net)
```
##### Validator class
Instantiate a validator object and load it from the chain
```py
from pyhmy.validator import Validator
validator = Validator(validator_addr)
validator.load_from_blockchain(test_net)
```
Create a new validator object and load from dictionary
```py
from pyhmy.numbers import convert_one_to_atto
validator = Validator('one1a0x3d6xpmr6f8wsyaxd9v36pytvp48zckswvv9')
info = {
'name': 'Alice',
'identity': 'alice',
'website': 'alice.harmony.one',
'details': "Don't mess with me!!!",
'security-contact': 'Bob',
'min-self-delegation': convert_one_to_atto(10000),
'amount': convert_one_to_atto(10001),
'max-rate': '0.9',
'max-change-rate': '0.05',
'rate': '0.01',
'bls-public-keys': ['0xa20e70089664a874b00251c5e85d35a73871531306f3af43e02138339d294e6bb9c4eb82162199c6a852afeaa8d68712'],
"bls-key-sigs": [
"0xef2c49a2f31fbbd23c21bc176eaf05cd0bebe6832033075d81fea7cff6f9bc1ab42f3b6895c5493fe645d8379d2eaa1413de55a9d3ce412a4f747cb57d52cc4da4754bfb2583ec9a41fe5dd48287f964f276336699959a5fcef3391dc24df00d",
]
'max-total-delegation': convert_one_to_atto(40000)
}
validator.load(info)
```
Sign a validator creation transaction
```py
signed_create_tx_hash = validator.sign_create_validator_transaction(
nonce = 2,
gas_price = 1,
gas_limit = 100,
private_key = '4edef2c24995d15b0e25cbd152fb0e2c05d3b79b9c2afd134e6f59f91bf99e48',
chain_id = 2).rawTransaction.hex()
```
To edit validator, change its parameters using the `setter` functions, for example, `validator.set_details`, except the `rate`, `bls_keys_to_add` and `bls_keys_to_remove` which can be passed to the below function:
```py
signed_edit_tx_hash = validator.sign_edit_validator_transaction(
nonce = 2,
gas_price = 1,
gas_limit = 100,
rate = '0.06',
bls_key_to_add = "0xb8c3b3a0f1966c169ca73c348f4b8aee333a407125ab5c67f1d6e1e18ab052ed5fff0f1f7d4a7f789528b5ccd9c47b04",
bls_key_to_add_sig = "0x3de4dff17451fb76a9690efce34bced97dd87eccd371fcd25335826cb879ca21281e82e5c2c76d4ef0ab0fc16e462312628834cbc1f29008b28e16a757367808be85180945b991be3103f98c14c7e3b3e54796d34aab4d8e812d440aa251c419",
bls_keys_to_remove = '0xa20e70089664a874b00251c5e85d35a73871531306f3af43e02138339d294e6bb9c4eb82162199c6a852afeaa8d68712',
private_key = '4edef2c24995d15b0e25cbd152fb0e2c05d3b79b9c2afd134e6f59f91bf99e48',
chain_id = 2).rawTransaction.hex()
```
### Transactions
```py
from pyhmy import transaction
```
##### Pool
```py
pending_tx = transaction.get_pending_transactions(test_net)
pending_stx = transaction.get_pending_staking_transactions(test_net)
tx_error_sink = transaction.get_transaction_error_sink(test_net)
stx_error_sink = transaction.get_staking_transaction_error_sink(test_net)
pool_stats = transaction.get_pool_stats(test_net)
pending_cx_receipts = transaction.get_pending_cx_receipts(test_net)
```
##### Fetching transactions
```py
tx_hash = '0x500f7f0ee70f866ba7e80592c06b409fabd7ace018a9b755a7f1f29e725e4423'
block_hash = '0xb94bf6e8a8a970d4d42dfe42f7f231af0ff7fd54e7f410395e3b306f2d4000d4'
tx = transaction.get_transaction_by_hash(tx_hash, test_net) # dict with tx-level info like from / to / gas
tx_from_block_hash = transaction.get_transaction_by_block_hash_and_index(block_hash, tx_index=0, endpoint=test_net)
tx_from_block_number = transaction.get_transaction_by_block_number_and_index(9017724, tx_index=0, endpoint=test_net)
tx_receipt = transaction.get_transaction_receipt(tx_hash, test_net)
```
##### Fetching staking transactions
```py
stx_hash = '0x3f616a8ef34f111f11813630cdcccb8fb6643b2affbfa91d3d8dbd1607e9bc33'
block_hash = '0x294dc88c7b6f3125f229a3cfd8d9b788a0bcfe9409ef431836adcd83839ba9f0' # block number 9018043
stx = transaction.get_staking_transaction_by_hash(stx_hash, test_net)
stx_from_block_hash = transaction.get_staking_transaction_by_block_hash_and_index(block_hash, tx_index=0, endpoint=test_net)
stx_from_block_number = transaction.get_staking_transaction_by_block_number_and_index(9018043, tx_index=0, endpoint=test_net)
```
##### Cross shard transactions
```py
cx_hash = '0xd324cc57280411dfac5a7ec2987d0b83e25e27a3d5bb5d3531262387331d692b'
cx_receipt = transaction.get_cx_receipt_by_hash(cx_hash, main_net_shard_1) # the shard which receives the tx
tx_resent = transaction.resend_cx_receipt(cx_hash, main_net) # beacon chain
```
##### Sending transactions
Sign it with your private key and use `send_raw_transaction`
```py
from pyhmy import signing
tx = {
'chainId': 2,
'from': 'one18t4yj4fuutj83uwqckkvxp9gfa0568uc48ggj7',
'gas': 6721900,
'gasPrice': 1000000000,
'nonce': 6055,
'shardID': 0,
'to': 'one1ngt7wj57ruz7kg4ejp7nw8z7z6640288ryckh9',
'toShardID': 0,
'value': 500000000000000000000
}
transaction.send_raw_transaction(signing.sign_transaction(tx, '01F903CE0C960FF3A9E68E80FF5FFC344358D80CE1C221C3F9711AF07F83A3BD').rawTransaction.hex(), test_net)
```
A similar approach can be followed for staking transactions
```py
from pyhmy import staking_structures, staking_signinge
tx = {
'chainId': 2,
'delegatorAddress': 'one18t4yj4fuutj83uwqckkvxp9gfa0568uc48ggj7',
'directive': staking_structures.Directive.CollectRewards,
'gasLimit': 6721900,
'gasPrice': 1,
'nonce': 6056
}
transaction.send_raw_staking_transaction(staking_signing.sign_staking_transaction(tx, private_key = '01F903CE0C960FF3A9E68E80FF5FFC344358D80CE1C221C3F9711AF07F83A3BD').rawTransaction.hex(), test_net)
```
### Contracts
```py
from pyhmy import contract
from pyhmy.util import convert_one_to_hex
contract_addr = 'one1rcs4yy4kln53ux60qdeuhhvpygn2sutn500dhw'
```
Call a contract without saving state
```py
from pyhmy import numbers
result = contract.call(convert_one_to_hex(contract_addr), 'latest', value=hex(int(numbers.convert_one_to_atto(5)))
, gas_price=hex(1), gas=hex(100000), endpoint=test_net)
```
Estimate gas required for a smart contract call
```py
estimated_gas = contract.estimate_gas(convert_one_to_hex(contract_addr), endpoint=test_net)
```
Fetch the byte code of the contract
```py
byte_code = contract.get_code(convert_one_to_hex(contract_addr), 'latest', endpoint=test_net)
```
Get storage in the contract at `key`
```py
storage = contract.get_storage_at(convert_one_to_hex(contract_addr), key='0x0', block_num='latest', endpoint=test_net)
```
Calling a function on a contract needs the contract ABI. The ABI can be obtained by compiling the contract.
```py
from web3 import Web3
from web3 import providers
from pyhmy.util import convert_one_to_hex
contract_abi = '[{"constant":true,"inputs":[],"name":"manager","outputs":[{"name":"","type":"address"}],"payable":false,"stateMutability":"view","type":"function"},{"constant":false,"inputs":[],"name":"pickWinner","outputs":[],"payable":false,"stateMutability":"nonpayable","type":"function"},{"constant":true,"inputs":[],"name":"getPlayers","outputs":[{"name":"","type":"address[]"}],"payable":false,"stateMutability":"view","type":"function"},{"constant":false,"inputs":[],"name":"enter","outputs":[],"payable":true,"stateMutability":"payable","type":"function"},{"constant":true,"inputs":[{"name":"","type":"uint256"}],"name":"players","outputs":[{"name":"","type":"address"}],"payable":false,"stateMutability":"view","type":"function"},{"inputs":[],"payable":false,"stateMutability":"nonpayable","type":"constructor"}]'
w3 = Web3(providers.HTTPProvider(test_net))
lottery = w3.eth.contract(abi=contract_abi, address=convert_one_to_hex('one1rcs4yy4kln53ux60qdeuhhvpygn2sutn500dhw'))
lottery.functions.getPlayers().call()
```
To actually participate in a contract, you can sign a transaction from your account to it.
```py
from pyhmy import signing
contract_addr = 'one1rcs4yy4kln53ux60qdeuhhvpygn2sutn500dhw'
tx = {
'chainId': 2,
'from': 'one18t4yj4fuutj83uwqckkvxp9gfa0568uc48ggj7',
'gas': 6721900,
'gasPrice': 1000000000,
'nonce': 6054,
'shardID': 0,
'to': contract_addr,
'toShardID': 0,
'value': 500000000000000000000
}
tx_hash = transaction.send_raw_transaction(signing.sign_transaction(tx, '01F903CE0C960FF3A9E68E80FF5FFC344358D80CE1C221C3F9711AF07F83A3BD').rawTransaction.hex(), test_net)
```
To deploy a contract, sign a transaction from your account without a `to` field and with the byte code as `data` and send it.
```py
from pyhmy import signing
from pyhmy import transaction
contract_tx = {
'chainId': 2, # test net data
'data': '0x608060405234801561001057600080fd5b50600436106100415760003560e01c8063445df0ac146100465780638da5cb5b14610064578063fdacd576146100ae575b600080fd5b61004e6100dc565b6040518082815260200191505060405180910390f35b61006c6100e2565b604051808273ffffffffffffffffffffffffffffffffffffffff1673ffffffffffffffffffffffffffffffffffffffff16815260200191505060405180910390f35b6100da600480360360208110156100c457600080fd5b8101908080359060200190929190505050610107565b005b60015481565b6000809054906101000a900473ffffffffffffffffffffffffffffffffffffffff1681565b6000809054906101000a900473ffffffffffffffffffffffffffffffffffffffff1673ffffffffffffffffffffffffffffffffffffffff163373ffffffffffffffffffffffffffffffffffffffff16141561016457806001819055505b5056fea265627a7a723158209b80813a158b44af65aee232b44c0ac06472c48f4abbe298852a39f0ff34a9f264736f6c63430005100032', # Migrations.sol
'from': 'one18t4yj4fuutj83uwqckkvxp9gfa0568uc48ggj7',
'gas': 6721900,
'gasPrice': 1000000000,
'nonce': 6049,
'shardID': 0,
'toShardID': 0
}
ctx_hash = transaction.send_raw_transaction(signing.sign_transaction(contract_tx, private_key = '01F903CE0C960FF3A9E68E80FF5FFC344358D80CE1C221C3F9711AF07F83A3BD').rawTransaction.hex(), test_net)
# the below may be need a time gap before the transaction reaches the chain
contract_address = transaction.get_transaction_receipt(ctx_hash, test_net)['contractAddress']
```
### Signing transactions
```py
from pyhmy import signing
```
Create a `transaction_dict` with the parameters, and supply your private key to sign (but not submit) a transaction. A signed transaction can be submitted using `transaction.sendRawTransaction`.
```py
transaction_dict = {
'nonce': 2,
'gasPrice': 1,
'gas': 100, # signing.py uses Ether, which by default calls it gas
'to': '0x14791697260e4c9a71f18484c9f997b308e59325',
'value': 5,
'shardID': 0,
'toShardID': 1,
'chainId': 'HmyMainnet'
}
signed_tx = signing.sign_transaction(transaction_dict, private_key = '4edef2c24995d15b0e25cbd152fb0e2c05d3b79b9c2afd134e6f59f91bf99e48')
signed_hash = signed_tx.rawTransaction.hex()
```
For a transaction with is Ethereum-like, the `shardID` and `toShardID` are optional, which implies that the transaction is not cross-shard.
```py
transaction_dict = {
'nonce': 2,
'gasPrice': 1,
'gas': 100, # signing.py uses Ether, which by default calls it gas
'to': '0x14791697260e4c9a71f18484c9f997b308e59325',
'value': 5,
}
signed_tx = signing.sign_transaction(transaction_dict, private_key = '4edef2c24995d15b0e25cbd152fb0e2c05d3b79b9c2afd134e6f59f91bf99e48')
signed_hash = signed_tx.rawTransaction.hex()
```
The `chainId` parameter is also optional, and [according to Ethereum](https://github.com/ethereum/eth-account/blob/00e7b10005c5fa7090086fcef37a76296c524e17/eth_account/_utils/transactions.py#L122), it should not be passed if "you want a transaction that can be replayed across networks." A full list of the possible values of `chainId` is provided below. You can pass either the `str` or the `int`. The RPC API may, however, reject the transaction, which is why it is recommended to pass either `1` or `2` for `mainnet` and `testnet` respectively.
```py
Default = 0,
EthMainnet = 1,
Morden = 2,
Ropsten = 3,
Rinkeby = 4,
RootstockMainnet = 30,
RootstockTestnet = 31,
Kovan = 42,
EtcMainnet = 61,
EtcTestnet = 62,
Geth = 1337,
Ganache = 0,
HmyMainnet = 1,
HmyTestnet = 2,
HmyLocal = 2,
HmyPangaea = 3,
```
### Signing staking transactions
```py
from pyhmy import staking_structures, staking_signing
```
To sign a transaction to collect rewards, supply the dictionary containing the `delegatorAddress` and the private key.
```py
transaction_dict = {
'directive': staking_structures.Directive.CollectRewards,
'delegatorAddress': 'one1a0x3d6xpmr6f8wsyaxd9v36pytvp48zckswvv9',
'nonce': 2,
'gasPrice': 1,
'gasLimit': 100,
}
signed_tx = staking_signing.sign_staking_transaction(transaction_dict, private_key = '4edef2c24995d15b0e25cbd152fb0e2c05d3b79b9c2afd134e6f59f91bf99e48')
```
To sign a transaction to delegate or undelegate, supply the dictionary containing the `delegatorAddress`, the `validatorAddress`, the `amount` to delegate or undelegate, and the private key.
```py
transaction_dict = {
'directive': staking_structures.Directive.Delegate,
'delegatorAddress': 'one1a0x3d6xpmr6f8wsyaxd9v36pytvp48zckswvv9',
'validatorAddress': 'one1xjanr7lgulc0fqyc8dmfp6jfwuje2d94xfnzyd',
'amount': 5,
'nonce': 2,
'gasPrice': 1,
'gasLimit': 100,
}
signed_tx = staking_signing.sign_staking_transaction(transaction_dict, '4edef2c24995d15b0e25cbd152fb0e2c05d3b79b9c2afd134e6f59f91bf99e48')
transaction_dict = {
'directive': staking_structures.Directive.Undelegate,
'delegatorAddress': 'one1a0x3d6xpmr6f8wsyaxd9v36pytvp48zckswvv9',
'validatorAddress': 'one1xjanr7lgulc0fqyc8dmfp6jfwuje2d94xfnzyd',
'amount': 5,
'nonce': 2,
'gasPrice': 1,
'gasLimit': 100,
}
signed_tx = staking_signing.sign_staking_transaction(transaction_dict, '4edef2c24995d15b0e25cbd152fb0e2c05d3b79b9c2afd134e6f59f91bf99e48')
```
For validator-related transactions, see the [section on the Validator class](#validator-class).
## Keeping your private key safe
You need `eth-keyfile` installed
```bash
pip install eth-keyfile
```
In a `Python` shell, you can save or load the key into / from a key file.
```py
import eth_keyfile
from eth_utils import to_bytes, to_hex
import json
keyfile = eth_keyfile.create_keyfile_json(to_bytes(hexstr='01F903CE0C960FF3A9E68E80FF5FFC344358D80CE1C221C3F9711AF07F83A3BD'), b'password')
with open('keyfile.json', 'w+') as outfile:
json.dump(keyfile, outfile)
TODO: sample of how to use the library, reference Tezos.
TODO: start (and finish) some of the documentation.
TODO: add more blockchain rpcs
TODO: check None return types for rpcs
TODO: more detailed tests for rpcs
private_key = to_hex(eth_keyfile.extract_key_from_keyfile('keyfile.json', b'password'))[2:].upper()
```

@ -1,18 +1,13 @@
"""
`pyhmy` for interacting with the Harmony blockchain
"""
import sys
import warnings
from ._version import __version__
from .util import (
Typgpy,
get_gopath,
get_goversion,
get_bls_build_variables,
json_load
)
__version__ = "0.1.1"
if sys.version_info.major < 3:
warnings.simplefilter("always", DeprecationWarning)
warnings.simplefilter( "always", DeprecationWarning )
warnings.warn(
DeprecationWarning(
"`pyhmy` does not support Python 2. Please use Python 3."
@ -20,11 +15,9 @@ if sys.version_info.major < 3:
)
warnings.resetwarnings()
if sys.platform.startswith('win32') or sys.platform.startswith('cygwin'):
warnings.simplefilter("always", ImportWarning)
if sys.platform.startswith( "win32" ) or sys.platform.startswith( "cygwin" ):
warnings.simplefilter( "always", ImportWarning )
warnings.warn(
ImportWarning(
"`pyhmy` does not work on Windows or Cygwin."
)
ImportWarning( "`pyhmy` does not work on Windows or Cygwin." )
)
warnings.resetwarnings()

@ -1,11 +0,0 @@
"""
Provides pyhmy version information.
"""
# This file is auto-generated! Do not edit!
# Use `python -m incremental.update pyhmy` to change this file.
from incremental import Version
__version__ = Version('pyhmy', 20, 5, 12)
__all__ = ["__version__"]

@ -1,33 +1,23 @@
from .rpc.request import (
rpc_request
)
"""
Interact with accounts on the Harmony blockchain
"""
from .rpc.request import rpc_request
from .rpc.exceptions import (
RPCError,
RequestsError,
RequestsTimeoutError
)
from .rpc.exceptions import RPCError, RequestsError, RequestsTimeoutError
from .exceptions import (
InvalidRPCReplyError
)
from .exceptions import InvalidRPCReplyError
from .blockchain import (
get_sharding_structure
)
from .blockchain import get_sharding_structure
from bech32 import (
bech32_decode
)
from .bech32.bech32 import bech32_decode
_default_endpoint = 'http://localhost:9500'
_default_timeout = 30
_address_length = 42
from .constants import DEFAULT_ENDPOINT, DEFAULT_TIMEOUT
def is_valid_address(address) -> bool:
def is_valid_address( address ) -> bool:
"""
Check if given string is valid one address
NOTE: This function is NOT thread safe due to the C function used by the bech32 library.
Parameters
----------
@ -39,16 +29,20 @@ def is_valid_address(address) -> bool:
bool
Is valid address
"""
if not address.startswith('one1'):
if not address.startswith( "one1" ):
return False
hrp, _ = bech32_decode(address)
hrp, _ = bech32_decode( address )
if not hrp:
return False
return True
def get_balance(address, endpoint=_default_endpoint, timeout=_default_timeout) -> int:
"""
Get current account balance
def get_balance(
address,
endpoint = DEFAULT_ENDPOINT,
timeout = DEFAULT_TIMEOUT
) -> int:
"""Get current account balance.
Parameters
----------
@ -68,22 +62,32 @@ def get_balance(address, endpoint=_default_endpoint, timeout=_default_timeout) -
------
InvalidRPCReplyError
If received unknown result from endpoint
"""
method = 'hmy_getBalance'
params = [
address,
'latest'
]
balance = rpc_request(method, params=params, endpoint=endpoint, timeout=timeout)['result']
try:
return int(balance, 16)
except TypeError as e:
raise InvalidRPCReplyError(method, endpoint) from e
def get_balance_by_block(address, block_num, endpoint=_default_endpoint, timeout=_default_timeout) -> int:
API Reference
-------------
https://api.hmny.io/#da8901d2-d237-4c3b-9d7d-10af9def05c4
"""
Get account balance for address at a given block number
method = "hmyv2_getBalance"
params = [ address ]
try:
balance = rpc_request(
method,
params = params,
endpoint = endpoint,
timeout = timeout
)[ "result" ]
return int( balance ) # v2 returns the result as it is
except TypeError as exception: # check will work if rpc returns None
raise InvalidRPCReplyError( method, endpoint ) from exception
def get_balance_by_block(
address,
block_num,
endpoint = DEFAULT_ENDPOINT,
timeout = DEFAULT_TIMEOUT
) -> int:
"""Get account balance for address at a given block number.
Parameters
----------
@ -105,30 +109,40 @@ def get_balance_by_block(address, block_num, endpoint=_default_endpoint, timeout
------
InvalidRPCReplyError
If received unknown result from endpoint
"""
method = 'hmy_getBalanceByBlockNumber'
params = [
address,
str(hex(block_num))
]
balance = rpc_request(method, params=params, endpoint=endpoint, timeout=timeout)['result']
try:
return int(balance, 16)
except TypeError as e:
raise InvalidRPCReplyError(method, endpoint) from e
def get_account_nonce(address, true_nonce=False, endpoint=_default_endpoint, timeout=_default_timeout) -> int:
API Reference
-------------
https://api.hmny.io/#9aeae4b8-1a09-4ed2-956b-d7c96266dd33
https://github.com/harmony-one/harmony/blob/9f320436ff30d9babd957bc5f2e15a1818c86584/rpc/blockchain.go#L92
"""
Get the account nonce
method = "hmyv2_getBalanceByBlockNumber"
params = [ address, block_num ]
try:
balance = rpc_request(
method,
params = params,
endpoint = endpoint,
timeout = timeout
)[ "result" ]
return int( balance )
except TypeError as exception:
raise InvalidRPCReplyError( method, endpoint ) from exception
def get_account_nonce(
address,
block_num = "latest",
endpoint = DEFAULT_ENDPOINT,
timeout = DEFAULT_TIMEOUT
) -> int:
"""Get the account nonce.
Parameters
----------
address: str
Address to get transaction count for
true_nonce: :obj:`bool`, optional
True to get on-chain nonce
False to get nonce based on pending transaction pool
block_num: :obj:`int` or 'latest'
Block to get nonce at
endpoint: :obj:`str`, optional
Endpoint to send request to
timeout: :obj:`int`, optional
@ -143,27 +157,100 @@ def get_account_nonce(address, true_nonce=False, endpoint=_default_endpoint, tim
------
InvalidRPCReplyError
If received unknown result from endpoint
API Reference
-------------
https://github.com/harmony-one/harmony/blob/9f320436ff30d9babd957bc5f2e15a1818c86584/rpc/transaction.go#L51
"""
method = 'hmy_getTransactionCount'
params = [
address,
'latest' if true_nonce else 'pending'
]
nonce = rpc_request(method, params=params, endpoint=endpoint, timeout=timeout)['result']
method = "hmyv2_getAccountNonce"
params = [ address, block_num ]
try:
return int(nonce, 16)
except TypeError as e:
raise InvalidRPCReplyError(method, endpoint) from e
nonce = rpc_request(
method,
params = params,
endpoint = endpoint,
timeout = timeout
)[ "result" ]
return int( nonce )
except TypeError as exception:
raise InvalidRPCReplyError( method, endpoint ) from exception
def get_nonce(
address,
block_num = "latest",
endpoint = DEFAULT_ENDPOINT,
timeout = DEFAULT_TIMEOUT
) -> int:
"""See get_account_nonce."""
return get_account_nonce( address, block_num, endpoint, timeout )
def get_transaction_count(
address,
block_num,
endpoint = DEFAULT_ENDPOINT,
timeout = DEFAULT_TIMEOUT
) -> int:
"""Get the number of transactions the given address has sent for the given
block number Legacy for apiv1. For apiv2, please use
get_account_nonce/get_transactions_count/get_staking_transactions_count
apis for more granular transaction counts queries.
Parameters
----------
address: str
Address to get transaction count for
block_num: :obj:`int` or 'latest'
Block to get nonce at
endpoint: :obj:`str`, optional
Endpoint to send request to
timeout: :obj:`int`, optional
Timeout in seconds
Returns
-------
int
The number of transactions the given address has sent for the given block number
def get_transaction_count(address, endpoint=_default_endpoint, timeout=_default_timeout) -> int:
Raises
------
InvalidRPCReplyError
If received unknown result from endpoint
API Reference
-------------
https://github.com/harmony-one/harmony/blob/9f320436ff30d9babd957bc5f2e15a1818c86584/rpc/transaction.go#L69
"""
Get number of transactions & staking transactions sent by an account
method = "hmyv2_getTransactionCount"
params = [ address, block_num ]
try:
nonce = rpc_request(
method,
params = params,
endpoint = endpoint,
timeout = timeout
)[ "result" ]
return int( nonce )
except TypeError as exception:
raise InvalidRPCReplyError( method, endpoint ) from exception
def get_transactions_count(
address,
tx_type,
endpoint = DEFAULT_ENDPOINT,
timeout = DEFAULT_TIMEOUT
) -> int:
"""Get the number of regular transactions from genesis of input type.
Parameters
----------
address: str
Address to get transaction count for
tx_type: str
Type of transactions to include in the count
currently supported are 'SENT', 'RECEIVED', 'ALL'
endpoint: :obj:`str`, optional
Endpoint to send request to
timeout: :obj:`int`, optional
@ -172,20 +259,93 @@ def get_transaction_count(address, endpoint=_default_endpoint, timeout=_default_
Returns
-------
int
Number of transactions sent by the account
Count of transactions of type tx_type
See also
--------
get_account_nonce
Raises
------
InvalidRPCReplyError
If received unknown result from endpoint
API Reference
-------------
https://api.hmny.io/#fc97aed2-e65e-4cf4-bc01-8dadb76732c0
https://github.com/harmony-one/harmony/blob/9f320436ff30d9babd957bc5f2e15a1818c86584/rpc/transaction.go#L114
"""
return get_account_nonce(address, true_nonce=True, endpoint=endpoint, timeout=timeout)
method = "hmyv2_getTransactionsCount"
params = [ address, tx_type ]
try:
tx_count = rpc_request(
method,
params = params,
endpoint = endpoint,
timeout = timeout
)[ "result" ]
return int( tx_count )
except TypeError as exception:
raise InvalidRPCReplyError( method, endpoint ) from exception
def get_staking_transactions_count(
address,
tx_type,
endpoint = DEFAULT_ENDPOINT,
timeout = DEFAULT_TIMEOUT
) -> int:
"""Get the number of staking transactions from genesis of input type
("SENT", "RECEIVED", "ALL")
Parameters
----------
address: str
Address to get staking transaction count for
tx_type: str
Type of staking transactions to include in the count
currently supported are 'SENT', 'RECEIVED', 'ALL'
endpoint: :obj:`str`, optional
Endpoint to send request to
timeout: :obj:`int`, optional
Timeout in seconds
def get_transaction_history(address, page=0, page_size=1000, include_full_tx=False, tx_type='ALL',
order='ASC', endpoint=_default_endpoint, timeout=_default_timeout
) -> list:
Returns
-------
int
Count of staking transactions of type tx_type
Raises
------
InvalidRPCReplyError
If received unknown result from endpoint
API Reference
-------------
https://api.hmny.io/#ddc1b029-f341-4c4d-ba19-74b528d6e5e5
https://github.com/harmony-one/harmony/blob/9f320436ff30d9babd957bc5f2e15a1818c86584/rpc/transaction.go#L134
"""
Get list of transactions sent and/or received by the account
method = "hmyv2_getStakingTransactionsCount"
params = [ address, tx_type ]
try:
tx_count = rpc_request(
method,
params = params,
endpoint = endpoint,
timeout = timeout
)[ "result" ]
return int( tx_count )
except ( KeyError, TypeError ) as exception:
raise InvalidRPCReplyError( method, endpoint ) from exception
def get_transaction_history( # pylint: disable=too-many-arguments
address,
page=0,
page_size=1000,
include_full_tx=False,
tx_type="ALL",
order="ASC",
endpoint=DEFAULT_ENDPOINT,
timeout=DEFAULT_TIMEOUT,
) -> list:
"""Get list of transactions sent and/or received by the account.
Parameters
----------
@ -203,8 +363,8 @@ def get_transaction_history(address, page=0, page_size=1000, include_full_tx=Fal
'SENT' to get all transactions sent by the address
'RECEIVED' to get all transactions received by the address
order: :obj:`str`, optional
'ASC' to sort transactions in ascending order based on timestamp
'DESC' to sort transactions in descending order based on timestamp
'ASC' to sort transactions in ascending order based on timestamp (oldest first)
'DESC' to sort transactions in descending order based on timestamp (newest first)
endpoint: :obj:`str`, optional
Endpoint to send request to
timeout: :obj:`int`, optional
@ -212,37 +372,55 @@ def get_transaction_history(address, page=0, page_size=1000, include_full_tx=Fal
Returns
-------
list
# TODO: Add link to reference RPC documentation
list of transactions
if include_full_tx is True, each transaction is a dictionary with the following keys
see transaction/get_transaction_by_hash for a description
if include_full_tx is False, each element represents the transaction hash
Raises
------
InvalidRPCReplyError
If received unknown result from endpoint
API Reference
-------------
https://api.hmny.io/#2200a088-81b5-4420-a291-312a7c6d880e
https://github.com/harmony-one/harmony/blob/9f320436ff30d9babd957bc5f2e15a1818c86584/rpc/transaction.go#L255
"""
params = [
{
'address': address,
'pageIndex': page,
'pageSize': page_size,
'fullTx': include_full_tx,
'txType': tx_type,
'order': order
"address": address,
"pageIndex": page,
"pageSize": page_size,
"fullTx": include_full_tx,
"txType": tx_type,
"order": order,
}
]
method = 'hmy_getTransactionsHistory'
tx_history = rpc_request(method, params=params, endpoint=endpoint, timeout=timeout)
method = "hmyv2_getTransactionsHistory"
try:
return tx_history['result']['transactions']
except KeyError as e:
raise InvalidRPCReplyError(method, endpoint) from e
def get_staking_transaction_history(address, page=0, page_size=1000, include_full_tx=False, tx_type='ALL',
order='ASC', endpoint=_default_endpoint, timeout=_default_timeout
) -> list:
"""
Get list of staking transactions sent by the account
tx_history = rpc_request(
method,
params = params,
endpoint = endpoint,
timeout = timeout
)
return tx_history[ "result" ][ "transactions" ]
except KeyError as exception:
raise InvalidRPCReplyError( method, endpoint ) from exception
def get_staking_transaction_history( # pylint: disable=too-many-arguments
address,
page=0,
page_size=1000,
include_full_tx=False,
tx_type="ALL",
order="ASC",
endpoint=DEFAULT_ENDPOINT,
timeout=DEFAULT_TIMEOUT,
) -> list:
"""Get list of staking transactions sent by the account.
Parameters
----------
@ -267,36 +445,68 @@ def get_staking_transaction_history(address, page=0, page_size=1000, include_ful
Returns
-------
list
# TODO: Add link to reference RPC documentation
list of transactions
if include_full_tx is True, each transaction is a dictionary with the following kets
blockHash: :obj:`str` Block hash that transaction was finalized or
"0x0000000000000000000000000000000000000000000000000000000000000000" if tx is pending
blockNumber: :obj:`int` Block number that transaction was finalized; None if tx is pending
from: :obj:`str` Wallet address
timestamp: :obj:`int` Timestamp in Unix time when transaction was finalized
gas: :obj:`int` Gas limit in Atto
gasPrice :obj:`int` Gas price in Atto
hash: :obj:`str` Transaction hash
nonce: :obj:`int` Wallet nonce for the transaction
transactionIndex: :obj:`int` Index of transaction in block; None if tx is pending
type: :obj:`str` Type of staking transaction
for example, "CollectRewards", "Delegate", "Undelegate"
msg: :obj:`dict` Message attached to the staking transaction
r: :obj:`str` First 32 bytes of the transaction signature
s: :obj:`str` Next 32 bytes of the transaction signature
v: :obj:`str` Recovery value + 27, as hex string
if include_full_tx is False, each element represents the transaction hash
Raises
------
InvalidRPCReplyError
If received unknown result from endpoint
API Reference
-------------
https://api.hmny.io/#c5d25b36-57be-4e43-a23b-17ace350e322
https://github.com/harmony-one/harmony/blob/9f320436ff30d9babd957bc5f2e15a1818c86584/rpc/transaction.go#L303
"""
params = [
{
'address': address,
'pageIndex': page,
'pageSize': page_size,
'fullTx': include_full_tx,
'txType': tx_type,
'order': order
"address": address,
"pageIndex": page,
"pageSize": page_size,
"fullTx": include_full_tx,
"txType": tx_type,
"order": order,
}
]
# Using v2 API, because getStakingTransactionHistory not implemented in v1
method = 'hmyv2_getStakingTransactionsHistory'
stx_history = rpc_request(method, params=params, endpoint=endpoint, timeout=timeout)['result']
method = "hmyv2_getStakingTransactionsHistory"
try:
return stx_history['staking_transactions']
except KeyError as e:
raise InvalidRPCReplyError(method, endpoint) from e
def get_balance_on_all_shards(address, skip_error=True, endpoint=_default_endpoint, timeout=_default_timeout) -> list:
"""
Get current account balance in all shards & optionally report errors getting account balance for a shard
stx_history = rpc_request(
method,
params = params,
endpoint = endpoint,
timeout = timeout
)[ "result" ]
return stx_history[ "staking_transactions" ]
except KeyError as exception:
raise InvalidRPCReplyError( method, endpoint ) from exception
def get_balance_on_all_shards(
address,
skip_error = True,
endpoint = DEFAULT_ENDPOINT,
timeout = DEFAULT_TIMEOUT
) -> list:
"""Get current account balance in all shards & optionally report errors
getting account balance for a shard.
Parameters
----------
@ -312,8 +522,7 @@ def get_balance_on_all_shards(address, skip_error=True, endpoint=_default_endpoi
Returns
-------
list
Account balance per shard in ATTO
list of dictionaries, each dictionary to contain shard number and balance of that shard in ATTO
Example reply:
[
{
@ -324,25 +533,39 @@ def get_balance_on_all_shards(address, skip_error=True, endpoint=_default_endpoi
]
"""
balances = []
sharding_structure = get_sharding_structure(endpoint=endpoint, timeout=timeout)
sharding_structure = get_sharding_structure(
endpoint = endpoint,
timeout = timeout
)
for shard in sharding_structure:
try:
balances.append({
'shard': shard['shardID'],
'balance': get_balance(address, endpoint=shard['http'], timeout=timeout)
})
except (KeyError, RPCError, RequestsError, RequestsTimeoutError):
balances.append(
{
"shard": shard[ "shardID" ],
"balance": get_balance(
address,
endpoint = shard[ "http" ],
timeout = timeout
),
}
)
except ( KeyError, RPCError, RequestsError, RequestsTimeoutError ):
if not skip_error:
balances.append({
'shard': shard['shardID'],
'balance': None
})
balances.append(
{
"shard": shard[ "shardID" ],
"balance": None
}
)
return balances
def get_total_balance(address, endpoint=_default_endpoint, timeout=_default_timeout) -> int:
"""
Get total account balance on all shards
def get_total_balance(
address,
endpoint = DEFAULT_ENDPOINT,
timeout = DEFAULT_TIMEOUT
) -> int:
"""Get total account balance on all shards.
Parameters
----------
@ -362,9 +585,18 @@ def get_total_balance(address, endpoint=_default_endpoint, timeout=_default_time
------
RuntimeError
If error occurred getting account balance for a shard
See also
------
get_balance_on_all_shards
"""
try:
balances = get_balance_on_all_shards(address, skip_error=False, endpoint=endpoint, timeout=timeout)
return sum(b['balance'] for b in balances)
except TypeError as e:
raise RuntimeError from e
balances = get_balance_on_all_shards(
address,
skip_error = False,
endpoint = endpoint,
timeout = timeout
)
return sum( b[ "balance" ] for b in balances )
except TypeError as exception:
raise RuntimeError from exception

@ -17,107 +17,106 @@
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
"""Reference implementation for Bech32 and segwit addresses."""
CHARSET = "qpzry9x8gf2tvdw0s3jn54khce6mua7l"
def bech32_polymod(values):
def bech32_polymod( values ):
"""Internal function that computes the Bech32 checksum."""
generator = [0x3b6a57b2, 0x26508e6d, 0x1ea119fa, 0x3d4233dd, 0x2a1462b3]
generator = [ 0x3B6A57B2, 0x26508E6D, 0x1EA119FA, 0x3D4233DD, 0x2A1462B3 ]
chk = 1
for value in values:
top = chk >> 25
chk = (chk & 0x1ffffff) << 5 ^ value
for i in range(5):
chk ^= generator[i] if ((top >> i) & 1) else 0
chk = ( chk & 0x1FFFFFF ) << 5 ^ value
for i in range( 5 ):
chk ^= generator[ i ] if ( ( top >> i ) & 1 ) else 0
return chk
def bech32_hrp_expand(hrp):
def bech32_hrp_expand( hrp ):
"""Expand the HRP into values for checksum computation."""
return [ord(x) >> 5 for x in hrp] + [0] + [ord(x) & 31 for x in hrp]
return [ ord( x ) >> 5 for x in hrp ] + [ 0
] + [ ord( x ) & 31 for x in hrp ]
def bech32_verify_checksum(hrp, data):
def bech32_verify_checksum( hrp, data ):
"""Verify a checksum given HRP and converted data characters."""
return bech32_polymod(bech32_hrp_expand(hrp) + data) == 1
return bech32_polymod( bech32_hrp_expand( hrp ) + data ) == 1
def bech32_create_checksum(hrp, data):
def bech32_create_checksum( hrp, data ):
"""Compute the checksum values given HRP and data."""
values = bech32_hrp_expand(hrp) + data
polymod = bech32_polymod(values + [0, 0, 0, 0, 0, 0]) ^ 1
return [(polymod >> 5 * (5 - i)) & 31 for i in range(6)]
values = bech32_hrp_expand( hrp ) + data
polymod = bech32_polymod( values + [ 0, 0, 0, 0, 0, 0 ] ) ^ 1
return [ ( polymod >> 5 * ( 5 - i ) ) & 31 for i in range( 6 ) ]
def bech32_encode(hrp, data):
def bech32_encode( hrp, data ):
"""Compute a Bech32 string given HRP and data values."""
combined = data + bech32_create_checksum(hrp, data)
return hrp + '1' + ''.join([CHARSET[d] for d in combined])
combined = data + bech32_create_checksum( hrp, data )
return hrp + "1" + "".join( [ CHARSET[ d ] for d in combined ] )
def bech32_decode(bech):
def bech32_decode( bech ):
"""Validate a Bech32 string, and determine HRP and data."""
if ((any(ord(x) < 33 or ord(x) > 126 for x in bech)) or
(bech.lower() != bech and bech.upper() != bech)):
return (None, None)
if ( any( ord( x ) < 33 or ord( x ) > 126 for x in bech
) ) or ( bech.lower() != bech and bech.upper() != bech ):
return ( None, None )
bech = bech.lower()
pos = bech.rfind('1')
if pos < 1 or pos + 7 > len(bech) or len(bech) > 90:
return (None, None)
if not all(x in CHARSET for x in bech[pos+1:]):
return (None, None)
hrp = bech[:pos]
data = [CHARSET.find(x) for x in bech[pos+1:]]
if not bech32_verify_checksum(hrp, data):
return (None, None)
return (hrp, data[:-6])
def convertbits(data, frombits, tobits, pad=True):
pos = bech.rfind( "1" )
if pos < 1 or pos + 7 > len( bech ) or len( bech ) > 90:
return ( None, None )
if not all( x in CHARSET for x in bech[ pos + 1 : ] ):
return ( None, None )
hrp = bech[ : pos ]
data = [ CHARSET.find( x ) for x in bech[ pos + 1 : ] ]
if not bech32_verify_checksum( hrp, data ):
return ( None, None )
return ( hrp, data[ :-6 ] )
def convertbits( data, frombits, tobits, pad = True ):
"""General power-of-2 base conversion."""
acc = 0
bits = 0
ret = []
maxv = (1 << tobits) - 1
max_acc = (1 << (frombits + tobits - 1)) - 1
maxv = ( 1 << tobits ) - 1
max_acc = ( 1 << ( frombits + tobits - 1 ) ) - 1
for value in data:
if value < 0 or (value >> frombits):
if value < 0 or ( value >> frombits ):
return None
acc = ((acc << frombits) | value) & max_acc
acc = ( ( acc << frombits ) | value ) & max_acc
bits += frombits
while bits >= tobits:
bits -= tobits
ret.append((acc >> bits) & maxv)
ret.append( ( acc >> bits ) & maxv )
if pad:
if bits:
ret.append((acc << (tobits - bits)) & maxv)
elif bits >= frombits or ((acc << (tobits - bits)) & maxv):
ret.append( ( acc << ( tobits - bits ) ) & maxv )
elif bits >= frombits or ( ( acc << ( tobits - bits ) ) & maxv ):
return None
return ret
def decode(hrp, addr):
def decode( hrp, addr ):
"""Decode a segwit address."""
hrpgot, data = bech32_decode(addr)
hrpgot, data = bech32_decode( addr )
if hrpgot != hrp:
return (None, None)
decoded = convertbits(data[1:], 5, 8, False)
if decoded is None or len(decoded) < 2 or len(decoded) > 40:
return (None, None)
if data[0] > 16:
return (None, None)
if data[0] == 0 and len(decoded) != 20 and len(decoded) != 32:
return (None, None)
return (data[0], decoded)
def encode(hrp, witver, witprog):
return ( None, None )
decoded = convertbits( data[ 1 : ], 5, 8, False )
if decoded is None or len( decoded ) < 2 or len( decoded ) > 40:
return ( None, None )
if data[ 0 ] > 16:
return ( None, None )
if data[ 0 ] == 0 and len( decoded ) != 20 and len( decoded ) != 32:
return ( None, None )
return ( data[ 0 ], decoded )
def encode( hrp, witver, witprog ):
"""Encode a segwit address."""
ret = bech32_encode(hrp, [witver] + convertbits(witprog, 8, 5))
if decode(hrp, ret) == (None, None):
ret = bech32_encode( hrp, [ witver ] + convertbits( witprog, 8, 5 ) )
if decode( hrp, ret ) == ( None, None ):
return None
return ret

File diff suppressed because it is too large Load Diff

@ -7,7 +7,8 @@ Example:
Below is a demo of how to import, manage keys, and interact with the CLI::
>>> from pyhmy import cli
>>> cli.single_call("hmy keys add test1")
'**Important** write this seed phrase in a safe place, it is the only way to recover your account if you ever forget your password
'**Important** write this seed phrase in a safe place,
it is the only way to recover your account if you ever forget your password
craft ... tobacco'
>>> cli.get_accounts_keystore()
{'test1': 'one1aqfeed538xf7n0cfh60tjaeat7yw333pmj6sfu'}
@ -15,7 +16,7 @@ Example:
>>> cli.get_accounts(check_addr)
['test1']
>>> cli.single_call("hmy keys list", timeout=2)
'NAME \t\t ADDRESS\n\ntest1 \tone1aqfeed538xf7n0cfh60tjaeat7yw333pmj6sfu\n'
'NAME \t\t ADDRESS\n\ntest1 \tone1aqfeed538xf7n0cfh60tjaeat7yw333pmj6sfu\n'
>>> cli.get_accounts_keystore()
{}
@ -40,128 +41,167 @@ For more details, reference the documentation here: TODO gitbook docs
"""
import subprocess
import pexpect
import os
import shutil
import re
import stat
import sys
from multiprocessing import Lock
from pathlib import Path
import pexpect
import requests
from .util import get_bls_build_variables, get_gopath
if sys.platform.startswith("linux"):
_libs = {"libbls384_256.so", "libcrypto.so.10", "libgmp.so.10", "libgmpxx.so.4", "libmcl.so"}
if sys.platform.startswith( "linux" ):
_libs = {
"libbls384_256.so",
"libcrypto.so.10",
"libgmp.so.10",
"libgmpxx.so.4",
"libmcl.so",
}
else:
_libs = {"libbls384_256.dylib", "libcrypto.1.0.0.dylib", "libgmp.10.dylib", "libgmpxx.4.dylib", "libmcl.dylib"}
_accounts = {} # Internal accounts keystore, make sure to sync when needed.
_account_keystore_path = "~/.hmy/account-keys" # Internal path to account keystore, will match the current binary.
_binary_path = "hmy" # Internal binary path.
_arg_prefix = "__PYHMY_ARG_PREFIX__"
_keystore_cache_lock = Lock()
_libs = {
"libbls384_256.dylib",
"libcrypto.1.0.0.dylib",
"libgmp.10.dylib",
"libgmpxx.4.dylib",
"libmcl.dylib",
}
# Internal accounts keystore, make sure to sync when needed.
_accounts = {}
# Internal path to account keystore, will match the current binary.
ARG_PREFIX = "__PYHMY_ARG_PREFIX__"
# _keystore_cache_lock = Lock()
environment = os.environ.copy() # The environment for the CLI to execute in.
# completely remove caching...
# we need to improve getting address better internally to REDUCE single calls....
# def _cache_and_lock_accounts_keystore(fn):
# """Internal decorator to cache the accounts keystore and prevent concurrent
# accesses with locks."""
# cached_accounts = {}
# last_mod = None
# def wrap(*args):
# nonlocal last_mod
# _keystore_cache_lock.acquire()
# files_in_dir = str(os.listdir(ACCOUNT_KEYSTORE_PATH))
# dir_mod_time = str(os.path.getmtime(ACCOUNT_KEYSTORE_PATH))
# curr_mod = hash(files_in_dir + dir_mod_time + BINARY_PATH)
# if curr_mod != last_mod:
# cached_accounts.clear()
# cached_accounts.update(fn(*args))
# last_mod = curr_mod
# accounts = cached_accounts.copy()
# _keystore_cache_lock.release()
# return accounts
# return wrap
# TODO: completely remove caching... we need to improve getting address better internally to REDUCE single calls....
def _cache_and_lock_accounts_keystore(fn):
def account_keystore_path( value = None ):
"""
Internal decorator to cache the accounts keystore and
prevent concurrent accesses with locks.
Gets or sets the ACCOUNT_KEYSTORE_PATH
"""
cached_accounts = {}
last_mod = None
if "value" not in account_keystore_path.__dict__:
account_keystore_path.value = "~/.hmy/account-keys"
if value:
account_keystore_path.value = value
return account_keystore_path.value
def wrap(*args):
nonlocal last_mod
_keystore_cache_lock.acquire()
files_in_dir = str(os.listdir(_account_keystore_path))
dir_mod_time = str(os.path.getmtime(_account_keystore_path))
curr_mod = hash(files_in_dir + dir_mod_time + _binary_path)
if curr_mod != last_mod:
cached_accounts.clear()
cached_accounts.update(fn(*args))
last_mod = curr_mod
accounts = cached_accounts.copy()
_keystore_cache_lock.release()
return accounts
return wrap
def binary_path( value = None ):
"""
Gets or sets the BINARY_PATH
"""
if "value" not in binary_path.__dict__:
binary_path.value = "hmy"
if value:
binary_path.value = value
return binary_path.value
def _get_current_accounts_keystore():
"""
Internal function that gets the current keystore from the CLI.
"""Internal function that gets the current keystore from the CLI.
:returns A dictionary where the keys are the account names/aliases and the
values are their 'one1...' addresses.
"""
curr_addresses = {}
response = single_call("hmy keys list")
lines = response.split("\n")
if "NAME" not in lines[0] or "ADDRESS" not in lines[0]:
raise ValueError("Name or Address not found on first line of key list")
if lines[1] != "":
raise ValueError("Unknown format: No blank line between label and data")
for line in lines[2:]:
columns = line.split("\t")
if len(columns) != 2:
response = single_call( "hmy keys list" )
lines = response.split( "\n" )
if "NAME" not in lines[ 0 ] or "ADDRESS" not in lines[ 0 ]:
raise ValueError(
"Name or Address not found on first line of key list"
)
if lines[ 1 ] != "":
raise ValueError(
"Unknown format: No blank line between label and data"
)
for line in lines[ 2 : ]:
columns = line.split( "\t" )
if len( columns ) != 2:
break # Done iterating through all of the addresses.
name, address = columns
curr_addresses[name.strip()] = address
curr_addresses[ name.strip() ] = address
return curr_addresses
def _set_account_keystore_path():
"""
Internal function to set the account keystore path according to the binary.
"""
global _account_keystore_path
response = single_call("hmy keys location").strip()
if not os.path.exists(response):
os.mkdir(response)
_account_keystore_path = response
"""Internal function to set the account keystore path according to the
binary."""
response = single_call( "hmy keys location" ).strip()
if not os.path.exists( response ):
os.mkdir( response )
account_keystore_path( response )
def _sync_accounts():
"""
Internal function that UPDATES the accounts keystore with the CLI's keystore.
"""
"""Internal function that UPDATES the accounts keystore with the CLI's
keystore."""
new_keystore = _get_current_accounts_keystore()
for key in new_keystore.keys():
if key not in _accounts.keys():
_accounts[key] = new_keystore[key]
acc_keys_to_remove = [k for k in _accounts.keys() if k not in new_keystore.keys()]
for key, value in new_keystore.items():
if key not in _accounts:
_accounts[ key ] = value
acc_keys_to_remove = [ k for k in _accounts if k not in new_keystore ]
for key in acc_keys_to_remove:
del _accounts[key]
del _accounts[ key ]
def _make_call_command(command):
"""
Internal function that processes a command String or String Arg List for
def _make_call_command( command ):
"""Internal function that processes a command String or String Arg List for
underlying pexpect or subprocess call.
Note that single quote is not respected for strings.
"""
if isinstance(command, list):
if isinstance( command, list ):
command_toks = command
else:
all_strings = sorted(re.findall(r'"(.*?)"', command), key=lambda e: len(e), reverse=True)
for i, string in enumerate(all_strings):
command = command.replace(string, f"{_arg_prefix}_{i}")
command_toks_prefix = [el for el in command.split(" ") if el]
all_strings = sorted(
re.findall(r'"(.*?)"', command),
key=lambda e: len(e), # pylint: disable=unnecessary-lambda
reverse=True
)
for i, string in enumerate( all_strings ):
command = command.replace( string, f"{ARG_PREFIX}_{i}" )
command_toks_prefix = [ el for el in command.split( " " ) if el ]
command_toks = []
for el in command_toks_prefix:
if el.startswith(f'"{_arg_prefix}_') and el.endswith(f'"'):
index = int(el.replace(f'"{_arg_prefix}_', '').replace('"', ''))
command_toks.append(all_strings[index])
for element in command_toks_prefix:
if element.startswith( f'"{ARG_PREFIX}_'
) and element.endswith( '"' ):
index = int(
element.replace( f'"{ARG_PREFIX}_',
"" ).replace( '"',
"" )
)
command_toks.append( all_strings[ index ] )
else:
command_toks.append(el)
if re.match(".*hmy", command_toks[0]):
command_toks = command_toks[1:]
command_toks.append( element )
if re.match( ".*hmy", command_toks[ 0 ] ):
command_toks = command_toks[ 1 : ]
return command_toks
@ -175,38 +215,46 @@ def get_accounts_keystore():
return _accounts
def is_valid_binary(path):
def is_valid_binary( path ):
"""
:param path: Path to the Harmony CLI binary (absolute or relative).
:return: If the file at the path is a CLI binary.
"""
path = os.path.realpath(path)
os.chmod(path, os.stat(path).st_mode | stat.S_IEXEC)
path = os.path.realpath( path )
os.chmod( path, os.stat( path ).st_mode | stat.S_IEXEC )
try:
proc = subprocess.Popen([path, "version"], env=environment,
stdout=subprocess.PIPE, stderr=subprocess.PIPE)
out, err = proc.communicate()
if not err:
return False
return "harmony" in err.decode().strip().lower()
except (OSError, subprocess.CalledProcessError, subprocess.SubprocessError):
with subprocess.Popen(
[ path,
"version" ],
env = environment,
stdout = subprocess.PIPE,
stderr = subprocess.PIPE,
) as proc:
_, err = proc.communicate()
if not err:
return False
return "harmony" in err.decode().strip().lower()
except (
OSError,
subprocess.CalledProcessError,
subprocess.SubprocessError
):
return False
def set_binary(path):
def set_binary( path ):
"""
:param path: The path of the CLI binary to use.
:returns If the binary has been set.
Note that the exposed keystore will be updated accordingly.
"""
global _binary_path
path = os.path.realpath(path)
assert os.path.exists(path)
os.chmod(path, os.stat(path).st_mode | stat.S_IEXEC)
if not is_valid_binary(path):
path = os.path.realpath( path )
assert os.path.exists( path )
os.chmod( path, os.stat( path ).st_mode | stat.S_IEXEC )
if not is_valid_binary( path ):
return False
_binary_path = path
binary_path( path )
_set_account_keystore_path()
_sync_accounts()
return True
@ -216,30 +264,37 @@ def get_binary_path():
"""
:return: The absolute path of the CLI binary.
"""
return os.path.abspath(_binary_path)
return os.path.abspath( binary_path() )
def get_version():
"""
:return: The version string of the CLI binary.
"""
proc = subprocess.Popen([_binary_path, "version"], env=environment,
stdout=subprocess.PIPE, stderr=subprocess.PIPE)
out, err = proc.communicate()
if not err:
raise RuntimeError(f"Could not get version.\n"
f"\tGot exit code {proc.returncode}. Expected non-empty error message.")
return err.decode().strip()
with subprocess.Popen(
[ binary_path(),
"version" ],
env = environment,
stdout = subprocess.PIPE,
stderr = subprocess.PIPE,
) as proc:
_, err = proc.communicate()
if not err:
raise RuntimeError(
f"Could not get version.\n"
f"\tGot exit code {proc.returncode}. Expected non-empty error message."
)
return err.decode().strip()
def get_account_keystore_path():
"""
:return: The absolute path to the account keystore of the CLI binary.
"""
return os.path.abspath(_account_keystore_path)
return os.path.abspath( account_keystore_path() )
def check_address(address):
def check_address( address ):
"""
:param address: A 'one1...' address.
:return: Boolean of if the address is in the CLI's keystore.
@ -247,15 +302,15 @@ def check_address(address):
return address in get_accounts_keystore().values()
def get_address(name):
def get_address( name ):
"""
:param name: The alias of a key used in the CLI's keystore.
:return: The associated 'one1...' address.
"""
return get_accounts_keystore().get(name, None)
return get_accounts_keystore().get( name, None )
def get_accounts(address):
def get_accounts( address ):
"""
:param address: The 'one1...' address
:return: A list of account names associated with the param
@ -263,38 +318,42 @@ def get_accounts(address):
Note that a list of account names is needed because 1 address can
have multiple names within the CLI's keystore.
"""
return [acc for acc, addr in get_accounts_keystore().items() if address == addr]
return [
acc for acc,
addr in get_accounts_keystore().items() if address == addr
]
def remove_account(name):
"""
Note that this edits the keystore directly since there is currently no
def remove_account( name ):
"""Note that this edits the keystore directly since there is currently no
way to remove an address using the CLI.
:param name: The alias of a key used in the CLI's keystore.
:raises RuntimeError: If it failed to remove an account.
"""
if not get_address(name):
if not get_address( name ):
return
keystore_path = f"{get_account_keystore_path()}/{name}"
try:
shutil.rmtree(keystore_path)
except (shutil.Error, FileNotFoundError) as err:
raise RuntimeError(f"Failed to delete dir: {keystore_path}\n"
f"\tException: {err}") from err
shutil.rmtree( keystore_path )
except ( shutil.Error, FileNotFoundError ) as err:
raise RuntimeError(
f"Failed to delete dir: {keystore_path}\n"
f"\tException: {err}"
) from err
_sync_accounts()
def remove_address(address):
def remove_address( address ):
"""
:param address: The 'one1...' address to be removed.
"""
for name in get_accounts(address):
remove_account(name)
for name in get_accounts( address ):
remove_account( name )
_sync_accounts()
def single_call(command, timeout=60, error_ok=False):
def single_call( command, timeout = 60, error_ok = False ):
"""
:param command: String or String Arg List of command to execute on CLI.
:param timeout: Optional timeout in seconds
@ -302,81 +361,129 @@ def single_call(command, timeout=60, error_ok=False):
:returns: Decoded string of response from hmy CLI call
:raises: RuntimeError if bad command
"""
command_toks = [_binary_path] + _make_call_command(command)
command_toks = [ binary_path() ] + _make_call_command( command )
try:
return subprocess.check_output(command_toks, env=environment, timeout=timeout).decode()
return subprocess.check_output(
command_toks,
env = environment,
timeout = timeout
).decode()
except subprocess.CalledProcessError as err:
if not error_ok:
raise RuntimeError(f"Bad CLI args: `{command}`\n "
f"\tException: {err}") from err
raise RuntimeError(
f"Bad CLI args: `{command}`\n "
f"\tException: {err}"
) from err
return err.output.decode()
def expect_call(command, timeout=60):
def expect_call( command, timeout = 60 ):
"""
:param command: String or String Arg List of command to execute on CLI.
:param timeout: Optional timeout in seconds
:returns: A pexpect child program
:raises: RuntimeError if bad command
"""
command_toks = _make_call_command(command)
command_toks = _make_call_command( command )
try:
proc = pexpect.spawn(f"{_binary_path}", command_toks, env=environment, timeout=timeout)
proc = pexpect.spawn(
f"{binary_path()}",
command_toks,
env = environment,
timeout = timeout
)
proc.delaybeforesend = None
except pexpect.ExceptionPexpect as err:
raise RuntimeError(f"Bad CLI args: `{command}`\n "
f"\tException: {err}") from err
raise RuntimeError(
f"Bad CLI args: `{command}`\n "
f"\tException: {err}"
) from err
return proc
def download(path="./bin/hmy", replace=True, verbose=True):
"""
Download the CLI binary to the specified path.
Related files will be saved in the same directory.
def download( path = "./bin/hmy", replace = True, verbose = True ):
"""Download the CLI binary to the specified path. Related files will be
saved in the same directory.
:param path: The desired path (absolute or relative) of the saved binary.
:param replace: A flag to force a replacement of the binary/file.
:param verbose: A flag to enable a report message once the binary is downloaded.
:returns the environment to run the saved CLI binary.
"""
path = os.path.realpath(path)
parent_dir = Path(path).parent
assert not os.path.isdir(path), f"path `{path}` must specify a file, not a directory."
path = os.path.realpath( path )
parent_dir = Path( path ).parent
assert not os.path.isdir(
path
), f"path `{path}` must specify a file, not a directory."
if not os.path.exists(path) or replace:
if not os.path.exists( path ) or replace:
old_cwd = os.getcwd()
os.makedirs(parent_dir, exist_ok=True)
os.chdir(parent_dir)
hmy_script_path = os.path.join(parent_dir, "hmy.sh")
with open(hmy_script_path, 'w') as f:
f.write(requests.get("https://raw.githubusercontent.com/harmony-one/go-sdk/master/scripts/hmy.sh")
.content.decode())
os.chmod(hmy_script_path, os.stat(hmy_script_path).st_mode | stat.S_IEXEC)
os.makedirs( parent_dir, exist_ok = True )
os.chdir( parent_dir )
hmy_script_path = os.path.join( parent_dir, "hmy.sh" )
with open( hmy_script_path, "w", encoding = 'utf8' ) as script_file:
script_file.write(
requests.get(
"https://raw.githubusercontent.com/harmony-one/go-sdk/master/scripts/hmy.sh"
).content.decode()
)
os.chmod(
hmy_script_path,
os.stat( hmy_script_path ).st_mode | stat.S_IEXEC
)
same_name_file = False
if os.path.exists(os.path.join(parent_dir, "hmy")) and Path(path).name != "hmy": # Save same name file.
if (
os.path.exists( os.path.join( parent_dir,
"hmy" ) ) and
Path( path ).name != "hmy"
): # Save same name file.
same_name_file = True
os.rename(os.path.join(parent_dir, "hmy"), os.path.join(parent_dir, ".hmy_tmp"))
os.rename(
os.path.join( parent_dir,
"hmy" ),
os.path.join( parent_dir,
".hmy_tmp" )
)
if verbose:
subprocess.call([hmy_script_path, '-d'])
subprocess.call( [ hmy_script_path, "-d" ] )
else:
subprocess.call([hmy_script_path, '-d'], stdout=open(os.devnull, 'w'), stderr=subprocess.STDOUT)
os.rename(os.path.join(parent_dir, "hmy"), path)
with open( os.devnull, "w", encoding = "UTF-8" ) as devnull:
subprocess.call(
[ hmy_script_path,
"-d" ],
stdout = devnull,
stderr = subprocess.STDOUT,
)
os.rename( os.path.join( parent_dir, "hmy" ), path )
if same_name_file:
os.rename(os.path.join(parent_dir, ".hmy_tmp"), os.path.join(parent_dir, "hmy"))
os.rename(
os.path.join( parent_dir,
".hmy_tmp" ),
os.path.join( parent_dir,
"hmy" )
)
if verbose:
print(f"Saved harmony binary to: `{path}`")
os.chdir(old_cwd)
print( f"Saved harmony binary to: `{path}`" )
os.chdir( old_cwd )
env = os.environ.copy()
files_in_parent_dir = set(os.listdir(parent_dir))
if files_in_parent_dir.intersection(_libs) == _libs:
if sys.platform.startswith("linux"):
env["LD_LIBRARY_PATH"] = parent_dir
else:
env["DYLD_FALLBACK_LIBRARY_PATH"] = parent_dir
elif os.path.exists(f"{get_gopath()}/src/github.com/harmony-one/bls") \
and os.path.exists(f"{get_gopath()}/src/github.com/harmony-one/mcl"):
env.update(get_bls_build_variables())
else:
raise RuntimeWarning(f"Could not get environment for downloaded hmy CLI at `{path}`")
if sys.platform.startswith( "darwin" ): # Dynamic linking for darwin
try:
files_in_parent_dir = set( os.listdir( parent_dir ) )
if files_in_parent_dir.intersection( _libs ) == _libs:
env[ "DYLD_FALLBACK_LIBRARY_PATH" ] = parent_dir
elif os.path.exists(
f"{get_gopath()}/src/github.com/harmony-one/bls"
) and os.path.exists(
f"{get_gopath()}/src/github.com/harmony-one/mcl"
):
env.update( get_bls_build_variables() )
else:
raise RuntimeWarning(
f"Could not get environment for downloaded hmy CLI at `{path}`"
)
except Exception as exception:
raise RuntimeWarning(
f"Could not get environment for downloaded hmy CLI at `{path}`"
) from exception
return env

@ -0,0 +1,18 @@
"""
Constants
"""
# localnet constants
DEFAULT_ENDPOINT = 'http://localhost:9500'
DEFAULT_TIMEOUT = 30
# staking percentage constants
PRECISION = 18
MAX_DECIMAL = 1000000000000000000
NAME_CHAR_LIMIT = 140
IDENTITY_CHAR_LIMIT = 140
WEBSITE_CHAR_LIMIT = 140
SECURITY_CONTACT_CHAR_LIMIT = 140
DETAILS_CHAR_LIMIT = 280
MIN_REQUIRED_DELEGATION = int( 10000 * 1e18 )

@ -0,0 +1,297 @@
"""
Basic smart contract functions on Harmony
For full ABI driven interaction, use something like web3py or brownie
"""
from .rpc.request import rpc_request
from .transaction import get_transaction_receipt
from .exceptions import InvalidRPCReplyError
from .constants import DEFAULT_ENDPOINT, DEFAULT_TIMEOUT
#########################
# Smart contract RPCs
#########################
def call( # pylint: disable=too-many-arguments
to_address,
block_num,
from_address=None,
gas=None,
gas_price=None,
value=None,
data=None,
endpoint=DEFAULT_ENDPOINT,
timeout=DEFAULT_TIMEOUT,
) -> str:
"""Execute a smart contract without saving state.
Parameters
----------
to_address: :obj:`str`
Address of the smart contract
block_num: :obj:`int`
Block number to execute the contract for
from_address: :obj:`str`, optional
Wallet address
gas: :obj:`str`, optional
Gas to execute the smart contract (in hex)
gas_price: :obj:`str`, optional
Gas price to execute smart contract call (in hex)
value: :obj:`str`, optional
Value sent with the smart contract call (in hex)
data: :obj:`str`, optional
Hash of smart contract method and parameters
endpoint: :obj:`str`, optional
Endpoint to send request to
timeout: :obj:`int`, optional
Timeout in seconds
Returns
-------
str
Return value of the executed smart contract
Raises
------
InvalidRPCReplyError
If received unknown result from exceptionndpoint, or
API Reference
-------------
https://api.hmny.io/?version=latest#d34b1f82-9b29-4b68-bac7-52fa0a8884b1
"""
params = [
{
"to": to_address,
"from": from_address,
"gas": gas,
"gasPrice": gas_price,
"value": value,
"data": data,
},
block_num,
]
method = "hmyv2_call"
try:
return rpc_request(
method,
params = params,
endpoint = endpoint,
timeout = timeout
)[ "result" ]
except KeyError as exception:
raise InvalidRPCReplyError( method, endpoint ) from exception
def estimate_gas( # pylint: disable=too-many-arguments
to_address,
from_address=None,
gas=None,
gas_price=None,
value=None,
data=None,
endpoint=DEFAULT_ENDPOINT,
timeout=DEFAULT_TIMEOUT,
) -> int:
"""Estimate the gas price needed for a smart contract call.
Parameters
----------
to_address: :obj:`str`
Address of the smart contract
from_address: :obj:`str`, optional
Wallet address
gas: :obj:`str`, optional
Gas to execute the smart contract (in hex)
gas_price: :obj:`str`, optional
Gas price to execute smart contract call (in hex)
value: :obj:`str`, optional
Value sent with the smart contract call (in hex)
data: :obj:`str`, optional
Hash of smart contract method and parameters
endpoint: :obj:`str`, optional
Endpoint to send request to
timeout: :obj:`int`, optional
Timeout in seconds
Returns
-------
int
Estimated gas price of smart contract call
Raises
------
InvalidRPCReplyError
If received unknown result from exceptionndpoint, or
API Reference
-------------
https://api.hmny.io/?version=latest#b9bbfe71-8127-4dda-b26c-ff95c4c22abd
"""
params = [
{
"to": to_address,
"from": from_address,
"gas": gas,
"gasPrice": gas_price,
"value": value,
"data": data,
}
]
method = "hmyv2_estimateGas"
try:
return int(
rpc_request(
method,
params = params,
endpoint = endpoint,
timeout = timeout
)[ "result" ],
16,
)
except KeyError as exception:
raise InvalidRPCReplyError( method, endpoint ) from exception
def get_code(
address,
block_num,
endpoint = DEFAULT_ENDPOINT,
timeout = DEFAULT_TIMEOUT
) -> str:
"""Get the code stored at the given address in the state for the given
block number.
Parameters
----------
address: :obj:`str`
Address of the smart contract
block_num: :obj:`int`
Block number to get the code for
endpoint: :obj:`str`, optional
Endpoint to send request to
timeout: :obj:`int`, optional
Timeout in seconds
Returns
-------
str
Byte code at the smart contract address for the given block
Raises
------
InvalidRPCReplyError
If received unknown result from exceptionndpoint, or
API Reference
-------------
https://api.hmny.io/?version=latest#e13e9d78-9322-4dc8-8917-f2e721a8e556
https://github.com/harmony-one/harmony/blob/1a8494c069dc3f708fdf690456713a2411465199/rpc/contract.go#L59
"""
params = [ address, block_num ]
method = "hmyv2_getCode"
try:
return rpc_request(
method,
params = params,
endpoint = endpoint,
timeout = timeout
)[ "result" ]
except KeyError as exception:
raise InvalidRPCReplyError( method, endpoint ) from exception
def get_storage_at(
address,
key,
block_num,
endpoint = DEFAULT_ENDPOINT,
timeout = DEFAULT_TIMEOUT
) -> str:
"""Get the storage from the state at the given address, the key and the
block number.
Parameters
----------
address: :obj:`str`
Address of the smart contract
key: :obj:`str`
Hex representation of the storage location
block_num: :obj:`int`
Block number to get the code for
endpoint: :obj:`str`, optional
Endpoint to send request to
timeout: :obj:`int`, optional
Timeout in seconds
Returns
-------
str
Data stored at the smart contract location
Raises
------
InvalidRPCReplyError
If received unknown result from exceptionndpoint, or
API Reference
-------------
https://api.hmny.io/?version=latest#fa8ac8bd-952d-4149-968c-857ca76da43f
https://github.com/harmony-one/harmony/blob/1a8494c069dc3f708fdf690456713a2411465199/rpc/contract.go#L84
"""
params = [ address, key, block_num ]
method = "hmyv2_getStorageAt"
try:
return rpc_request(
method,
params = params,
endpoint = endpoint,
timeout = timeout
)[ "result" ]
except KeyError as exception:
raise InvalidRPCReplyError( method, endpoint ) from exception
def get_contract_address_from_hash(
tx_hash,
endpoint = DEFAULT_ENDPOINT,
timeout = DEFAULT_TIMEOUT
) -> str:
"""Get address of the contract which was deployed in the transaction
represented by tx_hash.
Parameters
----------
tx_hash: :obj:`str`
Hash of the deployment transaction
endpoint: :obj:`str`, optional
Endpoint to send request to
timeout: :obj:`int`, optional
Timeout in seconds
Returns
-------
str
Address of the smart contract
Raises
------
InvalidRPCReplyError
If received unknown result from exceptionndpoint, or
API Reference
-------------
https://github.com/harmony-one/harmony-test/blob/master/localnet/rpc_tests/test_contract.py#L36
"""
try:
return get_transaction_receipt( tx_hash,
endpoint,
timeout )[ "contractAddress" ]
except KeyError as exception:
raise InvalidRPCReplyError(
"hmyv2_getTransactionReceipt",
endpoint
) from exception

@ -1,35 +1,38 @@
from .rpc.exceptions import (
RPCError,
RequestsError,
RequestsTimeoutError
)
class InvalidRPCReplyError(RuntimeError):
"""
Exception raised when RPC call returns unexpected result
Generally indicates Harmony API has been updated & pyhmy library needs to be updated as well
"""
def __init__(self, method, endpoint):
super().__init__(f'Unexpected reply for {method} from {endpoint}')
class InvalidValidatorError(ValueError):
"""
Exception raised Validator does not pass sanity checks
"""
"""
Exceptions used by pyhmy
"""
class InvalidRPCReplyError( RuntimeError ):
"""Exception raised when RPC call returns unexpected result Generally
indicates Harmony API has been updated & pyhmy library needs to be updated
as well."""
def __init__( self, method, endpoint ):
super().__init__( f"Unexpected reply for {method} from {endpoint}" )
class InvalidValidatorError( ValueError ):
"""Exception raised Validator does not pass sanity checks."""
errors = {
1: 'Invalid ONE address',
2: 'Field not initialized',
3: 'Invalid field input',
4: 'Error checking blockchain',
5: 'Unable to import validator information from blockchain'
1: "Invalid ONE address",
2: "Field not initialized",
3: "Invalid field input",
4: "Error checking blockchain",
5: "Unable to import validator information from blockchain",
}
def __init__(self, err_code, msg):
def __init__( self, err_code, msg ):
self.code = err_code
self.msg = msg
super().__init__(msg)
super().__init__( msg )
def __str__( self ):
return f"[Errno {self.code}] {self.errors[self.code]}: {self.msg}"
def __str__(self):
return f'[Errno {self.code}] {self.errors[self.code]}: {self.msg}'
class TxConfirmationTimedoutError( AssertionError ):
"""Exception raised when a transaction is sent to the chain But not
confirmed during the timeout period specified."""
def __init__( self, msg ):
super().__init__( f"{msg}" )

@ -1,3 +1,7 @@
"""
Logger for pyhmy
"""
import threading
import datetime
import gzip
@ -6,39 +10,41 @@ import logging
import logging.handlers
class _GZipRotator:
def __call__(self, source, dest):
os.rename(source, dest)
f_in = open(dest, 'rb')
f_out = gzip.open("%s.gz" % dest, 'wb')
f_out.writelines(f_in)
f_out.close()
f_in.close()
os.remove(dest)
class _GZipRotator: # pylint: disable=too-few-public-methods
def __call__( self, source, dest ):
os.rename( source, dest )
with open( dest, "rb" ) as f_in:
with gzip.open( f"{dest}.gz", "wb" ) as f_out:
f_out.writelines( f_in )
os.remove( dest )
class ControlledLogger:
"""
A simple logger that only writes to file when the 'write' method is called.
"""
def __init__(self, logger_name, log_dir, backup_count=5):
class ControlledLogger: # pylint: disable=too-many-instance-attributes
"""A simple logger that only writes to file when the 'write' method is
called."""
def __init__( self, logger_name, log_dir, backup_count = 5 ):
"""
:param logger_name: The name of the logger and logfile
:param log_dir: The directory in which to save this log file (can be abs or relative).
"""
if log_dir.endswith('/'):
log_dir = log_dir[:-1]
log_dir = os.path.realpath(log_dir)
os.makedirs(log_dir, exist_ok=True)
handler = logging.handlers.TimedRotatingFileHandler(f"{log_dir}/{logger_name}.log", 'midnight', 1,
backupCount=backup_count)
handler.setFormatter(logging.Formatter('%(levelname)s - %(message)s'))
if log_dir.endswith( "/" ):
log_dir = log_dir[ :-1 ]
log_dir = os.path.realpath( log_dir )
os.makedirs( log_dir, exist_ok = True )
handler = logging.handlers.TimedRotatingFileHandler(
f"{log_dir}/{logger_name}.log",
"midnight",
1,
backupCount = backup_count
)
handler.setFormatter(
logging.Formatter( "%(levelname)s - %(message)s" )
)
handler.rotator = _GZipRotator()
self.filename = handler.baseFilename
self.logger = logging.getLogger(logger_name)
self.logger.addHandler(handler)
self.logger = logging.getLogger( logger_name )
self.logger.addHandler( handler )
self._lock = threading.Lock()
self.filepath = f"{log_dir}/{logger_name}.log"
self.info_buffer = []
@ -46,97 +52,92 @@ class ControlledLogger:
self.warning_buffer = []
self.error_buffer = []
def __repr__(self):
def __repr__( self ):
return f"<ControlledLogger @ {self.filepath} : {self.logger}>"
def _clear(self):
"""
Internal method to clear the log buffer.
"""
def _clear( self ):
"""Internal method to clear the log buffer."""
self.info_buffer.clear()
self.debug_buffer.clear()
self.warning_buffer.clear()
self.error_buffer.clear()
def info(self, msg):
def info( self, msg ):
"""
:param msg: The info message to log
"""
self._lock.acquire()
self.info_buffer.append(f"[{threading.get_ident()}] "
f"{datetime.datetime.utcnow()} : {msg}")
self._lock.release()
with self._lock:
self.info_buffer.append(
f"[{threading.get_ident()}] "
f"{datetime.datetime.utcnow()} : {msg}"
)
def debug(self, msg):
def debug( self, msg ):
"""
:param msg: The debug message to log
"""
self._lock.acquire()
self.debug_buffer.append(f"[{threading.get_ident()}] "
f"{datetime.datetime.utcnow()} : {msg}")
self._lock.release()
with self._lock:
self.debug_buffer.append(
f"[{threading.get_ident()}] "
f"{datetime.datetime.utcnow()} : {msg}"
)
def warning(self, msg):
def warning( self, msg ):
"""
:param msg: The warning message to log
"""
self._lock.acquire()
self.warning_buffer.append(f"[{threading.get_ident()}] "
f"{datetime.datetime.utcnow()} : {msg}")
self._lock.release()
with self._lock:
self.warning_buffer.append(
f"[{threading.get_ident()}] "
f"{datetime.datetime.utcnow()} : {msg}"
)
def error(self, msg):
def error( self, msg ):
"""
:param msg: The error message to log
"""
self._lock.acquire()
self.error_buffer.append(f"[{threading.get_ident()}] "
f"{datetime.datetime.utcnow()} : {msg}")
self._lock.release()
def print_info(self):
"""
Prints the current info buffer but does not flush it to log file.
"""
print('\n'.join(self.info_buffer))
def print_debug(self):
"""
Prints the current debug buffer but does not flush it to log file.
"""
print('\n'.join(self.debug_buffer))
def print_warning(self):
"""
Prints the current warning buffer but does not flush it to log file.
"""
print('\n'.join(self.warning_buffer))
def print_error(self):
"""
Prints the current error buffer but does not flush it to log file.
"""
print('\n'.join(self.error_buffer))
def write(self):
"""
Flushes ALL of the log buffers to the log file via the logger.
Note that directly after this method call, the respective prints will print
nothing since all log messages are flushed to file.
"""
self._lock.acquire()
self.logger.setLevel(logging.DEBUG)
for line in self.debug_buffer:
self.logger.debug(line)
self.logger.setLevel(logging.WARNING)
for line in self.warning_buffer:
self.logger.warning(line)
self.logger.setLevel(logging.ERROR)
for line in self.error_buffer:
self.logger.error(line)
self.logger.setLevel(logging.INFO)
for line in self.info_buffer:
self.logger.info(line)
self._clear()
self._lock.release()
with self._lock:
self.error_buffer.append(
f"[{threading.get_ident()}] "
f"{datetime.datetime.utcnow()} : {msg}"
)
def print_info( self ):
"""Prints the current info buffer but does not flush it to log file."""
print( "\n".join( self.info_buffer ) )
def print_debug( self ):
"""Prints the current debug buffer but does not flush it to log
file."""
print( "\n".join( self.debug_buffer ) )
def print_warning( self ):
"""Prints the current warning buffer but does not flush it to log
file."""
print( "\n".join( self.warning_buffer ) )
def print_error( self ):
"""Prints the current error buffer but does not flush it to log
file."""
print( "\n".join( self.error_buffer ) )
def write( self ):
"""Flushes ALL of the log buffers to the log file via the logger.
Note that directly after this method call, the respective prints
will print nothing since all log messages are flushed to file.
"""
with self._lock:
self.logger.setLevel( logging.DEBUG )
for line in self.debug_buffer:
self.logger.debug( line )
self.logger.setLevel( logging.WARNING )
for line in self.warning_buffer:
self.logger.warning( line )
self.logger.setLevel( logging.ERROR )
for line in self.error_buffer:
self.logger.error( line )
self.logger.setLevel( logging.INFO )
for line in self.info_buffer:
self.logger.info( line )
self._clear()

@ -1,12 +1,15 @@
from decimal import Decimal
"""
Handles conversion of ONE to ATTO and vice versa
For more granular conversions, see Web3.toWei
"""
from decimal import Decimal
_conversion_unit = Decimal(1e18)
_conversion_unit = Decimal( 1e18 )
def convert_atto_to_one(atto) -> Decimal:
"""
Convert ATTO to ONE
def convert_atto_to_one( atto ) -> Decimal:
"""Convert ATTO to ONE.
Parameters
----------
@ -19,14 +22,13 @@ def convert_atto_to_one(atto) -> Decimal:
decimal
Converted value in ONE
"""
if isinstance(atto, float):
atto = int(atto)
return Decimal(atto) / _conversion_unit
if isinstance( atto, float ):
atto = int( atto )
return Decimal( atto ) / _conversion_unit
def convert_one_to_atto(one) -> Decimal:
"""
Convert ONE to ATTO
def convert_one_to_atto( one ) -> Decimal:
"""Convert ONE to ATTO.
Parameters
----------
@ -38,6 +40,6 @@ def convert_one_to_atto(one) -> Decimal:
decimal
Converted value in ATTO
"""
if isinstance(one, float):
one = str(one)
return Decimal(one) * _conversion_unit
if isinstance( one, float ):
one = str( one )
return Decimal( one ) * _conversion_unit

@ -1,27 +1,26 @@
import requests
"""
RPC Specific Exceptions
"""
import requests
class RPCError(RuntimeError):
"""
Exception raised when RPC call returns an error
"""
def __init__(self, method, endpoint, error):
class RPCError( RuntimeError ):
"""Exception raised when RPC call returns an error."""
def __init__( self, method, endpoint, error ):
self.error = error
super().__init__(f'Error in reply from {endpoint}: {method} returned {error}')
super().__init__(
f"Error in reply from {endpoint}: {method} returned {error}"
)
class RequestsError(requests.exceptions.RequestException):
"""
Wrapper for requests lib exceptions
"""
def __init__(self, endpoint):
super().__init__(f'Error connecting to {endpoint}')
class RequestsError( requests.exceptions.RequestException ):
"""Wrapper for requests lib exceptions."""
def __init__( self, endpoint ):
super().__init__( f"Error connecting to {endpoint}" )
class RequestsTimeoutError(requests.exceptions.Timeout):
"""
Wrapper for requests lib Timeout exceptions
"""
def __init__(self, endpoint):
super().__init__(f'Error connecting to {endpoint}')
class RequestsTimeoutError( requests.exceptions.Timeout ):
"""Wrapper for requests lib Timeout exceptions."""
def __init__( self, endpoint ):
super().__init__( f"Error connecting to {endpoint}" )

@ -1,21 +1,22 @@
"""
RPC wrapper around requests library
"""
import json
import requests
from .exceptions import (
RequestsError,
RequestsTimeoutError,
RPCError
)
from .exceptions import RequestsError, RequestsTimeoutError, RPCError
_default_endpoint = 'http://localhost:9500'
_default_timeout = 30
from ..constants import DEFAULT_ENDPOINT, DEFAULT_TIMEOUT
def base_request(method, params=None, endpoint=_default_endpoint, timeout=_default_timeout) -> str:
"""
Basic RPC request
def base_request(
method,
params = None,
endpoint = DEFAULT_ENDPOINT,
timeout = DEFAULT_TIMEOUT
) -> str:
"""Basic RPC request.
Parameters
---------
@ -44,8 +45,8 @@ def base_request(method, params=None, endpoint=_default_endpoint, timeout=_defau
"""
if params is None:
params = []
elif not isinstance(params, list):
raise TypeError(f'invalid type {params.__class__}')
elif not isinstance( params, list ):
raise TypeError( f"invalid type {params.__class__}" )
try:
payload = {
@ -55,21 +56,31 @@ def base_request(method, params=None, endpoint=_default_endpoint, timeout=_defau
"params": params
}
headers = {
'Content-Type': 'application/json'
"Content-Type": "application/json"
}
resp = requests.request('POST', endpoint, headers=headers, data=json.dumps(payload),
timeout=timeout, allow_redirects=True)
resp = requests.request(
"POST",
endpoint,
headers = headers,
data = json.dumps( payload ),
timeout = timeout,
allow_redirects = True,
)
return resp.content
except requests.exceptions.Timeout as err:
raise RequestsTimeoutError(endpoint) from err
raise RequestsTimeoutError( endpoint ) from err
except requests.exceptions.RequestException as err:
raise RequestsError(endpoint) from err
raise RequestsError( endpoint ) from err
def rpc_request(method, params=None, endpoint=_default_endpoint, timeout=_default_timeout) -> dict:
"""
RPC request
def rpc_request(
method,
params = None,
endpoint = DEFAULT_ENDPOINT,
timeout = DEFAULT_TIMEOUT
) -> dict:
"""RPC request.
Parameters
---------
@ -102,15 +113,12 @@ def rpc_request(method, params=None, endpoint=_default_endpoint, timeout=_defaul
--------
base_request
"""
raw_resp = base_request(method, params, endpoint, timeout)
raw_resp = base_request( method, params, endpoint, timeout )
try:
resp = json.loads(raw_resp)
if 'error' in resp:
raise RPCError(method, endpoint, str(resp['error']))
resp = json.loads( raw_resp )
if "error" in resp:
raise RPCError( method, endpoint, str( resp[ "error" ] ) )
return resp
except json.decoder.JSONDecodeError as err:
raise RPCError(method, endpoint, raw_resp) from err
# TODO: Add GET requests
raise RPCError( method, endpoint, raw_resp ) from err

@ -0,0 +1,255 @@
"""
Sign Harmony or Ethereum transactions
Harmony staking transaction signing is not covered by this module
"""
# pylint: disable=protected-access, no-member
from functools import partial
from toolz import dissoc, pipe, merge
import rlp
from eth_utils.curried import keccak, to_int, hexstr_if_str, apply_formatters_to_dict
from rlp.sedes import big_endian_int, Binary, binary
from eth_rlp import HashableRLP
from hexbytes import HexBytes
from eth_account import Account
from eth_account.datastructures import SignedTransaction
from eth_account._utils.legacy_transactions import (
Transaction as SignedEthereumTxData,
UnsignedTransaction as UnsignedEthereumTxData,
LEGACY_TRANSACTION_FORMATTERS as ETHEREUM_FORMATTERS,
TRANSACTION_DEFAULTS,
chain_id_to_v,
)
from eth_account._utils.signing import sign_transaction_hash
from .util import chain_id_to_int, convert_one_to_hex
HARMONY_FORMATTERS = dict(
ETHEREUM_FORMATTERS,
shardID=hexstr_if_str(to_int), # additional fields for Harmony transaction
toShardID=hexstr_if_str(to_int), # which may be cross shard
)
class UnsignedHarmonyTxData( HashableRLP ):
"""
Unsigned Harmony transaction data
Includes `shardID` and `toShardID`
as the difference against Eth
"""
fields = (
( "nonce",
big_endian_int ),
( "gasPrice",
big_endian_int ),
( "gas",
big_endian_int ),
( "shardID",
big_endian_int ),
( "toShardID",
big_endian_int ),
( "to",
Binary.fixed_length( 20,
allow_empty = True ) ),
( "value",
big_endian_int ),
( "data",
binary ),
)
class SignedHarmonyTxData( HashableRLP ):
"""
Signed Harmony transaction data
Includes `shardID` and `toShardID`
as the difference against Eth
"""
fields = UnsignedHarmonyTxData._meta.fields + (
("v", big_endian_int), # Recovery value + 27
("r", big_endian_int), # First 32 bytes
("s", big_endian_int), # Next 32 bytes
)
# https://github.com/ethereum/eth-account/blob/00e7b10005c5fa7090086fcef37a76296c524e17/eth_account/_utils/transactions.py#L55
def encode_transaction( unsigned_transaction, vrs ):
"""serialize and encode an unsigned transaction with v,r,s."""
( v, r, s ) = vrs # pylint: disable=invalid-name
chain_naive_transaction = dissoc(
unsigned_transaction.as_dict(),
"v",
"r",
"s"
)
if isinstance(
unsigned_transaction,
( UnsignedHarmonyTxData,
SignedHarmonyTxData )
):
serializer = SignedHarmonyTxData
else:
serializer = SignedEthereumTxData
signed_transaction = serializer(
v = v,
r = r,
s = s,
**chain_naive_transaction
)
return rlp.encode( signed_transaction )
def serialize_transaction( filled_transaction ):
"""serialize a signed/unsigned transaction."""
# although this will always be present for this module
# keep this check anyway
if "v" in filled_transaction:
# https://github.com/harmony-one/harmony/blob/f8879f5e0288157bf95ae2898a9a27f0c85ff9ad/core/types/transaction_signing.go#L173
if "shardID" in filled_transaction and filled_transaction[
"v" ] < 1666600000:
serializer = SignedHarmonyTxData
else:
serializer = SignedEthereumTxData
else:
if "shardID" in filled_transaction:
serializer = UnsignedHarmonyTxData
else:
serializer = UnsignedEthereumTxData
for field, _ in serializer._meta.fields:
assert field in filled_transaction, f"Could not find {field} in transaction"
return serializer.from_dict(
{
field: filled_transaction[ field ]
for field,
_ in serializer._meta.fields
}
)
# https://github.com/ethereum/eth-account/blob/00e7b10005c5fa7090086fcef37a76296c524e17/eth_account/account.py#L650
def sanitize_transaction( transaction_dict, private_key ):
"""remove the originating address from the dict and convert chainId to
int."""
account = Account.from_key( private_key ) # pylint: disable=no-value-for-parameter
sanitized_transaction = (
transaction_dict.copy()
) # do not alter the original dictionary
if "from" in sanitized_transaction:
sanitized_transaction[ "from" ] = convert_one_to_hex(
transaction_dict[ "from" ]
)
if sanitized_transaction[ "from" ] == account.address:
sanitized_transaction = dissoc( sanitized_transaction, "from" )
else:
raise TypeError(
"from field must match key's {account.address}, "
"but it was {sanitized_transaction['from']}"
)
if "chainId" in sanitized_transaction:
sanitized_transaction[ "chainId" ] = chain_id_to_int(
sanitized_transaction[ "chainId" ]
)
return account, sanitized_transaction
def sign_transaction( transaction_dict, private_key ) -> SignedTransaction:
"""Sign a (non-staking) transaction dictionary with the specified private
key.
Parameters
----------
transaction_dict: :obj:`dict` with the following keys
nonce: :obj:`int` Transaction nonce
gasPrice: :obj:`int` Transaction gas price in Atto
gas: :obj:`int` Gas limit in Atto
to: :obj:`str` Destination address
value: :obj:`int` Amount to be transferred in Atto
data: :obj:`str` Transaction data, used for smart contracts
from: :obj:`str` From address, optional (if passed, must match the
public key address generated from private_key)
chainId: :obj:`int` One of util.chainIds.keys(), optional
If you want to replay your transaction across networks, do not pass it
shardID: :obj:`int` Originating shard ID, optional (needed for cx shard transaction)
toShardID: :obj:`int` Destination shard ID, optional (needed for cx shard transaction)
r: :obj:`int` First 32 bytes of the signature, optional
s: :obj:`int` Next 32 bytes of the signature, optional
v: :obj:`int` Recovery value, optional
private_key: :obj:`str` The private key
Returns
-------
A SignedTransaction object, which is a named tuple
rawTransaction: :obj:`str` Hex bytes of the raw transaction
hash: :obj:`str` Hex bytes of the transaction hash
r: :obj:`int` First 32 bytes of the signature
s: :obj:`int` Next 32 bytes of the signature
v: :obj:`int` Recovery value
Raises
------
TypeError, if the from address specified is not the same
one as derived from the the private key
AssertionError, if the fields for the transaction are missing,
or if the chainId supplied is not a string,
or if the chainId is not a key in util.py
API Reference
-------------
https://readthedocs.org/projects/eth-account/downloads/pdf/stable/
"""
account, sanitized_transaction = sanitize_transaction(transaction_dict, private_key)
if "to" in sanitized_transaction and sanitized_transaction[ "to"
] is not None:
sanitized_transaction[ "to" ] = convert_one_to_hex(
sanitized_transaction[ "to" ]
)
# https://github.com/ethereum/eth-account/blob/00e7b10005c5fa7090086fcef37a76296c524e17/eth_account/_utils/transactions.py#L39
filled_transaction = pipe(
sanitized_transaction,
dict,
partial( merge,
TRANSACTION_DEFAULTS ),
chain_id_to_v,
apply_formatters_to_dict( HARMONY_FORMATTERS ),
)
unsigned_transaction = serialize_transaction( filled_transaction )
transaction_hash = unsigned_transaction.hash()
# https://github.com/ethereum/eth-account/blob/00e7b10005c5fa7090086fcef37a76296c524e17/eth_account/_utils/signing.py#L26
if isinstance(
unsigned_transaction,
( UnsignedEthereumTxData,
UnsignedHarmonyTxData )
):
chain_id = None
else:
chain_id = unsigned_transaction.v
(
v, # pylint: disable=invalid-name
r, # pylint: disable=invalid-name
s, # pylint: disable=invalid-name
) = sign_transaction_hash(
account._key_obj, transaction_hash, chain_id
)
encoded_transaction = encode_transaction(
unsigned_transaction,
vrs = ( v,
r,
s )
)
signed_transaction_hash = keccak( encoded_transaction )
return SignedTransaction(
rawTransaction = HexBytes( encoded_transaction ),
hash = HexBytes( signed_transaction_hash ),
r = r,
s = s,
v = v,
)

File diff suppressed because it is too large Load Diff

@ -0,0 +1,520 @@
"""
Sign Harmony staking transactions
"""
import math
from decimal import Decimal
from functools import partial
from toolz import ( pipe, dissoc, merge, identity, )
from hexbytes import HexBytes
import rlp
from eth_account.datastructures import SignedTransaction
from eth_account._utils.signing import sign_transaction_hash
from eth_account._utils.legacy_transactions import chain_id_to_v
from eth_utils.curried import (
hexstr_if_str,
to_bytes,
keccak,
apply_formatters_to_dict,
to_int,
apply_formatters_to_sequence,
apply_formatter_to_array,
)
from .constants import PRECISION, MAX_DECIMAL
from .signing import sanitize_transaction
from .staking_structures import (
FORMATTERS,
Directive,
CreateValidator,
EditValidator,
DelegateOrUndelegate,
CollectRewards,
)
from .util import convert_one_to_hex
# https://github.com/harmony-one/sdk/blob/99a827782fabcd5f91f025af0d8de228956d42b4/packages/harmony-staking/src/stakingTransaction.ts#L335
def _convert_staking_percentage_to_number( value, ):
"""Convert from staking percentage to integer For example, 0.1 becomes
1000000000000000000. Since Python floats are problematic with precision,
this function is used as a workaround.
Parameters
---------
value: :obj:`str` or :obj:`Decimal`
the value to convert
Returns
-------
int, converted as above
Raises
------
AssertionError, if data types are not as expected
ValueError, if the input type is not supported
"""
assert isinstance( value, ( str, Decimal ) ), "Only strings or decimals are supported"
if isinstance( value, Decimal ):
value = str( value )
value1 = value
if value[ 0 ] == "-":
raise ValueError( "Negative numbers are not accepted" )
if value[ 0 ] == "+":
value1 = value[ 1 : ]
if len( value1 ) == 0:
raise ValueError( "StakingDecimal string is empty" )
spaced = value1.split( " " )
if len( spaced ) > 1:
raise ValueError( "Bad decimal string" )
splitted = value1.split( "." )
combined_str = splitted[ 0 ]
if len( splitted ) == 2:
length = len( splitted[ 1 ] )
if length == 0 or len( combined_str ) == 0:
raise ValueError( "Bad StakingDecimal length" )
if splitted[ 1 ][ 0 ] == "-":
raise ValueError( "Bad StakingDecimal string" )
combined_str += splitted[ 1 ]
elif len( splitted ) > 2:
raise ValueError( "Too many periods to be a StakingDecimal string" )
if length > PRECISION:
raise ValueError( "Too much precision, must be less than {PRECISION}" )
zeroes_to_add = PRECISION - length
combined_str += (
"0" * zeroes_to_add
) # This will not have any periods, so it is effectively a large integer
val = int( combined_str )
assert val <= MAX_DECIMAL, "Staking percentage is too large"
return val
def _get_account_and_transaction( transaction_dict, private_key ):
"""Create account from private key and sanitize the transaction
Sanitization involves removal of 'from' key And conversion of chainId key
from str to int ( if present )
Parameters
----------
transaction_dict: :obj:`dict`
See sign_staking_transaction
private_key: obj:`str`
Private key for the account
Returns
-------
a tuple containing account :obj:`eth_account.Account`
and sanitize_transaction :obj:`dict`
Raises
------
AssertionError, if chainId is not present in util.chain_id_to_int
TypeError, if the value of 'from' key is not the same as account address
"""
account, sanitized_transaction = sanitize_transaction(
transaction_dict, private_key
) # remove from, convert chain id ( if present ) to integer
sanitized_transaction[ "directive" ] = sanitized_transaction[
"directive" ].value # convert to value, like in TypeScript
return account, sanitized_transaction
# pylint: disable=too-many-locals,protected-access,invalid-name
def _sign_transaction_generic(
account,
sanitized_transaction,
parent_serializer
):
"""Sign a generic staking transaction, given the serializer base class and
account.
Paramters
---------
account: :obj:`eth_account.Account`, the account to use for signing
sanitized_transaction: :obj:`dict`, The sanitized transaction ( chainId checks and no from key )
parent_serializer: :obj: The serializer class from staking_structures
Returns
-------
SignedTransaction object, which can be posted to the chain by using
blockchain.send_raw_transaction
Raises
------
Assertion / KeyError, if certain keys are missing from the dict
rlp.exceptions.ObjectSerializationError, if data types are not as expected
"""
# obtain the serializers
if sanitized_transaction.get( "chainId", 0 ) == 0:
unsigned_serializer, signed_serializer = (
parent_serializer.Unsigned( ),
parent_serializer.Signed( ),
) # unsigned, signed
else:
unsigned_serializer, signed_serializer = (
parent_serializer.SignedChainId( ),
parent_serializer.SignedChainId( ),
) # since chain_id_to_v adds v/r/s, unsigned is not used here
# fill the transaction
# https://github.com/ethereum/eth-account/blob/00e7b10005c5fa7090086fcef37a76296c524e17/eth_account/_utils/transactions.py#L39
filled_transaction = pipe(
sanitized_transaction,
dict,
partial( merge, { "chainId": None } ),
chain_id_to_v, # will move chain id to v and add v/r/s
apply_formatters_to_dict( FORMATTERS ),
)
# get the unsigned transaction
for field, _ in unsigned_serializer._meta.fields:
assert field in filled_transaction, f"Could not find {field} in transaction"
unsigned_transaction = unsigned_serializer.from_dict(
{
f: filled_transaction[ f ]
for f,
_ in unsigned_serializer._meta.fields
}
) # drop extras silently
# sign the unsigned transaction
if "v" in unsigned_transaction.as_dict():
chain_id = unsigned_transaction.v
else:
chain_id = None
transaction_hash = unsigned_transaction.hash()
( v,
r,
s
) = sign_transaction_hash( account._key_obj,
transaction_hash,
chain_id )
chain_naive_transaction = dissoc(
unsigned_transaction.as_dict(),
"v",
"r",
"s"
) # remove extra v/r/s added by chain_id_to_v
# serialize it
# https://github.com/harmony-one/sdk/blob/99a827782fabcd5f91f025af0d8de228956d42b4/packages/harmony-staking/src/stakingTransaction.ts#L207
signed_transaction = signed_serializer(
v=v
+ (
8 if chain_id is None else 0
),
r=r,
s=s, # in the below statement, remove everything not expected by signed_serializer
**{
f: chain_naive_transaction[ f ]
for f, _ in signed_serializer._meta.fields
if f not in "vrs"
},
)
# encode it
encoded_transaction = rlp.encode( signed_transaction )
# hash it
signed_transaction_hash = keccak( encoded_transaction )
# return is
return SignedTransaction(
rawTransaction = HexBytes( encoded_transaction ),
hash = HexBytes( signed_transaction_hash ),
r = r,
s = s,
v = v,
)
def _sign_delegate_or_undelegate( transaction_dict, private_key ):
"""Sign a delegate or undelegate transaction See sign_staking_transaction
for details."""
# preliminary steps
if transaction_dict[ "directive" ] not in [
Directive.Delegate,
Directive.Undelegate
]:
raise TypeError(
"Only Delegate or Undelegate are supported by _sign_delegate_or_undelegate"
)
# first common step
account, sanitized_transaction = _get_account_and_transaction(
transaction_dict, private_key
)
# encode the stakeMsg
sanitized_transaction[ "stakeMsg" ] = apply_formatters_to_sequence(
[
hexstr_if_str( to_bytes ),
hexstr_if_str( to_bytes ),
hexstr_if_str( to_int )
],
[
convert_one_to_hex(
sanitized_transaction.pop( "delegatorAddress" )
),
convert_one_to_hex(
sanitized_transaction.pop( "validatorAddress" )
),
sanitized_transaction.pop( "amount" ),
],
)
return _sign_transaction_generic(
account,
sanitized_transaction,
DelegateOrUndelegate
)
def _sign_collect_rewards( transaction_dict, private_key ):
"""Sign a collect rewards transaction See sign_staking_transaction for
details."""
# preliminary steps
if transaction_dict[ "directive" ] != Directive.CollectRewards:
raise TypeError(
"Only CollectRewards is supported by _sign_collect_rewards"
)
# first common step
account, sanitized_transaction = _get_account_and_transaction(
transaction_dict, private_key
)
# encode the stakeMsg
sanitized_transaction[ "stakeMsg" ] = [
hexstr_if_str( to_bytes )(
convert_one_to_hex(
sanitized_transaction.pop( "delegatorAddress" )
)
)
]
return _sign_transaction_generic(
account,
sanitized_transaction,
CollectRewards
)
def _sign_create_validator( transaction_dict, private_key ):
"""Sign a create validator transaction See sign_staking_transaction for
details."""
# preliminary steps
if transaction_dict[ "directive" ] != Directive.CreateValidator:
raise TypeError(
"Only CreateValidator is supported by _sign_create_or_edit_validator"
)
# first common step
account, sanitized_transaction = _get_account_and_transaction(
transaction_dict, private_key
)
# encode the stakeMsg
description = [
sanitized_transaction.pop( "name" ),
sanitized_transaction.pop( "identity" ),
sanitized_transaction.pop( "website" ),
sanitized_transaction.pop( "security-contact" ),
sanitized_transaction.pop( "details" ),
]
commission = apply_formatter_to_array(
hexstr_if_str( to_int ), # formatter
[
_convert_staking_percentage_to_number( sanitized_transaction.pop( "rate" ) ),
_convert_staking_percentage_to_number(
sanitized_transaction.pop( "max-rate" )
),
_convert_staking_percentage_to_number(
sanitized_transaction.pop( "max-change-rate" )
),
],
)
commission = [ [ element ] for element in commission ]
bls_keys = apply_formatter_to_array(
hexstr_if_str( to_bytes ), # formatter
sanitized_transaction.pop( "bls-public-keys" ),
)
bls_key_sigs = apply_formatter_to_array(
hexstr_if_str( to_bytes ),
sanitized_transaction.pop( "bls-key-sigs" ) # formatter
)
sanitized_transaction[ "stakeMsg" ] = apply_formatters_to_sequence(
[
hexstr_if_str( to_bytes ), # address
identity, # description
identity, # commission rates
hexstr_if_str(
to_int
), # min self delegation ( in ONE ), decimals are silently dropped
hexstr_if_str(
to_int
), # max total delegation ( in ONE ), decimals are silently dropped
identity, # bls public keys
identity, # bls key sigs
hexstr_if_str(
to_int
), # amount ( the Hexlify in the SDK drops the decimals, which is what we will do too )
],
[
convert_one_to_hex( sanitized_transaction.pop( "validatorAddress" ) ),
description,
commission,
math.floor(
sanitized_transaction.pop( "min-self-delegation" )
), # Decimal floors it correctly
math.floor( sanitized_transaction.pop( "max-total-delegation" ) ),
bls_keys,
bls_key_sigs,
math.floor( sanitized_transaction.pop( "amount" ) ),
],
)
return _sign_transaction_generic(
account,
sanitized_transaction,
CreateValidator
)
def _sign_edit_validator( transaction_dict, private_key ):
"""Sign an edit validator transaction See sign_staking_transaction for
details."""
# preliminary steps
if transaction_dict[ "directive" ] != Directive.EditValidator:
raise TypeError(
"Only EditValidator is supported by _sign_create_or_edit_validator"
)
# first common step
account, sanitized_transaction = _get_account_and_transaction(
transaction_dict, private_key
)
# encode the stakeMsg
description = [
sanitized_transaction.pop( "name" ),
sanitized_transaction.pop( "identity" ),
sanitized_transaction.pop( "website" ),
sanitized_transaction.pop( "security-contact" ),
sanitized_transaction.pop( "details" ),
]
sanitized_transaction[ "stakeMsg" ] = apply_formatters_to_sequence(
[
hexstr_if_str( to_bytes ), # address
identity, # description
identity, # new rate ( it's in a list so can't do hexstr_if_str )
hexstr_if_str(
to_int
), # min self delegation ( in ONE ), decimals are silently dropped
hexstr_if_str(
to_int
), # max total delegation ( in ONE ), decimals are silently dropped
hexstr_if_str( to_bytes ), # key to remove
hexstr_if_str( to_bytes ), # key to add
hexstr_if_str( to_bytes ), # key to add sig
],
[
convert_one_to_hex( sanitized_transaction.pop( "validatorAddress" ) ),
description,
[ _convert_staking_percentage_to_number( sanitized_transaction.pop( "rate" ) ) ],
math.floor(
sanitized_transaction.pop( "min-self-delegation" )
), # Decimal floors it correctly
math.floor( sanitized_transaction.pop( "max-total-delegation" ) ),
sanitized_transaction.pop( "bls-key-to-remove" ),
sanitized_transaction.pop( "bls-key-to-add" ),
sanitized_transaction.pop( "bls-key-to-add-sig" ),
],
)
return _sign_transaction_generic(
account,
sanitized_transaction,
EditValidator
)
def sign_staking_transaction( transaction_dict, private_key ):
"""Sign a supplied transaction_dict with the private_key.
Parameters
----------
transaction_dict: :obj:`dict`, a dictionary with the following keys
directive :obj:`staking_structures.Directive`, type of transaction
nonce: :obj:`int`, nonce of transaction
gasPrice: :obj:`int`, gas price for the transaction
gasLimit: :obj:`int`, gas limit for the transaction
chainId: :obj:`int`, chain id for the transaction, optional
see util.chain_id_to_int for options
The following keys depend on the directive:
CollectRewards:
delegatorAddress: :obj:`str`, Address of the delegator
Delegate/Undelegate:
delegatorAddress: :obj:`str`, Address of the delegator
validatorAddress: :obj:`str`, Address of the validator
amount: :obj:`int`, Amount to ( un )delegate in ATTO
CreateValidator:
validatorAddress: :obj:`str`, Address of the validator
name: ;obj:`str`, Name of the validator
identity: :obj:`str`, Identity of the validator, must be unique
website: :obj:`str`, Website of the validator
security-contact: :obj:`str`, Security contact
details: :obj:`str` Validator details
rate: :obj:'Decimal' or :obj:`str` Staking commission rate
max-rate: :obj:'Decimal' or :obj:`str` Maximum staking commission rate
max-change-rate: :obj:'Decimal' or :obj:`str` Maximum change in
staking commission rate per epoch
bls-public-keys: :obj:`list` List of strings of BLS public keys
min-self-delegation: :obj:`int` or :obj:`Decimal` Validator min
self delegation in ATTO
max-total-delegation: :obj:`int` or :obj:`Decimal` Validator max
total delegation in ATTO
EditValidator:
validatorAddress: :obj:`str`, Address of the validator
name: ;obj:`str`, Name of the validator
identity: :obj:`str`, Identity of the validator, must be unique
website: :obj:`str`, Website of the validator
security-contact: :obj:`str`, Security contact
details: :obj:`str` Validator details
rate: :obj:'Decimal' or :obj:`str` Staking commission rate
min-self-delegation: :obj:`int` or :obj:`Decimal` Validator min
self delegation in ATTO
max-total-delegation: :obj:`int` or :obj:`Decimal` Validator max
total delegation in ATTO
bls-key-to-remove: :obj:`str` BLS Public key to remove
bls-key-to-add: :obj:`str` BLS Public key to add
private_key: :obj:`str`, the private key to sign the transaction with
Raises
------
AssertionError, if inputs are not as expected
KeyError, if inputs are missing
ValueError, if specifically staking rates are malformed
rlp.exceptions.ObjectSerializationError, if input data types are not as expected
Returns
-------
SignedTransaction object, the hash of which can be used to send the transaction
using transaction.send_raw_transaction
API Reference
-------------
https://github.com/harmony-one/sdk/blob/99a827782fabcd5f91f025af0d8de228956d42b4/packages/harmony-staking/src/stakingTransaction.ts
"""
assert isinstance(
transaction_dict, dict
), "Only dictionaries are supported" # OrderedDict is a subclass
# chain_id missing => results in rlp decoding error for GasLimit
assert "chainId" in transaction_dict, "chainId missing"
assert "directive" in transaction_dict, "Staking transaction type not specified"
assert isinstance(
transaction_dict[ "directive" ], Directive
), "Unknown staking transaction type"
if transaction_dict[ "directive" ] == Directive.CollectRewards:
return _sign_collect_rewards( transaction_dict, private_key )
if transaction_dict[ "directive" ] == Directive.Delegate:
return _sign_delegate_or_undelegate( transaction_dict, private_key )
if transaction_dict[ "directive" ] == Directive.Undelegate:
return _sign_delegate_or_undelegate( transaction_dict, private_key )
if transaction_dict[ "directive" ] == Directive.CreateValidator:
return _sign_create_validator( transaction_dict, private_key )
if transaction_dict[ "directive" ] == Directive.EditValidator:
return _sign_edit_validator( transaction_dict, private_key )
raise ValueError( 'Unknown staking transaction type' )

@ -0,0 +1,325 @@
"""
Helper module for signing Harmony staking transactions
"""
# disable most of the Lint here
# pylint: disable=protected-access,no-member,invalid-name,missing-class-docstring,missing-function-docstring
from enum import Enum, auto
from rlp.sedes import big_endian_int, Binary, CountableList, List, Text
from eth_rlp import HashableRLP
from eth_utils.curried import ( to_int, hexstr_if_str, )
# https://github.com/harmony-one/sdk/blob/99a827782fabcd5f91f025af0d8de228956d42b4/packages/harmony-staking/src/stakingTransaction.ts#L120
class Directive( Enum ):
def _generate_next_value_( name, start, count, last_values ): # pylint: disable=no-self-argument
return count
CreateValidator = auto()
EditValidator = auto()
Delegate = auto()
Undelegate = auto()
CollectRewards = auto()
FORMATTERS = {
"directive": hexstr_if_str(
to_int
), # delegatorAddress is already formatted before the call
"nonce": hexstr_if_str(to_int),
"gasPrice": hexstr_if_str(to_int),
"gasLimit": hexstr_if_str(to_int),
"chainId": hexstr_if_str(to_int),
}
class CollectRewards:
@staticmethod
def UnsignedChainId():
class UnsignedChainId( HashableRLP ):
fields = (
( "directive",
big_endian_int ),
(
"stakeMsg",
CountableList(
Binary.fixed_length( 20,
allow_empty = True )
)
),
( "nonce",
big_endian_int ),
( "gasPrice",
big_endian_int ),
( "gasLimit",
big_endian_int ),
( "chainId",
big_endian_int ),
)
return UnsignedChainId
@staticmethod
def SignedChainId():
class SignedChainId( HashableRLP ):
fields = CollectRewards.UnsignedChainId()._meta.fields[
:-1
] + ( # drop chainId
("v", big_endian_int),
("r", big_endian_int),
("s", big_endian_int),
)
return SignedChainId
@staticmethod
def Unsigned():
class Unsigned( HashableRLP ):
fields = CollectRewards.UnsignedChainId(
)._meta.fields[ :-1 ] # drop chainId
return Unsigned
@staticmethod
def Signed():
class Signed( HashableRLP ):
fields = CollectRewards.Unsigned()._meta.fields[
:-3
] + ( # drop last 3 for raw.pop()
("v", big_endian_int),
("r", big_endian_int),
("s", big_endian_int),
)
return Signed
class DelegateOrUndelegate:
@staticmethod
def UnsignedChainId():
class UnsignedChainId( HashableRLP ):
fields = (
( "directive",
big_endian_int ),
(
"stakeMsg",
List(
[
Binary.fixed_length( 20,
allow_empty = True ),
Binary.fixed_length( 20,
allow_empty = True ),
big_endian_int,
],
True,
),
),
( "nonce",
big_endian_int ),
( "gasPrice",
big_endian_int ),
( "gasLimit",
big_endian_int ),
( "chainId",
big_endian_int ),
)
return UnsignedChainId
@staticmethod
def SignedChainId():
class SignedChainId( HashableRLP ):
fields = DelegateOrUndelegate.UnsignedChainId()._meta.fields[
:-1
] + ( # drop chainId
("v", big_endian_int),
("r", big_endian_int),
("s", big_endian_int),
)
return SignedChainId
@staticmethod
def Unsigned():
class Unsigned( HashableRLP ):
fields = DelegateOrUndelegate.UnsignedChainId(
)._meta.fields[ :-1 ] # drop chainId
return Unsigned
@staticmethod
def Signed():
class Signed( HashableRLP ):
fields = DelegateOrUndelegate.Unsigned()._meta.fields[
:-3
] + ( # drop last 3 for raw.pop()
("v", big_endian_int),
("r", big_endian_int),
("s", big_endian_int),
)
return Signed
class CreateValidator:
@staticmethod
def UnsignedChainId():
class UnsignedChainId( HashableRLP ):
fields = (
("directive", big_endian_int),
(
"stakeMsg",
List(
[ # list with the following members
# validatorAddress
Binary.fixed_length(
20, allow_empty=True
),
# description is Text of 5 elements
List(
[Text()] * 5, True
),
# commission rate is made up of 3 integers in an array
List(
[List([big_endian_int], True)] * 3, True
),
big_endian_int, # min self delegation
big_endian_int, # max total delegation
# bls-public-keys array of unspecified length, each key of 48
CountableList(
Binary.fixed_length(48, allow_empty=True)
),
# bls-key-sigs array of unspecified length, each sig of 96
CountableList(
Binary.fixed_length(96, allow_empty=True)
),
big_endian_int, # amount
],
True,
),
), # strictly these number of elements
("nonce", big_endian_int),
("gasPrice", big_endian_int),
("gasLimit", big_endian_int),
("chainId", big_endian_int),
)
return UnsignedChainId
@staticmethod
def SignedChainId():
class SignedChainId( HashableRLP ):
fields = CreateValidator.UnsignedChainId()._meta.fields[
:-1
] + ( # drop chainId
("v", big_endian_int),
("r", big_endian_int),
("s", big_endian_int),
)
return SignedChainId
@staticmethod
def Unsigned():
class Unsigned( HashableRLP ):
fields = CreateValidator.UnsignedChainId(
)._meta.fields[ :-1 ] # drop chainId
return Unsigned
@staticmethod
def Signed():
class Signed( HashableRLP ):
fields = CreateValidator.Unsigned()._meta.fields[
:-3
] + ( # drop last 3 for raw.pop()
("v", big_endian_int),
("r", big_endian_int),
("s", big_endian_int),
)
return Signed
class EditValidator:
@staticmethod
def UnsignedChainId():
class UnsignedChainId( HashableRLP ):
fields = (
("directive", big_endian_int),
(
"stakeMsg",
List(
[ # list with the following members
# validatorAddress
Binary.fixed_length(
20, allow_empty=True
),
# description is Text of 5 elements
List(
[Text()] * 5, True
),
# new rate is in a list
List([big_endian_int], True),
big_endian_int, # min self delegation
big_endian_int, # max total delegation
# slot key to remove
Binary.fixed_length(
48, allow_empty=True
),
# slot key to add
Binary.fixed_length(
48, allow_empty=True
),
# slot key to add sig
Binary.fixed_length(
96, allow_empty=True
),
],
True,
),
), # strictly these number of elements
("nonce", big_endian_int),
("gasPrice", big_endian_int),
("gasLimit", big_endian_int),
("chainId", big_endian_int),
)
return UnsignedChainId
@staticmethod
def SignedChainId():
class SignedChainId( HashableRLP ):
fields = EditValidator.UnsignedChainId()._meta.fields[
:-1
] + ( # drop chainId
("v", big_endian_int),
("r", big_endian_int),
("s", big_endian_int),
)
return SignedChainId
@staticmethod
def Unsigned():
class Unsigned( HashableRLP ):
fields = EditValidator.UnsignedChainId(
)._meta.fields[ :-1 ] # drop chainId
return Unsigned
@staticmethod
def Signed():
class Signed( HashableRLP ):
fields = EditValidator.Unsigned()._meta.fields[
:-3
] + ( # drop last 3 for raw.pop()
("v", big_endian_int),
("r", big_endian_int),
("s", big_endian_int),
)
return Signed

File diff suppressed because it is too large Load Diff

@ -1,55 +1,117 @@
"""
Basic pyhmy utils like is_shard_active
ONE address format conversion
Chain id (str) to int conversion
"""
import json
import subprocess
import os
import sys
import datetime
import requests
from eth_utils import to_checksum_address
from .blockchain import (
get_latest_header
)
from .blockchain import get_latest_header
from .rpc.exceptions import (
RPCError,
RequestsError,
RequestsTimeoutError,
)
from .rpc.exceptions import ( RPCError, RequestsError, RequestsTimeoutError, )
datetime_format = "%Y-%m-%d %H:%M:%S.%f"
from .account import is_valid_address
from .bech32.bech32 import bech32_decode, bech32_encode, convertbits
class Typgpy(str):
class Typgpy( str ):
"""Typography constants for pretty printing.
Note that an ENDC is needed to mark the end of a 'highlighted' text
segment.
"""
HEADER = "\033[95m"
OKBLUE = "\033[94m"
OKGREEN = "\033[92m"
WARNING = "\033[93m"
FAIL = "\033[91m"
ENDC = "\033[0m"
BOLD = "\033[1m"
UNDERLINE = "\033[4m"
def chain_id_to_int( chain_id ):
"""
Typography constants for pretty printing.
If chain_id is a string, converts it to int.
If chain_id is an int, returns the int.
Note that an ENDC is needed to mark the end of a 'highlighted' text segment.
Else raises TypeError
"""
HEADER = '\033[95m'
OKBLUE = '\033[94m'
OKGREEN = '\033[92m'
WARNING = '\033[93m'
FAIL = '\033[91m'
ENDC = '\033[0m'
BOLD = '\033[1m'
UNDERLINE = '\033[4m'
chain_ids = dict(
Default = 0,
EthMainnet = 1,
Morden = 2,
Ropsten = 3,
Rinkeby = 4,
RootstockMainnet = 30,
RootstockTestnet = 31,
Kovan = 42,
EtcMainnet = 61,
EtcTestnet = 62,
Geth = 1337,
Ganache = 0,
HmyMainnet = 1,
HmyTestnet = 2,
HmyLocal = 2,
HmyPangaea = 3,
)
# do not validate integer chainids, only known strings
if isinstance( chain_id, str ):
assert (
chain_id in chain_ids
), f"Chain {chain_id} unknown, specify an integer chainId"
return chain_ids.get( chain_id )
if isinstance( chain_id, int ):
return chain_id
raise TypeError( "chainId must be str or int" )
def get_gopath():
"""
:returns The go-path, assuming that go is installed.
"""
return subprocess.check_output(["go", "env", "GOPATH"]).decode().strip()
return subprocess.check_output( [ "go", "env", "GOPATH" ] ).decode().strip()
def get_goversion():
"""
:returns The go-version, assuming that go is installed.
"""
return subprocess.check_output(["go", "version"]).decode().strip()
return subprocess.check_output( [ "go", "version" ] ).decode().strip()
def convert_one_to_hex( addr ):
"""Given a one address, convert it to hex checksum address."""
if not is_valid_address( addr ):
return to_checksum_address( addr )
_, data = bech32_decode( addr )
buf = convertbits( data, 5, 8, False )
address = "0x" + "".join( f"{x:02x}" for x in buf )
return str( to_checksum_address( address ) )
def convert_hex_to_one( addr ):
"""Given a hex address, convert it to a one address."""
if is_valid_address( addr ):
return addr
checksum_addr = str( to_checksum_address( addr ) )
data = bytearray.fromhex(
checksum_addr[ 2 : ] if checksum_addr
.startswith( "0x" ) else checksum_addr
)
buf = convertbits( data, 8, 5 )
return str( bech32_encode( "one", buf ) )
def is_active_shard(endpoint, delay_tolerance=60):
def is_active_shard( endpoint, delay_tolerance = 60 ):
"""
:param endpoint: The endpoint of the SHARD to check
:param delay_tolerance: The time (in seconds) that the shard timestamp can be behind
@ -57,12 +119,15 @@ def is_active_shard(endpoint, delay_tolerance=60):
"""
try:
curr_time = datetime.datetime.utcnow()
latest_header = get_latest_header(endpoint=endpoint)
time_str = latest_header["timestamp"][:19] + '.0' # Fit time format
timestamp = datetime.datetime.strptime(time_str, "%Y-%m-%d %H:%M:%S.%f").replace(tzinfo=None)
latest_header = get_latest_header( endpoint = endpoint )
time_str = latest_header[ "timestamp" ][ : 19 ] + ".0" # Fit time format
timestamp = datetime.datetime.strptime(
time_str,
"%Y-%m-%d %H:%M:%S.%f"
).replace( tzinfo = None )
time_delta = curr_time - timestamp
return abs(time_delta.seconds) < delay_tolerance
except (RPCError, RequestsError, RequestsTimeoutError):
return abs( time_delta.seconds ) < delay_tolerance
except ( RPCError, RequestsError, RequestsTimeoutError ):
return False
@ -77,27 +142,37 @@ def get_bls_build_variables():
"""
variables = {}
try:
openssl_dir = subprocess.check_output(["which", "openssl"]).decode().strip().split("\n")[0]
except (IndexError, subprocess.CalledProcessError) as e:
raise RuntimeError("`openssl` not found") from e
openssl_dir = (
subprocess.check_output(
[ "which",
"openssl" ]
).decode().strip().split( "\n",
maxsplit = 1 )[ 0 ]
)
except ( IndexError, subprocess.CalledProcessError ) as exception:
raise RuntimeError( "`openssl` not found" ) from exception
hmy_path = f"{get_gopath()}/src/github.com/harmony-one"
bls_dir = f"{hmy_path}/bls"
mcl_dir = f"{hmy_path}/mcl"
assert os.path.exists(bls_dir), f"Harmony BLS repo not found at {bls_dir}"
assert os.path.exists(mcl_dir), f"Harmony MCL repo not found at {mcl_dir}"
if sys.platform.startswith("darwin"):
variables["CGO_CFLAGS"] = f"-I{bls_dir}/include -I{mcl_dir}/include -I{openssl_dir}/include"
variables["CGO_LDFLAGS"] = f"-L{bls_dir}/lib -L{openssl_dir}/lib"
variables["LD_LIBRARY_PATH"] = f"{bls_dir}/lib:{mcl_dir}/lib:{openssl_dir}/lib"
variables["DYLD_FALLBACK_LIBRARY_PATH"] = variables["LD_LIBRARY_PATH"]
assert os.path.exists( bls_dir ), f"Harmony BLS repo not found at {bls_dir}"
assert os.path.exists( mcl_dir ), f"Harmony MCL repo not found at {mcl_dir}"
if sys.platform.startswith( "darwin" ):
variables[
"CGO_CFLAGS"
] = f"-I{bls_dir}/include -I{mcl_dir}/include -I{openssl_dir}/include"
variables[ "CGO_LDFLAGS" ] = f"-L{bls_dir}/lib -L{openssl_dir}/lib"
variables[ "LD_LIBRARY_PATH"
] = f"{bls_dir}/lib:{mcl_dir}/lib:{openssl_dir}/lib"
variables[ "DYLD_FALLBACK_LIBRARY_PATH" ] = variables[ "LD_LIBRARY_PATH"
]
else:
variables["CGO_CFLAGS"] = f"-I{bls_dir}/include -I{mcl_dir}/include"
variables["CGO_LDFLAGS"] = f"-L{bls_dir}/lib"
variables["LD_LIBRARY_PATH"] = f"{bls_dir}/lib:{mcl_dir}/lib"
variables[ "CGO_CFLAGS" ] = f"-I{bls_dir}/include -I{mcl_dir}/include"
variables[ "CGO_LDFLAGS" ] = f"-L{bls_dir}/lib"
variables[ "LD_LIBRARY_PATH" ] = f"{bls_dir}/lib:{mcl_dir}/lib"
return variables
def json_load(string, **kwargs):
def json_load( string, **kwargs ):
"""
:param string: The JSON string to load
:returns A dictionary loaded from a JSON string to a dictionary.
@ -106,7 +181,7 @@ def json_load(string, **kwargs):
Note that this prints the failed input should an error arise.
"""
try:
return json.loads(string, **kwargs)
except Exception as e:
print(f"{Typgpy.FAIL}Could not parse input: '{string}'{Typgpy.ENDC}")
raise e from e
return json.loads( string, **kwargs )
except Exception as exception:
print( f"{Typgpy.FAIL}Could not parse input: '{string}'{Typgpy.ENDC}" )
raise exception

File diff suppressed because it is too large Load Diff

@ -0,0 +1,44 @@
# pyproject.toml
[build-system]
requires = ["setuptools>=61.0.0", "wheel"]
build-backend = "setuptools.build_meta"
[project]
name = "pyhmy"
version = "0.1.1"
description = "A library for interacting and working the Harmony blockchain and related codebases"
readme = "README.md"
license = { text = "MIT" }
keywords = [ "Harmony", "blockchain", "protocol", "staking" ]
dependencies = [
"pexpect",
"requests",
"eth-rlp",
"eth-account >= 0.5.5",
"eth-utils",
"hexbytes",
"cytoolz"
]
requires-python = ">=3.0"
[project.optional-dependencies]
dev = [ "black", "autopep8", "yapf", "twine", "build", "docformatter", "bumpver" ]
[tool.bumpver]
current_version = "0.1.1"
version_pattern = "MAJOR.MINOR.PATCH"
commit_message = "chore: bump version {old_version} -> {new_version}"
# git commit --amend -S
commit = true
# git tag ${bumpver show -n | head -1 | awk '{print $3}'} -f -s
tag = true
push = false
[tool.bumpver.file_patterns]
"pyproject.toml" = [
'current_version = "{version}"',
'version = "{version}"'
]
"pyhmy/__init__.py" = ['__version__ = "{version}"']

@ -1,3 +1,2 @@
[pytest]
addopts = -v --showlocals
python_paths = .
addopts = -v --showlocals

@ -1,40 +0,0 @@
import pathlib
from setuptools import setup, find_packages
HERE = pathlib.Path(__file__).parent
README = (HERE / "README.md").read_text()
setup(
name='pyhmy',
use_incremental=True,
license='MIT',
description="A library for interacting and working the Harmony blockchain and related codebases.",
long_description=README,
long_description_content_type="text/markdown",
author='Daniel Van Der Maden',
author_email='daniel@harmony.one',
url="http://harmony.one/",
packages=find_packages(),
keywords=['Harmony', 'blockchain', 'protocol'],
install_requires=[
'pexpect',
'requests',
'incremental',
],
setup_requires=[
'incremental',
'pytest',
'pytest-ordering',
'click',
'twisted'
],
classifiers=[
'Development Status :: 3 - Alpha',
'Intended Audience :: Developers',
'Topic :: Software Development :: Build Tools',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 3.6',
'Programming Language :: Python :: 3.7',
'Programming Language :: Python :: 3.8',
]
)

@ -0,0 +1,578 @@
{
"cells": [
{
"cell_type": "code",
"execution_count": 105,
"id": "feee22ef",
"metadata": {},
"outputs": [],
"source": [
"from pyhmy import signing, staking_signing, numbers, transaction, account, validator as validator_module, staking_structures, contract\n",
"from web3 import Web3"
]
},
{
"cell_type": "code",
"execution_count": 3,
"id": "55b8db60",
"metadata": {},
"outputs": [],
"source": [
"# we need five transactions in conftest\n",
"# simple transfer (from localnet address)\n",
"# contract creation (from second address)\n",
"# cross shard transfer (from second address)\n",
"# validator creation (from localnet address)\n",
"# delegation (from second address)"
]
},
{
"cell_type": "markdown",
"id": "e104724c",
"metadata": {},
"source": [
"### Simple Transfer"
]
},
{
"cell_type": "code",
"execution_count": 144,
"id": "d7fa35f8",
"metadata": {},
"outputs": [],
"source": [
"pk = \"1f84c95ac16e6a50f08d44c7bde7aff8742212fda6e4321fde48bf83bef266dc\"\n",
"tx = {\n",
" 'from': 'one155jp2y76nazx8uw5sa94fr0m4s5aj8e5xm6fu3',\n",
" # 3c86ac59f6b038f584be1c08fced78d7c71bb55d5655f81714f3cddc82144c65\n",
" 'to': 'one1ru3p8ff0wsyl7ncsx3vwd5szuze64qz60upg37',\n",
" 'gasPrice': Web3.toWei( 100, 'gwei' ),\n",
" 'gas': 21000,\n",
" 'chainId': 2, # localnet\n",
" 'value': int( numbers.convert_one_to_atto( 503 ) ),\n",
" 'nonce': 0,\n",
" 'shardID': 0,\n",
" 'toShardID': 0,\n",
"}\n",
"raw_tx = signing.sign_transaction(tx, pk).rawTransaction.hex()\n",
"tx_hash = transaction.send_raw_transaction(raw_tx)"
]
},
{
"cell_type": "code",
"execution_count": 145,
"id": "ed907d4b",
"metadata": {},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"0xf86f8085174876e8008252088080941f2213a52f7409ff4f103458e6d202e0b3aa805a891b4486fafde57c00008027a0d7c0b20207dcc9dde376822dc3f5625eac6f59a7526111695cdba3e29553ca17a05d4ca9a421ae16f89cbf6848186eaea7a800da732446dff9952e7c1e91d414e3\n",
"0xc26be5776aa57438bccf196671a2d34f3f22c9c983c0f844c62b2fb90403aa43\n"
]
}
],
"source": [
"print( raw_tx )\n",
"print( tx_hash )"
]
},
{
"cell_type": "markdown",
"id": "1bbee37b",
"metadata": {},
"source": [
"### Contract Creation"
]
},
{
"cell_type": "code",
"execution_count": 147,
"id": "b143507b",
"metadata": {},
"outputs": [],
"source": [
"pk = '3c86ac59f6b038f584be1c08fced78d7c71bb55d5655f81714f3cddc82144c65'\n",
"data = \"0x6080604052348015600f57600080fd5b50607780601d6000396000f3fe6080604052348015600f57600080fd5b506004361060285760003560e01c80634936cd3614602d575b600080fd5b604080516001815290519081900360200190f3fea2646970667358221220fa3fa0e8d0267831a59f4dd5edf39a513d07e98461cb06660ad28d4beda744cd64736f6c634300080f0033\"\n",
"tx = {\n",
" 'gasPrice': Web3.toWei( 100, 'gwei' ),\n",
" 'gas': 100000,\n",
" 'chainId': 2,\n",
" 'nonce': 0,\n",
" 'shardID': 0,\n",
" 'toShardID': 0,\n",
" 'data': data,\n",
"}\n",
"raw_tx = signing.sign_transaction(tx, pk).rawTransaction.hex()\n",
"tx_hash = transaction.send_raw_transaction(raw_tx)"
]
},
{
"cell_type": "code",
"execution_count": 148,
"id": "53dbcbff",
"metadata": {
"scrolled": true
},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"0xf8e88085174876e800830186a080808080b8946080604052348015600f57600080fd5b50607780601d6000396000f3fe6080604052348015600f57600080fd5b506004361060285760003560e01c80634936cd3614602d575b600080fd5b604080516001815290519081900360200190f3fea2646970667358221220fa3fa0e8d0267831a59f4dd5edf39a513d07e98461cb06660ad28d4beda744cd64736f6c634300080f003327a08bf26ee0120c296b17af507f62606abdb5c5f09a65642c3d30b349b8bfbb3d69a03ec7be51c615bcbf2f1d63f6eaa56cf8d7be81671717f90239619830a81ebc9f\n",
"0xa605852dd2fa39ed42e101c17aaca9d344d352ba9b24b14b9af94ec9cb58b31f\n"
]
}
],
"source": [
"print( raw_tx )\n",
"print( tx_hash )"
]
},
{
"cell_type": "code",
"execution_count": 130,
"id": "6e66392b",
"metadata": {},
"outputs": [],
"source": [
"contract_address = transaction.get_transaction_receipt( tx_hash ).get( 'contractAddress' )\n",
"deployed = contract.get_code( contract_address, 'latest' )"
]
},
{
"cell_type": "code",
"execution_count": 131,
"id": "ead2f9d4",
"metadata": {},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"0x6080604052348015600f57600080fd5b506004361060285760003560e01c80634936cd3614602d575b600080fd5b604080516001815290519081900360200190f3fea2646970667358221220fa3fa0e8d0267831a59f4dd5edf39a513d07e98461cb06660ad28d4beda744cd64736f6c634300080f0033\n",
"0x6080604052348015600f57600080fd5b50607780601d6000396000f3fe6080604052348015600f57600080fd5b506004361060285760003560e01c80634936cd3614602d575b600080fd5b604080516001815290519081900360200190f3fea2646970667358221220fa3fa0e8d0267831a59f4dd5edf39a513d07e98461cb06660ad28d4beda744cd64736f6c634300080f0033\n"
]
}
],
"source": [
"print( deployed )\n",
"print( data )"
]
},
{
"cell_type": "code",
"execution_count": 132,
"id": "453a34d6",
"metadata": {},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"3300f080003436c6f63746dc447adeb4d82da06660bc16489e70d315a93fde5dd4f95a1387620d8e0af3af0221228537660796462aef3f091002063009180915092518100615080406b5df080006b575d2064163dc63943608c10e065300067582060163400605b5df08000675f0065108432504060806x0\n",
"3300f080003436c6f63746dc447adeb4d82da06660bc16489e70d315a93fde5dd4f95a1387620d8e0af3af0221228537660796462aef3f091002063009180915092518100615080406b5df080006b575d2064163dc63943608c10e065300067582060163400605b5df08000675f0065108432504060806ef3f0006930006d10608770605b5df08000675f0065108432504060806x0\n"
]
}
],
"source": [
"print( \"\".join( [ deployed[ len( deployed ) - ( i + 1 ) ] for i in range( len( deployed ) ) ] ) )\n",
"print( \"\".join( [ data[ len( data ) - ( i + 1 ) ] for i in range( len( data ) ) ] ) )"
]
},
{
"cell_type": "code",
"execution_count": 133,
"id": "d251d1bf",
"metadata": {},
"outputs": [
{
"data": {
"text/plain": [
"True"
]
},
"execution_count": 133,
"metadata": {},
"output_type": "execute_result"
}
],
"source": [
"\"0x6080604052348015600f57600080fd5b506004361060285760003560e01c80634936cd3614602d575b600080fd5b604080516001815290519081900360200190f3fea2646970667358221220fa3fa0e8d0267831a59f4dd5edf39a513d07e98461cb06660ad28d4beda744cd64736f6c634300080f0033\" == deployed"
]
},
{
"cell_type": "markdown",
"id": "812e033c",
"metadata": {},
"source": [
"### Cross Shard Transfer"
]
},
{
"cell_type": "code",
"execution_count": 149,
"id": "d7c70614",
"metadata": {},
"outputs": [],
"source": [
"pk = '3c86ac59f6b038f584be1c08fced78d7c71bb55d5655f81714f3cddc82144c65'\n",
"tx = {\n",
" 'from': 'one1ru3p8ff0wsyl7ncsx3vwd5szuze64qz60upg37',\n",
" 'gasPrice': Web3.toWei( 100, 'gwei' ),\n",
" 'gas': 21000,\n",
" 'chainId': 2,\n",
" 'nonce': 1,\n",
" 'shardID': 0,\n",
" 'toShardID': 1,\n",
" 'to': 'one1e8rdglh97t37prtnv7k35ymnh2wazujpzsmzes',\n",
" 'value': Web3.toWei( 100, 'gwei' ),\n",
"}\n",
"raw_tx = signing.sign_transaction(tx, pk).rawTransaction.hex()\n",
"tx_hash = transaction.send_raw_transaction(raw_tx)"
]
},
{
"cell_type": "code",
"execution_count": 150,
"id": "f20990f1",
"metadata": {},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"0xf86b0185174876e800825208800194c9c6d47ee5f2e3e08d7367ad1a1373ba9dd1724185174876e8008027a02501c517220e9499f14e97c20b0a88cd3b7ba80637bba43ed295422e69a3f300a079b8e1213c9506184aed6ac2eb0b2cb00594c3f9fcdd6c088937ce17fe47107c\n",
"0xf73ba634cb96fc0e3e2c9d3b4c91379e223741be4a5aa56e6d6caf49c1ae75cf\n"
]
}
],
"source": [
"print( raw_tx )\n",
"print( tx_hash )"
]
},
{
"cell_type": "code",
"execution_count": 153,
"id": "66f024b9",
"metadata": {},
"outputs": [
{
"data": {
"text/plain": [
"0"
]
},
"execution_count": 153,
"metadata": {},
"output_type": "execute_result"
}
],
"source": [
"account.get_balance( 'one1e8rdglh97t37prtnv7k35ymnh2wazujpzsmzes', 'http://localhost:9502' )"
]
},
{
"cell_type": "markdown",
"id": "2b2446df",
"metadata": {},
"source": [
"### Validator Creation"
]
},
{
"cell_type": "code",
"execution_count": 154,
"id": "c3513c37",
"metadata": {},
"outputs": [],
"source": [
"pk = \"1f84c95ac16e6a50f08d44c7bde7aff8742212fda6e4321fde48bf83bef266dc\"\n",
"address = \"one155jp2y76nazx8uw5sa94fr0m4s5aj8e5xm6fu3\"\n",
"info = {\n",
" \"name\": \"Alice\",\n",
" \"identity\": \"alice\",\n",
" \"website\": \"alice.harmony.one\",\n",
" \"security-contact\": \"Bob\",\n",
" \"details\": \"Are you even reading this?\",\n",
" \"min-self-delegation\": int( numbers.convert_one_to_atto( 10000 ) ),\n",
" \"max-total-delegation\": int( numbers.convert_one_to_atto( 100000 ) ),\n",
" \"rate\": \"0.1\",\n",
" \"max-rate\": \"0.9\",\n",
" \"max-change-rate\": \"0.05\",\n",
" \"bls-public-keys\": [\n",
" # private key is b1f2a5029f5f43c8c933a61ce936ced030b2c9379f8e2478fc888fa670cdbc89b8cd1ebc29b5b00a81d3152bb3aaa3a337404f50bee5e434430ca3693a94a1c102a765cf3b0887b8b0bcf5317d33f4bec60a97feae2498a39ab7a1c2\n",
" # blspass.txt is empty\n",
" \"0xa20e70089664a874b00251c5e85d35a73871531306f3af43e02138339d294e6bb9c4eb82162199c6a852afeaa8d68712\",\n",
" ],\n",
" \"amount\": int( numbers.convert_one_to_atto( 10000 ) ),\n",
" \"bls-key-sigs\": [\n",
" \"0xef2c49a2f31fbbd23c21bc176eaf05cd0bebe6832033075d81fea7cff6f9bc1ab42f3b6895c5493fe645d8379d2eaa1413de55a9d3ce412a4f747cb57d52cc4da4754bfb2583ec9a41fe5dd48287f964f276336699959a5fcef3391dc24df00d\",\n",
" ]\n",
"}\n",
"validator = validator_module.Validator( address )\n",
"validator.load( info )\n",
"raw_tx = validator.sign_create_validator_transaction(\n",
" 1,\n",
" Web3.toWei( 100, 'gwei' ),\n",
" 55000000, # gas limit\n",
" pk,\n",
" 2 # chain id\n",
").rawTransaction.hex()\n",
"tx_hash = transaction.send_raw_staking_transaction(\n",
" raw_tx,\n",
")"
]
},
{
"cell_type": "code",
"execution_count": 155,
"id": "9b12f75f",
"metadata": {},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"0xf9017c80f9012994a5241513da9f4463f1d4874b548dfbac29d91f34f83d85416c69636585616c69636591616c6963652e6861726d6f6e792e6f6e6583426f629a41726520796f75206576656e2072656164696e6720746869733fddc988016345785d8a0000c9880c7d713b49da0000c887b1a2bc2ec500008a021e19e0c9bab24000008a152d02c7e14af6800000f1b0a20e70089664a874b00251c5e85d35a73871531306f3af43e02138339d294e6bb9c4eb82162199c6a852afeaa8d68712f862b860ef2c49a2f31fbbd23c21bc176eaf05cd0bebe6832033075d81fea7cff6f9bc1ab42f3b6895c5493fe645d8379d2eaa1413de55a9d3ce412a4f747cb57d52cc4da4754bfb2583ec9a41fe5dd48287f964f276336699959a5fcef3391dc24df00d8a021e19e0c9bab24000000185174876e8008403473bc028a08c1146305eaef981aa24c2f17c8519664d10c99ee42acedbc258749930d31a7ca031dadf114ee6ab9bd09933208094c65037b66c796bcfc57a70158106b37357b0\n",
"0x400e9831d358f5daccd153cad5bf53650a0d413bd8682ec0ffad55367d162968\n"
]
}
],
"source": [
"print( raw_tx )\n",
"print( tx_hash )"
]
},
{
"cell_type": "markdown",
"id": "4c2ff645",
"metadata": {},
"source": [
"### Delegation"
]
},
{
"cell_type": "code",
"execution_count": 156,
"id": "458d81b8",
"metadata": {},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"0xf88302f4941f2213a52f7409ff4f103458e6d202e0b3aa805a94a5241513da9f4463f1d4874b548dfbac29d91f3489056bc75e2d631000000285174876e80082c35028a02c5e953062dcdfa2de9723639b63bab45705eb6dfbfe7f44536ed266c3c7ca20a0742964e646338e7431874f70715565d99c01c762324355c69db34a9ed9de81d7\n",
"0xc8177ace2049d9f4eb4a45fd6bd6b16f693573d036322c36774cc00d05a3e24f\n"
]
}
],
"source": [
"pk = \"3c86ac59f6b038f584be1c08fced78d7c71bb55d5655f81714f3cddc82144c65\"\n",
"tx = {\n",
" 'directive': staking_structures.Directive.Delegate,\n",
" 'delegatorAddress': 'one1ru3p8ff0wsyl7ncsx3vwd5szuze64qz60upg37',\n",
" 'validatorAddress': 'one155jp2y76nazx8uw5sa94fr0m4s5aj8e5xm6fu3',\n",
" 'amount': Web3.toWei( 100, 'ether' ),\n",
" 'nonce': 2,\n",
" 'gasPrice': Web3.toWei( 100, 'gwei' ),\n",
" 'gasLimit': 50000,\n",
" 'chainId': 2,\n",
"}\n",
"raw_tx = staking_signing.sign_staking_transaction(\n",
" tx,\n",
" pk,\n",
").rawTransaction.hex()\n",
"tx_hash = transaction.send_raw_staking_transaction(\n",
" raw_tx,\n",
")\n",
"print( raw_tx )\n",
"print( tx_hash )"
]
},
{
"cell_type": "markdown",
"id": "8efa5536",
"metadata": {},
"source": [
"### test_transaction.py - transfer 105 ONE to another address"
]
},
{
"cell_type": "code",
"execution_count": 157,
"id": "c3295fee",
"metadata": {},
"outputs": [],
"source": [
"pk = '3c86ac59f6b038f584be1c08fced78d7c71bb55d5655f81714f3cddc82144c65'\n",
"tx = {\n",
" 'from': 'one1ru3p8ff0wsyl7ncsx3vwd5szuze64qz60upg37',\n",
" 'gasPrice': Web3.toWei( 100, 'gwei' ),\n",
" 'gas': 21000,\n",
" 'chainId': 2,\n",
" 'nonce': 3,\n",
" 'shardID': 0,\n",
" 'toShardID': 0,\n",
" 'to': 'one1e8rdglh97t37prtnv7k35ymnh2wazujpzsmzes',\n",
" 'value': Web3.toWei( 105, 'ether' ),\n",
"}\n",
"raw_tx = signing.sign_transaction(tx, pk).rawTransaction.hex()\n",
"tx_hash = transaction.send_raw_transaction(raw_tx)"
]
},
{
"cell_type": "code",
"execution_count": 158,
"id": "af515c7e",
"metadata": {},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"0xf86f0385174876e800825208808094c9c6d47ee5f2e3e08d7367ad1a1373ba9dd172418905b12aefafa80400008027a07a4952b90bf38723a9197179a8e6d2e9b3a86fd6da4e66a9cf09fdc59783f757a053910798b311245525bd77d6119332458c2855102e4fb9e564f6a3b710d18bb0\n",
"0x7ccd80f8513f76ec58b357c7a82a12a95e025d88f1444e953f90e3d86e222571\n"
]
}
],
"source": [
"print( raw_tx )\n",
"print( tx_hash )"
]
},
{
"cell_type": "markdown",
"id": "bb546b3e",
"metadata": {},
"source": [
"### test_transaction.py - staking transaction"
]
},
{
"cell_type": "code",
"execution_count": 168,
"id": "c14e2d6d",
"metadata": {},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"0xf88302f494c9c6d47ee5f2e3e08d7367ad1a1373ba9dd1724194a5241513da9f4463f1d4874b548dfbac29d91f3489056bc75e2d631000008085174876e80082c35027a0808ea7d27adf3b1f561e8da4676814084bb75ac541b616bece87c6446e6cc54ea02f19f0b14240354bd42ad60b0c7189873c0be87044e13072b0981a837ca76f64\n",
"0xe7d07ef6d9fca595a14ceb0ca917bece7bedb15efe662300e9334a32ac1da629\n"
]
}
],
"source": [
"pk = \"ff9ef6b00a61672b4b7bedd5ac653439b56ac8ee808c99a1bd871cf51b7d60eb\"\n",
"tx = {\n",
" 'directive': staking_structures.Directive.Delegate,\n",
" 'delegatorAddress': 'one1e8rdglh97t37prtnv7k35ymnh2wazujpzsmzes',\n",
" 'validatorAddress': 'one155jp2y76nazx8uw5sa94fr0m4s5aj8e5xm6fu3',\n",
" 'amount': Web3.toWei( 100, 'ether' ),\n",
" 'nonce': 0,\n",
" 'gasPrice': Web3.toWei( 100, 'gwei' ),\n",
" 'gasLimit': 50000,\n",
" 'chainId': 2,\n",
"}\n",
"raw_tx = staking_signing.sign_staking_transaction(\n",
" tx,\n",
" pk,\n",
").rawTransaction.hex()\n",
"tx_hash = transaction.send_raw_staking_transaction(\n",
" raw_tx,\n",
")\n",
"print( raw_tx )\n",
"print( tx_hash )"
]
},
{
"cell_type": "code",
"execution_count": 162,
"id": "ebf296aa",
"metadata": {},
"outputs": [
{
"data": {
"text/plain": [
"{'blockHash': '0xf55f1fb3c9be76fb74370e8a7d8580327797d2d6082040074783207a171e2de6',\n",
" 'blockNumber': 34,\n",
" 'from': 'one1ru3p8ff0wsyl7ncsx3vwd5szuze64qz60upg37',\n",
" 'hash': '0xf73ba634cb96fc0e3e2c9d3b4c91379e223741be4a5aa56e6d6caf49c1ae75cf',\n",
" 'shardID': 0,\n",
" 'to': 'one1e8rdglh97t37prtnv7k35ymnh2wazujpzsmzes',\n",
" 'toShardID': 1,\n",
" 'value': 100000000000}"
]
},
"execution_count": 162,
"metadata": {},
"output_type": "execute_result"
}
],
"source": [
"transaction.get_cx_receipt_by_hash( '0xf73ba634cb96fc0e3e2c9d3b4c91379e223741be4a5aa56e6d6caf49c1ae75cf', 'http://localhost:9502' )"
]
},
{
"cell_type": "code",
"execution_count": 166,
"id": "ff0229ce",
"metadata": {},
"outputs": [
{
"data": {
"text/plain": [
"{'blockHash': '0x0000000000000000000000000000000000000000000000000000000000000000',\n",
" 'blockNumber': None,\n",
" 'from': 'one1e8rdglh97t37prtnv7k35ymnh2wazujpzsmzes',\n",
" 'gas': 50000,\n",
" 'gasPrice': 100000000000,\n",
" 'hash': '0x279935278d20d778cbe4fdfa5d51be9eb1eb184053dc9a7cb88ad3365df73060',\n",
" 'msg': {'amount': 100000000000000000000,\n",
" 'delegatorAddress': 'one1e8rdglh97t37prtnv7k35ymnh2wazujpzsmzes',\n",
" 'validatorAddress': 'one155jp2y76nazx8uw5sa94fr0m4s5aj8e5xm6fu3'},\n",
" 'nonce': 2,\n",
" 'r': '0x8660a63c10af06f2fb3f24b92cf61d4f319044a1f1931c4f4e54ce986ff563c',\n",
" 's': '0x597785559c4283d3ece2df37cbf37077966487a2a2dc0f4cdbbf75a8f20bc1a8',\n",
" 'timestamp': 0,\n",
" 'transactionIndex': 0,\n",
" 'type': 'Delegate',\n",
" 'v': '0x27'}"
]
},
"execution_count": 166,
"metadata": {},
"output_type": "execute_result"
}
],
"source": [
"transaction.get_staking_transaction_by_hash( \"0x279935278d20d778cbe4fdfa5d51be9eb1eb184053dc9a7cb88ad3365df73060\" )"
]
}
],
"metadata": {
"kernelspec": {
"display_name": "Python 3 (ipykernel)",
"language": "python",
"name": "python3"
},
"language_info": {
"codemirror_mode": {
"name": "ipython",
"version": 3
},
"file_extension": ".py",
"mimetype": "text/x-python",
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
"version": "3.10.5"
}
},
"nbformat": 4,
"nbformat_minor": 5
}

@ -0,0 +1,9 @@
from pyhmy.bech32 import bech32
def test_encode():
bech32.encode( "one", 5, [ 121, 161 ] )
def test_decode():
bech32.decode( "one", "one1a0x3d6xpmr6f8wsyaxd9v36pytvp48zckswvv9" )

@ -10,61 +10,58 @@ TEMP_DIR = "/tmp/pyhmy-testing/test-cli"
BINARY_FILE_PATH = f"{TEMP_DIR}/bin/cli_test_binary"
@pytest.fixture(scope="session", autouse=True)
@pytest.fixture( scope = "session", autouse = True )
def setup():
shutil.rmtree(TEMP_DIR, ignore_errors=True)
os.makedirs(TEMP_DIR, exist_ok=True)
shutil.rmtree( TEMP_DIR, ignore_errors = True )
os.makedirs( TEMP_DIR, exist_ok = True )
@pytest.mark.run(order=0)
def test_download_cli():
env = cli.download(BINARY_FILE_PATH, replace=False, verbose=False)
cli.environment.update(env)
assert os.path.exists(BINARY_FILE_PATH)
env = cli.download( BINARY_FILE_PATH, replace = False, verbose = False )
cli.environment.update( env )
assert os.path.exists( BINARY_FILE_PATH )
@pytest.mark.run(order=1)
def test_is_valid():
bad_file_path = os.path.realpath(f"{TEMP_DIR}/test_is_valid/bad_hmy")
shutil.rmtree(Path(bad_file_path).parent, ignore_errors=True)
os.makedirs(Path(bad_file_path).parent, exist_ok=True)
Path(bad_file_path).touch()
assert os.path.exists(BINARY_FILE_PATH), "harmony cli did not download"
assert os.path.exists(bad_file_path), "did not create bad binary"
assert cli.is_valid_binary(BINARY_FILE_PATH)
assert not cli.is_valid_binary(bad_file_path)
bad_file_path = os.path.realpath( f"{TEMP_DIR}/test_is_valid/bad_hmy" )
shutil.rmtree( Path( bad_file_path ).parent, ignore_errors = True )
os.makedirs( Path( bad_file_path ).parent, exist_ok = True )
Path( bad_file_path ).touch()
assert os.path.exists( BINARY_FILE_PATH ), "harmony cli did not download"
assert os.path.exists( bad_file_path ), "did not create bad binary"
assert cli.is_valid_binary( BINARY_FILE_PATH )
assert not cli.is_valid_binary( bad_file_path )
@pytest.mark.run(order=2)
def test_bad_bin_set():
bad_file_path = os.path.realpath(f"{TEMP_DIR}/test_bad_bin_set/hmy")
shutil.rmtree(Path(bad_file_path).parent, ignore_errors=True)
os.makedirs(Path(bad_file_path).parent, exist_ok=True)
Path(bad_file_path).touch()
is_set = cli.set_binary(bad_file_path)
bad_file_path = os.path.realpath( f"{TEMP_DIR}/test_bad_bin_set/hmy" )
shutil.rmtree( Path( bad_file_path ).parent, ignore_errors = True )
os.makedirs( Path( bad_file_path ).parent, exist_ok = True )
Path( bad_file_path ).touch()
is_set = cli.set_binary( bad_file_path )
assert not is_set
assert cli.get_binary_path() != bad_file_path
@pytest.mark.run(order=3)
def test_bin_set():
cli.set_binary(BINARY_FILE_PATH)
cli.set_binary( BINARY_FILE_PATH )
cli_binary_path = cli.get_binary_path()
assert os.path.realpath(cli_binary_path) == os.path.realpath(BINARY_FILE_PATH)
assert os.path.realpath( cli_binary_path
) == os.path.realpath( BINARY_FILE_PATH )
def test_update_keystore():
cli.single_call("hmy keys add test1")
cli.single_call( "hmy keys add test1" )
addrs = cli.get_accounts_keystore()
assert "test1" in addrs.keys()
check_addr = addrs["test1"]
accounts_list = cli.get_accounts(check_addr)
check_acc = accounts_list[0]
check_addr = addrs[ "test1" ]
accounts_list = cli.get_accounts( check_addr )
check_acc = accounts_list[ 0 ]
assert check_acc == "test1"
raw_cli_keys_list_print = cli.single_call("hmy keys list", timeout=2)
raw_cli_keys_list_print = cli.single_call( "hmy keys list", timeout = 2 )
assert check_addr in raw_cli_keys_list_print
assert check_acc in raw_cli_keys_list_print
assert addrs[check_acc] == check_addr
cli.remove_address(check_addr)
assert addrs[ check_acc ] == check_addr
cli.remove_address( check_addr )
assert check_addr not in addrs.values()
assert "test1" not in addrs.keys()

@ -6,19 +6,19 @@ from pyhmy import logging
def test_basic_logger():
if os.path.exists(f"{os.getcwd()}/logs/pytest.log"):
os.remove(f"{os.getcwd()}/logs/pytest.log")
logger = logging.ControlledLogger("pytest", "logs/")
assert os.path.exists(f"{os.getcwd()}/logs/pytest.log")
logger.info("test info")
logger.debug("test debug")
logger.error("test error")
logger.warning("test warning")
with open(f"{os.getcwd()}/logs/pytest.log", 'r') as f:
if os.path.exists( f"{os.getcwd()}/logs/pytest.log" ):
os.remove( f"{os.getcwd()}/logs/pytest.log" )
logger = logging.ControlledLogger( "pytest", "logs/" )
assert os.path.exists( f"{os.getcwd()}/logs/pytest.log" )
logger.info( "test info" )
logger.debug( "test debug" )
logger.error( "test error" )
logger.warning( "test warning" )
with open( f"{os.getcwd()}/logs/pytest.log", "r" ) as f:
log_file_contents = f.readlines()
assert not log_file_contents
logger.write()
with open(f"{os.getcwd()}/logs/pytest.log", 'r') as f:
with open( f"{os.getcwd()}/logs/pytest.log", "r" ) as f:
log_file_contents = f.readlines()
for line in log_file_contents:
if "INFO" in line:

@ -1,36 +1,31 @@
from decimal import Decimal
import pytest
from pyhmy import numbers
from pyhmy import (
numbers
)
@pytest.mark.run(order=1)
def test_convert_atto_to_one():
a = numbers.convert_atto_to_one(1e18)
assert Decimal(1) == a
a = numbers.convert_atto_to_one( 1e18 )
assert Decimal( 1 ) == a
b = numbers.convert_atto_to_one( 1e18 + 0.6 )
assert Decimal( 1 ) == b
b = numbers.convert_atto_to_one(1e18 + 0.6)
assert Decimal(1) == b
c = numbers.convert_atto_to_one( "1" + ( "0" * 18 ) )
assert Decimal( 1 ) == c
c = numbers.convert_atto_to_one('1' + ('0' * 18))
assert Decimal(1) == c
d = numbers.convert_atto_to_one( Decimal( 1e18 ) )
assert Decimal( 1 ) == d
d = numbers.convert_atto_to_one(Decimal(1e18))
assert Decimal(1) == d
@pytest.mark.run(order=2)
def test_convert_one_to_atto():
a = numbers.convert_one_to_atto(1e-18)
assert Decimal(1) == a
a = numbers.convert_one_to_atto( 1e-18 )
assert Decimal( 1 ) == a
b = numbers.convert_one_to_atto(1.5)
assert Decimal(1.5e18) == b
b = numbers.convert_one_to_atto( 1.5 )
assert Decimal( 1.5e18 ) == b
c = numbers.convert_one_to_atto('1')
assert Decimal(1e18) == c
c = numbers.convert_one_to_atto( "1" )
assert Decimal( 1e18 ) == c
d = numbers.convert_one_to_atto(Decimal(1))
assert Decimal(1e18) == d
d = numbers.convert_one_to_atto( Decimal( 1 ) )
assert Decimal( 1e18 ) == d

@ -4,17 +4,14 @@ import socket
import pytest
import requests
from pyhmy.rpc import (
exceptions,
request
)
from pyhmy.rpc import exceptions, request
@pytest.fixture(scope="session", autouse=True)
@pytest.fixture( scope = "session", autouse = True )
def setup():
endpoint = 'http://localhost:9500'
endpoint = "http://localhost:9500"
timeout = 30
method = 'hmy_getNodeMetadata'
method = "hmyv2_getNodeMetadata"
params = []
payload = {
"id": "1",
@ -23,52 +20,64 @@ def setup():
"params": params
}
headers = {
'Content-Type': 'application/json'
"Content-Type": "application/json"
}
try:
response = requests.request('POST', endpoint, headers=headers,
data=json.dumps(payload), timeout=timeout, allow_redirects=True)
response = requests.request(
"POST",
endpoint,
headers = headers,
data = json.dumps( payload ),
timeout = timeout,
allow_redirects = True,
)
except Exception as e:
pytest.skip("can not connect to local blockchain", allow_module_level=True)
pytest.skip(
"can not connect to local blockchain",
allow_module_level = True
)
@pytest.mark.run(order=1)
def test_request_connection_error():
# Find available port
s = socket.socket()
s.bind(('localhost', 0))
port = s.getsockname()[1]
s.bind( ( "localhost", 0 ) )
port = s.getsockname()[ 1 ]
s.close()
if port == 0:
pytest.skip("could not find available port")
bad_endpoint = f'http://localhost:{port}'
pytest.skip( "could not find available port" )
bad_endpoint = f"http://localhost:{port}"
bad_request = None
try:
bad_request = request.rpc_request('hmy_getNodeMetadata', endpoint=bad_endpoint)
bad_request = request.rpc_request(
"hmyv2_getNodeMetadata",
endpoint = bad_endpoint
)
except Exception as e:
assert isinstance(e, exceptions.RequestsError)
assert isinstance( e, exceptions.RequestsError )
assert bad_request is None
@pytest.mark.run(order=2)
def test_request_rpc_error():
error_request = None
try:
error_request = request.rpc_request('hmy_getBalance')
except (exceptions.RequestsTimeoutError, exceptions.RequestsError) as err:
pytest.skip("can not connect to local blockchain", allow_module_level=True)
error_request = request.rpc_request( "hmyv2_getBalance" )
except ( exceptions.RequestsTimeoutError, exceptions.RequestsError ) as err:
pytest.skip(
"can not connect to local blockchain",
allow_module_level = True
)
except Exception as e:
assert isinstance(e, exceptions.RPCError)
assert isinstance( e, exceptions.RPCError )
assert error_request is None
@pytest.mark.run(order=3)
def test_rpc_request():
endpoint = 'http://localhost:9500'
endpoint = "http://localhost:9500"
timeout = 30
method = 'hmy_getNodeMetadata'
method = "hmyv2_getNodeMetadata"
params = []
payload = {
"id": "1",
@ -77,29 +86,35 @@ def test_rpc_request():
"params": params
}
headers = {
'Content-Type': 'application/json'
"Content-Type": "application/json"
}
response = None
try:
response = requests.request('POST', endpoint, headers=headers,
data=json.dumps(payload), timeout=timeout, allow_redirects=True)
response = requests.request(
"POST",
endpoint,
headers = headers,
data = json.dumps( payload ),
timeout = timeout,
allow_redirects = True,
)
except:
pytest.skip("can not connect to local blockchain")
pytest.skip( "can not connect to local blockchain" )
assert response is not None
resp = None
try:
resp = json.loads(response.content)
resp = json.loads( response.content )
except json.decoder.JSONDecodeError as err:
pytest.skip('unable to decode response')
pytest.skip( "unable to decode response" )
assert resp is not None
rpc_response = None
try:
rpc_response = request.rpc_request(method, params, endpoint, timeout)
rpc_response = request.rpc_request( method, params, endpoint, timeout )
except exceptions.RPCError as e:
assert 'error' in resp
assert "error" in resp
if rpc_response is not None:
assert rpc_response == resp

@ -1,58 +1,74 @@
import json
import time
import random
import pytest
import requests
test_validator_address = 'one18tvf56zqjkjnak686lwutcp5mqfnvee35xjnhc'
transfer_raw_transaction = '0xf86f80843b9aca008252080180943ad89a684095a53edb47d7ddc5e034d8133667318a152d02c7e14af68000008027a0ec6c8ad0f70b3c826fa77574c6815a8f73936fafb7b2701a7082ad7d278c95a9a0429f9f166b1c1d385a4ec8f8b86604c26e427c2b0a1c85d9cf4ec6bbd0719508'
tx_hash = '0x1fa20537ea97f162279743139197ecf0eac863278ac1c8ada9a6be5d1e31e633'
create_validator_raw_transaction = '0xf9015680f90105943ad89a684095a53edb47d7ddc5e034d813366731d984746573748474657374847465737484746573748474657374ddc988016345785d8a0000c9880c7d713b49da0000c887b1a2bc2ec500008a022385a827e8155000008b084595161401484a000000f1b0282554f2478661b4844a05a9deb1837aac83931029cb282872f0dcd7239297c499c02ea8da8746d2f08ca2b037e89891f862b86003557e18435c201ecc10b1664d1aea5b4ec59dbfe237233b953dbd9021b86bc9770e116ed3c413fe0334d89562568a10e133d828611f29fee8cdab9719919bbcc1f1bf812c73b9ccd0f89b4f0b9ca7e27e66d58bbb06fcf51c295b1d076cfc878a0228f16f86157860000080843b9aca008351220027a018385211a150ca032c3526cef0aba6a75f99a18cb73f547f67bab746be0c7a64a028be921002c6eb949b3932afd010dfe1de2459ec7fe84403b9d9d8892394a78c'
staking_tx_hash = '0x57ec011aabdeb078a4816502224022f291fa8b07c82bbae8476f514a1d71c730'
# private keys
# 1f84c95ac16e6a50f08d44c7bde7aff8742212fda6e4321fde48bf83bef266dc / one155jp2y76nazx8uw5sa94fr0m4s5aj8e5xm6fu3 (genesis)
# 3c86ac59f6b038f584be1c08fced78d7c71bb55d5655f81714f3cddc82144c65 / one1ru3p8ff0wsyl7ncsx3vwd5szuze64qz60upg37 (transferred 503)
endpoint = 'http://localhost:9500'
endpoint_shard_one = 'http://localhost:9501'
endpoint = "http://localhost:9500"
timeout = 30
headers = {
'Content-Type': 'application/json'
"Content-Type": "application/json"
}
txs = [
# same shard 503 ONE transfer from one155jp2y76nazx8uw5sa94fr0m4s5aj8e5xm6fu3 to one1ru3p8ff0wsyl7ncsx3vwd5szuze64qz60upg37 (0 nonce)
"0xf86f8085174876e8008252088080941f2213a52f7409ff4f103458e6d202e0b3aa805a891b4486fafde57c00008027a0d7c0b20207dcc9dde376822dc3f5625eac6f59a7526111695cdba3e29553ca17a05d4ca9a421ae16f89cbf6848186eaea7a800da732446dff9952e7c1e91d414e3",
# contract creation by one1ru3p8ff0wsyl7ncsx3vwd5szuze64qz60upg37 (0 nonce)
"0xf8e88085174876e800830186a080808080b8946080604052348015600f57600080fd5b50607780601d6000396000f3fe6080604052348015600f57600080fd5b506004361060285760003560e01c80634936cd3614602d575b600080fd5b604080516001815290519081900360200190f3fea2646970667358221220fa3fa0e8d0267831a59f4dd5edf39a513d07e98461cb06660ad28d4beda744cd64736f6c634300080f003327a08bf26ee0120c296b17af507f62606abdb5c5f09a65642c3d30b349b8bfbb3d69a03ec7be51c615bcbf2f1d63f6eaa56cf8d7be81671717f90239619830a81ebc9f",
# cross shard transfer by one1ru3p8ff0wsyl7ncsx3vwd5szuze64qz60upg37 (1 nonce)
"0xf86b0185174876e800825208800194c9c6d47ee5f2e3e08d7367ad1a1373ba9dd1724185174876e8008027a02501c517220e9499f14e97c20b0a88cd3b7ba80637bba43ed295422e69a3f300a079b8e1213c9506184aed6ac2eb0b2cb00594c3f9fcdd6c088937ce17fe47107c",
]
stxs = [
# creation of one155jp2y76nazx8uw5sa94fr0m4s5aj8e5xm6fu3 as validator (1 nonce)
"0xf9017c80f9012994a5241513da9f4463f1d4874b548dfbac29d91f34f83d85416c69636585616c69636591616c6963652e6861726d6f6e792e6f6e6583426f629a41726520796f75206576656e2072656164696e6720746869733fddc988016345785d8a0000c9880c7d713b49da0000c887b1a2bc2ec500008a021e19e0c9bab24000008a152d02c7e14af6800000f1b0a20e70089664a874b00251c5e85d35a73871531306f3af43e02138339d294e6bb9c4eb82162199c6a852afeaa8d68712f862b860ef2c49a2f31fbbd23c21bc176eaf05cd0bebe6832033075d81fea7cff6f9bc1ab42f3b6895c5493fe645d8379d2eaa1413de55a9d3ce412a4f747cb57d52cc4da4754bfb2583ec9a41fe5dd48287f964f276336699959a5fcef3391dc24df00d8a021e19e0c9bab24000000185174876e8008403473bc028a08c1146305eaef981aa24c2f17c8519664d10c99ee42acedbc258749930d31a7ca031dadf114ee6ab9bd09933208094c65037b66c796bcfc57a70158106b37357b0",
# delegation by one1ru3p8ff0wsyl7ncsx3vwd5szuze64qz60upg37 to one155jp2y76nazx8uw5sa94fr0m4s5aj8e5xm6fu3 (2 nonce)
"0xf88302f4941f2213a52f7409ff4f103458e6d202e0b3aa805a94a5241513da9f4463f1d4874b548dfbac29d91f3489056bc75e2d631000000285174876e80082c35028a02c5e953062dcdfa2de9723639b63bab45705eb6dfbfe7f44536ed266c3c7ca20a0742964e646338e7431874f70715565d99c01c762324355c69db34a9ed9de81d7",
]
tx_hashes = [
"0xc26be5776aa57438bccf196671a2d34f3f22c9c983c0f844c62b2fb90403aa43",
"0xa605852dd2fa39ed42e101c17aaca9d344d352ba9b24b14b9af94ec9cb58b31f",
"0xf73ba634cb96fc0e3e2c9d3b4c91379e223741be4a5aa56e6d6caf49c1ae75cf",
]
stx_hashes = [
"0x400e9831d358f5daccd153cad5bf53650a0d413bd8682ec0ffad55367d162968",
"0xc8177ace2049d9f4eb4a45fd6bd6b16f693573d036322c36774cc00d05a3e24f",
]
assert len( txs ) == len( tx_hashes ), "Mismatch in tx and tx_hash count"
assert len( stxs ) == len( stx_hashes ), "Mismatch in stx and stx_hash count"
@pytest.fixture(scope="session", autouse=True)
@pytest.fixture( scope = "session", autouse = True )
def setup_blockchain():
# return
metadata = _check_connection()
_check_staking_epoch(metadata)
tx_data = _check_funding_transaction()
if not tx_data['result']:
_send_funding_transaction()
time.sleep(20) # Sleep to let cross shard transaction finalize
tx_data = _check_funding_transaction()
if 'error' in tx_data:
pytest.skip(f"Error in hmy_getTransactionByHash reply: {tx_data['error']}", allow_module_level=True)
if not tx_data['result']:
pytest.skip(f"Funding transaction failed: {tx_hash}", allow_module_level=True)
_check_staking_epoch( metadata )
stx_data = _check_staking_transaction()
for i in range( len( txs ) ):
tx = txs[ i ]
tx_hash = tx_hashes[ i ]
_send_transaction( tx, endpoint )
if not _wait_for_transaction_confirmed( tx_hash, endpoint ):
pytest.skip(
"Could not confirm initial transaction #{} on chain"
.format( i ),
allow_module_level = True,
)
if not stx_data['result']:
_send_staking_transaction()
time.sleep(30) # Sleep to let transaction finalize
stx_data = _check_staking_transaction()
if 'error' in stx_data:
pytest.skip(f"Error in hmy_getStakingTransactionByHash reply: {stx_data['error']}", allow_module_level=True)
if not stx_data['result']:
pytest.skip(f"Staking transaction failed: {staking_tx_hash}", allow_module_level=True)
# TODO: Build data object to return data instead of hard coded values in the test files
try:
return int(stx_data['result']['blockNumber'], 16)
except (TypeError, KeyError) as e:
pytest.skip(f"Unexpected reply for hmy_getStakingTransactionByHash: {stx_data['result']}", allow_module_level=True)
for i in range( len( stxs ) ):
stx = stxs[ i ]
stx_hash = stx_hashes[ i ]
_send_staking_transaction( stx, endpoint )
if not _wait_for_staking_transaction_confirmed( stx_hash, endpoint ):
pytest.skip(
"Could not confirm initial staking transaction #{} on chain"
.format( i ),
allow_module_level = True,
)
def _check_connection():
@ -60,101 +76,206 @@ def _check_connection():
payload = {
"id": "1",
"jsonrpc": "2.0",
"method": 'hmy_getNodeMetadata',
"params": []
"method": "hmyv2_getNodeMetadata",
"params": [],
}
response = requests.request('POST', endpoint, headers=headers,
data=json.dumps(payload), timeout=timeout, allow_redirects=True)
metadata = json.loads(response.content)
if 'error' in metadata:
pytest.skip(f"Error in hmy_getNodeMetadata reply: {metadata['error']}", allow_module_level=True)
if 'chain-config' not in metadata['result']:
pytest.skip("Chain config not found in hmy_getNodeMetadata reply", allow_module_level=True)
response = requests.request(
"POST",
endpoint,
headers = headers,
data = json.dumps( payload ),
timeout = timeout,
allow_redirects = True,
)
metadata = json.loads( response.content )
if "error" in metadata:
pytest.skip(
f"Error in hmyv2_getNodeMetadata reply: {metadata['error']}",
allow_module_level = True,
)
if "chain-config" not in metadata[ "result" ]:
pytest.skip(
"Chain config not found in hmyv2_getNodeMetadata reply",
allow_module_level = True,
)
return metadata
except Exception as e:
pytest.skip('Can not connect to local blockchain or bad hmy_getNodeMetadata reply', allow_module_level=True)
pytest.skip(
"Can not connect to local blockchain or bad hmyv2_getNodeMetadata reply",
allow_module_level = True,
)
def _check_staking_epoch(metadata):
def _check_staking_epoch( metadata ):
latest_header = None
try:
payload = {
"id": "1",
"jsonrpc": "2.0",
"method": 'hmy_latestHeader',
"params": []
"method": "hmyv2_latestHeader",
"params": [],
}
response = requests.request('POST', endpoint, headers=headers,
data=json.dumps(payload), timeout=timeout, allow_redirects=True)
latest_header = json.loads(response.content)
if 'error' in latest_header:
pytest.skip(f"Error in hmy_latestHeader reply: {latest_header['error']}", allow_module_level=True)
response = requests.request(
"POST",
endpoint,
headers = headers,
data = json.dumps( payload ),
timeout = timeout,
allow_redirects = True,
)
latest_header = json.loads( response.content )
if "error" in latest_header:
pytest.skip(
f"Error in hmyv2_latestHeader reply: {latest_header['error']}",
allow_module_level = True,
)
except Exception as e:
pytest.skip('Failed to get hmy_latestHeader reply', allow_module_level=True)
pytest.skip(
"Failed to get hmyv2_latestHeader reply",
allow_module_level = True
)
if metadata and latest_header:
staking_epoch = metadata['result']['chain-config']['staking-epoch']
current_epoch = latest_header['result']['epoch']
staking_epoch = metadata[ "result" ][ "chain-config" ][ "staking-epoch"
]
current_epoch = latest_header[ "result" ][ "epoch" ]
if staking_epoch > current_epoch:
pytest.skip(f'Not staking epoch: current {current_epoch}, staking {staking_epoch}', allow_module_level=True)
pytest.skip(
f"Not staking epoch: current {current_epoch}, staking {staking_epoch}",
allow_module_level = True,
)
def _send_funding_transaction():
def _send_transaction( raw_tx, endpoint ):
try:
payload = {
"id": "1",
"jsonrpc": "2.0",
"method": 'hmy_sendRawTransaction',
"params": [transfer_raw_transaction]
"method": "hmyv2_sendRawTransaction",
"params": [ raw_tx ],
}
response = requests.request('POST', endpoint_shard_one, headers=headers,
data=json.dumps(payload), timeout=timeout, allow_redirects=True)
tx = json.loads(response.content)
if 'error' in tx:
pytest.skip(f"Error in hmy_sendRawTransaction reply: {tx['error']}", allow_module_level=True)
response = requests.request(
"POST",
endpoint,
headers = headers,
data = json.dumps( payload ),
timeout = timeout,
allow_redirects = True,
)
tx = json.loads( response.content )
if "error" in tx:
pytest.skip(
f"Error in hmyv2_sendRawTransaction reply: {tx['error']}",
allow_module_level = True,
)
except Exception as e:
pytest.skip('Failed to get hmy_sendRawTransaction reply', allow_module_level=True)
pytest.skip(
"Failed to get hmyv2_sendRawTransaction reply",
allow_module_level = True
)
def _check_funding_transaction():
def _check_transaction( tx_hash, endpoint ):
try:
payload = {
"id": "1",
"jsonrpc": "2.0",
"method": 'hmy_getTransactionByHash',
"params": [tx_hash]
"method": "hmyv2_getTransactionByHash",
"params": [ tx_hash ],
}
response = requests.request('POST', endpoint_shard_one, headers=headers,
data=json.dumps(payload), timeout=timeout, allow_redirects=True)
tx_data = json.loads(response.content)
response = requests.request(
"POST",
endpoint,
headers = headers,
data = json.dumps( payload ),
timeout = timeout,
allow_redirects = True,
)
tx_data = json.loads( response.content )
return tx_data
except Exception as e:
pytest.skip('Failed to get hmy_getTransactionByHash reply', allow_module_level=True)
pytest.skip(
"Failed to get hmyv2_getTransactionByHash reply",
allow_module_level = True
)
def _wait_for_transaction_confirmed( tx_hash, endpoint, timeout = 30 ):
start_time = time.time()
while ( time.time() - start_time ) <= timeout:
tx_data = _check_transaction( tx_hash, endpoint )
if tx_data is not None:
block_hash = tx_data[ "result" ].get( "blockHash", "0x00" )
unique_chars = "".join( set( list( block_hash[ 2 : ] ) ) )
if unique_chars != "0":
return True
time.sleep( random.uniform( 0.2, 0.5 ) )
return False
def _send_staking_transaction():
def _send_staking_transaction( raw_tx, endpoint = endpoint ):
try:
payload = {
"id": "1",
"jsonrpc": "2.0",
"method": 'hmy_sendRawStakingTransaction',
"params": [create_validator_raw_transaction]
"method": "hmyv2_sendRawStakingTransaction",
"params": [ raw_tx ],
}
response = requests.request('POST', endpoint, headers=headers,
data=json.dumps(payload), timeout=timeout, allow_redirects=True)
staking_tx = json.loads(response.content)
if 'error' in staking_tx:
pytest.skip(f"Error in hmy_sendRawStakingTransaction reply: {staking_tx['error']}", allow_module_level=True)
response = requests.request(
"POST",
endpoint,
headers = headers,
data = json.dumps( payload ),
timeout = timeout,
allow_redirects = True,
)
staking_tx = json.loads( response.content )
if "error" in staking_tx:
pytest.skip(
f"Error in hmyv2_sendRawStakingTransaction reply: {staking_tx['error']}",
allow_module_level = True,
)
except Exception as e:
pytest.skip('Failed to get hmy_sendRawStakingTransaction reply', allow_module_level=True)
pytest.skip(
"Failed to get hmyv2_sendRawStakingTransaction reply",
allow_module_level = True,
)
def _check_staking_transaction():
def _check_staking_transaction( stx_hash, endpoint = endpoint ):
try:
payload = {
"id": "1",
"jsonrpc": "2.0",
"method": 'hmy_getStakingTransactionByHash',
"params": [staking_tx_hash]
"method": "hmyv2_getStakingTransactionByHash",
"params": [ stx_hash ],
}
response = requests.request('POST', endpoint, headers=headers,
data=json.dumps(payload), timeout=timeout, allow_redirects=True)
stx_data = json.loads(response.content)
response = requests.request(
"POST",
endpoint,
headers = headers,
data = json.dumps( payload ),
timeout = timeout,
allow_redirects = True,
)
stx_data = json.loads( response.content )
return stx_data
except Exception as e:
pytest.skip('Failed to get hmy_getStakingTransactionByHash reply', allow_module_level=True)
pytest.skip(
"Failed to get hmyv2_getStakingTransactionByHash reply",
allow_module_level = True,
)
def _wait_for_staking_transaction_confirmed( tx_hash, endpoint, timeout = 30 ):
answer = False
start_time = time.time()
while ( time.time() - start_time ) <= timeout:
tx_data = _check_staking_transaction( tx_hash, endpoint )
if tx_data is not None:
block_hash = tx_data[ "result" ].get( "blockHash", "0x00" )
unique_chars = "".join( set( list( block_hash[ 2 : ] ) ) )
if unique_chars != "0":
answer = True
time.sleep( random.uniform( 0.2, 0.5 ) )
return answer

@ -1,84 +1,155 @@
import pytest
import requests
from pyhmy import (
account
)
from pyhmy import account
from pyhmy.rpc import (
exceptions
)
from pyhmy.rpc import exceptions
explorer_endpoint = 'http://localhost:9599'
endpoint_shard_one = 'http://localhost:9501'
local_test_address = 'one1zksj3evekayy90xt4psrz8h6j2v3hla4qwz4ur'
test_validator_address = 'one18tvf56zqjkjnak686lwutcp5mqfnvee35xjnhc'
explorer_endpoint = "http://localhost:9700"
endpoint_shard_one = "http://localhost:9502"
local_test_address = "one155jp2y76nazx8uw5sa94fr0m4s5aj8e5xm6fu3"
test_validator_address = local_test_address
genesis_block_number = 0
test_block_number = 1
fake_shard = "http://example.com"
def _test_account_rpc(fn, *args, **kwargs):
if not callable(fn):
pytest.fail(f'Invalid function: {fn}')
def _test_account_rpc( fn, *args, **kwargs ):
if not callable( fn ):
pytest.fail( f"Invalid function: {fn}" )
try:
response = fn(*args, **kwargs)
response = fn( *args, **kwargs )
except Exception as e:
if isinstance(e, exceptions.RPCError) and 'does not exist/is not available' in str(e):
pytest.skip(f'{str(e)}')
pytest.fail(f'Unexpected error: {e.__class__} {e}')
if isinstance( e,
exceptions.RPCError
) and "does not exist/is not available" in str( e ):
pytest.skip( f"{str(e)}" )
pytest.fail( f"Unexpected error: {e.__class__} {e}" )
return response
@pytest.mark.run(order=1)
def test_get_balance(setup_blockchain):
balance = _test_account_rpc(account.get_balance, local_test_address)
assert isinstance(balance, int)
def test_get_balance( setup_blockchain ):
balance = _test_account_rpc( account.get_balance, local_test_address )
assert isinstance( balance, int )
assert balance > 0
@pytest.mark.run(order=2)
def test_get_balance_by_block(setup_blockchain):
balance = _test_account_rpc(account.get_balance_by_block, local_test_address, genesis_block_number)
assert isinstance(balance, int)
def test_get_balance_by_block( setup_blockchain ):
balance = _test_account_rpc(
account.get_balance_by_block,
local_test_address,
genesis_block_number
)
assert isinstance( balance, int )
assert balance > 0
@pytest.mark.run(order=3)
def test_get_true_nonce(setup_blockchain):
true_nonce = _test_account_rpc(account.get_account_nonce, local_test_address, true_nonce=True, endpoint=endpoint_shard_one)
assert isinstance(true_nonce, int)
assert true_nonce > 0
@pytest.mark.run(order=4)
def test_get_pending_nonce(setup_blockchain):
pending_nonce = _test_account_rpc(account.get_account_nonce, local_test_address, endpoint=endpoint_shard_one)
assert isinstance(pending_nonce, int)
assert pending_nonce > 0
@pytest.mark.run(order=5)
def test_get_transaction_history(setup_blockchain):
tx_history = _test_account_rpc(account.get_transaction_history, local_test_address, endpoint=explorer_endpoint)
assert isinstance(tx_history, list)
assert len(tx_history) >= 0
@pytest.mark.run(order=6)
def test_get_staking_transaction_history(setup_blockchain):
staking_tx_history = _test_account_rpc(account.get_staking_transaction_history, test_validator_address, endpoint=explorer_endpoint)
assert isinstance(staking_tx_history, list)
assert len(staking_tx_history) > 0
@pytest.mark.run(order=7)
def test_get_balance_on_all_shards(setup_blockchain):
balances = _test_account_rpc(account.get_balance_on_all_shards, local_test_address)
assert isinstance(balances, list)
assert len(balances) == 2
@pytest.mark.run(order=8)
def test_get_total_balance(setup_blockchain):
total_balance = _test_account_rpc(account.get_total_balance, local_test_address)
assert isinstance(total_balance, int)
def test_get_account_nonce( setup_blockchain ):
true_nonce = _test_account_rpc(
account.get_account_nonce,
local_test_address,
test_block_number,
endpoint = endpoint_shard_one,
)
assert isinstance( true_nonce, int )
def test_get_transaction_history( setup_blockchain ):
tx_history = _test_account_rpc(
account.get_transaction_history,
local_test_address,
endpoint = explorer_endpoint
)
assert isinstance( tx_history, list )
assert len( tx_history ) >= 0
def test_get_staking_transaction_history( setup_blockchain ):
staking_tx_history = _test_account_rpc(
account.get_staking_transaction_history,
test_validator_address,
endpoint = explorer_endpoint,
)
assert isinstance( staking_tx_history, list )
assert len( staking_tx_history ) > 0
def test_get_balance_on_all_shards( setup_blockchain ):
balances = _test_account_rpc(
account.get_balance_on_all_shards,
local_test_address
)
assert isinstance( balances, list )
assert len( balances ) == 2
def test_get_total_balance( setup_blockchain ):
total_balance = _test_account_rpc(
account.get_total_balance,
local_test_address
)
assert isinstance( total_balance, int )
assert total_balance > 0
@pytest.mark.run(order=0)
def test_is_valid_address():
assert account.is_valid_address('one1zksj3evekayy90xt4psrz8h6j2v3hla4qwz4ur')
assert not account.is_valid_address('one1wje75aedczmj4dwjs0812xcg7vx0dy231cajk0')
assert account.is_valid_address(
"one1zksj3evekayy90xt4psrz8h6j2v3hla4qwz4ur"
)
assert not account.is_valid_address(
"one1wje75aedczmj4dwjs0812xcg7vx0dy231cajk0"
)
def test_get_transaction_count( setup_blockchain ):
tx_count = _test_account_rpc(
account.get_transaction_count,
local_test_address,
"latest",
explorer_endpoint
)
assert isinstance( tx_count, int )
assert tx_count > 0
def test_get_transactions_count( setup_blockchain ):
tx_count = _test_account_rpc(
account.get_transactions_count,
local_test_address,
"ALL",
explorer_endpoint
)
def test_get_staking_transactions_count( setup_blockchain ):
tx_count = _test_account_rpc(
account.get_staking_transactions_count,
local_test_address,
"ALL",
explorer_endpoint,
)
assert isinstance( tx_count, int )
def test_errors():
with pytest.raises( exceptions.RPCError ):
account.get_balance( "", fake_shard )
with pytest.raises( exceptions.RPCError ):
account.get_balance_by_block( "", 1, fake_shard )
with pytest.raises( exceptions.RPCError ):
account.get_account_nonce( "", 1, fake_shard )
with pytest.raises( exceptions.RPCError ):
account.get_transaction_count( "", 1, fake_shard )
with pytest.raises( exceptions.RPCError ):
account.get_transactions_count( "", 1, fake_shard )
with pytest.raises( exceptions.RPCError ):
account.get_transactions_count( "", "ALL", fake_shard )
with pytest.raises( exceptions.RPCError ):
account.get_transaction_history( "", endpoint = fake_shard )
with pytest.raises( exceptions.RPCError ):
account.get_staking_transaction_history( "", endpoint = fake_shard )
with pytest.raises( exceptions.RPCError ):
account.get_balance_on_all_shards( "", endpoint = fake_shard )
with pytest.raises( exceptions.RPCError ):
account.get_total_balance( "", endpoint = fake_shard )

@ -1,156 +1,369 @@
import pytest
import requests
from pyhmy import (
blockchain
)
from pyhmy.rpc import (
exceptions
)
from pyhmy import blockchain
from pyhmy.rpc import exceptions
test_epoch_number = 0
genesis_block_number = 0
test_block_number = 1
test_block_hash = None
fake_shard = "http://example.com"
address = "one155jp2y76nazx8uw5sa94fr0m4s5aj8e5xm6fu3"
def _test_blockchain_rpc(fn, *args, **kwargs):
if not callable(fn):
pytest.fail(f'Invalid function: {fn}')
def _test_blockchain_rpc( fn, *args, **kwargs ):
if not callable( fn ):
pytest.fail( f"Invalid function: {fn}" )
try:
response = fn(*args, **kwargs)
response = fn( *args, **kwargs )
except Exception as e:
if isinstance(e, exceptions.RPCError) and 'does not exist/is not available' in str(e):
pytest.skip(f'{str(e)}')
pytest.fail(f'Unexpected error: {e.__class__} {e}')
if isinstance( e,
exceptions.RPCError
) and "does not exist/is not available" in str( e ):
pytest.skip( f"{str(e)}" )
pytest.fail( f"Unexpected error: {e.__class__} {e}" )
return response
@pytest.mark.run(order=1)
def test_get_node_metadata(setup_blockchain):
metadata = _test_blockchain_rpc(blockchain.get_node_metadata)
assert isinstance(metadata, dict)
@pytest.mark.run(order=2)
def test_get_sharding_structure(setup_blockchain):
sharding_structure = _test_blockchain_rpc(blockchain.get_sharding_structure)
assert isinstance(sharding_structure, list)
assert len(sharding_structure) > 0
@pytest.mark.run(order=3)
def test_get_leader_address(setup_blockchain):
leader = _test_blockchain_rpc(blockchain.get_leader_address)
assert isinstance(leader, str)
assert 'one1' in leader
@pytest.mark.run(order=4)
def test_get_block_number(setup_blockchain):
current_block_number = _test_blockchain_rpc(blockchain.get_block_number)
assert isinstance(current_block_number, int)
@pytest.mark.run(order=5)
def test_get_current_epoch(setup_blockchain):
current_epoch = _test_blockchain_rpc(blockchain.get_current_epoch)
assert isinstance(current_epoch, int)
@pytest.mark.run(order=6)
def tset_get_gas_price(setup_blockchain):
gas = _test_blockchain_rpc(blockchain.get_gas_price)
assert isinstance(gas, int)
@pytest.mark.run(order=7)
def test_get_num_peers(setup_blockchain):
peers = _test_blockchain_rpc(blockchain.get_num_peers)
assert isinstance(peers, int)
@pytest.mark.run(order=8)
def test_get_latest_header(setup_blockchain):
header = _test_blockchain_rpc(blockchain.get_latest_header)
assert isinstance(header, dict)
@pytest.mark.run(order=9)
def test_get_latest_headers(setup_blockchain):
header_pair = _test_blockchain_rpc(blockchain.get_latest_headers)
assert isinstance(header_pair, dict)
@pytest.mark.run(order=10)
def test_get_block_by_number(setup_blockchain):
def test_get_node_metadata( setup_blockchain ):
metadata = _test_blockchain_rpc( blockchain.get_node_metadata )
assert isinstance( metadata, dict )
def test_get_sharding_structure( setup_blockchain ):
sharding_structure = _test_blockchain_rpc(
blockchain.get_sharding_structure
)
assert isinstance( sharding_structure, list )
assert len( sharding_structure ) > 0
def test_get_leader_address( setup_blockchain ):
leader = _test_blockchain_rpc( blockchain.get_leader_address )
assert isinstance( leader, str )
assert "one1" in leader
def test_get_block_number( setup_blockchain ):
current_block_number = _test_blockchain_rpc( blockchain.get_block_number )
assert isinstance( current_block_number, int )
def test_get_current_epoch( setup_blockchain ):
current_epoch = _test_blockchain_rpc( blockchain.get_current_epoch )
assert isinstance( current_epoch, int )
def tset_get_gas_price( setup_blockchain ):
gas = _test_blockchain_rpc( blockchain.get_gas_price )
assert isinstance( gas, int )
def test_get_num_peers( setup_blockchain ):
peers = _test_blockchain_rpc( blockchain.get_num_peers )
assert isinstance( peers, int )
def test_get_latest_header( setup_blockchain ):
header = _test_blockchain_rpc( blockchain.get_latest_header )
assert isinstance( header, dict )
def test_get_latest_chain_headers( setup_blockchain ):
header_pair = _test_blockchain_rpc( blockchain.get_latest_chain_headers )
assert isinstance( header_pair, dict )
def test_get_block_by_number( setup_blockchain ):
global test_block_hash
block = _test_blockchain_rpc(blockchain.get_block_by_number, test_block_number)
assert isinstance(block, dict)
assert 'hash' in block.keys()
test_block_hash = block['hash']
block = _test_blockchain_rpc(
blockchain.get_block_by_number,
test_block_number
)
assert isinstance( block, dict )
assert "hash" in block.keys()
test_block_hash = block[ "hash" ]
@pytest.mark.run(order=11)
def test_get_block_by_hash(setup_blockchain):
def test_get_block_by_hash( setup_blockchain ):
if not test_block_hash:
pytest.skip('Failed to get reference block hash')
block = _test_blockchain_rpc(blockchain.get_block_by_hash, test_block_hash)
assert isinstance(block, dict)
pytest.skip( "Failed to get reference block hash" )
block = _test_blockchain_rpc(
blockchain.get_block_by_hash,
test_block_hash
)
assert isinstance( block, dict )
def test_get_block_transaction_count_by_number( setup_blockchain ):
tx_count = _test_blockchain_rpc(
blockchain.get_block_transaction_count_by_number,
test_block_number
)
assert isinstance( tx_count, int )
@pytest.mark.run(order=12)
def test_get_block_transaction_count_by_number(setup_blockchain):
tx_count = _test_blockchain_rpc(blockchain.get_block_transaction_count_by_number, test_block_number)
assert isinstance(tx_count, int)
@pytest.mark.run(order=13)
def test_get_block_transaction_count_by_hash(setup_blockchain):
def test_get_block_transaction_count_by_hash( setup_blockchain ):
if not test_block_hash:
pytest.skip('Failed to get reference block hash')
tx_count = _test_blockchain_rpc(blockchain.get_block_transaction_count_by_hash, test_block_hash)
assert isinstance(tx_count, int)
@pytest.mark.run(order=14)
def test_get_blocks(setup_blockchain):
blocks = _test_blockchain_rpc(blockchain.get_blocks, genesis_block_number, test_block_number)
assert isinstance(blocks, list)
assert len(blocks) == (test_block_number - genesis_block_number + 1)
@pytest.mark.run(order=15)
def test_get_block_signers(setup_blockchain):
block_signers = _test_blockchain_rpc(blockchain.get_block_signers, test_block_number)
assert isinstance(block_signers, list)
assert len(block_signers) > 0
@pytest.mark.run(order=16)
def test_get_validators(setup_blockchain):
validators = _test_blockchain_rpc(blockchain.get_validators, test_epoch_number)
assert isinstance(validators, dict)
assert 'validators' in validators.keys()
assert len(validators['validators']) > 0
@pytest.mark.run(order=17)
def test_get_shard(setup_blockchain):
shard = _test_blockchain_rpc(blockchain.get_shard)
assert isinstance(shard, int)
pytest.skip( "Failed to get reference block hash" )
tx_count = _test_blockchain_rpc(
blockchain.get_block_transaction_count_by_hash,
test_block_hash
)
assert isinstance( tx_count, int )
def test_get_blocks( setup_blockchain ):
blocks = _test_blockchain_rpc(
blockchain.get_blocks,
genesis_block_number,
test_block_number
)
assert isinstance( blocks, list )
assert len( blocks ) == ( test_block_number - genesis_block_number + 1 )
def test_get_block_signers( setup_blockchain ):
block_signers = _test_blockchain_rpc(
blockchain.get_block_signers,
test_block_number
)
assert isinstance( block_signers, list )
assert len( block_signers ) > 0
def test_get_validators( setup_blockchain ):
validators = _test_blockchain_rpc(
blockchain.get_validators,
test_epoch_number
)
assert isinstance( validators, dict )
assert "validators" in validators.keys()
assert len( validators[ "validators" ] ) > 0
def test_get_shard( setup_blockchain ):
shard = _test_blockchain_rpc( blockchain.get_shard )
assert isinstance( shard, int )
assert shard == 0
@pytest.mark.run(order=18)
def test_get_staking_epoch(setup_blockchain):
staking_epoch = _test_blockchain_rpc(blockchain.get_staking_epoch)
assert isinstance(staking_epoch, int)
@pytest.mark.run(order=19)
def test_get_prestaking_epoch(setup_blockchain):
prestaking_epoch = _test_blockchain_rpc(blockchain.get_prestaking_epoch)
assert isinstance(prestaking_epoch, int)
def test_get_staking_epoch( setup_blockchain ):
staking_epoch = _test_blockchain_rpc( blockchain.get_staking_epoch )
assert isinstance( staking_epoch, int )
def test_get_prestaking_epoch( setup_blockchain ):
prestaking_epoch = _test_blockchain_rpc( blockchain.get_prestaking_epoch )
assert isinstance( prestaking_epoch, int )
@pytest.mark.run(order=20)
def test_get_bad_blocks(setup_blockchain):
def test_get_bad_blocks( setup_blockchain ):
# TODO: Remove skip when RPC is fixed
pytest.skip("Known error with hmy_getCurrentBadBlocks")
bad_blocks = _test_blockchain_rpc(blockchain.get_bad_blocks)
assert isinstance(bad_blocks, list)
@pytest.mark.run(order=21)
def test_get_validator_keys(setup_blockchain):
keys = _test_blockchain_rpc(blockchain.get_validator_keys, test_epoch_number)
assert isinstance(keys, list)
assert len(keys) > 0
@pytest.mark.run(order=22)
def test_get_block_signer_keys(setup_blockchain):
keys = _test_blockchain_rpc(blockchain.get_block_signer_keys, test_block_number)
assert isinstance(keys, list)
assert len(keys) > 0
pytest.skip( "Known error with hmyv2_getCurrentBadBlocks" )
bad_blocks = _test_blockchain_rpc( blockchain.get_bad_blocks )
assert isinstance( bad_blocks, list )
def test_get_validator_keys( setup_blockchain ):
keys = _test_blockchain_rpc(
blockchain.get_validator_keys,
test_epoch_number
)
assert isinstance( keys, list )
assert len( keys ) > 0
def test_get_block_signers_keys( setup_blockchain ):
keys = _test_blockchain_rpc(
blockchain.get_block_signers_keys,
test_block_number
)
assert isinstance( keys, list )
assert len( keys ) > 0
def test_chain_id( setup_blockchain ):
chain_id = _test_blockchain_rpc( blockchain.chain_id )
assert isinstance( chain_id, int )
def test_get_peer_info( setup_blockchain ):
peer_info = _test_blockchain_rpc( blockchain.get_peer_info )
assert isinstance( peer_info, dict )
def test_protocol_version( setup_blockchain ):
protocol_version = _test_blockchain_rpc( blockchain.protocol_version )
assert isinstance( protocol_version, int )
def test_is_last_block( setup_blockchain ):
is_last_block = _test_blockchain_rpc( blockchain.is_last_block, 0 )
assert isinstance( is_last_block, bool )
assert not is_last_block
def test_epoch_last_block( setup_blockchain ):
epoch_last_block = _test_blockchain_rpc( blockchain.epoch_last_block, 0 )
assert isinstance( epoch_last_block, int )
def test_get_circulating_supply( setup_blockchain ):
circulating_supply = _test_blockchain_rpc(
blockchain.get_circulating_supply
)
assert isinstance( circulating_supply, str )
def test_get_total_supply( setup_blockchain ):
total_supply = _test_blockchain_rpc( blockchain.get_total_supply )
assert isinstance( total_supply, str ) or total_supply == None
def test_get_last_cross_links( setup_blockchain ):
last_cross_links = _test_blockchain_rpc( blockchain.get_last_cross_links )
assert isinstance( last_cross_links, list )
def test_get_gas_price( setup_blockchain ):
gas_price = _test_blockchain_rpc( blockchain.get_gas_price )
assert isinstance( gas_price, int )
def test_get_version( setup_blockchain ):
version = _test_blockchain_rpc( blockchain.get_version )
assert isinstance( version, int )
def test_get_header_by_number( setup_blockchain ):
header_pair = _test_blockchain_rpc( blockchain.get_header_by_number, 0 )
assert isinstance( header_pair, dict )
def test_get_block_staking_transaction_count_by_number( setup_blockchain ):
tx_count = _test_blockchain_rpc(
blockchain.get_block_staking_transaction_count_by_number,
test_block_number
)
assert isinstance( tx_count, int )
def test_get_block_staking_transaction_count_by_hash( setup_blockchain ):
if not test_block_hash:
pytest.skip( "Failed to get reference block hash" )
tx_count = _test_blockchain_rpc(
blockchain.get_block_staking_transaction_count_by_hash,
test_block_hash
)
assert isinstance( tx_count, int )
def test_is_block_signer( setup_blockchain ):
is_signer = _test_blockchain_rpc(
blockchain.is_block_signer,
test_block_number,
address
)
assert isinstance( is_signer, bool )
def test_get_signed_blocks( setup_blockchain ):
signed_blocks = _test_blockchain_rpc(
blockchain.get_signed_blocks,
address
)
assert isinstance( signed_blocks, int )
def test_in_sync( setup_blockchain ):
in_sync = _test_blockchain_rpc( blockchain.in_sync )
assert isinstance( in_sync, bool )
def test_beacon_in_sync( setup_blockchain ):
beacon_in_sync = _test_blockchain_rpc( blockchain.beacon_in_sync )
assert isinstance( beacon_in_sync, bool )
def test_errors():
with pytest.raises( exceptions.RPCError ):
blockchain.chain_id( fake_shard )
with pytest.raises( exceptions.RPCError ):
blockchain.get_node_metadata( fake_shard )
with pytest.raises( exceptions.RPCError ):
blockchain.get_peer_info( fake_shard )
with pytest.raises( exceptions.RPCError ):
blockchain.protocol_version( fake_shard )
with pytest.raises( exceptions.RPCError ):
blockchain.get_shard( fake_shard )
with pytest.raises( exceptions.RPCError ):
blockchain.get_staking_epoch( fake_shard )
with pytest.raises( exceptions.RPCError ):
blockchain.get_prestaking_epoch( fake_shard )
with pytest.raises( exceptions.RPCError ):
blockchain.get_sharding_structure( fake_shard )
with pytest.raises( exceptions.RPCError ):
blockchain.get_leader_address( fake_shard )
with pytest.raises( exceptions.RPCError ):
blockchain.is_last_block( 0, fake_shard )
with pytest.raises( exceptions.RPCError ):
blockchain.epoch_last_block( 0, fake_shard )
with pytest.raises( exceptions.RPCError ):
blockchain.get_circulating_supply( fake_shard )
with pytest.raises( exceptions.RPCError ):
blockchain.get_total_supply( fake_shard )
with pytest.raises( exceptions.RPCError ):
blockchain.get_block_number( fake_shard )
with pytest.raises( exceptions.RPCError ):
blockchain.get_current_epoch( fake_shard )
with pytest.raises( exceptions.RPCError ):
blockchain.get_last_cross_links( fake_shard )
with pytest.raises( exceptions.RPCError ):
blockchain.get_gas_price( fake_shard )
with pytest.raises( exceptions.RPCError ):
blockchain.get_num_peers( fake_shard )
with pytest.raises( exceptions.RPCError ):
blockchain.get_version( fake_shard )
with pytest.raises( exceptions.RPCError ):
blockchain.get_latest_header( fake_shard )
with pytest.raises( exceptions.RPCError ):
blockchain.get_header_by_number( 0, fake_shard )
with pytest.raises( exceptions.RPCError ):
blockchain.get_latest_chain_headers( fake_shard )
with pytest.raises( exceptions.RPCError ):
blockchain.get_block_by_number( 0, endpoint = fake_shard )
with pytest.raises( exceptions.RPCError ):
blockchain.get_block_by_hash( "", endpoint = fake_shard )
with pytest.raises( exceptions.RPCError ):
blockchain.get_block_transaction_count_by_number( 0, fake_shard )
with pytest.raises( exceptions.RPCError ):
blockchain.get_block_transaction_count_by_hash( "", fake_shard )
with pytest.raises( exceptions.RPCError ):
blockchain.get_block_staking_transaction_count_by_number(
0,
fake_shard
)
with pytest.raises( exceptions.RPCError ):
blockchain.get_block_staking_transaction_count_by_hash( "", fake_shard )
with pytest.raises( exceptions.RPCError ):
blockchain.get_blocks( 0, 1, endpoint = fake_shard )
with pytest.raises( exceptions.RPCError ):
blockchain.get_block_signers( 0, fake_shard )
with pytest.raises( exceptions.RPCError ):
blockchain.get_block_signers_keys( 0, fake_shard )
with pytest.raises( exceptions.RPCError ):
blockchain.is_block_signer( 0, "", fake_shard )
with pytest.raises( exceptions.RPCError ):
blockchain.get_signed_blocks( "", fake_shard )
with pytest.raises( exceptions.RPCError ):
blockchain.get_validators( 1, fake_shard )
with pytest.raises( exceptions.RPCError ):
blockchain.get_validator_keys( 0, fake_shard )
with pytest.raises( exceptions.RPCError ):
blockchain.in_sync( fake_shard )
with pytest.raises( exceptions.RPCError ):
blockchain.beacon_in_sync( fake_shard )

@ -0,0 +1,86 @@
import pytest
from pyhmy import contract
from pyhmy.rpc import exceptions
explorer_endpoint = "http://localhost:9599"
contract_tx_hash = "0xa605852dd2fa39ed42e101c17aaca9d344d352ba9b24b14b9af94ec9cb58b31f"
# deployedBytecode from json file
contract_code = "0x6080604052348015600f57600080fd5b506004361060285760003560e01c80634936cd3614602d575b600080fd5b604080516001815290519081900360200190f3fea2646970667358221220fa3fa0e8d0267831a59f4dd5edf39a513d07e98461cb06660ad28d4beda744cd64736f6c634300080f0033"
contract_address = None
fake_shard = "http://example.com"
def _test_contract_rpc( fn, *args, **kwargs ):
if not callable( fn ):
pytest.fail( f"Invalid function: {fn}" )
try:
response = fn( *args, **kwargs )
except Exception as e:
if isinstance( e,
exceptions.RPCError
) and "does not exist/is not available" in str( e ):
pytest.skip( f"{str(e)}" )
elif isinstance( e,
exceptions.RPCError
) and "estimateGas returned" in str( e ):
pytest.skip( f"{str(e)}" )
pytest.fail( f"Unexpected error: {e.__class__} {e}" )
return response
def test_get_contract_address_from_hash( setup_blockchain ):
global contract_address
contract_address = _test_contract_rpc(
contract.get_contract_address_from_hash,
contract_tx_hash
)
assert isinstance( contract_address, str )
def test_call( setup_blockchain ):
if not contract_address:
pytest.skip( "Contract address not loaded yet" )
called = _test_contract_rpc( contract.call, contract_address, "latest" )
assert isinstance( called, str ) and called.startswith( "0x" )
def test_estimate_gas( setup_blockchain ):
if not contract_address:
pytest.skip( "Contract address not loaded yet" )
gas = _test_contract_rpc( contract.estimate_gas, contract_address )
assert isinstance( gas, int )
def test_get_code( setup_blockchain ):
if not contract_address:
pytest.skip( "Contract address not loaded yet" )
code = _test_contract_rpc( contract.get_code, contract_address, "latest" )
assert code == contract_code
def test_get_storage_at( setup_blockchain ):
if not contract_address:
pytest.skip( "Contract address not loaded yet" )
storage = _test_contract_rpc(
contract.get_storage_at,
contract_address,
"0x0",
"latest"
)
assert isinstance( storage, str ) and storage.startswith( "0x" )
def test_errors():
with pytest.raises( exceptions.RPCError ):
contract.get_contract_address_from_hash( "", fake_shard )
with pytest.raises( exceptions.RPCError ):
contract.call( "", "", endpoint = fake_shard )
with pytest.raises( exceptions.RPCError ):
contract.estimate_gas( "", endpoint = fake_shard )
with pytest.raises( exceptions.RPCError ):
contract.get_code( "", "latest", endpoint = fake_shard )
with pytest.raises( exceptions.RPCError ):
contract.get_storage_at( "", 1, "latest", endpoint = fake_shard )

@ -0,0 +1,120 @@
from pyhmy import signing
"""
Test signature source (node.js)
import { Transaction, RLPSign, TxStatus } from '@harmony-js/transaction';
import { HttpProvider, Messenger } from '@harmony-js/network';
import { ChainType, ChainID } from '@harmony-js/utils';
const provider = new HttpProvider('http://localhost:9500');
let privateKey = '4edef2c24995d15b0e25cbd152fb0e2c05d3b79b9c2afd134e6f59f91bf99e48'
let hmyMessenger = new Messenger(provider, ChainType.Ethereum, ChainID.Default);
let transaction: Transaction = new Transaction(
{
gasLimit: 100,
gasPrice: 1,
to: "one1z3u3d9expexf5u03sjzvn7vhkvywtye9nqmmlu",
value: 5,
nonce: 2,
},
hmyMessenger,
TxStatus.INTIALIZED,
);
console.log('Unsigned transaction')
let payload = transaction.txPayload
console.log(payload)
let signed = RLPSign(transaction, privateKey);
console.log( 'Signed transaction' )
console.log(signed)
"""
def test_eth_transaction():
transaction_dict = {
"nonce": 2,
"gasPrice": 1,
"gas": 100, # signing.py uses Ether, which by default calls it gas
"to": "0x14791697260e4c9a71f18484c9f997b308e59325",
"value": 5,
}
signed_tx = signing.sign_transaction(
transaction_dict,
"4edef2c24995d15b0e25cbd152fb0e2c05d3b79b9c2afd134e6f59f91bf99e48",
)
assert (
signed_tx.rawTransaction.hex() ==
"0xf85d0201649414791697260e4c9a71f18484c9f997b308e5932505801ca0b364f4296bfd3231889d1b9ac94c68abbcb8ee6a6c7a5fa412ac82b5b7b0d5d1a02233864842ab28ee4f99c207940a867b0f8534ca362836190792816b48dde3b1"
)
"""
Test signature source (node.js)
import { Transaction, RLPSign, TxStatus } from '@harmony-js/transaction';
import { HttpProvider, Messenger } from '@harmony-js/network';
import { ChainType, ChainID } from '@harmony-js/utils';
const provider = new HttpProvider('http://localhost:9500');
let privateKey = '4edef2c24995d15b0e25cbd152fb0e2c05d3b79b9c2afd134e6f59f91bf99e48'
let hmyMessenger = new Messenger(provider, ChainType.Harmony, ChainID.HmyMainnet);
let transaction: Transaction = new Transaction(
{
gasLimit: 100,
gasPrice: 1,
to: "one1z3u3d9expexf5u03sjzvn7vhkvywtye9nqmmlu",
value: 5,
nonce: 2,
shardID: 0,
toShardID: 1
},
hmyMessenger,
TxStatus.INTIALIZED,
);
console.log('Unsigned transaction')
let payload = transaction.txPayload
console.log(payload)
let signed = RLPSign(transaction, privateKey);
console.log( 'Signed transaction' )
console.log(signed)
"""
def test_hmy_transaction():
transaction_dict = {
"nonce": 2,
"gasPrice": 1,
"gas": 100, # signing.py uses Ether, which by default calls it gas
"to": "0x14791697260e4c9a71f18484c9f997b308e59325",
"value": 5,
"shardID": 0,
"toShardID": 1,
"chainId": "HmyMainnet",
}
signed_tx = signing.sign_transaction(
transaction_dict,
"4edef2c24995d15b0e25cbd152fb0e2c05d3b79b9c2afd134e6f59f91bf99e48",
)
assert (
signed_tx.rawTransaction.hex() ==
"0xf85f02016480019414791697260e4c9a71f18484c9f997b308e59325058026a02a203357ca6d7cdec981ad3d3692ad2c9e24536a9b6e7b486ce2f94f28c7563ea010d38cd0312a153af0aa7d8cd986040c36118bba373cb94e3e86fd4aedce904d"
)
def test_hmy_eth_compatible_transaction():
transaction_dict = {
"chainId": 1666600000,
"gas": 21000,
"gasPrice": 100000000000,
"nonce": 0,
"shardID": 0,
"to": "0x19e7e376e7c213b7e7e7e46cc70a5dd086daff2a",
"toShardID": 1,
"value": 1000000000000000000
}
signed_tx = signing.sign_transaction(
transaction_dict,
"0x1111111111111111111111111111111111111111111111111111111111111111",
)
assert (
signed_tx.rawTransaction.hex() ==
"0xf8728085174876e80082520880019419e7e376e7c213b7e7e7e46cc70a5dd086daff2a880de0b6b3a76400008084c6ac98a3a0322cca082c3ca0a1d9ad5fffb4dc0e09ade49b4b0e3b0c9dfa5f6288bc7363d6a05604874964abaaf364e8b10108e8bfed5561c341aa5e4abb92b2c6f4c009ef4c"
)

@ -1,93 +1,248 @@
import pytest
import requests
from pyhmy import (
staking
)
from pyhmy import staking
from pyhmy.rpc import (
exceptions
)
from pyhmy.rpc import exceptions
explorer_endpoint = "http://localhost:9700"
test_validator_address = "one155jp2y76nazx8uw5sa94fr0m4s5aj8e5xm6fu3"
fake_shard = "http://example.com"
explorer_endpoint = 'http://localhost:9599'
test_validator_address = 'one18tvf56zqjkjnak686lwutcp5mqfnvee35xjnhc'
def _test_staking_rpc(fn, *args, **kwargs):
if not callable(fn):
pytest.fail(f'Invalid function: {fn}')
def _test_staking_rpc( fn, *args, **kwargs ):
if not callable( fn ):
pytest.fail( f"Invalid function: {fn}" )
try:
response = fn(*args, **kwargs)
response = fn( *args, **kwargs )
except Exception as e:
if isinstance(e, exceptions.RPCError) and 'does not exist/is not available' in str(e):
pytest.skip(f'{str(e)}')
pytest.fail(f'Unexpected error: {e.__class__} {e}')
if isinstance( e,
exceptions.RPCError
) and "does not exist/is not available" in str( e ):
pytest.skip( f"{str(e)}" )
pytest.fail( f"Unexpected error: {e.__class__} {e}" )
return response
@pytest.mark.run(order=1)
def test_get_all_validator_addresses(setup_blockchain):
validator_addresses = _test_staking_rpc(staking.get_all_validator_addresses)
assert isinstance(validator_addresses, list)
assert len(validator_addresses) > 0
def test_get_all_validator_addresses( setup_blockchain ):
validator_addresses = _test_staking_rpc(
staking.get_all_validator_addresses
)
assert isinstance( validator_addresses, list )
assert len( validator_addresses ) > 0
assert test_validator_address in validator_addresses
@pytest.mark.run(order=2)
def test_get_validator_information(setup_blockchain):
info = _test_staking_rpc(staking.get_validator_information, test_validator_address)
assert isinstance(info, dict)
@pytest.mark.run(order=3)
def test_get_all_validator_information(setup_blockchain):
all_validator_information = _test_staking_rpc(staking.get_all_validator_information)
assert isinstance(all_validator_information, list)
assert len(all_validator_information) > 0
@pytest.mark.run(order=4)
def test_get_delegations_by_delegator(setup_blockchain):
delegations = _test_staking_rpc(staking.get_delegations_by_delegator, test_validator_address)
assert isinstance(delegations, list)
assert len(delegations) > 0
@pytest.mark.run(order=5)
def test_get_delegations_by_validator(setup_blockchain):
delegations = _test_staking_rpc(staking.get_delegations_by_validator, test_validator_address)
assert isinstance(delegations, list)
assert len(delegations) > 0
@pytest.mark.run(order=6)
def test_get_current_utility_metrics(setup_blockchain):
metrics = _test_staking_rpc(staking.get_current_utility_metrics)
assert isinstance(metrics, dict)
@pytest.mark.run(order=7)
def test_get_staking_network_info(setup_blockchain):
info = _test_staking_rpc(staking.get_staking_network_info)
assert isinstance(info, dict)
@pytest.mark.run(order=8)
def test_get_super_committees(setup_blockchain):
committee = _test_staking_rpc(staking.get_super_committees)
assert isinstance(committee, dict)
@pytest.mark.run(order=9)
def test_get_raw_median_stake_snapshot(setup_blockchain):
median_stake = _test_staking_rpc(staking.get_raw_median_stake_snapshot)
assert isinstance(median_stake, dict)
@pytest.mark.run(order=10)
def test_get_validator_information_by_block(setup_blockchain):
# Apparently validator information not created until block after create-validator transaction is accepted, so +1 block
info = _test_staking_rpc(staking.get_validator_information_by_block, test_validator_address, setup_blockchain + 1, endpoint=explorer_endpoint)
assert isinstance(info, dict)
@pytest.mark.run(order=11)
def test_get_validator_information_by_block(setup_blockchain):
def test_get_validator_information( setup_blockchain ):
info = _test_staking_rpc(
staking.get_validator_information,
test_validator_address
)
assert isinstance( info, dict )
def test_get_all_validator_information( setup_blockchain ):
all_validator_information = _test_staking_rpc(
staking.get_all_validator_information
)
assert isinstance( all_validator_information, list )
assert len( all_validator_information ) > 0
def test_get_delegations_by_delegator( setup_blockchain ):
delegations = _test_staking_rpc(
staking.get_delegations_by_delegator,
test_validator_address
)
assert isinstance( delegations, list )
assert len( delegations ) > 0
def test_get_delegations_by_validator( setup_blockchain ):
delegations = _test_staking_rpc(
staking.get_delegations_by_validator,
test_validator_address
)
assert isinstance( delegations, list )
assert len( delegations ) > 0
def test_get_current_utility_metrics( setup_blockchain ):
metrics = _test_staking_rpc( staking.get_current_utility_metrics )
assert isinstance( metrics, dict )
def test_get_staking_network_info( setup_blockchain ):
info = _test_staking_rpc( staking.get_staking_network_info )
assert isinstance( info, dict )
def test_get_super_committees( setup_blockchain ):
committee = _test_staking_rpc( staking.get_super_committees )
assert isinstance( committee, dict )
def test_get_raw_median_stake_snapshot( setup_blockchain ):
median_stake = _test_staking_rpc( staking.get_raw_median_stake_snapshot )
assert isinstance( median_stake, dict )
def test_get_validator_information_by_block( setup_blockchain ):
# Apparently validator information not created until block after create-validator transaction is accepted, so +1 block
info = _test_staking_rpc(staking.get_all_validator_information_by_block, setup_blockchain + 1, endpoint=explorer_endpoint)
assert isinstance(info, list)
info = _test_staking_rpc(
staking.get_validator_information_by_block_number,
test_validator_address,
"latest",
endpoint = explorer_endpoint,
)
assert isinstance( info, dict )
@pytest.mark.run(order=12)
def test_get_delegations_by_delegator_by_block(setup_blockchain):
delegations = _test_staking_rpc(staking.get_delegations_by_delegator_by_block, test_validator_address, setup_blockchain + 1, endpoint=explorer_endpoint)
assert isinstance(delegations, list)
def test_get_validator_information_by_block( setup_blockchain ):
# Apparently validator information not created until block after create-validator transaction is accepted, so +1 block
info = _test_staking_rpc(
staking.get_all_validator_information_by_block_number,
"latest",
endpoint = explorer_endpoint,
)
assert isinstance( info, list )
def test_get_delegations_by_delegator_by_block( setup_blockchain ):
delegations = _test_staking_rpc(
staking.get_delegations_by_delegator_by_block_number,
test_validator_address,
"latest",
endpoint = explorer_endpoint,
)
assert isinstance( delegations, list )
def test_get_elected_validator_addresses( setup_blockchain ):
validator_addresses = _test_staking_rpc(
staking.get_elected_validator_addresses
)
assert isinstance( validator_addresses, list )
assert len( validator_addresses ) > 0
def test_get_validators( setup_blockchain ):
validators = _test_staking_rpc( staking.get_validators, 2 )
assert isinstance( validators, dict )
assert len( validators[ "validators" ] ) > 0
def test_get_validator_keys( setup_blockchain ):
validators = _test_staking_rpc( staking.get_validator_keys, 2 )
assert isinstance( validators, list )
def test_get_validator_self_delegation( setup_blockchain ):
self_delegation = _test_staking_rpc(
staking.get_validator_self_delegation,
test_validator_address
)
assert isinstance( self_delegation, int )
assert self_delegation > 0
def test_get_validator_total_delegation( setup_blockchain ):
total_delegation = _test_staking_rpc(
staking.get_validator_total_delegation,
test_validator_address
)
assert isinstance( total_delegation, int )
assert total_delegation > 0
def test_get_all_delegation_information( setup_blockchain ):
delegation_information = _test_staking_rpc(
staking.get_all_delegation_information,
0
)
assert isinstance( delegation_information, list )
assert len( delegation_information ) > 0
def test_get_delegation_by_delegator_and_validator( setup_blockchain ):
delegation_information = _test_staking_rpc(
staking.get_delegation_by_delegator_and_validator,
test_validator_address,
test_validator_address,
)
assert isinstance( delegation_information, dict )
def test_get_available_redelegation_balance( setup_blockchain ):
redelgation_balance = _test_staking_rpc(
staking.get_available_redelegation_balance,
test_validator_address
)
assert isinstance( redelgation_balance, int )
assert redelgation_balance == 0
def test_get_total_staking( setup_blockchain ):
total_staking = _test_staking_rpc( staking.get_total_staking )
assert isinstance( total_staking, int )
if (
staking.get_validator_information(
test_validator_address,
explorer_endpoint
)[ "active-status" ] == "active"
):
assert total_staking > 0
def test_errors():
with pytest.raises( exceptions.RPCError ):
staking.get_all_validator_addresses( fake_shard )
with pytest.raises( exceptions.RPCError ):
staking.get_validator_information( "", fake_shard )
with pytest.raises( exceptions.RPCError ):
staking.get_elected_validator_addresses( fake_shard )
with pytest.raises( exceptions.RPCError ):
staking.get_validators( 1, fake_shard )
with pytest.raises( exceptions.RPCError ):
staking.get_validator_keys( 1, fake_shard )
with pytest.raises( exceptions.RPCError ):
staking.get_validator_information_by_block_number( "", 1, fake_shard )
with pytest.raises( exceptions.RPCError ):
staking.get_all_validator_information( 0, fake_shard )
with pytest.raises( exceptions.RPCError ):
staking.get_validator_self_delegation( "", fake_shard )
with pytest.raises( exceptions.RPCError ):
staking.get_validator_total_delegation( "", fake_shard )
with pytest.raises( exceptions.RPCError ):
staking.get_all_validator_information_by_block_number(
1,
1,
fake_shard
)
with pytest.raises( exceptions.RPCError ):
staking.get_all_delegation_information( 1, fake_shard )
with pytest.raises( exceptions.RPCError ):
staking.get_delegations_by_delegator( "", fake_shard )
with pytest.raises( exceptions.RPCError ):
staking.get_delegations_by_delegator_by_block_number(
"",
1,
fake_shard
)
with pytest.raises( exceptions.RPCError ):
staking.get_delegation_by_delegator_and_validator( "", "", fake_shard )
with pytest.raises( exceptions.RPCError ):
staking.get_available_redelegation_balance( "", fake_shard )
with pytest.raises( exceptions.RPCError ):
staking.get_delegations_by_validator( "", fake_shard )
with pytest.raises( exceptions.RPCError ):
staking.get_current_utility_metrics( fake_shard )
with pytest.raises( exceptions.RPCError ):
staking.get_staking_network_info( fake_shard )
with pytest.raises( exceptions.RPCError ):
staking.get_super_committees( fake_shard )
with pytest.raises( exceptions.RPCError ):
staking.get_total_staking( fake_shard )
with pytest.raises( exceptions.RPCError ):
staking.get_raw_median_stake_snapshot( fake_shard )

@ -0,0 +1,112 @@
from pyhmy import staking_signing, staking_structures
from pyhmy.numbers import convert_one_to_atto
# other transactions (create/edit validator) are in test_validator.py
# test_delegate is the same as test_undelegate (except the directive) so it has been omitted
# staking transactions without a chain id have been omitted as well, since the node does not accept them anyway
"""
let stakingTx
let stakeMsg3: CollectRewards = new CollectRewards(
'one1a0x3d6xpmr6f8wsyaxd9v36pytvp48zckswvv9'
)
stakingTx = new StakingTransaction(
Directive.DirectiveCollectRewards,
stakeMsg3,
2, // nonce
numberToHex(new Unit('1').asOne().toWei()), // gasPrice
100, // gasLimit
null, // chainId
);
const signed = stakingTx.rlpSign('4edef2c24995d15b0e25cbd152fb0e2c05d3b79b9c2afd134e6f59f91bf99e48')
console.log( 'Signed transaction' )
console.log(signed)
"""
# def test_collect_rewards_no_chain_id():
# transaction_dict = {
# 'directive': staking_structures.Directive.CollectRewards,
# 'delegatorAddress': 'one1a0x3d6xpmr6f8wsyaxd9v36pytvp48zckswvv9',
# 'nonce': 2,
# 'gasPrice': int(convert_one_to_atto(1)),
# 'gasLimit': 100,
# }
# signed_tx = staking_signing.sign_staking_transaction(transaction_dict, '4edef2c24995d15b0e25cbd152fb0e2c05d3b79b9c2afd134e6f59f91bf99e48')
# assert signed_tx.rawTransaction.hex() == '0xf85a04d594ebcd16e8c1d8f493ba04e99a56474122d81a9c5823a0490e4ceb747563ba40da3e0db8a65133cf6f6ae4c48a24866cd6aa1f0d6c2414a06dbd51a67b35b5685e7b7420cba26e63b0e7d3c696fc6cb69d48e54fcad280e9'
"""
let stakingTx
let stakeMsg3: CollectRewards = new CollectRewards(
'one1a0x3d6xpmr6f8wsyaxd9v36pytvp48zckswvv9'
)
stakingTx = new StakingTransaction(
Directive.DirectiveCollectRewards,
stakeMsg3,
2, // nonce
numberToHex(new Unit('1').asOne().toWei()), // gasPrice
100, // gasLimit
1, // chainId
);
const signed = stakingTx.rlpSign('4edef2c24995d15b0e25cbd152fb0e2c05d3b79b9c2afd134e6f59f91bf99e48')
console.log( 'Signed transaction' )
console.log(signed)
"""
def test_collect_rewards_chain_id():
transaction_dict = {
"directive": staking_structures.Directive.CollectRewards,
"delegatorAddress": "one1a0x3d6xpmr6f8wsyaxd9v36pytvp48zckswvv9",
"nonce": 2,
"gasPrice": int(convert_one_to_atto(1)),
"gasLimit": 100,
"chainId": 1, # with chainId for coverage
}
signed_tx = staking_signing.sign_staking_transaction(
transaction_dict,
"4edef2c24995d15b0e25cbd152fb0e2c05d3b79b9c2afd134e6f59f91bf99e48",
)
assert (
signed_tx.rawTransaction.hex() ==
"0xf86504d594ebcd16e8c1d8f493ba04e99a56474122d81a9c5802880de0b6b3a76400006425a055d6c3c0d8e7a1e75152db361a2ed47f5ab54f6f19b0d8e549953dbdf13ba647a076e1367dfca38eae3bd0e8da296335acabbaeb87dc17e47ebe4942db29334099"
)
"""
let stakingTx
let stakeMsg4: Delegate = new Delegate(
'one1a0x3d6xpmr6f8wsyaxd9v36pytvp48zckswvv9',
'one1a0x3d6xpmr6f8wsyaxd9v36pytvp48zckswvv9',
5
)
stakingTx = new StakingTransaction(
Directive.DirectiveDelegate,
stakeMsg4,
2, // nonce
numberToHex(new Unit('1').asOne().toWei()), // gasPrice
100, // gasLimit
2, // chainId
);
const signed = stakingTx.rlpSign('4edef2c24995d15b0e25cbd152fb0e2c05d3b79b9c2afd134e6f59f91bf99e48')
console.log( 'Signed transaction' )
console.log(signed)
"""
def test_delegate():
transaction_dict = {
"directive": staking_structures.Directive.Delegate,
"delegatorAddress": "one1a0x3d6xpmr6f8wsyaxd9v36pytvp48zckswvv9",
"validatorAddress": "one1a0x3d6xpmr6f8wsyaxd9v36pytvp48zckswvv9",
"amount": 5,
"nonce": 2,
"gasPrice": int( convert_one_to_atto( 1 ) ),
"gasLimit": 100,
"chainId": 2,
}
signed_tx = staking_signing.sign_staking_transaction(
transaction_dict,
"4edef2c24995d15b0e25cbd152fb0e2c05d3b79b9c2afd134e6f59f91bf99e48",
)
assert (
signed_tx.rawTransaction.hex() ==
"0xf87b02eb94ebcd16e8c1d8f493ba04e99a56474122d81a9c5894ebcd16e8c1d8f493ba04e99a56474122d81a9c580502880de0b6b3a76400006428a0c856fd483a989ca4db4b5257f6996729527828fb21ec13cc65f0bffe6c015ab1a05e9d3c92742e8cb7450bebdfb7ad277ccbfc9fa0719db0b12a715a0a173cadd6"
)

@ -1,144 +1,266 @@
import pytest
import requests
from pyhmy import (
transaction
)
from pyhmy import transaction
from pyhmy.rpc import (
exceptions
)
from pyhmy.rpc import exceptions
endpoint = "http://localhost:9500"
endpoint_shard_one = "http://localhost:9502"
fake_shard = "http://example.com"
localhost_shard_one = 'http://localhost:9501'
tx_hash = '0x1fa20537ea97f162279743139197ecf0eac863278ac1c8ada9a6be5d1e31e633'
# previously sent txs to get and check
tx_hash = "0xc26be5776aa57438bccf196671a2d34f3f22c9c983c0f844c62b2fb90403aa43"
tx_block_num = None
tx_block_hash = None
cx_hash = '0x1fa20537ea97f162279743139197ecf0eac863278ac1c8ada9a6be5d1e31e633'
stx_hash = '0x57ec011aabdeb078a4816502224022f291fa8b07c82bbae8476f514a1d71c730'
tx_index = None
cx_hash = "0xf73ba634cb96fc0e3e2c9d3b4c91379e223741be4a5aa56e6d6caf49c1ae75cf"
stx_hash = "0xc8177ace2049d9f4eb4a45fd6bd6b16f693573d036322c36774cc00d05a3e24f"
stx_block_num = None
stx_block_hash = None
test_index = 0
stx_index = None
# new txs to send and check
raw_tx = "0xf86f0385174876e800825208808094c9c6d47ee5f2e3e08d7367ad1a1373ba9dd172418905b12aefafa80400008027a07a4952b90bf38723a9197179a8e6d2e9b3a86fd6da4e66a9cf09fdc59783f757a053910798b311245525bd77d6119332458c2855102e4fb9e564f6a3b710d18bb0"
raw_tx_hash = "0x7ccd80f8513f76ec58b357c7a82a12a95e025d88f1444e953f90e3d86e222571"
raw_stx = "0xf88302f494c9c6d47ee5f2e3e08d7367ad1a1373ba9dd1724194a5241513da9f4463f1d4874b548dfbac29d91f3489056bc75e2d631000008085174876e80082c35027a0808ea7d27adf3b1f561e8da4676814084bb75ac541b616bece87c6446e6cc54ea02f19f0b14240354bd42ad60b0c7189873c0be87044e13072b0981a837ca76f64"
raw_stx_hash = "0xe7d07ef6d9fca595a14ceb0ca917bece7bedb15efe662300e9334a32ac1da629"
raw_tx = '0xf86f80843b9aca008252080180943ad89a684095a53edb47d7ddc5e034d8133667318a152d02c7e14af68000008027a0ec6c8ad0f70b3c826fa77574c6815a8f73936fafb7b2701a7082ad7d278c95a9a0429f9f166b1c1d385a4ec8f8b86604c26e427c2b0a1c85d9cf4ec6bbd0719508'
raw_stx = '0xf9015680f90105943ad89a684095a53edb47d7ddc5e034d813366731d984746573748474657374847465737484746573748474657374ddc988016345785d8a0000c9880c7d713b49da0000c887b1a2bc2ec500008a022385a827e8155000008b084595161401484a000000f1b0282554f2478661b4844a05a9deb1837aac83931029cb282872f0dcd7239297c499c02ea8da8746d2f08ca2b037e89891f862b86003557e18435c201ecc10b1664d1aea5b4ec59dbfe237233b953dbd9021b86bc9770e116ed3c413fe0334d89562568a10e133d828611f29fee8cdab9719919bbcc1f1bf812c73b9ccd0f89b4f0b9ca7e27e66d58bbb06fcf51c295b1d076cfc878a0228f16f86157860000080843b9aca008351220027a018385211a150ca032c3526cef0aba6a75f99a18cb73f547f67bab746be0c7a64a028be921002c6eb949b3932afd010dfe1de2459ec7fe84403b9d9d8892394a78c'
def _test_transaction_rpc(fn, *args, **kwargs):
if not callable(fn):
pytest.fail(f'Invalid function: {fn}')
def _test_transaction_rpc( fn, *args, **kwargs ):
if not callable( fn ):
pytest.fail( f"Invalid function: {fn}" )
try:
response = fn(*args, **kwargs)
response = fn( *args, **kwargs )
except Exception as e:
if isinstance(e, exceptions.RPCError) and 'does not exist/is not available' in str(e):
pytest.skip(f'{str(e)}')
pytest.fail(f'Unexpected error: {e.__class__} {e}')
if isinstance( e,
exceptions.RPCError
) and "does not exist/is not available" in str( e ):
pytest.skip( f"{str(e)}" )
pytest.fail( f"Unexpected error: {e.__class__} {e}" )
return response
@pytest.mark.run(order=1)
def test_get_pending_transactions(setup_blockchain):
pool = _test_transaction_rpc(transaction.get_pending_transactions)
assert isinstance(pool, list)
@pytest.mark.run(order=2)
def test_get_transaction_by_hash(setup_blockchain):
tx = _test_transaction_rpc(transaction.get_transaction_by_hash, tx_hash, endpoint=localhost_shard_one)
def test_get_pending_transactions( setup_blockchain ):
pool = _test_transaction_rpc( transaction.get_pending_transactions )
assert isinstance( pool, list )
def test_get_transaction_by_hash( setup_blockchain ):
tx = _test_transaction_rpc(
transaction.get_transaction_by_hash,
tx_hash,
endpoint = endpoint
)
assert tx
assert isinstance(tx, dict)
assert 'blockNumber' in tx.keys()
assert 'blockHash' in tx.keys()
assert isinstance( tx, dict )
assert "blockNumber" in tx.keys()
assert "blockHash" in tx.keys()
global tx_block_num
tx_block_num = int(tx['blockNumber'], 0)
tx_block_num = int( tx[ "blockNumber" ] )
global tx_block_hash
tx_block_hash = tx['blockHash']
tx_block_hash = tx[ "blockHash" ]
global tx_index
tx_index = int( tx[ "transactionIndex" ] )
@pytest.mark.run(order=3)
def test_get_transaction_by_block_hash_and_index(setup_blockchain):
def test_get_transaction_by_block_hash_and_index( setup_blockchain ):
if not tx_block_hash:
pytest.skip('Failed to get reference block hash')
tx = _test_transaction_rpc(transaction.get_transaction_by_block_hash_and_index,
tx_block_hash, test_index, endpoint=localhost_shard_one)
pytest.skip( "Failed to get reference block hash" )
tx = _test_transaction_rpc(
transaction.get_transaction_by_block_hash_and_index,
tx_block_hash,
tx_index,
endpoint = endpoint,
)
assert tx
assert isinstance(tx, dict)
assert isinstance( tx, dict )
@pytest.mark.run(order=4)
def test_get_transaction_by_block_number_and_index(setup_blockchain):
def test_get_transaction_by_block_number_and_index( setup_blockchain ):
if not tx_block_num:
pytest.skip('Failed to get reference block num')
tx = _test_transaction_rpc(transaction.get_transaction_by_block_number_and_index, tx_block_num, test_index,
endpoint=localhost_shard_one)
pytest.skip( "Failed to get reference block num" )
tx = _test_transaction_rpc(
transaction.get_transaction_by_block_number_and_index,
tx_block_num,
tx_index,
endpoint = endpoint,
)
assert tx
assert isinstance(tx, dict)
assert isinstance( tx, dict )
@pytest.mark.run(order=5)
def test_get_transaction_receipt(setup_blockchain):
tx_receipt = _test_transaction_rpc(transaction.get_transaction_receipt, tx_hash, endpoint=localhost_shard_one)
def test_get_transaction_receipt( setup_blockchain ):
tx_receipt = _test_transaction_rpc(
transaction.get_transaction_receipt,
tx_hash,
endpoint = endpoint
)
assert tx_receipt
assert isinstance(tx_receipt, dict)
@pytest.mark.run(order=6)
def test_get_transaction_error_sink(setup_blockchain):
errors = _test_transaction_rpc(transaction.get_transaction_error_sink)
assert isinstance(errors, list)
@pytest.mark.run(order=7)
def test_send_raw_transaction(setup_blockchain):
test_tx_hash = _test_transaction_rpc(transaction.send_raw_transaction, raw_tx)
assert isinstance(test_tx_hash, str)
assert test_tx_hash == tx_hash
@pytest.mark.run(order=8)
def test_get_pending_cx_receipts(setup_blockchain):
pending = _test_transaction_rpc(transaction.get_pending_cx_receipts)
assert isinstance(pending, list)
@pytest.mark.run(order=9)
def test_get_cx_receipt_by_hash(setup_blockchain):
cx = _test_transaction_rpc(transaction.get_cx_receipt_by_hash, cx_hash)
assert isinstance( tx_receipt, dict )
def test_get_transaction_error_sink( setup_blockchain ):
errors = _test_transaction_rpc( transaction.get_transaction_error_sink )
assert isinstance( errors, list )
def test_send_and_confirm_raw_transaction( setup_blockchain ):
# Note: this test is not yet idempotent since the localnet will reject transactions which were previously finalized.
test_tx = _test_transaction_rpc(
transaction.send_and_confirm_raw_transaction,
raw_tx
)
assert isinstance( test_tx, dict )
assert test_tx[ "hash" ] == raw_tx_hash
def test_get_pending_cx_receipts( setup_blockchain ):
pending = _test_transaction_rpc( transaction.get_pending_cx_receipts )
assert isinstance( pending, list )
def test_get_cx_receipt_by_hash( setup_blockchain ):
cx = _test_transaction_rpc(
transaction.get_cx_receipt_by_hash,
cx_hash,
endpoint_shard_one
)
assert cx
assert isinstance(cx, dict)
assert isinstance( cx, dict )
@pytest.mark.run(order=10)
def test_resend_cx_receipt(setup_blockchain):
sent = _test_transaction_rpc(transaction.resend_cx_receipt, cx_hash)
assert isinstance(sent, bool)
assert not sent
def test_resend_cx_receipt( setup_blockchain ):
sent = _test_transaction_rpc( transaction.resend_cx_receipt, cx_hash )
assert isinstance( sent, bool )
assert sent
@pytest.mark.run(order=11)
def test_get_staking_transaction_by_hash(setup_blockchain):
staking_tx = _test_transaction_rpc(transaction.get_staking_transaction_by_hash, stx_hash)
def test_get_staking_transaction_by_hash( setup_blockchain ):
staking_tx = _test_transaction_rpc(
transaction.get_staking_transaction_by_hash,
stx_hash
)
assert staking_tx
assert isinstance(staking_tx, dict)
assert 'blockNumber' in staking_tx.keys()
assert 'blockHash' in staking_tx.keys()
assert isinstance( staking_tx, dict )
assert "blockNumber" in staking_tx.keys()
assert "blockHash" in staking_tx.keys()
global stx_block_num
stx_block_num = int(staking_tx['blockNumber'], 0)
stx_block_num = int( staking_tx[ "blockNumber" ] )
global stx_block_hash
stx_block_hash = staking_tx['blockHash']
stx_block_hash = staking_tx[ "blockHash" ]
global stx_index
stx_index = int( staking_tx[ "transactionIndex" ] )
@pytest.mark.run(order=12)
def test_get_transaction_by_block_hash_and_index(setup_blockchain):
def test_get_transaction_by_block_hash_and_index( setup_blockchain ):
if not stx_block_hash:
pytest.skip('Failed to get reference block hash')
stx = _test_transaction_rpc(transaction.get_staking_transaction_by_block_hash_and_index, stx_block_hash, test_index)
pytest.skip( "Failed to get reference block hash" )
stx = _test_transaction_rpc(
transaction.get_staking_transaction_by_block_hash_and_index,
stx_block_hash,
stx_index,
)
assert stx
assert isinstance(stx, dict)
assert isinstance( stx, dict )
@pytest.mark.run(order=13)
def test_get_transaction_by_block_number_and_index(setup_blockchain):
def test_get_transaction_by_block_number_and_index( setup_blockchain ):
if not stx_block_num:
pytest.skip('Failed to get reference block num')
stx = _test_transaction_rpc(transaction.get_staking_transaction_by_block_number_and_index, stx_block_num, test_index)
pytest.skip( "Failed to get reference block num" )
stx = _test_transaction_rpc(
transaction.get_staking_transaction_by_block_number_and_index,
stx_block_num,
stx_index,
)
assert stx
assert isinstance(stx, dict)
@pytest.mark.run(order=14)
def test_get_staking_transaction_error_sink(setup_blockchain):
errors = _test_transaction_rpc(transaction.get_staking_transaction_error_sink)
assert isinstance(errors, list)
@pytest.mark.run(order=15)
def test_send_raw_staking_transaction(setup_blockchain):
test_stx_hash = _test_transaction_rpc(transaction.send_raw_staking_transaction, raw_stx)
assert isinstance(test_stx_hash, str)
assert test_stx_hash == stx_hash
assert isinstance( stx, dict )
def test_get_staking_transaction_error_sink( setup_blockchain ):
errors = _test_transaction_rpc(
transaction.get_staking_transaction_error_sink
)
assert isinstance( errors, list )
def test_send_raw_staking_transaction( setup_blockchain ):
test_stx = _test_transaction_rpc(
transaction.send_and_confirm_raw_staking_transaction,
raw_stx,
endpoint = endpoint
)
assert isinstance( test_stx, dict )
assert test_stx[ "hash" ] == raw_stx_hash
def test_get_pool_stats( setup_blockchain ):
test_pool_stats = _test_transaction_rpc(
transaction.get_pool_stats,
endpoint = endpoint
)
assert isinstance( test_pool_stats, dict )
def test_get_pending_staking_transactions( setup_blockchain ):
pending_staking_transactions = _test_transaction_rpc(
transaction.get_pending_staking_transactions,
endpoint = endpoint
)
assert isinstance( pending_staking_transactions, list )
def test_errors():
with pytest.raises( exceptions.RPCError ):
transaction.get_pending_transactions( fake_shard )
with pytest.raises( exceptions.RPCError ):
transaction.get_transaction_error_sink( fake_shard )
with pytest.raises( exceptions.RPCError ):
transaction.get_pool_stats( fake_shard )
with pytest.raises( exceptions.RPCError ):
transaction.get_transaction_by_hash( "", endpoint = fake_shard )
with pytest.raises( exceptions.RPCError ):
transaction.get_transaction_by_block_hash_and_index(
"",
1,
endpoint = fake_shard
)
with pytest.raises( exceptions.RPCError ):
transaction.get_transaction_by_block_number_and_index(
1,
1,
endpoint = fake_shard
)
with pytest.raises( exceptions.RPCError ):
transaction.get_transaction_receipt( "", endpoint = fake_shard )
with pytest.raises( exceptions.RPCError ):
transaction.send_raw_transaction( "", endpoint = fake_shard )
with pytest.raises( exceptions.RPCError ):
transaction.get_pending_cx_receipts( fake_shard )
with pytest.raises( exceptions.RPCError ):
transaction.get_cx_receipt_by_hash( "", endpoint = fake_shard )
with pytest.raises( exceptions.RPCError ):
transaction.resend_cx_receipt( "", endpoint = fake_shard )
with pytest.raises( exceptions.RPCError ):
transaction.get_staking_transaction_by_hash( "", endpoint = fake_shard )
with pytest.raises( exceptions.RPCError ):
transaction.get_staking_transaction_by_block_hash_and_index(
"",
1,
endpoint = fake_shard
)
with pytest.raises( exceptions.RPCError ):
transaction.get_staking_transaction_by_block_number_and_index(
1,
1,
endpoint = fake_shard
)
with pytest.raises( exceptions.RPCError ):
transaction.get_staking_transaction_error_sink( endpoint = fake_shard )
with pytest.raises( exceptions.RPCError ):
transaction.send_raw_staking_transaction( "", endpoint = fake_shard )
with pytest.raises( exceptions.RPCError ):
transaction.get_pending_staking_transactions( endpoint = fake_shard )

@ -0,0 +1,225 @@
import pytest
from decimal import Decimal
from pyhmy import validator
from pyhmy.numbers import convert_one_to_atto
from pyhmy.exceptions import InvalidValidatorError
test_epoch_number = 0
genesis_block_number = 0
test_block_number = 1
test_validator_object = None
test_validator_loaded = False
def test_instantiate_validator( setup_blockchain ):
global test_validator_object
test_validator_object = validator.Validator(
"one1a0x3d6xpmr6f8wsyaxd9v36pytvp48zckswvv9"
)
assert isinstance( test_validator_object, validator.Validator )
def test_load_validator( setup_blockchain ):
if not test_validator_object:
pytest.skip( "Validator not instantiated yet" )
info = {
"name": "Alice",
"identity": "alice",
"website": "alice.harmony.one",
"details": "Don't mess with me!!!",
"security-contact": "Bob",
"min-self-delegation": convert_one_to_atto( 10000 ),
"amount": convert_one_to_atto( 10001 ),
"max-rate": "0.9",
"max-change-rate": "0.05",
"rate": "0.01",
"bls-public-keys": [
"0x30b2c38b1316da91e068ac3bd8751c0901ef6c02a1d58bc712104918302c6ed03d5894671d0c816dad2b4d303320f202"
],
"bls-key-sigs": [
"0x68f800b6adf657b674903e04708060912b893b7c7b500788808247550ab3e186e56a44ebf3ca488f8ed1a42f6cef3a04bd5d2b2b7eb5a767848d3135b362e668ce6bba42c7b9d5666d8e3a83be707b5708e722c58939fe9b07c170f3b7062414"
],
"max-total-delegation": convert_one_to_atto( 40000 ),
}
test_validator_object.load( info )
global test_validator_loaded
test_validator_loaded = True
"""
TypeScript signature source (is outdated because the JS SDK has not been updated for SlotKeySigs)
For now I have checked that the below transaction to localnet works
---
const description: Description = new Description('Alice', 'alice', 'alice.harmony.one', 'Bob', "Don't mess with me!!!")
const commissionRates: CommissionRate = new CommissionRate(new Decimal('0.01'), new Decimal('0.9'), new Decimal('0.05'))
const stakeMsg: CreateValidator = new CreateValidator(
'one1a0x3d6xpmr6f8wsyaxd9v36pytvp48zckswvv9',
description,
commissionRates,
numberToHex(new Unit('10000').asOne().toWei()), // minSelfDelegation
numberToHex(new Unit('40000').asOne().toWei()), // maxTotalDelegation
[ '0xb9486167ab9087ab818dc4ce026edb5bf216863364c32e42df2af03c5ced1ad181e7d12f0e6dd5307a73b62247608611' ],
numberToHex(new Unit('10001').asOne().toWei()) // amount
)
const stakingTx: StakingTransaction = new StakingTransaction(
Directive.DirectiveCreateValidator,
stakeMsg,
2, // nonce
numberToHex(new Unit('1').asOne().toWei()), // gasPrice
100, // gasLimit
null, // chainId
);
const signed = stakingTx.rlpSign('4edef2c24995d15b0e25cbd152fb0e2c05d3b79b9c2afd134e6f59f91bf99e48')
console.log( 'Signed transaction' )
console.log(signed)
"""
def test_create_validator_sign( setup_blockchain ):
if not ( test_validator_object or test_validator_loaded ):
pytest.skip( "Validator not instantiated yet" )
signed_hash = test_validator_object.sign_create_validator_transaction(
2,
int( convert_one_to_atto( 1 ) ),
100,
"4edef2c24995d15b0e25cbd152fb0e2c05d3b79b9c2afd134e6f59f91bf99e48",
2,
).rawTransaction.hex()
assert (
signed_hash ==
"0xf9017580f9012394ebcd16e8c1d8f493ba04e99a56474122d81a9c58f83885416c69636585616c69636591616c6963652e6861726d6f6e792e6f6e6583426f6295446f6e2774206d6573732077697468206d65212121dcc8872386f26fc10000c9880c7d713b49da0000c887b1a2bc2ec500008a021e19e0c9bab24000008a0878678326eac9000000f1b030b2c38b1316da91e068ac3bd8751c0901ef6c02a1d58bc712104918302c6ed03d5894671d0c816dad2b4d303320f202f862b86068f800b6adf657b674903e04708060912b893b7c7b500788808247550ab3e186e56a44ebf3ca488f8ed1a42f6cef3a04bd5d2b2b7eb5a767848d3135b362e668ce6bba42c7b9d5666d8e3a83be707b5708e722c58939fe9b07c170f3b70624148a021e27c1806e59a4000002880de0b6b3a76400006428a0c6c7e62f02331df0afd4699ec514a2fc4548c920d77ad74d98caeec8c924c09aa02b27b999a724b1d341d6bbb0e877611d0047542cb7e380f9a6a272d204b450cd"
)
"""
Signature matched from TypeScript (is outdated because the JS SDK has not been updated for SlotKeyToAddSig)
For now I have checked that the below transaction to localnet works
---
import {
CreateValidator,
EditValidator,
Delegate,
Undelegate,
CollectRewards,
Directive,
Description,
CommissionRate,
Decimal,
StakingTransaction,
} from '@harmony-js/staking'
const { numberToHex, Unit } = require('@harmony-js/utils');
const description: Description = new Description('Alice', 'alice', 'alice.harmony.one', 'Bob', "Don't mess with me!!!")
const commissionRates: CommissionRate = new CommissionRate(new Decimal('0.01'), new Decimal('0.9'), new Decimal('0.05'))
const stakeMsg: EditValidator = new EditValidator(
'one1a0x3d6xpmr6f8wsyaxd9v36pytvp48zckswvv9',
description,
new Decimal('0.06'),
numberToHex(new Unit('10000').asOne().toWei()), // minSelfDelegation
numberToHex(new Unit('40000').asOne().toWei()), // maxTotalDelegation
'0xb9486167ab9087ab818dc4ce026edb5bf216863364c32e42df2af03c5ced1ad181e7d12f0e6dd5307a73b62247608611', // remove key
'0xb9486167ab9087ab818dc4ce026edb5bf216863364c32e42df2af03c5ced1ad181e7d12f0e6dd5307a73b62247608612' // add key
)
const stakingTx: StakingTransaction = new StakingTransaction(
Directive.DirectiveEditValidator,
stakeMsg,
2, // nonce
numberToHex(new Unit('1').asOne().toWei()), // gasPrice
100, // gasLimit
2, // chainId
);
const signed = stakingTx.rlpSign('4edef2c24995d15b0e25cbd152fb0e2c05d3b79b9c2afd134e6f59f91bf99e48')
console.log( 'Signed transaction' )
console.log(signed)
"""
def test_edit_validator_sign( setup_blockchain ):
if not ( test_validator_object or test_validator_loaded ):
pytest.skip( "Validator not instantiated yet" )
signed_hash = test_validator_object.sign_edit_validator_transaction(
2,
int(convert_one_to_atto(1)),
100,
"0.06",
"0xb9486167ab9087ab818dc4ce026edb5bf216863364c32e42df2af03c5ced1ad181e7d12f0e6dd5307a73b62247608612", # remove key
"0xb9486167ab9087ab818dc4ce026edb5bf216863364c32e42df2af03c5ced1ad181e7d12f0e6dd5307a73b62247608611", # add key
"0x68f800b6adf657b674903e04708060912b893b7c7b500788808247550ab3e186e56a44ebf3ca488f8ed1a42f6cef3a04bd5d2b2b7eb5a767848d3135b362e668ce6bba42c7b9d5666d8e3a83be707b5708e722c58939fe9b07c170f3b7062414", # add key sig
"4edef2c24995d15b0e25cbd152fb0e2c05d3b79b9c2afd134e6f59f91bf99e48",
2,
).rawTransaction.hex()
assert (
signed_hash ==
"0xf9018401f9013294ebcd16e8c1d8f493ba04e99a56474122d81a9c58f83885416c69636585616c69636591616c6963652e6861726d6f6e792e6f6e6583426f6295446f6e2774206d6573732077697468206d65212121c887d529ae9e8600008a021e19e0c9bab24000008a0878678326eac9000000b0b9486167ab9087ab818dc4ce026edb5bf216863364c32e42df2af03c5ced1ad181e7d12f0e6dd5307a73b62247608612b0b9486167ab9087ab818dc4ce026edb5bf216863364c32e42df2af03c5ced1ad181e7d12f0e6dd5307a73b62247608611b86068f800b6adf657b674903e04708060912b893b7c7b500788808247550ab3e186e56a44ebf3ca488f8ed1a42f6cef3a04bd5d2b2b7eb5a767848d3135b362e668ce6bba42c7b9d5666d8e3a83be707b5708e722c58939fe9b07c170f3b706241402880de0b6b3a76400006427a0ecdae4a29d051f4f83dd54004858fbf0f7820e169b8e1846245835ceb686ee12a04b2336eb5830e30720137b2de539518fd5655467fef140ab31fde881a19f256a"
)
def test_invalid_validator( setup_blockchain ):
if not ( test_validator_object or test_validator_loaded ):
pytest.skip( "Validator not instantiated yet" )
with pytest.raises( InvalidValidatorError ):
info = {
"name": "Alice",
}
test_validator_object.load( info )
with pytest.raises( InvalidValidatorError ):
test_validator_object.set_name( "a" * 141 )
with pytest.raises( InvalidValidatorError ):
test_validator_object.set_identity( "a" * 141 )
with pytest.raises( InvalidValidatorError ):
test_validator_object.set_website( "a" * 141 )
with pytest.raises( InvalidValidatorError ):
test_validator_object.set_security_contact( "a" * 141 )
with pytest.raises( InvalidValidatorError ):
test_validator_object.set_details( "a" * 281 )
with pytest.raises( InvalidValidatorError ):
test_validator_object.set_min_self_delegation( 1 )
with pytest.raises( InvalidValidatorError ):
test_validator_object.set_max_total_delegation( 1 )
with pytest.raises( InvalidValidatorError ):
test_validator_object.set_amount( 1 )
with pytest.raises( InvalidValidatorError ):
test_validator_object.set_max_rate( "2.0" )
with pytest.raises( InvalidValidatorError ):
test_validator_object.set_max_change_rate( "-2.0" )
with pytest.raises( InvalidValidatorError ):
test_validator_object.set_rate( "-2.0" )
def test_validator_getters( setup_blockchain ):
if not ( test_validator_object or test_validator_loaded ):
pytest.skip( "Validator not instantiated yet" )
assert (
test_validator_object.get_address() ==
"one1a0x3d6xpmr6f8wsyaxd9v36pytvp48zckswvv9"
)
assert test_validator_object.add_bls_key( "5" )
assert test_validator_object.remove_bls_key( "5" )
assert test_validator_object.get_name() == "Alice"
assert test_validator_object.get_identity() == "alice"
assert test_validator_object.get_website() == "alice.harmony.one"
assert test_validator_object.get_security_contact() == "Bob"
assert test_validator_object.get_details() == "Don't mess with me!!!"
assert isinstance(
test_validator_object.get_min_self_delegation(),
Decimal
)
assert isinstance(
test_validator_object.get_max_total_delegation(),
Decimal
)
assert isinstance( test_validator_object.get_amount(), Decimal )
assert isinstance( test_validator_object.get_max_rate(), Decimal )
assert isinstance( test_validator_object.get_max_change_rate(), Decimal )
assert isinstance( test_validator_object.get_rate(), Decimal )
assert len( test_validator_object.get_bls_keys() ) > 0
def test_validator_load_from_blockchain( setup_blockchain ):
test_validator_object2 = validator.Validator(
"one155jp2y76nazx8uw5sa94fr0m4s5aj8e5xm6fu3"
)
test_validator_object2.load_from_blockchain()

@ -12,23 +12,64 @@ from pyhmy import util
TEMP_DIR = "/tmp/pyhmy-testing/test-util"
@pytest.fixture(scope="session", autouse=True)
@pytest.fixture( scope = "session", autouse = True )
def setup():
shutil.rmtree(TEMP_DIR, ignore_errors=True)
os.makedirs(TEMP_DIR, exist_ok=True)
shutil.rmtree( TEMP_DIR, ignore_errors = True )
os.makedirs( TEMP_DIR, exist_ok = True )
def test_json_load():
dec = util.json_load('1.1', parse_float=decimal.Decimal)
assert isinstance(dec, decimal.Decimal)
assert float(dec) == 1.1
dec = util.json_load( "1.1", parse_float = decimal.Decimal )
assert isinstance( dec, decimal.Decimal )
assert float( dec ) == 1.1
ref_dict = {
'test': 'val',
'arr': [
1,
2,
3
]
"test": "val",
"arr": [ 1,
2,
3 ]
}
loaded_dict = util.json_load(json.dumps(ref_dict))
assert str(ref_dict) == str(loaded_dict)
loaded_dict = util.json_load( json.dumps( ref_dict ) )
assert str( ref_dict ) == str( loaded_dict )
def test_chain_id_to_int():
assert util.chain_id_to_int( 2 ) == 2
assert util.chain_id_to_int( "HmyMainnet" ) == 1
def test_get_gopath():
assert isinstance( util.get_gopath(), str )
def test_get_goversion():
assert isinstance( util.get_goversion(), str )
def test_convert_one_to_hex():
assert (
util.convert_one_to_hex( "0xebcd16e8c1d8f493ba04e99a56474122d81a9c58" )
== "0xeBCD16e8c1D8f493bA04E99a56474122D81A9c58"
)
assert (
util.convert_one_to_hex( "one1a0x3d6xpmr6f8wsyaxd9v36pytvp48zckswvv9" )
== "0xeBCD16e8c1D8f493bA04E99a56474122D81A9c58"
)
def test_convert_hex_to_one():
assert (
util.convert_hex_to_one( "0xebcd16e8c1d8f493ba04e99a56474122d81a9c58" )
== "one1a0x3d6xpmr6f8wsyaxd9v36pytvp48zckswvv9"
)
assert (
util.convert_hex_to_one( "one1a0x3d6xpmr6f8wsyaxd9v36pytvp48zckswvv9" )
== "one1a0x3d6xpmr6f8wsyaxd9v36pytvp48zckswvv9"
)
def test_get_bls_build_variables():
assert isinstance( util.get_bls_build_variables(), dict )
def test_is_active_shard():
assert isinstance( util.is_active_shard( "" ), bool )

Loading…
Cancel
Save