Skip to content

Commit dec39be

Browse files
authored
Merge pull request #180 from opentensor/release/1.5.2
Release/1.5.2
2 parents ed422b6 + 6bb8302 commit dec39be

File tree

16 files changed

+544
-123
lines changed

16 files changed

+544
-123
lines changed

.github/workflows/e2e-tests.yml

Lines changed: 173 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,173 @@
1+
name: E2E Tests
2+
3+
concurrency:
4+
group: e2e-${{ github.event.pull_request.number || github.ref }}
5+
cancel-in-progress: true
6+
7+
on:
8+
pull_request:
9+
branches:
10+
- '**'
11+
types: [ opened, synchronize, reopened, ready_for_review ]
12+
13+
workflow_dispatch:
14+
inputs:
15+
verbose:
16+
description: "Output more information when triggered manually"
17+
required: false
18+
default: ""
19+
20+
env:
21+
CARGO_TERM_COLOR: always
22+
VERBOSE: ${{ github.event.inputs.verbose }}
23+
24+
# job to run tests in parallel
25+
jobs:
26+
# Looking for e2e tests
27+
find-tests:
28+
runs-on: ubuntu-latest
29+
if: ${{ github.event.pull_request.draft == false }}
30+
outputs:
31+
test-files: ${{ steps.get-tests.outputs.test-files }}
32+
steps:
33+
- name: Check-out repository under $GITHUB_WORKSPACE
34+
uses: actions/checkout@v4
35+
36+
- name: Find test files
37+
id: get-tests
38+
run: |
39+
test_files=$(find tests/e2e_tests -name "test*.py" | jq -R -s -c 'split("\n") | map(select(. != ""))')
40+
# keep it here for future debug
41+
# test_files=$(find tests/e2e_tests -type f -name "test*.py" | grep -E 'test_(hotkeys|staking)\.py$' | jq -R -s -c 'split("\n") | map(select(. != ""))')
42+
echo "Found test files: $test_files"
43+
echo "test-files=$test_files" >> "$GITHUB_OUTPUT"
44+
shell: bash
45+
46+
# Pull docker image
47+
pull-docker-image:
48+
runs-on: ubuntu-latest
49+
outputs:
50+
image-name: ${{ steps.set-image.outputs.image }}
51+
steps:
52+
- name: Set Docker image tag based on label or branch
53+
id: set-image
54+
run: |
55+
echo "Event: $GITHUB_EVENT_NAME"
56+
echo "Branch: $GITHUB_REF_NAME"
57+
58+
echo "Reading labels ..."
59+
if [[ "${GITHUB_EVENT_NAME}" == "pull_request" ]]; then
60+
labels=$(jq -r '.pull_request.labels[].name' "$GITHUB_EVENT_PATH")
61+
else
62+
labels=""
63+
fi
64+
65+
image=""
66+
67+
for label in $labels; do
68+
echo "Found label: $label"
69+
case "$label" in
70+
"subtensor-localnet:main")
71+
image="ghcr.io/opentensor/subtensor-localnet:main"
72+
break
73+
;;
74+
"subtensor-localnet:testnet")
75+
image="ghcr.io/opentensor/subtensor-localnet:testnet"
76+
break
77+
;;
78+
"subtensor-localnet:devnet")
79+
image="ghcr.io/opentensor/subtensor-localnet:devnet"
80+
break
81+
;;
82+
esac
83+
done
84+
85+
if [[ -z "$image" ]]; then
86+
# fallback to default based on branch
87+
if [[ "${GITHUB_REF_NAME}" == "master" ]]; then
88+
image="ghcr.io/opentensor/subtensor-localnet:main"
89+
else
90+
image="ghcr.io/opentensor/subtensor-localnet:devnet-ready"
91+
fi
92+
fi
93+
94+
echo "✅ Final selected image: $image"
95+
echo "image=$image" >> "$GITHUB_OUTPUT"
96+
97+
- name: Log in to GitHub Container Registry
98+
run: echo "${{ secrets.GITHUB_TOKEN }}" | docker login ghcr.io -u $GITHUB_ACTOR --password-stdin
99+
100+
- name: Pull Docker Image
101+
run: docker pull ${{ steps.set-image.outputs.image }}
102+
103+
- name: Save Docker Image to Cache
104+
run: docker save -o subtensor-localnet.tar ${{ steps.set-image.outputs.image }}
105+
106+
- name: Upload Docker Image as Artifact
107+
uses: actions/upload-artifact@v4
108+
with:
109+
name: subtensor-localnet
110+
path: subtensor-localnet.tar
111+
112+
# Job to run tests in parallel
113+
run-fast-blocks-e2e-test:
114+
name: "FB: ${{ matrix.test-file }} / Python ${{ matrix.python-version }}"
115+
needs:
116+
- find-tests
117+
- pull-docker-image
118+
runs-on: ubuntu-latest
119+
timeout-minutes: 45
120+
strategy:
121+
fail-fast: false # Allow other matrix jobs to run even if this job fails
122+
max-parallel: 32 # Set the maximum number of parallel jobs (same as we have cores in ubuntu-latest runner)
123+
matrix:
124+
os:
125+
- ubuntu-latest
126+
test-file: ${{ fromJson(needs.find-tests.outputs.test-files) }}
127+
python-version: ["3.9", "3.10", "3.11", "3.12", "3.13"]
128+
steps:
129+
- name: Check-out repository
130+
uses: actions/checkout@v4
131+
132+
- name: Set up Python ${{ matrix.python-version }}
133+
uses: actions/setup-python@v5
134+
with:
135+
python-version: ${{ matrix.python-version }}
136+
137+
- name: Install uv
138+
uses: astral-sh/setup-uv@v4
139+
140+
- name: install dependencies
141+
run: |
142+
uv sync --extra dev --dev
143+
144+
- name: Download Cached Docker Image
145+
uses: actions/download-artifact@v4
146+
with:
147+
name: subtensor-localnet
148+
149+
- name: Load Docker Image
150+
run: docker load -i subtensor-localnet.tar
151+
152+
- name: Run tests with retry
153+
env:
154+
LOCALNET_IMAGE_NAME: ${{ needs.pull-docker-image.outputs.image-name }}
155+
run: |
156+
for i in 1 2 3; do
157+
echo "::group::🔁 Test attempt $i"
158+
if uv run pytest ${{ matrix.test-file }} -s; then
159+
echo "✅ Tests passed on attempt $i"
160+
echo "::endgroup::"
161+
exit 0
162+
else
163+
echo "❌ Tests failed on attempt $i"
164+
echo "::endgroup::"
165+
if [ "$i" -lt 3 ]; then
166+
echo "Retrying..."
167+
sleep 5
168+
fi
169+
fi
170+
done
171+
172+
echo "Tests failed after 3 attempts"
173+
exit 1

.github/workflows/run-async-substrate-interface-tests.yml

Lines changed: 0 additions & 81 deletions
This file was deleted.
Lines changed: 59 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,59 @@
1+
name: Unit and integration tests checker
2+
permissions:
3+
contents: read
4+
5+
on:
6+
pull_request:
7+
types: [opened, synchronize, reopened, edited]
8+
9+
jobs:
10+
unit-and-integration-tests:
11+
if: github.event.pull_request.draft == false
12+
runs-on: ubuntu-latest
13+
14+
strategy:
15+
fail-fast: false
16+
max-parallel: 5
17+
matrix:
18+
python-version: ["3.9", "3.10", "3.11", "3.12", "3.13"]
19+
20+
steps:
21+
- name: Checkout repository
22+
uses: actions/checkout@v4
23+
24+
- name: Set up Python
25+
uses: actions/setup-python@v5
26+
with:
27+
python-version: "3.11"
28+
29+
- name: Cache venv
30+
id: cache
31+
uses: actions/cache@v4
32+
with:
33+
path: venv
34+
key: v2-${{ runner.os }}-${{ hashFiles('pyproject.toml') }}
35+
36+
- name: Install deps
37+
if: ${{ steps.cache.outputs.cache-hit != 'true' }}
38+
run: |
39+
python -m venv venv
40+
source venv/bin/activate
41+
python -m pip install --upgrade pip
42+
python -m pip install uv
43+
python -m uv sync --extra dev --active
44+
45+
- name: Unit tests
46+
timeout-minutes: 20
47+
env:
48+
PYTHONUNBUFFERED: "1"
49+
run: |
50+
source venv/bin/activate
51+
python -m uv run pytest -n 2 tests/unit_tests/ --reruns 3
52+
53+
- name: Integration tests
54+
timeout-minutes: 20
55+
env:
56+
PYTHONUNBUFFERED: "1"
57+
run: |
58+
source venv/bin/activate
59+
python -m uv run pytest -n 2 tests/integration_tests/ --reruns 3

CHANGELOG.md

Lines changed: 10 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,14 @@
11
# Changelog
2+
## 1.5.2 /2025-09-08
3+
* Improve test workflows by @basfroman in https://github.com/opentensor/async-substrate-interface/pull/173
4+
* Adds env var support for setting cache size by @thewhaleking in https://github.com/opentensor/async-substrate-interface/pull/174
5+
* Set env vars as str in unit test by @thewhaleking in https://github.com/opentensor/async-substrate-interface/pull/177
6+
* DiskCachedAsyncSubstrateInterface: use aiosqlite by @thewhaleking in https://github.com/opentensor/async-substrate-interface/pull/176
7+
* Additional Debug Logging by @thewhaleking in https://github.com/opentensor/async-substrate-interface/pull/178
8+
* None type edge case catch by @thewhaleking in https://github.com/opentensor/async-substrate-interface/pull/184
9+
10+
11+
**Full Changelog**: https://github.com/opentensor/async-substrate-interface/compare/v1.5.1...v1.5.2
212

313
## 1.5.1 /2025-08-05
414
* query multiple/decoding fix by @thewhaleking in https://github.com/opentensor/async-substrate-interface/pull/168

README.md

Lines changed: 43 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -54,6 +54,49 @@ async def main():
5454
asyncio.run(main())
5555
```
5656

57+
### Caching
58+
There are a few different cache types used in this library to improve the performance overall. The one with which
59+
you are probably familiar is the typical `functools.lru_cache` used in `sync_substrate.SubstrateInterface`.
60+
61+
By default, it uses a max cache size of 512 for smaller returns, and 16 for larger ones. These cache sizes are
62+
user-configurable using the respective env vars, `SUBSTRATE_CACHE_METHOD_SIZE` and `SUBSTRATE_RUNTIME_CACHE_SIZE`.
63+
64+
They are applied only on methods whose results cannot change — such as the block hash for a given block number
65+
(small, 512 default), or the runtime for a given runtime version (large, 16 default).
66+
67+
Additionally, in `AsyncSubstrateInterface`, because of its asynchronous nature, we developed our own asyncio-friendly
68+
LRU caches. The primary one is the `CachedFetcher` which wraps the same methods as `functools.lru_cache` does in
69+
`SubstrateInterface`, but the key difference here is that each request is assigned a future that is returned when the
70+
initial request completes. So, if you were to do:
71+
72+
```python
73+
bn = 5000
74+
bh1, bh2 = await asyncio.gather(
75+
asi.get_block_hash(bn),
76+
asi.get_block_hash(bn)
77+
)
78+
```
79+
it would actually only make one single network call, and return the result to both requests. Like `SubstrateInterface`,
80+
it also takes the `SUBSTRATE_CACHE_METHOD_SIZE` and `SUBSTRATE_RUNTIME_CACHE_SIZE` vars to set cache size.
81+
82+
The third and final caching mechanism we use is `async_substrate_interface.async_substrate.DiskCachedAsyncSubstrateInterface`,
83+
which functions the same as the normal `AsyncSubstrateInterface`, but that also saves this cache to the disk, so the cache
84+
is preserved between runs. This is product for a fairly nice use-case (such as `btcli`). As you may call different networks
85+
with entirely different results, this cache is keyed by the uri supplied at instantiation of the `DiskCachedAsyncSubstrateInterface`
86+
object, so `DiskCachedAsyncSubstrateInterface(network_1)` and `DiskCachedAsyncSubstrateInterface(network_2)` will not share
87+
the same on-disk cache.
88+
89+
As with the other two caches, this also takes `SUBSTRATE_CACHE_METHOD_SIZE` and `SUBSTRATE_RUNTIME_CACHE_SIZE` env vars.
90+
91+
92+
### ENV VARS
93+
The following environment variables are used within async-substrate-interface
94+
- NO_CACHE (default 0): if set to 1, when using the DiskCachedAsyncSubstrateInterface class, no persistent on-disk cache will be stored, instead using only in-memory cache.
95+
- CACHE_LOCATION (default `~/.cache/async-substrate-interface`): this determines the location for the cache file, if using DiskCachedAsyncSubstrateInterface
96+
- SUBSTRATE_CACHE_METHOD_SIZE (default 512): the cache size (either in-memory or on-disk) of the smaller return-size methods (see the Caching section for more info)
97+
- SUBSTRATE_RUNTIME_CACHE_SIZE (default 16): the cache size (either in-memory or on-disk) of the larger return-size methods (see the Caching section for more info)
98+
99+
57100
## Contributing
58101

59102
Contributions are welcome! Please open an issue or submit a pull request to the `staging` branch.

0 commit comments

Comments
 (0)