Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
6 changes: 3 additions & 3 deletions envs/miner-ihp/docker-compose.yml
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
services:
miner:
image: ghcr.io/backend-developers-ltd/infinitehash-subnet-prod@sha256:4ba538db88ecf36f58a6aa5ea0304a28b1b9d3b060def34877b77d6900637fd9
image: ghcr.io/backend-developers-ltd/infinitehash-subnet-prod@sha256:677308a08ffdd881d01cf2b0a2a361ec24df118d2e5fda9f8f229341141bbf24
pull_policy: always
init: true
restart: unless-stopped
Expand Down Expand Up @@ -47,7 +47,7 @@ services:
<<: *logging

ihp-proxy:
image: backenddevelopersltd/infinitehash-proxy-server:v1-latest
image: backenddevelopersltd/infinitehash-proxy-server@sha256:8ee04e0a60cc95c716b7c581af98bc1a03af051a506cc30a7ab1ed7a497b5ee6
restart: unless-stopped
stop_grace_period: 15s
env_file:
Expand All @@ -68,7 +68,7 @@ services:
<<: *logging

ihp-api:
image: backenddevelopersltd/infinitehash-proxy-api:v1-latest
image: backenddevelopersltd/infinitehash-proxy-api@sha256:f741cb064966f8dfd4d4b5bf3d5fcd9fb081cafaa387d3d95946c2e3116329bd
restart: unless-stopped
env_file:
- ./proxy/.env
Expand Down
2 changes: 1 addition & 1 deletion envs/miner/docker-compose.yml
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
services:
miner:
image: ghcr.io/backend-developers-ltd/infinitehash-subnet-prod@sha256:6dab0713c45901f589ba531567d84555a12a53aaf79d277639997e19916475ae
image: ghcr.io/backend-developers-ltd/infinitehash-subnet-prod@sha256:677308a08ffdd881d01cf2b0a2a361ec24df118d2e5fda9f8f229341141bbf24
pull_policy: always
init: true
restart: unless-stopped
Expand Down
3 changes: 2 additions & 1 deletion installer/README.md
Original file line number Diff line number Diff line change
Expand Up @@ -86,7 +86,8 @@ In `braiins` mode, the installer provisions Braiins Farm Proxy (`farm-proxy` and
- The miner container uses `APS_MINER_SUBNET_POOL_NAME` (default: `central-proxy`) to select which pool gets absolute `target_hashrate`.
- The selected pool name must exist in `proxy/pools.toml`; otherwise no subnet target update is applied.
- During installation, if `proxy/pools.toml` does not exist yet, the script asks for backup/private pool host/port, optional backup `worker_id` override, and for `central-proxy` asks whether to use suggested identity format or set `worker_id` manually (manual input may be empty for no override), then writes provided values to `pools.backup`/`[[pools.main]]`.
- Installer default sets `[extranonce].extranonce2_size = 2`.
- Installer also sets `health_check_worker_id = "sn89auction.check"` for both `pools.backup` and `[[pools.main]]`.
- Installer default sets `[extranonce].extranonce2_size = 6`.
- After updating `proxy/pools.toml`, APS miner touches reload sentinel `APS_MINER_IHP_RELOAD_SENTINEL` (default: `/root/src/proxy/.reload-ihp`); sidecar `ihp-proxy-reloader` then runs `kill -HUP 1` in `ihp-proxy` PID namespace.

`braiins` mode:
Expand Down
19 changes: 17 additions & 2 deletions installer/miner_install.sh
Original file line number Diff line number Diff line change
Expand Up @@ -61,6 +61,7 @@ write_default_ihp_pools() {
name = "private-backup"
host = "${backup_pool_host}"
port = ${backup_pool_port}
health_check_worker_id = "sn89auction.check"
EOL

if [ -n "${backup_pool_worker_id}" ]; then
Expand All @@ -75,6 +76,7 @@ name = "central-proxy"
host = "stratum.infinitehash.xyz"
port = 9332
weight = 1
health_check_worker_id = "sn89auction.check"
EOL

if [ -n "${main_pool_worker_id}" ]; then
Expand All @@ -86,7 +88,7 @@ EOL
cat >> "${destination}" <<EOL

[extranonce]
extranonce2_size = 2
extranonce2_size = 6

[routing]
rebalance_interval_seconds = 10
Expand All @@ -96,7 +98,20 @@ rebalance_threshold_percent = 10
min_reassign_interval_seconds = 45
pool_connect_timeout_seconds = 10
pool_read_timeout_seconds = 300
pool_unhealthy_cooldown_seconds = 10
# How often to run pool health checks (seconds)
pool_health_check_interval_seconds = 10

# Maximum seconds for subscribe/authorize health-check flow (seconds)
# Includes waiting for initial notify and difficulty messages
pool_health_authorization_timeout_seconds = 10

# Optional: minimum initial mining difficulty accepted during pool health checks
# Uncomment to reject pools offering difficulty below this value
#pool_health_min_initial_difficulty = 1000

# Optional: maximum initial mining difficulty accepted during pool health checks
# Uncomment to reject pools offering difficulty above this value
#pool_health_max_initial_difficulty = 100000000
worker_assignment_stale_threshold_seconds = 30
disconnected_worker_retention_seconds = 900

Expand Down
84 changes: 69 additions & 15 deletions tools/update_compose_digest.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,7 @@
Examples:
python tools/update_compose_digest.py validator
python tools/update_compose_digest.py miner --environment staging
python tools/update_compose_digest.py ihp
python tools/update_compose_digest.py all --environment prod
"""

Expand All @@ -19,6 +20,10 @@

REPOSITORY_PREFIX = "ghcr.io/backend-developers-ltd/infinitehash-subnet"
TAG = "v0-latest"
IHP_IMAGE_TAGS = {
"ihp-proxy": "backenddevelopersltd/infinitehash-proxy-server:v1-latest",
"ihp-api": "backenddevelopersltd/infinitehash-proxy-api:v1-latest",
}


def run(cmd: list[str]) -> str:
Expand All @@ -27,8 +32,25 @@ def run(cmd: list[str]) -> str:
return result.stdout


def fetch_digest_buildx(image: str) -> str | None:
"""Return top-level image/index digest using docker buildx imagetools inspect."""
try:
output = run(["docker", "buildx", "imagetools", "inspect", image])
except subprocess.CalledProcessError:
return None

match = re.search(r"(?m)^Digest:\s+(\S+)\s*$", output)
if match:
return match.group(1)
return None


def fetch_digest(image: str) -> str:
"""Return manifest digest for the given image tag."""
digest = fetch_digest_buildx(image)
if digest:
return digest

try:
output = run(["docker", "manifest", "inspect", "--verbose", image])
except subprocess.CalledProcessError as exc: # pragma: no cover - bubble context
Expand All @@ -39,10 +61,21 @@ def fetch_digest(image: str) -> str:
except json.JSONDecodeError as exc: # pragma: no cover - malformed
raise SystemExit(f"Cannot parse docker manifest output: {exc}") from exc

digest = data.get("Descriptor", {}).get("digest")
if not digest:
raise SystemExit("Unable to find digest in docker manifest output.")
return digest
if isinstance(data, dict):
digest = data.get("Descriptor", {}).get("digest")
if digest:
return digest
elif isinstance(data, list):
digests = [item.get("Descriptor", {}).get("digest") for item in data if isinstance(item, dict)]
digests = [digest for digest in digests if digest]
if len(digests) == 1:
return digests[0]
raise SystemExit(
"Unable to determine a single digest from docker manifest output. "
"Install/enable docker buildx or use an image with a single manifest."
)

raise SystemExit("Unable to find digest in docker manifest output.")


def update_compose(compose_path: Path, env: str, digest: str) -> int:
Expand All @@ -55,11 +88,21 @@ def update_compose(compose_path: Path, env: str, digest: str) -> int:
return count


def update_compose_image(compose_path: Path, image_repository: str, digest: str) -> int:
"""Replace image references for a specific repository with a pinned digest."""
text = compose_path.read_text()
pattern = re.compile(rf"({re.escape(image_repository)})(?:[:@][^\s]+)")
new_text, count = pattern.subn(rf"\1@{digest}", text)
if count > 0:
compose_path.write_text(new_text)
return count


def main() -> None:
parser = argparse.ArgumentParser(description=__doc__)
parser.add_argument(
"target",
choices=["validator", "miner", "all"],
choices=["validator", "miner", "ihp", "all"],
help="which docker-compose file(s) to update",
)
parser.add_argument(
Expand Down Expand Up @@ -92,23 +135,34 @@ def main() -> None:
)
args = parser.parse_args()

image_tag = f"{REPOSITORY_PREFIX}-{args.environment}:{TAG}"
digest = fetch_digest(image_tag)

targets: list[tuple[str, Path]] = []
targets: list[tuple[str, Path, str, str]] = []
if args.target in ("validator", "all"):
targets.append(("validator", args.compose_path))
image_tag = f"{REPOSITORY_PREFIX}-{args.environment}:{TAG}"
targets.append(("validator", args.compose_path, image_tag, "app"))
if args.target in ("miner", "all"):
targets.append(("miner-braiins", args.miner_compose_path))
targets.append(("miner-ihp", args.miner_ihp_compose_path))
image_tag = f"{REPOSITORY_PREFIX}-{args.environment}:{TAG}"
targets.append(("miner-braiins", args.miner_compose_path, image_tag, "app"))
targets.append(("miner-ihp", args.miner_ihp_compose_path, image_tag, "app"))
if args.target in ("ihp", "all"):
for label, image_tag in IHP_IMAGE_TAGS.items():
targets.append((label, args.miner_ihp_compose_path, image_tag, "direct"))

total_replacements = 0
for label, compose_path in targets:
digest_cache: dict[str, str] = {}
for label, compose_path, image_tag, kind in targets:
if not compose_path.exists():
raise SystemExit(f"docker-compose file not found for {label}: {compose_path}")
replacements = update_compose(compose_path, args.environment, digest)
digest = digest_cache.get(image_tag)
if digest is None:
digest = fetch_digest(image_tag)
digest_cache[image_tag] = digest
if kind == "app":
replacements = update_compose(compose_path, args.environment, digest)
else:
image_repository = image_tag.rsplit(":", 1)[0]
replacements = update_compose_image(compose_path, image_repository, digest)
if replacements == 0:
print(f"No image references for environment '{args.environment}' found in {compose_path}")
print(f"No image references for {image_tag} found in {compose_path}")
else:
print(f"{label.capitalize()}: pinned {replacements} reference(s) to {image_tag}@{digest}")
total_replacements += replacements
Expand Down
Loading