Skip to content
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
6 changes: 3 additions & 3 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -43,7 +43,7 @@ The result: a sustainable incentive layer that channels resources toward buildin
# Quick start
git clone https://github.com/entrius/gittensor.git
cd gittensor
cp env.example .env
cp .env.example .env
# Edit .env with proper values
nano .env

Expand All @@ -60,7 +60,7 @@ See full guide **[here](https://docs.gittensor.io/miner.html)**
# Quick start
git clone https://github.com/entrius/gittensor.git
cd gittensor
cp env.example .env
cp .env.example .env
# Edit .env with proper values
nano .env

Expand All @@ -83,7 +83,7 @@ _NOTE: don’t be afraid to provide recommendations for your favorite open sourc

- Programming Language Weights

A list of major file types/extensions, mostly related to programming languages, but also plenty of markdown, documentation, and other common files are included. Each extension has a weight for scoring. If the extension has a language full name then it code in those languages will be evaluated using token-based scoring.
A list of major file types/extensions, mostly related to programming languages, but also plenty of markdown, documentation, and other common files are included. Each extension has a weight for scoring. If the extension has a language full name then code in those languages will be evaluated using token-based scoring.

_NOTE: this list will also be dynamic. Additions, and weight changes will occur as the subnet matures._

Expand Down
2 changes: 2 additions & 0 deletions docker-compose.miner.yml
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,7 @@ services:
image: entrius/gittensor:latest
container_name: gt-miner
restart: unless-stopped
network_mode: "host"
entrypoint: /app/scripts/miner-entrypoint.sh
env_file:
- .env
Expand All @@ -11,5 +12,6 @@ services:
volumes:
# 'ro' = readonly
- ${WALLET_PATH}:/root/.bittensor/wallets:ro
command: ["--subtensor.chain_endpoint", "${SUBTENSOR_CHAIN_ENDPOINT:-ws://172.17.0.1:9944}"]
labels:
- "com.centurylinklabs.watchtower.enable=true"
4 changes: 2 additions & 2 deletions gittensor/classes.py
Original file line number Diff line number Diff line change
Expand Up @@ -156,7 +156,7 @@ class PullRequest:
base_score: float = 0.0
issue_multiplier: float = 1.0
open_pr_spam_multiplier: float = 1.0
repository_uniqueness_multiplier: float = 1.0
pioneer_multiplier: float = 1.0
time_decay_multiplier: float = 1.0
credibility_multiplier: float = 1.0
raw_credibility: float = 1.0 # Before applying ^k scalar
Expand Down Expand Up @@ -194,7 +194,7 @@ def calculate_final_earned_score(self) -> float:
'repo': self.repo_weight_multiplier,
'issue': self.issue_multiplier,
'spam': self.open_pr_spam_multiplier,
'unique': self.repository_uniqueness_multiplier,
'pioneer': self.pioneer_multiplier,
'decay': self.time_decay_multiplier,
'cred': self.credibility_multiplier,
}
Expand Down
3 changes: 2 additions & 1 deletion gittensor/constants.py
Original file line number Diff line number Diff line change
Expand Up @@ -59,7 +59,8 @@
DEFAULT_MAX_CONTRIBUTION_SCORE_FOR_FULL_BONUS = 2000

# Boosts
UNIQUE_PR_BOOST = 0.74
PIONEER_PR_BOOST = 5.0 # Big reward for the first miner to merge a PR on a repo inactive for 90 days.
PIONEER_LOOKBACK_DAYS = 90 # Repos with no merged PRs in this window are considered untouched.
MAX_CODE_DENSITY_MULTIPLIER = 3.0

# Issue boosts
Expand Down
87 changes: 47 additions & 40 deletions gittensor/utils/github_api_tools.py
Original file line number Diff line number Diff line change
Expand Up @@ -801,64 +801,71 @@ def load_miners_prs(
page_info: Dict = pr_data.get('pageInfo', {})

for pr_raw in prs:
repository_full_name = parse_repo_name(pr_raw['repository'])
pr_state = pr_raw['state']

# Stop querying once we hit PRs older than the tier incentive start date
pr_creation_time = datetime.fromisoformat(pr_raw['createdAt'].rstrip('Z')).replace(tzinfo=timezone.utc)
try:
repository_full_name = parse_repo_name(pr_raw['repository'])
pr_state = pr_raw['state']

if pr_creation_time < TIER_BASED_INCENTIVE_MECHANISM_START_DATE:
bt.logging.info(
f'Reached PR #{pr_raw["number"]} in {repository_full_name} created at {pr_creation_time}, '
f'before tier incentive start date ({TIER_BASED_INCENTIVE_MECHANISM_START_DATE}). '
f'Stopping PR fetch.'
# Stop querying once we hit PRs older than the tier incentive start date
pr_creation_time = datetime.fromisoformat(pr_raw['createdAt'].rstrip('Z')).replace(
tzinfo=timezone.utc
)
return

if repository_full_name not in master_repositories:
bt.logging.info(f'Skipping PR #{pr_raw["number"]} in {repository_full_name} - ineligible repo')
continue
if pr_creation_time < TIER_BASED_INCENTIVE_MECHANISM_START_DATE:
bt.logging.info(
f'Reached PR #{pr_raw["number"]} in {repository_full_name} created at {pr_creation_time}, '
f'before tier incentive start date ({TIER_BASED_INCENTIVE_MECHANISM_START_DATE}). '
f'Stopping PR fetch.'
)
return

repo_config = master_repositories[repository_full_name]
if repository_full_name not in master_repositories:
bt.logging.info(f'Skipping PR #{pr_raw["number"]} in {repository_full_name} - ineligible repo')
continue

# Check if repo is inactive
if repo_config.inactive_at is not None:
inactive_dt = datetime.fromisoformat(repo_config.inactive_at.rstrip('Z')).replace(
tzinfo=timezone.utc
)
# Skip PR if it was created after the repo became inactive
if pr_creation_time >= inactive_dt:
bt.logging.info(
f'Skipping PR #{pr_raw["number"]} in {repository_full_name} - PR was created after repo became inactive (created: {pr_creation_time.isoformat()}, inactive: {inactive_dt.isoformat()})'
repo_config = master_repositories[repository_full_name]

# Check if repo is inactive
if repo_config.inactive_at is not None:
inactive_dt = datetime.fromisoformat(repo_config.inactive_at.rstrip('Z')).replace(
tzinfo=timezone.utc
)
# Skip PR if it was created after the repo became inactive
if pr_creation_time >= inactive_dt:
bt.logging.info(
f'Skipping PR #{pr_raw["number"]} in {repository_full_name} - PR was created after repo became inactive (created: {pr_creation_time.isoformat()}, inactive: {inactive_dt.isoformat()})'
)
continue

if pr_state in (PRState.OPEN.value, PRState.CLOSED.value):
try_add_open_or_closed_pr(miner_eval, pr_raw, pr_state, lookback_date_filter)
continue

if pr_state in (PRState.OPEN.value, PRState.CLOSED.value):
try_add_open_or_closed_pr(miner_eval, pr_raw, pr_state, lookback_date_filter)
continue
should_skip, skip_reason = should_skip_merged_pr(
pr_raw, repository_full_name, repo_config, lookback_date_filter
)

should_skip, skip_reason = should_skip_merged_pr(
pr_raw, repository_full_name, repo_config, lookback_date_filter
)
if should_skip:
bt.logging.debug(skip_reason)
continue

if should_skip:
bt.logging.debug(skip_reason)
continue
miner_eval.add_merged_pull_request(pr_raw)

miner_eval.add_merged_pull_request(pr_raw)
except Exception as e:
pr_number = pr_raw.get('number', '?')
bt.logging.warning(f'Error processing PR #{pr_number}, skipping: {e}')

if not page_info.get('hasNextPage') or len(prs) == 0:
break

cursor = page_info.get('endCursor')

bt.logging.info(
f'Fetched {len(miner_eval.merged_pull_requests)} merged PRs, {len(miner_eval.open_pull_requests)} open PRs, '
f'{len(miner_eval.closed_pull_requests)} closed'
)

except Exception as e:
bt.logging.error(f'Error fetching PRs via GraphQL: {e}')
bt.logging.error(f'Unexpected error fetching PRs via GraphQL: {e}')

bt.logging.info(
f'Fetched {len(miner_eval.merged_pull_requests)} merged PRs, {len(miner_eval.open_pull_requests)} open PRs, '
f'{len(miner_eval.closed_pull_requests)} closed'
)


def extract_pr_number_from_url(pr_url: str) -> Optional[int]:
Expand Down
59 changes: 48 additions & 11 deletions gittensor/validator/evaluation/scoring.py
Original file line number Diff line number Diff line change
Expand Up @@ -26,7 +26,7 @@
TIME_DECAY_MIN_MULTIPLIER,
TIME_DECAY_SIGMOID_MIDPOINT,
TIME_DECAY_SIGMOID_STEEPNESS_SCALAR,
UNIQUE_PR_BOOST,
PIONEER_PR_BOOST,
)
from gittensor.utils.github_api_tools import (
FileContentPair,
Expand Down Expand Up @@ -348,8 +348,8 @@ def finalize_miner_scores(miner_evaluations: Dict[int, MinerEvaluation]) -> None

# Process merged PRs
for pr in evaluation.merged_pull_requests:
pr.repository_uniqueness_multiplier = calculate_uniqueness_multiplier(
pr.repository_full_name, repo_counts, total_contributing_miners
pr.pioneer_multiplier = calculate_pioneer_multiplier(
pr, evaluation, miner_evaluations
)

# Apply spam multiplier (calculated once per miner based on unlocked tiers)
Expand Down Expand Up @@ -431,15 +431,52 @@ def finalize_miner_scores(miner_evaluations: Dict[int, MinerEvaluation]) -> None
bt.logging.info('Finalization complete.')


def calculate_uniqueness_multiplier(
repo_full_name: str, repo_counts: Dict[str, int], total_contributing_miners: int
def calculate_pioneer_multiplier(
target_pr: PullRequest, target_miner_eval: MinerEvaluation, all_evaluations: Dict[int, MinerEvaluation]
) -> float:
"""Calculate repository uniqueness multiplier based on how many miners contribute to a repo."""
if total_contributing_miners == 0:
return 1.0
repo_count = repo_counts.get(repo_full_name, 0)
uniqueness_score = (total_contributing_miners - repo_count + 1) / total_contributing_miners
return 1.0 + (uniqueness_score * UNIQUE_PR_BOOST)
"""
Calculate pioneer multiplier for a merged PR.

Instead of a naive binary check, this uses an exponential decay model.
The bonus starts at its maximum for the very first PR in a repository
within the 90-day lookback window, and halves for each subsequent PR
merged on that same repository, regardless of which miner merges it.

This creates a "gold rush" dynamic:
- 1st PR: 1.0 + MAX_BOOST (e.g., 6.0)
- 2nd PR: 1.0 + (MAX_BOOST * 0.5) (e.g., 3.5)
- 3rd PR: 1.0 + (MAX_BOOST * 0.25) (e.g., 2.25)
- ... and so on until it reaches ~1.0.
"""
repo_name = target_pr.repository_full_name
target_time = target_pr.merged_at
previous_prs_count = 0

for uid, evaluation in all_evaluations.items():
if evaluation is None:
continue
for pr in evaluation.merged_pull_requests:
if pr.repository_full_name == repo_name and pr.merged_at:
# Count how many PRs were merged BEFORE the target PR
if pr.merged_at < target_time:
previous_prs_count += 1

# Tie-breaker for PRs merged at the exact same second
elif pr.merged_at == target_time and pr.number != target_pr.number:
if uid < target_miner_eval.uid:
previous_prs_count += 1
# If same miner merged two PRs at the same exact second, break tie by PR number
elif uid == target_miner_eval.uid and pr.number < target_pr.number:
previous_prs_count += 1

# Apply exponential decay: halving the boost for each prior PR
pioneer_bonus = PIONEER_PR_BOOST * (0.5 ** previous_prs_count)

if pioneer_bonus > 0.05: # Only log significant bonuses to reduce noise
bt.logging.info(f"PIONEER BONUS: {repo_name} | PR #{target_pr.number} (Miner {target_miner_eval.uid}) "
f"| Prior PRs: {previous_prs_count} | Multiplier: {1.0 + pioneer_bonus:.2f}")

return 1.0 + pioneer_bonus


def calculate_issue_multiplier(pr: PullRequest) -> float:
Expand Down
4 changes: 2 additions & 2 deletions gittensor/validator/storage/queries.py
Original file line number Diff line number Diff line change
Expand Up @@ -37,7 +37,7 @@
number, repository_full_name, uid, hotkey, github_id, title, author_login,
merged_at, pr_created_at, pr_state,
repo_weight_multiplier, base_score, issue_multiplier,
open_pr_spam_multiplier, repository_uniqueness_multiplier, time_decay_multiplier,
open_pr_spam_multiplier, pioneer_multiplier, time_decay_multiplier,
credibility_multiplier, raw_credibility, credibility_scalar,
earned_score, collateral_score,
additions, deletions, commits, total_nodes_scored,
Expand All @@ -56,7 +56,7 @@
base_score = EXCLUDED.base_score,
issue_multiplier = EXCLUDED.issue_multiplier,
open_pr_spam_multiplier = EXCLUDED.open_pr_spam_multiplier,
repository_uniqueness_multiplier = EXCLUDED.repository_uniqueness_multiplier,
pioneer_multiplier = EXCLUDED.pioneer_multiplier,
time_decay_multiplier = EXCLUDED.time_decay_multiplier,
credibility_multiplier = EXCLUDED.credibility_multiplier,
raw_credibility = EXCLUDED.raw_credibility,
Expand Down
2 changes: 1 addition & 1 deletion gittensor/validator/storage/repository.py
Original file line number Diff line number Diff line change
Expand Up @@ -166,7 +166,7 @@ def store_pull_requests_bulk(self, pull_requests: List[PullRequest]) -> int:
pr.base_score,
pr.issue_multiplier,
pr.open_pr_spam_multiplier,
pr.repository_uniqueness_multiplier,
pr.pioneer_multiplier,
pr.time_decay_multiplier,
pr.credibility_multiplier,
pr.raw_credibility,
Expand Down
3 changes: 2 additions & 1 deletion gittensor/validator/weights/master_repositories.json
Original file line number Diff line number Diff line change
Expand Up @@ -498,7 +498,7 @@
"weight": 4.25
},
"autoppia/autoppia_web_agents_subnet": {
"additional_acceptable_branches": ["dev"],
"additional_acceptable_branches": ["dev", "dev-gittensor"],
"tier": "Silver",
"weight": 6.46
},
Expand Down Expand Up @@ -746,6 +746,7 @@
"weight": 0.19
},
"bokeh/bokeh": {
"additional_acceptable_branches": ["branch-*.*"],
"tier": "Bronze",
"weight": 0.17
},
Expand Down
Loading