-
Notifications
You must be signed in to change notification settings - Fork 0
Expand file tree
/
Copy pathapp.py
More file actions
470 lines (390 loc) · 19.1 KB
/
app.py
File metadata and controls
470 lines (390 loc) · 19.1 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
432
433
434
435
436
437
438
439
440
441
442
443
444
445
446
447
448
449
450
451
452
453
454
455
456
457
458
459
460
461
462
463
464
465
466
467
468
469
470
from flask import Flask, render_template, request, jsonify, send_from_directory
import pandas as pd
from datetime import datetime, timedelta
from layerbot.utils.scan_time import get_last_scan_time
import subprocess
import json
import os
import argparse
from pathlib import Path
from dotenv import load_dotenv
# Load environment variables from .env file
load_dotenv()
app = Flask(__name__)
# Get mount path from environment variable, default to empty string for root mount
MOUNT_PATH = os.environ.get('MOUNT_PATH', '').rstrip('/')
# Map known bridge contract addresses to human-readable version labels.
# Keys are lowercased for case-insensitive matching.
def _build_contract_versions():
mapping = {}
for env_var, label in [
('BRIDGE_CONTRACT_ADDRESS_0', 'V0'),
('BRIDGE_CONTRACT_ADDRESS_1', 'V1'),
('BRIDGE_CONTRACT_ADDRESS_CURRENT', 'V1'),
('BRIDGE_CONTRACT_V2_ADDRESS', 'V2'),
]:
addr = os.environ.get(env_var, '')
if addr:
mapping[addr.lower()] = label
return mapping
CONTRACT_VERSIONS = _build_contract_versions()
# Ensure mount path starts with / if it's not empty
if MOUNT_PATH and not MOUNT_PATH.startswith('/'):
MOUNT_PATH = '/' + MOUNT_PATH
def prepare_chart_data(deposits_df):
"""Prepare deposits data for the chart visualization."""
try:
# Create a copy to avoid modifying the original
df = deposits_df.copy()
# Sort by timestamp for proper chronological order
df = df.sort_values('Timestamp')
# Prepare individual deposit data (scatter points)
individual_deposits = []
cumulative_total = 0
cumulative_data = []
for _, row in df.iterrows():
try:
# Skip rows with invalid data
if pd.isna(row['Timestamp']) or pd.isna(row['Amount']):
continue
# Individual deposit data
individual_deposits.append({
'x': row['Timestamp'].isoformat(),
'y': float(row['Amount']),
'deposit_id': int(row['Deposit ID']),
'formatted_date': row['Formatted_Timestamp']
})
# Cumulative total data
cumulative_total += float(row['Amount'])
cumulative_data.append({
'x': row['Timestamp'].isoformat(),
'y': cumulative_total,
'count': len(cumulative_data) + 1,
'formatted_date': row['Formatted_Timestamp']
})
except Exception as e:
print(f"Error processing row {row.get('Deposit ID', 'unknown')}: {e}")
continue
return {
'individual_deposits': individual_deposits,
'cumulative_deposits': cumulative_data
}
except Exception as e:
print(f"Error preparing chart data: {e}")
return {
'individual_deposits': [],
'cumulative_deposits': []
}
def prepare_withdrawals_chart_data(withdrawals_df):
"""Prepare withdrawals data for the chart visualization."""
try:
# Create a copy to avoid modifying the original
df = withdrawals_df.copy()
# Sort by timestamp for proper chronological order
df = df.sort_values('Timestamp')
# Prepare individual withdrawal data (scatter points)
individual_withdrawals = []
cumulative_total = 0
cumulative_data = []
for _, row in df.iterrows():
try:
# Skip rows with invalid data
if pd.isna(row['Timestamp']) or pd.isna(row['Amount_TRB']):
continue
# Individual withdrawal data
individual_withdrawals.append({
'x': row['Timestamp'].isoformat(),
'y': float(row['Amount_TRB']),
'withdraw_id': int(row['withdraw_id']),
'formatted_date': row['Formatted_Timestamp']
})
# Cumulative total data
cumulative_total += float(row['Amount_TRB'])
cumulative_data.append({
'x': row['Timestamp'].isoformat(),
'y': cumulative_total,
'count': len(cumulative_data) + 1,
'formatted_date': row['Formatted_Timestamp']
})
except Exception as e:
print(f"Error processing withdrawal row {row.get('withdraw_id', 'unknown')}: {e}")
continue
return {
'individual_withdrawals': individual_withdrawals,
'cumulative_withdrawals': cumulative_data
}
except Exception as e:
print(f"Error preparing withdrawals chart data: {e}")
return {
'individual_withdrawals': [],
'cumulative_withdrawals': []
}
@app.route('/')
def show_deposits():
# Read the deposits CSV file
deposits_df = pd.read_csv('bridge_deposits.csv')
# Get the most recent scan time
most_recent_scan = get_last_scan_time()
if not most_recent_scan:
most_recent_scan = "No scan time available"
# Convert timestamp columns to more readable format with error handling (for all data)
try:
deposits_df['Timestamp'] = pd.to_datetime(deposits_df['Timestamp'], errors='coerce')
# Remove rows with invalid timestamps
deposits_df = deposits_df.dropna(subset=['Timestamp'])
deposits_df['Formatted_Timestamp'] = deposits_df['Timestamp'].dt.strftime('%Y-%m-%d %H:%M:%S UTC')
# Calculate age of deposits
def format_time_ago(timestamp):
if pd.isna(timestamp):
return 'N/A'
try:
now = datetime.now()
if timestamp.tz is None:
timestamp = timestamp.tz_localize('UTC')
now = now.replace(tzinfo=timestamp.tz)
diff = now - timestamp
total_seconds = int(diff.total_seconds())
if total_seconds < 60:
return f"{total_seconds}s ago"
elif total_seconds < 3600:
minutes = total_seconds // 60
return f"{minutes}m ago"
elif total_seconds < 86400:
hours = total_seconds // 3600
return f"{hours}h ago"
else:
days = total_seconds // 86400
return f"{days}d ago"
except Exception as e:
print(f"Error calculating age for timestamp {timestamp}: {e}")
return 'N/A'
deposits_df['Age'] = deposits_df['Timestamp'].apply(format_time_ago)
except Exception as e:
print(f"Error processing timestamps: {e}")
# Fallback: create dummy timestamps if all fail
deposits_df['Timestamp'] = pd.to_datetime('1970-01-01')
deposits_df['Formatted_Timestamp'] = '1970-01-01 00:00:00 UTC'
deposits_df['Age'] = 'N/A'
# Compute a human-readable contract version label (V0 / V1 / V2) from the address
if 'Bridge Contract Address' in deposits_df.columns:
deposits_df['Contract_Version'] = (
deposits_df['Bridge Contract Address']
.fillna('')
.str.lower()
.map(CONTRACT_VERSIONS)
.fillna('Unknown')
)
else:
deposits_df['Contract_Version'] = 'Unknown'
# Keep original data for chart (after timestamp processing, before filtering)
chart_deposits_df = deposits_df.copy()
# Filter out deposit IDs 27 and 32 for table display only
deposits_df = deposits_df[~deposits_df['Deposit ID'].isin([27, 32])]
# Convert the large numbers to ETH format (divide by 10^18) for both datasets
deposits_df['Amount'] = deposits_df['Amount'].apply(lambda x: float(x) / 1e18)
chart_deposits_df['Amount'] = chart_deposits_df['Amount'].apply(lambda x: float(x) / 1e18)
# Calculate which rows need highlighting
current_time = datetime.now().timestamp()
twelve_hours = 12 * 60 * 60 # 12 hours in seconds
fourteen_hours = 14 * 60 * 60 # 14 hours in seconds
# Convert deposit timestamp for comparison
deposit_timestamps = deposits_df['Timestamp'].apply(lambda x: x.timestamp() if pd.notna(x) else None)
# Calculate status based on time and claimed status
def calculate_status(row):
# Check if already claimed/completed (handle both old 'Claimed' and new 'Status' columns)
if pd.notna(row.get('Status')) and str(row['Status']).lower() == 'completed':
return 'completed'
elif pd.notna(row.get('Claimed')) and str(row['Claimed']).lower() == 'yes':
return 'completed'
# Calculate time since deposit for unclaimed deposits
if pd.notna(row['Timestamp']):
deposit_time = row['Timestamp'].timestamp()
time_elapsed = current_time - deposit_time
if time_elapsed < fourteen_hours:
return 'in progress'
else:
return 'past due'
else:
return 'past due' # Default for invalid timestamps
deposits_df['Status'] = deposits_df.apply(calculate_status, axis=1)
# Ready to claim status (green) - based on deposit timestamp
deposits_df['ready_to_claim'] = (
(deposits_df['Status'].str.lower() != 'completed') &
(deposit_timestamps.notna()) &
((current_time - deposit_timestamps) > twelve_hours)
)
# Recent scan status (pale green)
if isinstance(most_recent_scan, str) and most_recent_scan != "No scan time available":
most_recent_scan_time = pd.to_datetime(most_recent_scan).timestamp()
deposits_df['recent_scan'] = (
(deposit_timestamps.notna()) &
((most_recent_scan_time - deposit_timestamps) <= twelve_hours) &
(deposits_df['Status'].str.lower() != 'completed') # Exclude completed deposits
)
else:
deposits_df['recent_scan'] = False
# Invalid recipient status (red)
deposits_df['invalid_recipient'] = ~deposits_df['Recipient'].fillna('').str.startswith('tellor1')
# Sort the deposits dataframe - highest deposit ID first
deposits_df['Status'] = deposits_df['Status'].fillna('past due')
deposits_df = deposits_df.sort_values(
by=['Deposit ID'],
ascending=[False]
)
# Read the withdrawals CSV file
try:
withdrawals_csv = os.getenv('BRIDGE_WITHDRAWALS_CSV', 'bridge_withdrawals.csv')
withdrawals_df = pd.read_csv(withdrawals_csv)
# Handle timestamp column if it exists
if 'Timestamp' in withdrawals_df.columns:
try:
withdrawals_df['Timestamp'] = pd.to_datetime(withdrawals_df['Timestamp'], errors='coerce')
# Remove rows with invalid timestamps in the Timestamp column
valid_timestamp_mask = withdrawals_df['Timestamp'].notna()
# For rows with valid timestamps, format them nicely
withdrawals_df.loc[valid_timestamp_mask, 'Formatted_Timestamp'] = withdrawals_df.loc[valid_timestamp_mask, 'Timestamp'].dt.strftime('%Y-%m-%d %H:%M:%S UTC')
# For rows with invalid timestamps, set to 'N/A'
withdrawals_df.loc[~valid_timestamp_mask, 'Formatted_Timestamp'] = 'N/A'
# Calculate age of withdrawals
def format_time_ago(timestamp):
if pd.isna(timestamp):
return 'N/A'
try:
now = datetime.now()
if timestamp.tz is None:
timestamp = timestamp.tz_localize('UTC')
now = now.replace(tzinfo=timestamp.tz)
diff = now - timestamp
total_seconds = int(diff.total_seconds())
if total_seconds < 60:
return f"{total_seconds}s ago"
elif total_seconds < 3600:
minutes = total_seconds // 60
return f"{minutes}m ago"
elif total_seconds < 86400:
hours = total_seconds // 3600
return f"{hours}h ago"
else:
days = total_seconds // 86400
return f"{days}d ago"
except Exception as e:
print(f"Error calculating age for withdrawal timestamp {timestamp}: {e}")
return 'N/A'
withdrawals_df['Age'] = withdrawals_df['Timestamp'].apply(format_time_ago)
# Calculate hours since withdrawal for status logic
def calculate_hours_since(timestamp):
if pd.isna(timestamp):
return None
try:
now = datetime.now()
if timestamp.tz is None:
timestamp = timestamp.tz_localize('UTC')
now = now.replace(tzinfo=timestamp.tz)
diff = now - timestamp
return diff.total_seconds() / 3600
except Exception as e:
print(f"Error calculating hours since withdrawal {timestamp}: {e}")
return None
withdrawals_df['hours_since_withdrawal'] = withdrawals_df['Timestamp'].apply(calculate_hours_since)
except Exception as e:
print(f"Error processing withdrawal timestamps: {e}")
withdrawals_df['Formatted_Timestamp'] = 'N/A'
withdrawals_df['Age'] = 'N/A'
withdrawals_df['hours_since_withdrawal'] = None
else:
# If no Timestamp column exists, create a placeholder
withdrawals_df['Formatted_Timestamp'] = 'N/A'
withdrawals_df['Age'] = 'N/A'
withdrawals_df['hours_since_withdrawal'] = None
# Handle withdraw_id column
if withdrawals_df['withdraw_id'].dtype == 'object':
# If it's a string, clean it up
withdrawals_df['withdraw_id'] = withdrawals_df['withdraw_id'].str.replace('"', '')
# Convert to numeric
withdrawals_df['withdraw_id'] = pd.to_numeric(withdrawals_df['withdraw_id'])
# Convert boolean columns to proper format
# 'success' may be blank for stub rows — treat blank as False
withdrawals_df['success'] = (
withdrawals_df['success']
.fillna('')
.astype(str)
.str.strip()
.str.lower()
.isin(['true', '1', 'yes'])
)
# 'Claimed' can be True/False or blank ('') for stub rows where we haven't
# yet confirmed status. Blank rows will show as "Unknown" in the UI.
if 'Claimed' not in withdrawals_df.columns:
withdrawals_df['Claimed'] = False
else:
withdrawals_df['Claimed'] = (
withdrawals_df['Claimed']
.fillna('')
.astype(str)
.str.strip()
.str.lower()
.isin(['true', '1', 'yes'])
)
# Rows with no transaction data (no creator AND no amount) are stub rows
# that represent withdrawal IDs we know exist on-chain but have no details for.
has_creator = withdrawals_df['creator'].fillna('').astype(str).str.strip().ne('')
has_amount = withdrawals_df['Amount'].fillna('').astype(str).str.strip().ne('')
withdrawals_df['has_tx_data'] = has_creator | has_amount
# Convert Amount to TRB format if it exists (divide by 10^6 for loya to TRB conversion)
if 'Amount' in withdrawals_df.columns:
try:
withdrawals_df['Amount'] = pd.to_numeric(withdrawals_df['Amount'], errors='coerce')
withdrawals_df['Amount_TRB'] = withdrawals_df['Amount'] / 1e6 # Convert loya to TRB
except Exception as e:
print(f"Error processing withdrawal amounts: {e}")
withdrawals_df['Amount_TRB'] = 0
else:
withdrawals_df['Amount_TRB'] = 0
# Sort by withdraw_id in descending order
withdrawals_df = withdrawals_df.sort_values('withdraw_id', ascending=False)
withdrawals = withdrawals_df.to_dict('records')
except Exception as e:
print(f"Error reading withdrawals CSV: {e}")
withdrawals = []
# Prepare chart data for deposits over time visualization (using unfiltered data)
chart_data = prepare_chart_data(chart_deposits_df)
# Prepare withdrawals chart data
withdrawals_chart_data = prepare_withdrawals_chart_data(pd.DataFrame(withdrawals))
# Convert DataFrames to list of dictionaries
deposits = deposits_df.to_dict('records')
return render_template('deposits.html',
deposits=deposits,
withdrawals=withdrawals,
most_recent_scan=most_recent_scan,
chart_data=chart_data,
withdrawals_chart_data=withdrawals_chart_data,
mount_path=MOUNT_PATH)
# Routes for both mount path and root to work with reverse proxy
if MOUNT_PATH:
@app.route(f'{MOUNT_PATH}/')
def show_deposits_mounted():
return show_deposits()
@app.route('/')
def show_deposits_root():
return show_deposits()
# Add static file serving for mount path
if MOUNT_PATH:
@app.route(f'{MOUNT_PATH}/static/<path:filename>')
def mounted_static(filename):
return send_from_directory(app.static_folder, filename)
if __name__ == '__main__':
# Parse command line arguments
parser = argparse.ArgumentParser(description='Run the Flask bridge monitoring app')
parser.add_argument('--port', '-p', type=int,
default=int(os.environ.get('FLASK_PORT', 5000)),
help='Port to run the Flask app on (default: 5000, can also be set via FLASK_PORT env var)')
parser.add_argument('--host', type=str,
default=os.environ.get('FLASK_HOST', '127.0.0.1'),
help='Host to bind the Flask app to (default: 127.0.0.1, can also be set via FLASK_HOST env var)')
parser.add_argument('--debug', action='store_true',
default=os.environ.get('FLASK_DEBUG', 'True').lower() in ['true', '1', 'yes'],
help='Run in debug mode (default: True, can also be set via FLASK_DEBUG env var)')
args = parser.parse_args()
print(f"Starting Flask app on {args.host}:{args.port}")
print(f"Debug mode: {args.debug}")
app.run(host=args.host, port=args.port, debug=args.debug)