-
Notifications
You must be signed in to change notification settings - Fork 2
Expand file tree
/
Copy pathblockchain.py
More file actions
471 lines (414 loc) · 23.1 KB
/
blockchain.py
File metadata and controls
471 lines (414 loc) · 23.1 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
432
433
434
435
436
437
438
439
440
441
442
443
444
445
446
447
448
449
450
451
452
453
454
455
456
457
458
459
460
461
462
463
464
465
466
467
468
469
470
471
import datetime
import hashlib
import json
import uuid
from os import environ
from urllib.parse import urlparse
from argparse import ArgumentParser
from functools import wraps
import time
import logging
import requests
from bson.objectid import ObjectId
from flask import Flask, jsonify, request
from flask_cors import CORS
from pymongo import MongoClient, DESCENDING
from pymongo.errors import ConnectionFailure
from dotenv import load_dotenv
from eth_keys.datatypes import PublicKey, Signature
# --- Setup Logging ---
logging.basicConfig(level=logging.INFO, format='%(asctime)s - %(levelname)s - %(message)s')
load_dotenv()
# =============================================================================
# BunkNet Configuration
# =============================================================================
MONGO_URI = environ.get('BUNKNET_MONGO_URI', 'mongodb://localhost:27017/?replicaSet=rs0')
ADMIN_SECRET_KEY = environ.get('BUNKNET_ADMIN_KEY', 'bunknet_super_admin_key')
P2P_SECRET_KEY = environ.get('BUNKNET_P2P_KEY', 'bunknet_super_secret_p2p_key')
MINER_ADDRESS = environ.get('BUNKNET_MINER_ADDRESS', '0x000000000000000000000000000000000000BEEF')
TREASURY_ADDRESS = environ.get('BUNKNET_TREASURY_ADDRESS')
# --- Tokenomics & Protocol Configuration ---
INITIAL_SUPPLY = 100000000.0
BASE_BLOCK_REWARD = 50.0
DIFFICULTY_ADJUSTMENT_INTERVAL = 10
TARGET_BLOCK_TIME = 15 # Seconds
MAX_TRANSACTIONS_PER_BLOCK = 200
# =============================================================================
# Application & Database Setup
# =============================================================================
app = Flask(__name__)
CORS(app)
try:
client = MongoClient(MONGO_URI)
client.admin.command('ismaster')
logging.info("MongoDB connection successful.")
except ConnectionFailure:
logging.error("FATAL: Could not connect to MongoDB. Ensure it's running as a replica set.")
exit()
db = client["bunknet_node"]
blocks_col = db["blocks"]
mempool_col = db["mempool"]
state_col = db["state"]
config_col = db["config"]
address_labels_col = db["address_labels"]
# =============================================================================
# Utility & Decorators
# =============================================================================
def prepare_json_response(data):
def json_serial(obj):
if isinstance(obj, (datetime.datetime, ObjectId)): return str(obj)
raise TypeError(f"Type {type(obj)} not serializable")
return json.loads(json.dumps(data, default=json_serial))
def admin_required(f):
@wraps(f)
def decorated_function(*args, **kwargs):
auth_key = request.headers.get('X-Admin-Key')
if not ADMIN_SECRET_KEY or auth_key != ADMIN_SECRET_KEY:
return jsonify({'error': 'Unauthorized: Admin key required'}), 401
return f(*args, **kwargs)
return decorated_function
def p2p_required(f):
@wraps(f)
def decorated_function(*args, **kwargs):
auth_key = request.headers.get('X-P2P-Key')
if not P2P_SECRET_KEY or auth_key != P2P_SECRET_KEY:
return jsonify({'error': 'Unauthorized: P2P key required'}), 401
return f(*args, **kwargs)
return decorated_function
def public_key_to_address(public_key: PublicKey) -> str:
return public_key.to_checksum_address()
# =============================================================================
# Core Blockchain Class
# =============================================================================
class Blockchain:
def __init__(self):
self.nodes = set()
if blocks_col.count_documents({}) == 0:
logging.info("No existing blockchain found. Creating genesis block...")
if not TREASURY_ADDRESS: raise ValueError("BUNKNET_TREASURY_ADDRESS must be set in .env")
with client.start_session() as session:
with session.start_transaction():
state_col.insert_one({'_id': TREASURY_ADDRESS, 'balance': INITIAL_SUPPLY, 'nonce': 0}, session=session)
genesis_tx = {'transaction_id': str(uuid.uuid4()),'sender': '0','recipient': TREASURY_ADDRESS,'amount': str(INITIAL_SUPPLY),'nonce': 0,'type': 'genesis_mint','timestamp': time.time()}
self.create_block(proof=1, previous_hash='0', transactions=[genesis_tx], session=session)
config_col.update_one({'_id': 'config'}, {'$set': {'difficulty_prefix': '0000'}}, upsert=True, session=session)
logging.info("Genesis block and state created successfully.")
def get_account_state(self, address, session=None):
state = state_col.find_one({'_id': address}, session=session)
return {'balance': state.get('balance', 0.0), 'nonce': state.get('nonce', 0)} if state else {'balance': 0.0, 'nonce': 0}
def process_transactions(self, transactions, session=None):
for tx in transactions:
sender, recipient, amount_str = tx['sender'], tx['recipient'], tx['amount']
amount = float(amount_str)
fee = float(tx.get('fee', '0'))
if sender != '0':
sender_state = self.get_account_state(sender, session=session)
new_sender_balance = sender_state['balance'] - amount - fee
new_sender_nonce = sender_state['nonce'] + 1
state_col.update_one({'_id': sender}, {'$set': {'balance': new_sender_balance, 'nonce': new_sender_nonce}}, upsert=True, session=session)
recipient_state = self.get_account_state(recipient, session=session)
new_recipient_balance = recipient_state['balance'] + amount
state_col.update_one({'_id': recipient}, {'$set': {'balance': new_recipient_balance}}, upsert=True, session=session)
def add_transaction_to_mempool(self, sender, recipient, amount, fee, nonce, signature, public_key):
tx_data = {'sender': sender, 'recipient': recipient, 'amount': amount, 'fee': fee, 'nonce': nonce}
is_valid, derived_pk = self.verify_signature(signature, tx_data)
if not is_valid:
return {'error': 'Invalid signature.'}
derived_address = public_key_to_address(derived_pk)
if sender.lower() != derived_address.lower():
return {'error': 'Sender address does not match the signature.'}
account_state = self.get_account_state(sender)
if int(nonce) != account_state['nonce']:
return {'error': f"Invalid nonce. Expected {account_state['nonce']}, got {nonce}."}
if account_state['balance'] < (float(amount) + float(fee)):
return {'error': 'Insufficient funds.'}
if mempool_col.find_one({'sender': sender, 'nonce': int(nonce)}):
return {'error': 'Transaction with this nonce already in mempool.'}
full_tx = {**tx_data, 'transaction_id': str(uuid.uuid4()), 'type': 'transfer', 'timestamp': time.time(), 'signature': signature, 'public_key': public_key}
mempool_col.insert_one(full_tx)
return full_tx
@staticmethod
def verify_signature(signature_hex, transaction_data):
try:
signature_bytes = bytes.fromhex(signature_hex)
sig = Signature(signature_bytes)
tx_data_str = json.dumps(transaction_data, sort_keys=True, separators=(',', ':')).encode()
message_hash = hashlib.sha256(tx_data_str).digest()
recovered_pk = sig.recover_public_key_from_msg_hash(message_hash)
return True, recovered_pk
except Exception as e:
logging.error(f"Signature verification failed: {e}")
return False, None
def mine_block(self):
prev_block = self.get_previous_block()
if not prev_block:
logging.error("Could not find previous block to mine on top of.")
return None
proof = self.proof_of_work(prev_block['proof'])
with client.start_session() as session:
with session.start_transaction():
try:
mempool_txs = list(mempool_col.find({}, session=session).limit(MAX_TRANSACTIONS_PER_BLOCK))
transactions_for_block = [{k: v for k, v in tx.items() if k != '_id'} for tx in mempool_txs]
total_fees = sum(float(tx.get('fee', '0')) for tx in transactions_for_block)
reward_tx = {'transaction_id': str(uuid.uuid4()),'sender': '0','recipient': MINER_ADDRESS,'amount': str(BASE_BLOCK_REWARD + total_fees),'nonce': -1,'type': 'reward','timestamp': time.time()}
transactions_to_process = transactions_for_block + [reward_tx]
self.process_transactions(transactions_to_process, session=session)
block = self.create_block(proof, self.hash(prev_block), transactions_to_process, session=session)
if mempool_txs:
processed_tx_ids = [tx['_id'] for tx in mempool_txs]
mempool_col.delete_many({'_id': {'$in': processed_tx_ids}}, session=session)
self.adjust_difficulty(block, session=session)
logging.info(f"Block {block['index']} mined successfully with {len(transactions_for_block)} transactions.")
return block
except Exception as e:
logging.error(f"ATOMIC MINE FAILED: Transaction aborted due to an error: {e}")
return None
def adjust_difficulty(self, last_block, session=None):
if last_block['index'] % DIFFICULTY_ADJUSTMENT_INTERVAL != 0 or last_block['index'] <= 1: return
prev_adjustment_block = blocks_col.find_one({'index': last_block['index'] - DIFFICULTY_ADJUSTMENT_INTERVAL}, session=session)
if not prev_adjustment_block: return
time_elapsed = last_block['timestamp'] - prev_adjustment_block['timestamp']
expected_time = DIFFICULTY_ADJUSTMENT_INTERVAL * TARGET_BLOCK_TIME
current_prefix = self.get_difficulty_prefix(session=session)
if time_elapsed < expected_time / 1.5: new_prefix = current_prefix + '0'
elif time_elapsed > expected_time * 1.5 and len(current_prefix) > 2: new_prefix = current_prefix[:-1]
else: return
logging.info(f"Adjusting difficulty from {len(current_prefix)} to {len(new_prefix)} zeros.")
config_col.update_one({'_id': 'config'}, {'$set': {'difficulty_prefix': new_prefix}}, upsert=True, session=session)
def get_difficulty_prefix(self, session=None):
config = config_col.find_one({'_id': 'config'}, session=session)
return config.get('difficulty_prefix', '0000') if config else '0000'
def proof_of_work(self, previous_proof, session=None):
new_proof = 1; difficulty_prefix = self.get_difficulty_prefix(session=session)
while True:
hash_op = hashlib.sha256(str(new_proof**2 - previous_proof**2).encode()).hexdigest()
if hash_op.startswith(difficulty_prefix): return new_proof
new_proof += 1
def create_block(self, proof, previous_hash, transactions, session=None):
last_block = self.get_previous_block(session=session)
index = last_block['index'] + 1 if last_block else 1
block = {'index': index,'timestamp': time.time(),'proof': proof,'previous_hash': previous_hash,'transactions': transactions,'difficulty_prefix': self.get_difficulty_prefix(session=session)}
block['hash'] = self.hash(block)
blocks_col.insert_one(block, session=session)
return block
def get_previous_block(self, session=None):
return blocks_col.find_one(sort=[("index", DESCENDING)], session=session)
@staticmethod
def hash(block):
block_copy = block.copy(); block_copy.pop('hash', None)
block_string = json.dumps(block_copy, sort_keys=True, default=str).encode()
return hashlib.sha256(block_string).hexdigest()
def add_node(self, address):
parsed_url = urlparse(address)
self.nodes.add(parsed_url.netloc or parsed_url.path)
@staticmethod
def is_chain_valid(chain):
previous_block = chain[0]
for i in range(1, len(chain)):
block = chain[i]
if block['previous_hash'] != Blockchain.hash(previous_block): return False
proof, previous_proof = block['proof'], previous_block['proof']
hash_operation = hashlib.sha256(str(proof**2 - previous_proof**2).encode()).hexdigest()
if not hash_operation.startswith(block.get('difficulty_prefix', '0000')): return False
previous_block = block
return True
def resolve_conflicts(self):
neighbours = self.nodes
new_chain = None
max_length = blocks_col.count_documents({})
for node in neighbours:
try:
headers = {'X-P2P-Key': P2P_SECRET_KEY}
response = requests.get(f'http://{node}/get_chain', headers=headers, timeout=5)
if response.status_code == 200:
length, chain = response.json()['length'], response.json()['chain']
if length > max_length and self.is_chain_valid(chain):
max_length, new_chain = length, chain
except requests.exceptions.RequestException: continue
if new_chain:
logging.info("Found a longer valid chain. Atomically rebuilding local state...")
with client.start_session() as session:
with session.start_transaction():
blocks_col.delete_many({}, session=session)
blocks_col.insert_many(new_chain, session=session)
state_col.delete_many({}, session=session)
mempool_col.delete_many({}, session=session)
logging.info("Re-processing transactions to rebuild the world state...")
all_blocks = list(blocks_col.find(sort=[("index", 1)], session=session))
for block in all_blocks:
self.process_transactions(block['transactions'], session=session)
logging.info("State rebuild complete. Chain is now authoritative.")
return True
logging.info("Our chain is authoritative.")
return False
# =============================================================================
# Flask API Endpoints
# =============================================================================
blockchain = Blockchain()
@app.route('/new_transaction', methods=['POST'])
def new_transaction_endpoint():
values = request.get_json()
required = ['sender', 'recipient', 'amount', 'fee', 'nonce', 'signature', 'public_key']
if not all(key in values for key in required): return jsonify({'error': 'Missing required fields'}), 400
result = blockchain.add_transaction_to_mempool(**values)
if 'error' in result: return jsonify(result), 400
return jsonify({'message': 'Transaction added to mempool', 'transaction_id': result['transaction_id']}), 201
@app.route('/status', methods=['GET'])
def get_status():
try:
chain_length = blocks_col.count_documents({})
pending_transactions = mempool_col.count_documents({})
last_block = blockchain.get_previous_block()
avg_block_time, hash_rate = 0, 0
# --- NEW: Calculate Total Transactions using an efficient aggregation pipeline ---
pipeline = [
{"$unwind": "$transactions"},
{"$count": "total_transactions"}
]
tx_count_result = list(blocks_col.aggregate(pipeline))
total_transactions = tx_count_result[0]['total_transactions'] if tx_count_result else 0
if last_block and chain_length > 10:
recent_blocks = list(blocks_col.find({'index': {'$gt': chain_length - 10}}).sort("index", 1))
if len(recent_blocks) > 1:
time_diff = recent_blocks[-1]['timestamp'] - recent_blocks[0]['timestamp']
avg_block_time = time_diff / (len(recent_blocks) - 1)
difficulty_prefix = blockchain.get_difficulty_prefix()
difficulty = 16**len(difficulty_prefix)
hash_rate = difficulty / avg_block_time if avg_block_time > 0 else 0
return jsonify({
'chain_length': chain_length,
'pending_transactions': pending_transactions,
'total_transactions': total_transactions, # <-- NEW field added to the response
'last_block_hash': last_block['hash'] if last_block else '0',
'average_block_time': avg_block_time,
'hash_rate': int(hash_rate)
}), 200
except Exception as e:
logging.error(f"Error in /status endpoint: {e}")
return jsonify({"error": "An internal error occurred."}), 500
@app.route('/get_chain', methods=['GET'])
def get_chain():
try:
page = int(request.args.get('page', 1))
limit = int(request.args.get('limit', 0))
query = blocks_col.find({}, {'_id': 0}).sort("index", DESCENDING)
total = blocks_col.count_documents({})
if limit > 0: query = query.skip((page - 1) * limit).limit(limit)
return jsonify({'chain': prepare_json_response(list(query)), 'length': total}), 200
except Exception as e:
logging.error(f"Error in /get_chain endpoint: {e}")
return jsonify({"error": "Could not retrieve chain data."}), 500
@app.route('/get_block/<identifier>', methods=['GET'])
def get_block(identifier):
try:
try: block = blocks_col.find_one({'index': int(identifier)})
except ValueError: block = blocks_col.find_one({'hash': identifier})
if block: return jsonify(prepare_json_response(block)), 200
return jsonify({'error': 'Block not found'}), 404
except Exception as e:
logging.error(f"Error in /get_block endpoint: {e}")
return jsonify({"error": "An internal error occurred."}), 500
@app.route('/mine_block', methods=['GET'])
def mine_block_endpoint():
block = blockchain.mine_block()
if block: return jsonify({'message': 'New Block Forged', 'block': prepare_json_response(block)}), 200
return jsonify({'error': 'Mining failed and transaction was rolled back.'}), 500
@app.route('/address/<address>', methods=['GET'])
def get_address_details(address):
state = blockchain.get_account_state(address)
pipeline = [{"$unwind": "$transactions"},{"$match": {"$or": [{"transactions.sender": address}, {"transactions.recipient": address}]}}, {"$replaceRoot": {"newRoot": {"$mergeObjects": ["$transactions", {"block_index": "$index"}]}}}]
transactions = list(blocks_col.aggregate(pipeline))
# THE FIX: Look up the label for the address
label_doc = address_labels_col.find_one({'address': address})
label = label_doc.get('label') if label_doc else None
return jsonify({
'address': address,
'balance': state['balance'],
'nonce': state['nonce'],
'transactions': prepare_json_response(transactions),
'label': label # Include the label in the response
}), 200
@app.route('/transaction/<tx_id>', methods=['GET'])
def get_transaction(tx_id):
block = blocks_col.find_one({"transactions.transaction_id": tx_id}, {'_id': 0})
if block:
for tx in block['transactions']:
if tx['transaction_id'] == tx_id: return jsonify(prepare_json_response({**tx, "block_index": block['index']})), 200
tx_mempool = mempool_col.find_one({"transaction_id": tx_id}, {'_id': 0})
if tx_mempool: return jsonify(prepare_json_response({**tx_mempool, "block_index": "Pending"})), 200
return jsonify({"error": "Transaction not found"}), 404
@app.route('/get_mempool', methods=['GET'])
def get_mempool():
mempool = list(mempool_col.find({}, {'_id': 0}))
return jsonify({"mempool": mempool, "count": len(mempool)}), 200
@app.route('/labels', methods=['GET'])
def get_labels():
try:
labels = list(address_labels_col.find({}, {'_id': 0}))
label_map = {item['address']: item['label'] for item in labels}
return jsonify(label_map), 200
except Exception as e:
logging.error(f"Error in /labels endpoint: {e}")
return jsonify({"error": "Could not retrieve labels."}), 500
# --- P2P Networking Endpoints ---
@app.route('/nodes/register', methods=['POST'])
@p2p_required
def register_nodes():
nodes = request.get_json().get('nodes')
if nodes is None: return "Error: Please supply a valid list of nodes", 400
for node in nodes: blockchain.add_node(node)
return jsonify({'message': 'New nodes have been added', 'total_nodes': list(blockchain.nodes)}), 201
@app.route('/nodes/resolve', methods=['GET'])
@p2p_required
def consensus():
replaced = blockchain.resolve_conflicts()
message = 'Our chain was replaced' if replaced else 'Our chain is authoritative'
return jsonify({'message': message}), 200
# --- Admin Endpoints ---
@app.route('/admin/mint', methods=['POST'])
@admin_required
def admin_mint_tokens():
values = request.get_json()
recipient = values.get('recipient')
amount = float(values.get('amount', 0))
if not recipient or amount <= 0: return jsonify({'error': 'Recipient and a positive amount are required'}), 400
mint_tx = {'transaction_id': str(uuid.uuid4()),'sender': '0','recipient': recipient,'amount': str(amount),'nonce': -1,'type': 'admin_mint','timestamp': time.time()}
mempool_col.insert_one(mint_tx)
logging.info(f"ADMIN: Minted {amount} $BUNK to {recipient}")
return jsonify({'message': f'Mint transaction for {amount} $BUNK to {recipient} has been added to the mempool.'}), 201
@app.route('/admin/set_address_label', methods=['POST'])
@admin_required
def set_address_label():
values = request.get_json()
address, label = values.get('address'), values.get('label')
if not address or not label:
return jsonify({'error': 'Address and label are required'}), 400
address_labels_col.update_one(
{'address': address},
{'$set': {'label': label}},
upsert=True
)
return jsonify({'message': f"Label '{label}' set for address {address[:10]}..."}), 200
@app.route('/admin/burn', methods=['POST'])
@admin_required
def admin_burn_tokens():
values = request.get_json()
sender = values.get('sender')
amount = float(values.get('amount', 0))
if not sender or amount <= 0: return jsonify({'error': 'Sender address and a positive amount are required'}), 400
account_state = blockchain.get_account_state(sender)
if account_state['balance'] < amount: return jsonify({'error': f'Insufficient funds. Address has {account_state["balance"]} $BUNK.'}), 400
burn_tx = {'transaction_id': str(uuid.uuid4()),'sender': sender,'recipient': "0x000000000000000000000000000000000000dEaD",'amount': str(amount),'nonce': account_state['nonce'],'type': 'burn','fee': '0.0','timestamp': time.time()}
mempool_col.insert_one(burn_tx)
logging.info(f"ADMIN: Created burn transaction for {amount} $BUNK from {sender}")
return jsonify({'message': f'Burn transaction for {amount} $BUNK from {sender} has been added to the mempool.'}), 201
# =============================================================================
# Main Execution
# =============================================================================
if __name__ == '__main__':
parser = ArgumentParser()
parser.add_argument('-p', '--port', default=5000, type=int, help='port to listen on')
args = parser.parse_args()
logging.info(f"Starting BunkNet node on port {args.port}")
app.run(host='0.0.0.0', port=args.port, debug=False)