Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
4 changes: 2 additions & 2 deletions .env.sample
Original file line number Diff line number Diff line change
Expand Up @@ -11,8 +11,8 @@ GCS_BUCKET_NAME=<YOUR_GCS_BUCKET_NAME>
GOOGLE_SHEET_ID=<YOUR_GOOGLE_SHEETS_ID>
GOOGLE_WORKSHEET_NAME=<YOUR_GOOGLE_WORKSHEET_NAME>
API_TOKEN=<API_TOKEN>
FLIPSIDE_API_KEY=<YOUR_FLIPSIDE_API_KEY>
FLIPSIDE_ENDPOINT_URL=https://api-v2.flipsidecrypto.xyz
ENS_BIGQUERY_DATATABLE=<YOUR_ENS_BIGQUERY_DATATABLE>
LABELS_BIGQUERY_DATATABLE=<YOUR_ADDRESS_LABELS_BIGQUERY_DATATABLE>
DEFAULT_MODEL=claude-3-haiku-20240307 # 'claude-3-haiku-20240307' or 'claude-3-sonnet-20240229' or 'claude-3-opus-20240229'
CORS_ALLOWED_ORIGINS=<CORS_ALLOWED_ORIGINS>
PORT=5000
Expand Down
80 changes: 37 additions & 43 deletions label.py
Original file line number Diff line number Diff line change
@@ -1,9 +1,7 @@
import json
import re
from flipside import Flipside
import pandas as pd


# Recursively iterate over the json object looking for specified pattern
def explore_json(obj, items, pattern):
try:
Expand Down Expand Up @@ -40,64 +38,60 @@ def format (addresses):
return addresses_str


# Query Flipside
def query_flipside (addresses_str, endpoint):
sql = f"""
select address,
address_name,
label,
label_type,
label_subtype
from ethereum.core.dim_labels
where lower(address) in ({addresses_str})
"""

query_result_set = endpoint.query(sql)
df = pd.DataFrame(query_result_set)
return df

# Put results into a json object
def to_json(df):
# Query labels datatables
def query_labels (addresses_str, labels_dataset, ens_dataset, client):
try:
json_data = []
data = df[1][4]
if data is None:
print("Data is None, returning an empty JSON object")
return json.dumps({'address_labels': []}, indent=4)
sql = f"""
with labels_data as (
select address,
address_name
from {labels_dataset}
where address in ({addresses_str})
),
ens_data as (
select address,
tag as address_name
from {ens_dataset}
where address in ({addresses_str})
and address not in (select address from labels_data)
),
merged_data as (
select * from labels_data
union all
select * from ens_data
)

for index, item in enumerate(data):
json_data.append({
'address': item[0],
'address_name': item[1],
'label': item[2],
'label_type': item[3],
'label_subtype': item[4]
})

json_object = json.dumps({'address_labels': json_data}, indent=4)
return json_object
select * from merged_data
"""
df = client.query(sql).result().to_dataframe()
return df
except Exception as e:
print("Error at to_json: ", e)
print("Error at query_labels: ", e)
return pd.DataFrame()

# Fetch address labels
def fetch_address_labels(sim_data, endpoint):
def fetch_address_labels(sim_data, labels_dataset, ens_dataset, client):
address_regex = r'^0x[0-9a-fA-F]{40}$'
try:
addresses = extract(sim_data, address_regex)
addresses_str = format(addresses)
labels_data = query_flipside(addresses_str,endpoint)
labels_json = to_json(labels_data)

labels = query_labels (addresses_str, labels_dataset, ens_dataset, client)

labels_json = labels.to_json(orient='records')
labels_json = '{"address_labels": ' + labels_json + '}'

return labels_json
except Exception as e:
print("Error at fetch_address_labels: ", e)

# Add address labels to the original sim_data
def add_labels(sim_data, endpoint):
def add_labels(sim_data, labels_dataset, ens_dataset, client):
try:
labels_json = json.loads(fetch_address_labels(sim_data, endpoint))
labels_json = json.loads(fetch_address_labels(sim_data, labels_dataset, ens_dataset, client))
for key, value in labels_json.items():
sim_data[key] = value

return sim_data
except Exception as e:
print("Error at add_labels: ", e)
Expand Down
1 change: 0 additions & 1 deletion requirements.txt
Original file line number Diff line number Diff line change
Expand Up @@ -10,7 +10,6 @@ asyncio
aiohttp
fastapi==0.110.1
fastapi_limiter==0.1.6
flipside==2.0.8
starlette==0.37.2
redis==5.1.0b4
uvicorn==0.13.1
Expand Down
8 changes: 3 additions & 5 deletions simulate.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,6 @@
from dotenv import load_dotenv
from web3 import Web3, AsyncWeb3
import decimal
from flipside import Flipside
from label import add_labels

w3 = AsyncWeb3(AsyncWeb3.AsyncHTTPProvider('https://cloudflare-eth.com'))
Expand Down Expand Up @@ -330,9 +329,8 @@ async def simulate_transaction(tx_hash, block_number, from_address, to_address,
tenderly_account_slug = os.getenv('TENDERLY_ACCOUNT_SLUG')
tenderly_project_slug = os.getenv('TENDERLY_PROJECT_SLUG')
tenderly_access_key = os.getenv('TENDERLY_ACCESS_KEY')
flipside_api_key = os.getenv('FLIPSIDE_API_KEY')
flipside_endpoint_url = os.getenv('FLIPSIDE_ENDPOINT_URL')
flipside = Flipside(flipside_api_key, flipside_endpoint_url)
labels_dataset = os.getenv('LABELS_BIGQUERY_DATATABLE')
ens_dataset = os.getenv('ENS_BIGQUERY_DATATABLE')

tx_details = {
'network_id': NETWORK_CONFIGS[network]['network_id'],
Expand Down Expand Up @@ -370,7 +368,7 @@ async def simulate_transaction(tx_hash, block_number, from_address, to_address,
trimmed_initial = await extract_useful_fields(sim_data)
trimmed_decimals = await apply_decimals(trimmed_initial)
trimmed_logs_applied= await apply_logs(trimmed_decimals)
trimmed= add_labels(trimmed_logs_applied, flipside)
trimmed = add_labels(trimmed_logs_applied, labels_dataset, ens_dataset, bigquery_client)
try:
blob = bucket.blob(f'{network}/transactions/simulations/trimmed/{tx_hash}.json')
blob.upload_from_string(json.dumps(trimmed))
Expand Down