-
Notifications
You must be signed in to change notification settings - Fork 0
Expand file tree
/
Copy pathfetch_data.py
More file actions
186 lines (143 loc) · 7.69 KB
/
fetch_data.py
File metadata and controls
186 lines (143 loc) · 7.69 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
"""
fetch_data.py
=============
Fetches historical stock data using yfinance and engineers three features:
- Momentum : 12-month trailing return (t-12 to t-1)
- Volatility : 12-month rolling std of monthly returns
- PE Ratio : trailing P/E from yfinance fast_info
Target y:
- Next month's return (forward 1-month return)
Usage:
python fetch_data.py
Output:
data/features.csv — cleaned feature matrix ready for neural net
"""
import os
import numpy as np
import pandas as pd
# ── Try real yfinance fetch, fall back to synthetic data ───────────────────
try:
import yfinance as yf
USE_REAL = True
except ImportError:
USE_REAL = False
print("yfinance not installed. Run: pip install yfinance")
print("Using synthetic data for now.\n")
# ══════════════════════════════════════════════════════════════════════════════
# CONFIG
# ══════════════════════════════════════════════════════════════════════════════
TICKERS = ["AAPL", "MSFT", "GOOGL", "AMZN", "TSLA",
"JPM", "GS", "BAC", "WMT", "XOM"]
START = "2015-01-01"
END = "2024-01-01"
OUT_DIR = "data"
OUT_FILE = os.path.join(OUT_DIR, "features.csv")
# ══════════════════════════════════════════════════════════════════════════════
# REAL DATA — yfinance
# ══════════════════════════════════════════════════════════════════════════════
def fetch_real_data():
print(f"Fetching data for {len(TICKERS)} tickers from {START} to {END}...")
all_rows = []
for ticker in TICKERS:
print(f" Processing {ticker}...")
try:
stock = yf.Ticker(ticker)
# Download daily then resample to monthly
hist = stock.history(start=START, end=END, interval="1d")
if hist.empty:
print(f" Skipping {ticker} — no data")
continue
hist = hist[["Close"]].resample("ME").last()
hist.index = pd.to_datetime(hist.index).tz_convert(None)
# Monthly returns
hist["ret"] = hist["Close"].pct_change()
# Momentum: 12-1 month
hist["momentum"] = hist["Close"].shift(1) / hist["Close"].shift(13) - 1
# Volatility: 12-month rolling std
hist["volatility"] = hist["ret"].rolling(12).std()
# PE ratio — current as proxy
try:
pe = stock.fast_info.get("trailingPE", 28.0)
except Exception:
pe = 28.0
hist["pe_ratio"] = pe
# Target: forward 1-month return
hist["target"] = hist["ret"].shift(-1)
hist["ticker"] = ticker
all_rows.append(hist)
except Exception as e:
print(f" Error: {e}")
continue
if not all_rows:
raise ValueError("No data fetched.")
return pd.concat(all_rows)
# ══════════════════════════════════════════════════════════════════════════════
# SYNTHETIC DATA — for offline testing
# ══════════════════════════════════════════════════════════════════════════════
def generate_synthetic_data(n_samples=500, seed=42):
"""
Generates realistic synthetic financial features.
Injects a weak true signal so the NN has something to learn.
True relationship (with noise):
y = 0.03 * momentum - 0.02 * volatility + 0.001 * pe_ratio + noise
"""
print("Generating synthetic financial data...")
print(f" n_samples = {n_samples}")
print(f" Features = momentum, volatility, pe_ratio")
print(f" Target = next month return\n")
rng = np.random.default_rng(seed)
# ── Simulate realistic feature distributions ───────────────────────────
momentum = rng.normal(loc=0.08, scale=0.20, size=n_samples)
volatility = rng.uniform(low=0.01, high=0.08, size=n_samples)
pe_ratio = rng.uniform(low=8.0, high=45.0, size=n_samples)
# ── Weak true signal + noise (finance is noisy!) ───────────────────────
noise = rng.normal(loc=0.0, scale=0.04, size=n_samples)
target = (0.03 * momentum
- 0.02 * volatility
+ 0.001 * pe_ratio
+ noise)
df = pd.DataFrame({
"momentum" : momentum,
"volatility": volatility,
"pe_ratio" : pe_ratio,
"target" : target,
"ticker" : "SYNTHETIC",
})
return df
# ══════════════════════════════════════════════════════════════════════════════
# CLEAN & SAVE
# ══════════════════════════════════════════════════════════════════════════════
def clean_and_save(df):
"""
Selects features, drops NaNs, normalises features, saves to CSV.
NOTE: We normalise X features only — never the target y.
"""
features = ["momentum", "volatility", "pe_ratio", "target"]
df = df[features + ["ticker"]].copy()
# Drop NaN and infinite values
df = df.replace([np.inf, -np.inf], np.nan).dropna()
# ── Normalise features (zero mean, unit variance) ──────────────────────
for col in ["momentum", "volatility", "pe_ratio"]:
mean = df[col].mean()
std = df[col].std()
df[col] = (df[col] - mean) / (std + 1e-8)
# ── Save ───────────────────────────────────────────────────────────────
os.makedirs(OUT_DIR, exist_ok=True)
df.to_csv(OUT_FILE, index=False)
print(f"Saved {len(df)} rows to {OUT_FILE}")
print(f"\nFeature stats after normalisation:")
print(df[["momentum","volatility","pe_ratio","target"]].describe().round(4))
return df
# ══════════════════════════════════════════════════════════════════════════════
# MAIN
# ══════════════════════════════════════════════════════════════════════════════
if __name__ == "__main__":
if USE_REAL:
df = fetch_real_data()
else:
df = generate_synthetic_data(n_samples=500)
df = clean_and_save(df)
print(f"\nFinal dataset shape : {df.shape}")
print(f"Columns : {list(df.columns)}")
print(f"\nFirst 5 rows:")
print(df.head())