![]()
In [ ]:
from datetime import datetime
import pandas as pd
# =====================================================
# 1. INITIALIZATION & SETUP
# =====================================================
qb = QuantBook()
# Define your list of tickers here
fixed_tickers = ["SPY"]
dynamic_tickers = ["NVDA", "AMZN", "JPM"]
tickers = fixed_tickers + dynamic_tickers
symbols = []
# Add all equities and collect their Symbol objects
for ticker in tickers:
symbol = qb.AddEquity(ticker, Resolution.Daily).Symbol
symbols.append(symbol)
# Add US Treasury Yield Data
yield_symbol = qb.AddData(USTreasuryYieldCurveRate, "USTYCR").Symbol
start = datetime(1998, 1, 1)
end = datetime(2026, 3, 28)
folder_name = "Data_Prices"
# =====================================================
# 2. FETCH MASTER HISTORY FOR ALL STOCKS
# =====================================================
print(f"Fetching Stocks Data for {len(tickers)} tickers...")
history = qb.History(symbols, start, end, Resolution.Daily)
print("Fetching Treasury Yield Data...")
yield_history = qb.History(yield_symbol, start, end, Resolution.Daily)
if history.empty:
raise ValueError("No price history found. Check date range.")
print("Master data loaded. Beginning processing...\n")
# =====================================================
# 3. LOOP THROUGH EACH TICKER & EXPORT TO OBJECT STORE
# =====================================================
for symbol in symbols:
ticker = symbol.Value
print(f"Processing {ticker}...")
try:
# Isolate this specific stock's data from the MultiIndex
stock_history = history.loc[symbol].copy()
except KeyError:
print(f"No data returned for {ticker}. Skipping.")
continue
# -------------------------------------------------
# A. DAILY CLOSE
# -------------------------------------------------
prices_daily = stock_history[['close']].dropna()
prices_daily.reset_index(inplace=True)
prices_daily['time'] = prices_daily['time'].dt.strftime('%Y-%m-%d')
prices_daily.rename(columns={'time': 'Date', 'close': 'Close'}, inplace=True)
prices_daily.insert(0, "Symbol", ticker)
csv_daily = prices_daily.to_csv(index=False, na_rep="", lineterminator='\n').strip()
file_name_daily = f"prices_daily_{ticker}.csv"
object_store_key_daily = f"{folder_name}/{file_name_daily}"
qb.ObjectStore.Save(object_store_key_daily, csv_daily)
# -------------------------------------------------
# B. WEEKLY OHLCV
# -------------------------------------------------
prices_weekly = stock_history.resample('W').agg({
'open': 'first', 'high': 'max', 'low': 'min', 'close': 'last', 'volume': 'sum'
}).dropna()
prices_weekly.reset_index(inplace=True)
prices_weekly['time'] = prices_weekly['time'].dt.strftime('%Y-%m-%d')
prices_weekly.rename(columns={'time': 'Date', 'open': 'Open', 'high': 'High', 'low': 'Low', 'close': 'Close', 'volume': 'Volume'}, inplace=True)
prices_weekly.insert(0, "Symbol", ticker)
csv_weekly = prices_weekly.to_csv(index=False, na_rep="", lineterminator='\n').strip()
file_name_weekly = f"prices_weekly_{ticker}.csv"
object_store_key_weekly = f"{folder_name}/{file_name_weekly}"
qb.ObjectStore.Save(object_store_key_weekly, csv_weekly)
# -------------------------------------------------
# C. MONTHLY CLOSE
# -------------------------------------------------
# Resample to Month-End ('ME') and grab the last closing price
prices_monthly = stock_history[['close']].resample('ME').last().dropna()
prices_monthly.reset_index(inplace=True)
prices_monthly['time'] = prices_monthly['time'].dt.strftime('%Y-%m-%d')
prices_monthly.rename(columns={'time': 'Date', 'close': 'Close'}, inplace=True)
prices_monthly.insert(0, "Symbol", ticker)
csv_monthly = prices_monthly.to_csv(index=False, na_rep="", lineterminator='\n').strip()
file_name_monthly = f"prices_monthly_{ticker}.csv"
object_store_key_monthly = f"{folder_name}/{file_name_monthly}"
qb.ObjectStore.Save(object_store_key_monthly, csv_monthly)
print(f"Saved Daily, Weekly, and Monthly CSVs for {ticker}.")
# =====================================================
# 4. PROCESS TREASURY YIELDS (USTYCR)
# =====================================================
print("\nProcessing USTYCR (10-Year Yield)...")
if not yield_history.empty:
# 1. Clean and isolate the raw 10-year yield series
yield_df = yield_history[['tenyear']].copy().dropna()
yield_df = yield_df.reset_index(level=0, drop=True) # Drop the 'symbol' index
# Ensure index is a timezone-naive datetime object so we can resample it
yield_df.index = pd.to_datetime(yield_df.index, utc=True).tz_localize(None)
# Convert percentage to decimal (e.g., 4.25 -> 0.0425)
yield_df['tenyear'] = yield_df['tenyear'] / 100.0
ticker = "USTYCR"
# --- A. DAILY YIELD ---
yield_daily = yield_df.copy().reset_index()
yield_daily['time'] = yield_daily['time'].dt.strftime('%Y-%m-%d')
yield_daily.rename(columns={'time': 'Date', 'tenyear': 'Yield10Y'}, inplace=True)
yield_daily.insert(0, "Symbol", ticker)
csv_daily = yield_daily.to_csv(index=False, na_rep="", lineterminator='\n').strip()
qb.ObjectStore.Save(f"{folder_name}/prices_daily_yield_{ticker}.csv", csv_daily)
# --- B. WEEKLY YIELD (Last value of the week) ---
yield_weekly = yield_df.resample('W').last().dropna().reset_index()
yield_weekly['time'] = yield_weekly['time'].dt.strftime('%Y-%m-%d')
yield_weekly.rename(columns={'time': 'Date', 'tenyear': 'Yield10Y'}, inplace=True)
yield_weekly.insert(0, "Symbol", ticker)
csv_weekly = yield_weekly.to_csv(index=False, na_rep="", lineterminator='\n').strip()
qb.ObjectStore.Save(f"{folder_name}/prices_weekly_yield_{ticker}.csv", csv_weekly)
# --- C. MONTHLY YIELD (Last value of the month) ---
yield_monthly = yield_df.resample('ME').last().dropna().reset_index()
yield_monthly['time'] = yield_monthly['time'].dt.strftime('%Y-%m-%d')
yield_monthly.rename(columns={'time': 'Date', 'tenyear': 'Yield10Y'}, inplace=True)
yield_monthly.insert(0, "Symbol", ticker)
csv_monthly = yield_monthly.to_csv(index=False, na_rep="", lineterminator='\n').strip()
qb.ObjectStore.Save(f"{folder_name}/prices_monthly_yield_{ticker}.csv", csv_monthly)
print(f"Saved Daily, Weekly, and Monthly CSVs for {ticker}.")
else:
print("Treasury Yield data is empty.")
print("\nAll data processed successfully!")