GJ Algo Research - Colab
GJ Algo Research - Colab
import pandas as pd
# Data
data = {
"DATE": ["1/4/21", "1/6/21", "1/7/21", "1/8/21", "1/12/21", "1/15/21", "1/18/21", "1/19/21", "1/21/21", "1/22/21",
"1/25/21", "1/26/21", "1/29/21", "2/1/21", "2/2/21", "2/4/21", "2/9/21", "2/15/21", "2/16/21", "2/22/21",
"2/26/21", "3/1/21", "3/2/21", "3/4/21", "3/5/21", "3/8/21", "3/12/21", "3/16/21", "3/17/21", "3/18/21",
"3/19/21", "3/23/21", "3/24/21", "3/25/21", "3/26/21", "3/29/21", "3/31/21", "4/1/21", "4/7/21", "4/8/21",
"4/9/21", "4/12/21", "4/13/21", "4/14/21", "4/15/21", "4/16/21", "4/19/21", "4/20/21", "4/21/21", "4/22/21",
"4/23/21", "4/26/21", "4/27/21", "4/29/21", "4/30/21", "5/3/21", "5/7/21", "5/10/21", "5/13/21", "5/17/21",
"5/19/21", "5/26/21", "6/2/21", "6/4/21", "6/7/21", "6/10/21", "6/18/21", "6/21/21", "6/22/21", "6/23/21",
"6/28/21", "6/29/21", "6/30/21", "7/6/21", "7/7/21", "7/8/21", "7/9/21", "7/12/21", "7/13/21", "7/14/21",
"7/15/21", "7/16/21", "7/20/21", "7/22/21", "7/26/21", "7/27/21", "7/28/21", "7/29/21", "8/4/21", "8/16/21",
"8/19/21", "8/20/21", "8/23/21", "8/24/21", "8/26/21", "8/27/21", "8/31/21", "9/2/21", "9/7/21", "9/9/21",
"9/10/21", "9/17/21", "9/20/21", "9/21/21", "9/23/21", "9/24/21", "9/27/21", "9/28/21", "10/1/21", "10/4/21",
"10/5/21", "10/6/21", "10/7/21", "10/8/21", "10/11/21", "10/12/21", "10/13/21", "10/14/21", "10/15/21",
"10/19/21", "10/21/21", "10/22/21", "10/25/21", "10/26/21", "10/27/21", "10/28/21", "11/1/21", "11/2/21",
"11/4/21", "11/5/21", "11/9/21", "11/10/21", "11/11/21", "11/15/21", "11/16/21", "11/18/21", "11/22/21",
"11/23/21", "11/24/21", "11/26/21", "12/2/21", "12/3/21", "12/8/21", "12/9/21", "12/13/21", "12/14/21",
"12/16/21", "12/17/21", "12/20/21", "12/21/21", "12/22/21", "12/23/21"],
"RETURN": [-1, 0, -1, 0, 1, 1, 1, 0, -1, 0, 1, -1, 1.1, 1.35, 1.4, 0, 1, 1, -1, 1.3, 1, 0, 2, 1, -1, 0, 1, 0, 1.2, 2,
0, -1, -1, -1, 1, -1, 1, 1.9, 1.8, 1, 2, 0, -1, 0, 1.1, 1.1, 1.8, 2, 1.2, -1, -1, 0, 1, 0, 1, 1, 1.5, 1,
-1, 0, 1.1, 0.6, 0, 1, -1, 1, -1, -1, 1.5, 0, 1.6, -1, 1.1, -1, -1, 1, 1, -1, -1, -1, 2.6, 0, 1.8, -1, 1.75,
0, 0, 1, 1.5, 1, 1, 1.25, 1, -1, 1.1, -1, 1, 0, 1, 1, 1, 1, 1, -1, 1, 1, -1, -1, 2, -1, -1, -1, 1.2, 0, 1,
1.2, 1, 1, 1, -1, -1, 0, 1.4, -1, 1, 0, 1.75, 1, 1, 0, 2, 1.2, 0, 1.1, 1, 0, 1.5, -1, -1, 1, 0, 1.4, -1, -1,
1.3, -1, -1, -1, 2, 1, -1, -1]
}
# Convert to DataFrame
df = pd.DataFrame(data)
df['RETURN'] = df['RETURN'] * 2 # Convert to percentage gains/losses
# Parameters
phase1_target = 7
phase2_target = 5
max_drawdown = 10
# Simulation
pass_count = 0
fail_count = 0
# Results
print(f"Pass: {pass_count}, Fail: {fail_count}")
import pandas as pd
# Data
data = {
"DATE": ["1/4/22", "1/5/22", "1/6/22", "1/10/22", "1/11/22", "1/12/22", "1/14/22", "1/17/22", "1/18/22", "1/20/22",
"1/21/22", "1/24/22", "1/25/22", "1/26/22", "1/27/22", "1/28/22", "1/31/22", "2/1/22", "2/4/22", "2/10/22",
"2/11/22", "2/14/22", "2/15/22", "2/17/22", "2/18/22", "2/21/22", "2/24/22", "2/25/22", "2/28/22", "3/1/22",
"3/2/22", "3/4/22", "3/7/22", "3/8/22", "3/10/22", "3/11/22", "3/15/22", "3/17/22", "3/18/22", "3/22/22",
https://colab.research.google.com/drive/1UunQnNRuv2CJ1PDAxLqteTm_kiGYomWo#scrollTo=Kq_nwVfsnyo5&printMode=true 1/23
2/13/25, 10:47 PM GJ Algo Research - Colab
"3/24/22", "3/25/22", "3/28/22", "3/31/22"],
"RETURN": [1, 1.5, 1, -1, 0, 0, 1.2, 0.7, -1, -1, 0, -1, 1.7, -1, 1.2, 0.9, 1.25, 0, -1, 1, 1.25, 1, -1, 2, 2, 1, -1,
1.75, 0, -1, 1.6, -1, 1.9, 0, 1.4, 1, -1, 0, 0, 1, 0, 2, -1, -1]
}
# Convert to DataFrame
df = pd.DataFrame(data)
df['RETURN'] = df['RETURN'] * 2 # Convert to percentage gains/losses
# Parameters
phase1_target = 7
phase2_target = 5
max_drawdown = 10
# Simulation
pass_count = 0
fail_count = 0
# Results
print(f"Pass: {pass_count}, Fail: {fail_count}")
import pandas as pd
# Data
data = {
"DATE": ["1/4/22", "1/5/22", "1/6/22", "1/10/22", "1/11/22", "1/12/22", "1/14/22", "1/17/22", "1/18/22", "1/20/22",
"1/21/22", "1/24/22", "1/25/22", "1/26/22", "1/27/22", "1/28/22", "1/31/22", "2/1/22", "2/4/22", "2/10/22",
"2/11/22", "2/14/22", "2/15/22", "2/17/22", "2/18/22", "2/21/22", "2/24/22", "2/25/22", "2/28/22", "3/1/22",
"3/2/22", "3/4/22", "3/7/22", "3/8/22", "3/10/22", "3/11/22", "3/15/22", "3/17/22", "3/18/22", "3/22/22",
"3/24/22", "3/25/22", "3/28/22", "3/31/22"],
"RETURN": [1, 1.5, 1, -1, 0, 0, 1.2, 0.7, -1, -1, 0, -1, 1.7, -1, 1.2, 0.9, 1.25, 0, -1, 1, 1.25, 1, -1, 2, 2, 1, -1,
1.75, 0, -1, 1.6, -1, 1.9, 0, 1.4, 1, -1, 0, 0, 1, 0, 2, -1, -1]
}
# Convert to DataFrame
df = pd.DataFrame(data)
df['RETURN'] = df['RETURN'] * 2 # Convert to percentage gains/losses
# Parameters
phase1_target = 7
phase2_target = 5
max_drawdown = 10
# Simulation
pass_count = 0
fail_count = 0
https://colab.research.google.com/drive/1UunQnNRuv2CJ1PDAxLqteTm_kiGYomWo#scrollTo=Kq_nwVfsnyo5&printMode=true 2/23
2/13/25, 10:47 PM GJ Algo Research - Colab
trade_return = df.iloc[i]['RETURN']
cumulative_profit += trade_return
max_cumulative_profit = max(max_cumulative_profit, cumulative_profit)
drawdown = max_cumulative_profit - cumulative_profit
# Results
print(f"Pass: {pass_count}, Fail: {fail_count}")
import pandas as pd
# Convert to DataFrame
df = pd.DataFrame(data)
# Define thresholds
bullish_threshold = 0.0020 # Close > Open + 0.0020
bearish_threshold = 0.0020 # Close < Open - 0.0020
range_threshold = 0.0030 # High - Low < 0.0030 for consolidatory
# Apply classification
df["Classification"] = df.apply(classify_session, axis=1)
# Display results
print(df)
import pandas as pd
import matplotlib.pyplot as plt
https://colab.research.google.com/drive/1UunQnNRuv2CJ1PDAxLqteTm_kiGYomWo#scrollTo=Kq_nwVfsnyo5&printMode=true 3/23
2/13/25, 10:47 PM GJ Algo Research - Colab
# Convert to DataFrame
df = pd.DataFrame(data)
# Plotting
plt.figure(figsize=(10, 6))
# Show plot
plt.show()
import pandas as pd
import matplotlib.pyplot as plt
# Convert to DataFrame
df = pd.DataFrame(data)
https://colab.research.google.com/drive/1UunQnNRuv2CJ1PDAxLqteTm_kiGYomWo#scrollTo=Kq_nwVfsnyo5&printMode=true 4/23
2/13/25, 10:47 PM GJ Algo Research - Colab
axes[0, 0].set_ylabel("Price")
axes[0, 0].set_xticks(df["Session_Index"])
axes[0, 0].set_xticklabels(df["Session"])
axes[0, 0].grid()
axes[0, 0].legend()
# Adjust layout
plt.tight_layout(rect=[0, 0, 1, 0.96]) # Adjust title spacing
# Show plot
plt.show()
https://colab.research.google.com/drive/1UunQnNRuv2CJ1PDAxLqteTm_kiGYomWo#scrollTo=Kq_nwVfsnyo5&printMode=true 5/23
2/13/25, 10:47 PM GJ Algo Research - Colab
import pandas as pd
import mplfinance as mpf
# Data
data = [
["1/2/23", "7:00:00 AM", 158.124, 158.165, 158.101, 158.138],
["1/2/23", "7:15:00 AM", 158.111, 158.186, 158.092, 158.124],
["1/2/23", "7:30:00 AM", 158.137, 158.198, 158.111, 158.189],
["1/2/23", "7:45:00 AM", 158.176, 158.183, 158.061, 158.124],
["1/2/23", "8:00:00 AM", 158.114, 158.123, 157.987, 158.057],
["1/2/23", "8:15:00 AM", 158.037, 158.041, 158.019, 158.027],
["1/2/23", "8:30:00 AM", 158.045, 158.081, 158.009, 158.07],
["1/2/23", "8:45:00 AM", 158.07, 158.193, 158.064, 158.193],
["1/2/23", "9:00:00 AM", 158.193, 158.193, 157.937, 158.159],
["1/2/23", "9:15:00 AM", 158.006, 158.182, 157.955, 157.966],
["1/2/23", "9:30:00 AM", 157.966, 158.145, 157.966, 158.1],
["1/2/23", "9:45:00 AM", 158.093, 158.128, 157.972, 157.994],
https://colab.research.google.com/drive/1UunQnNRuv2CJ1PDAxLqteTm_kiGYomWo#scrollTo=Kq_nwVfsnyo5&printMode=true 6/23
2/13/25, 10:47 PM GJ Algo Research - Colab
["1/2/23", "10:00:00 AM", 158.025, 158.129, 158.006, 158.085],
["1/2/23", "10:15:00 AM", 158.095, 158.114, 157.949, 158.02],
["1/2/23", "10:30:00 AM", 158.019, 158.083, 157.968, 158.036],
["1/2/23", "10:45:00 AM", 158.02, 158.153, 157.712, 157.726],
["1/2/23", "11:00:00 AM", 157.789, 157.864, 157.705, 157.773],
["1/2/23", "11:15:00 AM", 157.773, 157.882, 157.769, 157.816],
["1/2/23", "11:30:00 AM", 157.807, 157.863, 157.666, 157.712],
["1/2/23", "11:45:00 AM", 157.712, 157.743, 157.635, 157.646],
["1/2/23", "12:00:00 PM", 157.667, 157.725, 157.555, 157.578],
["1/2/23", "12:15:00 PM", 157.577, 157.681, 157.563, 157.637],
["1/2/23", "12:30:00 PM", 157.637, 157.66, 157.597, 157.618],
["1/2/23", "12:45:00 PM", 157.618, 157.643, 157.609, 157.625],
["1/2/23", "1:00:00 PM", 157.62, 157.742, 157.62, 157.685],
["1/2/23", "1:15:00 PM", 157.684, 157.73, 157.629, 157.644],
["1/2/23", "1:30:00 PM", 157.653, 157.696, 157.625, 157.654],
["1/2/23", "1:45:00 PM", 157.654, 157.675, 157.593, 157.602],
["1/2/23", "2:00:00 PM", 157.602, 157.614, 157.52, 157.542],
["1/2/23", "2:15:00 PM", 157.537, 157.642, 157.53, 157.62],
["1/2/23", "2:30:00 PM", 157.618, 157.642, 157.554, 157.566],
["1/2/23", "2:45:00 PM", 157.566, 157.619, 157.547, 157.562],
["1/2/23", "3:00:00 PM", 157.563, 157.588, 157.466, 157.494],
["1/2/23", "3:15:00 PM", 157.494, 157.519, 157.429, 157.468],
["1/2/23", "3:30:00 PM", 157.465, 157.514, 157.43, 157.433],
["1/2/23", "3:45:00 PM", 157.433, 157.496, 157.425, 157.458],
["1/2/23", "4:00:00 PM", 157.443, 157.507, 157.439, 157.492],
["1/2/23", "4:15:00 PM", 157.493, 157.544, 157.458, 157.512],
["1/2/23", "4:30:00 PM", 157.502, 157.528, 157.487, 157.518],
["1/2/23", "4:45:00 PM", 157.518, 157.654, 157.499, 157.561],
["1/2/23", "5:00:00 PM", 157.55, 157.641, 157.546, 157.569],
["1/2/23", "5:15:00 PM", 157.569, 157.675, 157.56, 157.62],
["1/2/23", "5:30:00 PM", 157.609, 157.648, 157.584, 157.603],
["1/2/23", "5:45:00 PM", 157.603, 157.667, 157.574, 157.586],
["1/2/23", "6:00:00 PM", 157.549, 157.612, 157.387, 157.584],
["1/2/23", "6:15:00 PM", 157.566, 157.586, 157.498, 157.524],
["1/2/23", "6:30:00 PM", 157.519, 157.575, 157.491, 157.543],
["1/2/23", "6:45:00 PM", 157.543, 157.551, 157.491, 157.528],
["1/2/23", "7:00:00 PM", 157.526, 157.537, 157.482, 157.495],
["1/2/23", "7:15:00 PM", 157.498, 157.535, 157.481, 157.53],
["1/2/23", "7:30:00 PM", 157.526, 157.591, 157.498, 157.576],
["1/2/23", "7:45:00 PM", 157.566, 157.593, 157.543, 157.567],
["1/2/23", "8:00:00 PM", 157.564, 157.575, 157.506, 157.567],
["1/2/23", "8:15:00 PM", 157.554, 157.585, 157.55, 157.574],
["1/2/23", "8:30:00 PM", 157.571, 157.59, 157.551, 157.571],
["1/2/23", "8:45:00 PM", 157.561, 157.571, 157.495, 157.571],
["1/2/23", "9:00:00 PM", 157.564, 157.564, 157.498, 157.559],
["1/2/23", "9:15:00 PM", 157.556, 157.566, 157.498, 157.561],
["1/2/23", "9:30:00 PM", 157.538, 157.569, 157.489, 157.534],
["1/2/23", "9:45:00 PM", 157.534, 157.567, 157.453, 157.494],
["1/2/23", "10:00:00 PM", 157.506, 157.521, 157.432, 157.511],
["1/2/23", "10:15:00 PM", 157.512, 157.547, 157.503, 157.511],
["1/2/23", "10:30:00 PM", 157.511, 157.541, 157.508, 157.533],
["1/2/23", "10:45:00 PM", 157.512, 157.536, 157.448, 157.497],
["1/2/23", "11:00:00 PM", 157.497, 157.524, 157.48, 157.521],
["1/2/23", "11:15:00 PM", 157.522, 157.524, 157.432, 157.432],
["1/2/23", "11:30:00 PM", 157.439, 157.48, 157.427, 157.48],
["1/2/23", "11:45:00 PM", 157.425, 157.486, 157.324, 157.41]
]
# Create DataFrame
df = pd.DataFrame(data, columns=["Date", "Time", "Open", "High", "Low", "Close"])
https://colab.research.google.com/drive/1UunQnNRuv2CJ1PDAxLqteTm_kiGYomWo#scrollTo=Kq_nwVfsnyo5&printMode=true 7/23
2/13/25, 10:47 PM GJ Algo Research - Colab
<ipython-input-14-93776fc425f8>:80: UserWarning: Could not infer format, so each element will be parsed individually, fa
df["DateTime"] = pd.to_datetime(df["Date"] + " " + df["Time"])
# Convert to DataFrame
df = pd.DataFrame(data)
df['RETURN'] = df['RETURN'] * 2 # Convert to percentage gains/losses
# Parameters
phase1_target = 7
phase2_target = 5
max_drawdown = 10
# Simulation
pass_count = 0
fail_count = 0
# Results
print(f"Pass: {pass_count}, Fail: {fail_count}")
import pandas as pd
import numpy as np
https://colab.research.google.com/drive/1UunQnNRuv2CJ1PDAxLqteTm_kiGYomWo#scrollTo=Kq_nwVfsnyo5&printMode=true 8/23
2/13/25, 10:47 PM GJ Algo Research - Colab
import matplotlib.pyplot as plt
from ta.volatility import BollingerBands, KeltnerChannel
from ta.trend import ADXIndicator
from ta.momentum import RSIIndicator
# Data
raw_data = """Timestamp,Open,High,Low,Close
1/3/23 03:00:00,157.437,157.448,156.871,156.893
1/3/23 03:15:00,156.897,156.949,156.374,156.461
1/3/23 03:30:00,156.459,156.896,156.384,156.713
1/3/23 03:45:00,156.712,156.848,156.571,156.836
1/3/23 04:00:00,156.84,157.096,156.829,156.895
1/3/23 04:15:00,156.895,156.951,156.774,156.86
1/3/23 04:30:00,156.863,157.018,156.71,156.807
1/3/23 04:45:00,156.802,156.955,156.72,156.921
1/3/23 05:00:00,156.921,156.999,156.83,156.83
1/3/23 05:15:00,156.829,156.841,156.652,156.712
1/3/23 05:30:00,156.712,156.768,156.639,156.723
1/3/23 05:45:00,156.723,156.746,156.666,156.688
1/3/23 06:00:00,156.681,156.705,156.457,156.482
1/3/23 06:15:00,156.483,156.612,156.477,156.578
1/3/23 06:30:00,156.585,156.616,156.489,156.532
1/3/23 06:45:00,156.531,156.587,156.439,156.449
1/3/23 07:00:00,156.452,156.527,156.423,156.479
1/3/23 07:15:00,156.482,156.703,156.478,156.575
1/3/23 07:30:00,156.574,156.708,156.569,156.661
1/3/23 07:45:00,156.661,156.82,156.639,156.674
1/3/23 08:00:00,156.671,156.717,156.504,156.51
1/3/23 08:15:00,156.509,156.649,156.44,156.628
1/3/23 08:30:00,156.628,156.777,156.61,156.65
1/3/23 08:45:00,156.65,156.818,156.608,156.621
1/3/23 09:00:00,156.619,156.724,156.546,156.691"""
# Read Data
from io import StringIO
df = pd.read_csv(StringIO(raw_data), parse_dates=['Timestamp'])
df.set_index('Timestamp', inplace=True)
# 1. ATR Calculation
df['H-L'] = df['High'] - df['Low']
df['H-C'] = abs(df['High'] - df['Close'].shift(1))
df['L-C'] = abs(df['Low'] - df['Close'].shift(1))
df['TR'] = df[['H-L', 'H-C', 'L-C']].max(axis=1)
df['ATR'] = df['TR'].rolling(window=14).mean()
consolidating_atr = df['ATR'].iloc[-1] < df['ATR'].mean()
# 5. Standard Deviation
df['Std_Dev'] = df['Close'].rolling(window=20).std()
consolidating_std = df['Std_Dev'].iloc[-1] < df['Std_Dev'].mean()
https://colab.research.google.com/drive/1UunQnNRuv2CJ1PDAxLqteTm_kiGYomWo#scrollTo=Kq_nwVfsnyo5&printMode=true 9/23
2/13/25, 10:47 PM GJ Algo Research - Colab
# 7. EMA Convergence
df['EMA_Fast'] = df['Close'].ewm(span=9, adjust=False).mean()
df['EMA_Slow'] = df['Close'].ewm(span=21, adjust=False).mean()
consolidating_ema = abs(df['EMA_Fast'].iloc[-1] - df['EMA_Slow'].iloc[-1]) < 0.001 * df['Close'].iloc[-1]
# Decision
is_consolidating = all([consolidating_atr, consolidating_bb, consolidating_adx, consolidating_rsi, consolidating_std, consol
print("Market is Consolidating" if is_consolidating else "Market is Trending")
import pandas as pd
# Parameters
phase1_target = 2
phase2_target = 0
max_drawdown = 10
# Simulation
pass_count = 0
fail_count = 0
pass_days = []
fail_days = []
https://colab.research.google.com/drive/1UunQnNRuv2CJ1PDAxLqteTm_kiGYomWo#scrollTo=Kq_nwVfsnyo5&printMode=true 10/23
2/13/25, 10:47 PM GJ Algo Research - Colab
# Display results
print(f"Pass: {pass_count}, Fail: {fail_count}")
print(f"Pass Rate: {pass_percentage:.2f}%")
print(f"Fail Rate: {fail_percentage:.2f}%")
print(f"Average days to pass: {avg_pass_days:.2f}")
print(f"Average days to fail: {avg_fail_days:.2f}")
import pandas as pd
import matplotlib.pyplot as plt
# Parameters
phase1_target = 6
phase2_target = 6
max_drawdown = 9
# Track results
pass_count = 0
fail_count = 0
days_to_pass = []
days_to_fail = []
https://colab.research.google.com/drive/1UunQnNRuv2CJ1PDAxLqteTm_kiGYomWo#scrollTo=Kq_nwVfsnyo5&printMode=true 11/23
2/13/25, 10:47 PM GJ Algo Research - Colab
for i in range(len(df)):
trade_return = df.iloc[i]['RETURN']
cumulative_profit += trade_return
max_cumulative_profit = max(max_cumulative_profit, cumulative_profit)
# Phase transitions
if phase == 1 and cumulative_profit >= phase1_target:
phase = 2
phase1_date = df.iloc[i]['DATE']
cumulative_profit = 0 # Reset for Phase 2
max_cumulative_profit = 0
elif phase == 2 and cumulative_profit >= phase2_target:
phase2_date = df.iloc[i]['DATE']
break # Stop when we pass
# Formatting
plt.xlabel("Date")
plt.ylabel("Cumulative Profit")
plt.title("Passing Account Performance")
plt.legend()
plt.grid(True)
plt.xticks(rotation=45)
# Show plot
plt.show()
https://colab.research.google.com/drive/1UunQnNRuv2CJ1PDAxLqteTm_kiGYomWo#scrollTo=Kq_nwVfsnyo5&printMode=true 12/23
2/13/25, 10:47 PM GJ Algo Research - Colab
import pandas as pd
# Parameters
phase1_target = 7
phase2_target = 5
max_drawdown = 10
# Store results
best_risk = None
best_pass_rate = 0
best_avg_days = float('inf')
https://colab.research.google.com/drive/1UunQnNRuv2CJ1PDAxLqteTm_kiGYomWo#scrollTo=Kq_nwVfsnyo5&printMode=true 13/23
2/13/25, 10:47 PM GJ Algo Research - Colab
fail_count += 1
fail_days.append(days_taken)
break
# Store results
risk_results[risk] = {"Pass Rate": pass_rate, "Avg Days": avg_pass_days}
# Find the best risk level (highest pass rate & fastest completion)
if pass_rate > best_pass_rate or (pass_rate == best_pass_rate and avg_pass_days < best_avg_days):
best_pass_rate = pass_rate
best_avg_days = avg_pass_days
best_risk = risk
https://colab.research.google.com/drive/1UunQnNRuv2CJ1PDAxLqteTm_kiGYomWo#scrollTo=Kq_nwVfsnyo5&printMode=true 14/23
2/13/25, 10:47 PM GJ Algo Research - Colab
current_streak_length = 0
max_streak_start = None
max_streak_end = None
current_streak_start = None
import pandas as pd
YearMonth MonthlyReturn
0 2021-01 1.10
1 2021-02 6.05
2 2021-03 4.20
3 2021-04 12.90
4 2021-05 4.20
5 2021-06 2.20
6 2021-07 3.15
7 2021-08 5.85
8 2021-09 4.00
9 2021-10 4.80
10 2021-11 9.55
11 2021-12 -1.30
12 2022-01 5.45
13 2022-02 7.00
14 2022-03 3.90
15 2022-04 5.00
https://colab.research.google.com/drive/1UunQnNRuv2CJ1PDAxLqteTm_kiGYomWo#scrollTo=Kq_nwVfsnyo5&printMode=true 15/23
2/13/25, 10:47 PM GJ Algo Research - Colab
16 2022-05 5.00
17 2022-06 3.40
18 2022-07 -1.50
19 2022-08 4.55
20 2022-09 5.00
21 2022-10 11.70
22 2022-11 11.50
23 2022-12 -1.20
24 2023-01 1.20
25 2023-02 3.00
26 2023-03 3.10
27 2023-04 7.50
28 2023-05 13.70
29 2023-06 2.30
30 2023-07 4.55
31 2023-08 -2.30
32 2023-09 5.25
33 2023-10 7.50
34 2023-11 1.20
35 2023-12 4.50
36 2024-01 8.35
37 2024-02 -0.50
38 2024-03 10.50
39 2024-04 0.30
40 2024-05 -3.00
41 2024-06 5.60
42 2024-07 1.00
43 2024-08 9.30
44 2024-09 8.70
45 2024-10 6.00
46 2024-11 8.30
47 2024-12 1.30
48 2025-01 5.70
import pandas as pd
| YearMonth | MonthlyReturn |
|:------------|----------------:|
| 2021-01 | 1.1 |
| 2021-02 | 6.05 |
| 2021-03 | 4.2 |
| 2021-04 | 12.9 |
| 2021-05 | 4.2 |
| 2021-06 | 2.2 |
| 2021-07 | 3.15 |
| 2021-08 | 5.85 |
| 2021-09 | 4 |
| 2021-10 | 4.8 |
| 2021-11 | 9.55 |
| 2021-12 | -1.3 |
| 2022-01 | 5.45 |
| 2022-02 | 7 |
| 2022-03 | 3.9 |
| 2022-04 | 5 |
| 2022-05 | 5 |
| 2022-06 | 3.4 |
| 2022-07 | -1.5 |
| 2022-08 | 4.55 |
| 2022-09 | 5 |
| 2022-10 | 11.7 |
| 2022-11 | 11.5 |
| 2022-12 | -1.2 |
https://colab.research.google.com/drive/1UunQnNRuv2CJ1PDAxLqteTm_kiGYomWo#scrollTo=Kq_nwVfsnyo5&printMode=true 16/23
2/13/25, 10:47 PM GJ Algo Research - Colab
| 2023-01 | 1.2 |
| 2023-02 | 3 |
| 2023-03 | 3.1 |
| 2023-04 | 7.5 |
| 2023-05 | 13.7 |
| 2023-06 | 2.3 |
| 2023-07 | 4.55 |
| 2023-08 | -2.3 |
| 2023-09 | 5.25 |
| 2023-10 | 7.5 |
| 2023-11 | 1.2 |
| 2023-12 | 4.5 |
| 2024-01 | 8.35 |
| 2024-02 | -0.5 |
| 2024-03 | 10.5 |
| 2024-04 | 0.3 |
| 2024-05 | -3 |
| 2024-06 | 5.6 |
| 2024-07 | 1 |
| 2024-08 | 9.3 |
| 2024-09 | 8.7 |
| 2024-10 | 6 |
| 2024-11 | 8.3 |
| 2024-12 | 1.3 |
| 2025-01 | 5.7 |
import pandas as pd
# Ensure the column name is correct (replace 'RETURN' with the actual column name if different)
column_name = 'RETURN' # Change this if your column has a different name
import pandas as pd
# Ensure the column name is correct (replace 'RETURN' with the actual column name if different)
column_name = 'RETURN' # Change this if your column has a different name
https://colab.research.google.com/drive/1UunQnNRuv2CJ1PDAxLqteTm_kiGYomWo#scrollTo=Kq_nwVfsnyo5&printMode=true 17/23
2/13/25, 10:47 PM GJ Algo Research - Colab
import pandas as pd
# Ensure the column name is correct (replace 'RETURN' with the actual column name if different)
column_name = 'RETURN' # Change this if your column has a different name
%matplotlib inline
import quantstats as qs
import pandas as pd
# Ensure the DATE column is in datetime format and set it as the index
df['DATE'] = pd.to_datetime(df['DATE'])
df.set_index('DATE', inplace=True)
https://colab.research.google.com/drive/1UunQnNRuv2CJ1PDAxLqteTm_kiGYomWo#scrollTo=Kq_nwVfsnyo5&printMode=true 18/23
2/13/25, 10:47 PM GJ Algo Research - Colab
WARNING:matplotlib.font_manager:findfont: Font family 'Arial' not found.
WARNING:matplotlib.font_manager:findfont: Font family 'Arial' not found.
WARNING:matplotlib.font_manager:findfont: Font family 'Arial' not found.
WARNING:matplotlib.font_manager:findfont: Font family 'Arial' not found.
WARNING:matplotlib.font_manager:findfont: Font family 'Arial' not found.
WARNING:matplotlib.font_manager:findfont: Font family 'Arial' not found.
WARNING:matplotlib.font_manager:findfont: Font family 'Arial' not found.
WARNING:matplotlib.font_manager:findfont: Font family 'Arial' not found.
WARNING:matplotlib.font_manager:findfont: Font family 'Arial' not found.
WARNING:matplotlib.font_manager:findfont: Font family 'Arial' not found.
WARNING:matplotlib.font_manager:findfont: Font family 'Arial' not found.
WARNING:matplotlib.font_manager:findfont: Font family 'Arial' not found.
WARNING:matplotlib.font_manager:findfont: Font family 'Arial' not found.
WARNING:matplotlib.font_manager:findfont: Font family 'Arial' not found.
WARNING:matplotlib.font_manager:findfont: Font family 'Arial' not found.
WARNING:matplotlib.font_manager:findfont: Font family 'Arial' not found.
https://colab.research.google.com/drive/1UunQnNRuv2CJ1PDAxLqteTm_kiGYomWo#scrollTo=Kq_nwVfsnyo5&printMode=true 19/23
2/13/25, 10:47 PM GJ Algo Research - Colab
Collecting quantstats
Downloading QuantStats-0.0.64-py2.py3-none-any.whl.metadata (8.9 kB)
Requirement already satisfied: pandas>=0.24.0 in /usr/local/lib/python3.11/dist-packages (from quantstats) (2.2.2)
Requirement already satisfied: numpy>=1.16.5 in /usr/local/lib/python3.11/dist-packages (from quantstats) (1.26.4)
Requirement already satisfied: seaborn>=0.9.0 in /usr/local/lib/python3.11/dist-packages (from quantstats) (0.13.2)
Requirement already satisfied: matplotlib>=3.0.0 in /usr/local/lib/python3.11/dist-packages (from quantstats) (3.10.0)
Requirement already satisfied: scipy>=1.2.0 in /usr/local/lib/python3.11/dist-packages (from quantstats) (1.13.1)
Requirement already satisfied: tabulate>=0.8.0 in /usr/local/lib/python3.11/dist-packages (from quantstats) (0.9.0)
Requirement already satisfied: yfinance>=0.1.70 in /usr/local/lib/python3.11/dist-packages (from quantstats) (0.2.52)
Requirement already satisfied: python-dateutil>=2.0 in /usr/local/lib/python3.11/dist-packages (from quantstats) (2.8.2)
Requirement already satisfied: contourpy>=1.0.1 in /usr/local/lib/python3.11/dist-packages (from matplotlib>=3.0.0->quan
Requirement already satisfied: cycler>=0.10 in /usr/local/lib/python3.11/dist-packages (from matplotlib>=3.0.0->quantsta
Requirement already satisfied: fonttools>=4.22.0 in /usr/local/lib/python3.11/dist-packages (from matplotlib>=3.0.0->qua
Requirement already satisfied: kiwisolver>=1.3.1 in /usr/local/lib/python3.11/dist-packages (from matplotlib>=3.0.0->qua
Requirement already satisfied: packaging>=20.0 in /usr/local/lib/python3.11/dist-packages (from matplotlib>=3.0.0->quant
Requirement already satisfied: pillow>=8 in /usr/local/lib/python3.11/dist-packages (from matplotlib>=3.0.0->quantstats)
Requirement already satisfied: pyparsing>=2.3.1 in /usr/local/lib/python3.11/dist-packages (from matplotlib>=3.0.0->quan
Requirement already satisfied: pytz>=2020.1 in /usr/local/lib/python3.11/dist-packages (from pandas>=0.24.0->quantstats)
Requirement already satisfied: tzdata>=2022.7 in /usr/local/lib/python3.11/dist-packages (from pandas>=0.24.0->quantstat
Requirement already satisfied: six>=1.5 in /usr/local/lib/python3.11/dist-packages (from python-dateutil>=2.0->quantstat
Requirement already satisfied: requests>=2.31 in /usr/local/lib/python3.11/dist-packages (from yfinance>=0.1.70->quantst
Requirement already satisfied: multitasking>=0.0.7 in /usr/local/lib/python3.11/dist-packages (from yfinance>=0.1.70->qu
Requirement already satisfied: lxml>=4.9.1 in /usr/local/lib/python3.11/dist-packages (from yfinance>=0.1.70->quantstats
Requirement already satisfied: platformdirs>=2.0.0 in /usr/local/lib/python3.11/dist-packages (from yfinance>=0.1.70->qu
Requirement already satisfied: frozendict>=2.3.4 in /usr/local/lib/python3.11/dist-packages (from yfinance>=0.1.70->quan
Requirement already satisfied: peewee>=3.16.2 in /usr/local/lib/python3.11/dist-packages (from yfinance>=0.1.70->quantst
Requirement already satisfied: beautifulsoup4>=4.11.1 in /usr/local/lib/python3.11/dist-packages (from yfinance>=0.1.70-
Requirement already satisfied: html5lib>=1.1 in /usr/local/lib/python3.11/dist-packages (from yfinance>=0.1.70->quantsta
Requirement already satisfied: soupsieve>1.2 in /usr/local/lib/python3.11/dist-packages (from beautifulsoup4>=4.11.1->yf
Requirement already satisfied: typing-extensions>=4.0.0 in /usr/local/lib/python3.11/dist-packages (from beautifulsoup4>
Requirement already satisfied: webencodings in /usr/local/lib/python3.11/dist-packages (from html5lib>=1.1->yfinance>=0.
Requirement already satisfied: charset-normalizer<4,>=2 in /usr/local/lib/python3.11/dist-packages (from requests>=2.31-
Requirement already satisfied: idna<4,>=2.5 in /usr/local/lib/python3.11/dist-packages (from requests>=2.31->yfinance>=0
Requirement already satisfied: urllib3<3,>=1.21.1 in /usr/local/lib/python3.11/dist-packages (from requests>=2.31->yfina
Requirement already satisfied: certifi>=2017.4.17 in /usr/local/lib/python3.11/dist-packages (from requests>=2.31->yfina
Downloading QuantStats-0.0.64-py2.py3-none-any.whl (45 kB)
━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━ 45.8/45.8 kB 2.9 MB/s eta 0:00:00
Installing collected packages: quantstats
Successfully installed quantstats-0.0.64
%matplotlib inline
import quantstats as qs
import pandas as pd
# Ensure the DATE column is in datetime format and set it as the index
https://colab.research.google.com/drive/1UunQnNRuv2CJ1PDAxLqteTm_kiGYomWo#scrollTo=Kq_nwVfsnyo5&printMode=true 20/23
2/13/25, 10:47 PM GJ Algo Research - Colab
df['DATE'] = pd.to_datetime(df['DATE'])
df.set_index('DATE', inplace=True)
https://colab.research.google.com/drive/1UunQnNRuv2CJ1PDAxLqteTm_kiGYomWo#scrollTo=Kq_nwVfsnyo5&printMode=true 21/23
2/13/25, 10:47 PM GJ Algo Research - Colab
Sharpe Ratio: 4.51
Sharpe Ratio (using extend_pandas): 4.51
[Performance Metrics]
Sharpe 4.51
Prob. Sharpe Ratio 100.0%
Smart Sharpe 4.41
Sortino 8.85
Smart Sortino 8.65
Sortino/√2 6.26
Smart Sortino/√2 6.12
Omega 1.91
MTD 5.76%
3M 16.11%
6M 46.87%
YTD 5.76%
1Y 70.15%
3Y (ann.) 44.6%
5Y (ann.) 46.23%
10Y (ann.) 46.23%
All-time (ann.) 46.23%
[Worst 5 Drawdowns]
[Strategy Visualization]
via Matplotlib
WARNING:matplotlib.font_manager:findfont: Font family 'Arial' not found.
WARNING:matplotlib.font_manager:findfont: Font family 'Arial' not found.
WARNING:matplotlib.font_manager:findfont: Font family 'Arial' not found.
https://colab.research.google.com/drive/1UunQnNRuv2CJ1PDAxLqteTm_kiGYomWo#scrollTo=Kq_nwVfsnyo5&printMode=true 23/23