Add portfolio analysis script and update dependencies

- Created a new Python script for portfolio analysis using historical stock data.
- Implemented functions for normality testing of prices and returns.
- Added histogram plots for prices and returns.
- Included logic for random portfolio allocation and efficient frontier calculation.
- Updated `pyproject.toml` to include `pandas-stubs` for type hinting support.
- Modified `uv.lock` to reflect the addition of `pandas-stubs` and its dependencies.
This commit is contained in:
2025-10-08 11:08:26 +02:00
parent 04d8b4cf14
commit a4adf0a392
4 changed files with 8778 additions and 2 deletions

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,144 @@
"""
Created on Thu Oct 3 15:57:44 2024
@author: turinici
"""
"""
This program uses historical data in the format in :
https://turinici.com/wp-content/uploads/cours/common/close_cac40_historical.csv
It can also be downloaded form yahoo finance in daily
prices (at least the "close") if possible at lest 5 years
Idea: use yahoo e.g., yfinance package
"pip install yfinance"
Then the code does :
1'/ order by increasing date
2/ plot price histogram and returns (with "log" and/or "actuarial")
3/ test normality of : prices, log returns, actuarial returns
for instance can use scipy.stats.normaltest
4/ shows the random versus optimal results
TODO : replace "None" by what is required to implement the task.
"""
import matplotlib.pyplot as plt
import numpy as np
import pandas as pd
from scipy.stats import kstest, normaltest # type: ignore
#from scipy.special import softmax
# take csv from www course :we suppose it is available locally))
data = pd.read_csv('M2/Risks Management/TP1/close_cac40_historical.csv', sep=';', index_col = 'Date')
data.head()
# order by increasing date, keep variable 'data'
data = data.sort_index(ascending=True)
data.head()
data.tail()
# plot histogram of prices
_ = data.hist(bins=30, figsize = (15,15))
def normality_test(data,kolmogorov_smirnov=False,level=0.01,print_results=True):
"""
Tests normality of each column of dataframe "data".
Inputs:
kolmogorov_smirnov= false: use "normaltest", otherswise use kstest, both from scipy.stats
level = p-value threshold level for the conclusions
Outputs: the number of yes/no in the results
"""
pvalues = []
for cols in data.keys():
pv = kstest(data[cols].dropna(), 'norm', args=(data[cols].mean(), data[cols].std())).pvalue if kolmogorov_smirnov else normaltest(data[cols].dropna()).pvalue
pvalues.append(pv)
res = 'normal' if pv >= level else 'not normal'
print("Test pval=", pv, 'res=', res)
normalok = sum([1 for pv in pvalues if pv >= level])
normalnotok = sum([1 for pv in pvalues if pv < level])
if (print_results):
print("no. of normal = ", normalok)
print("no. of not normal = ", normalnotok)
return normalok, normalnotok
normality_test(data)
# use 'data' to compute returns
# returns = data.pct_change() #actuarial
returns = np.log(data/data.shift(1))
_ = returns.hist(bins = int(np.sqrt(returns.shape[0])), figsize = (15,15)) # type: ignore
normality_test(returns.tail(25*3)) # type: ignore # test last 3 months
###########################################################
print('normality tests for increments, not returns!!')
increments = data - data.shift(1)
_ = increments.hist(bins=int(np.sqrt(increments.shape[0])), figsize = (15,15))
normality_test(increments.tail(25*3))
########################################################################
#%%
nb = 10 #will work with nb stocks
all_returns = returns.copy() #backup
nb_all = all_returns.shape[1]
if (nb > nb_all):
print("too many number of stocks, revert to max")
nb = nb_all
#choose the stock names
nb_stocks_names = np.random.choice(all_returns.keys(), nb, replace=False) # type: ignore
returns_small = all_returns.loc[:, nb_stocks_names] # type: ignore
#%%
#compute avg and cov of returns
mean_returns = returns_small.mean()
cov_matrix = returns_small.cov()
rdt_list = []
std_list = []
for _ in range(500):
#sample at random some "allocation"
allocation = np.random.random(nb)
rdt_port = allocation@mean_returns
std_port = np.sqrt(allocation@cov_matrix@allocation)
rdt_list.append(rdt_port)
std_list.append(std_port)
inverse_cov = np.linalg.inv(cov_matrix)
# compute and draw the efficient frontier on the same graph
onesM = np.ones_like(mean_returns)
#compute 'a' and 'b' using formulas from the course
a = onesM.T @ inverse_cov @ onesM
b = onesM.T @ inverse_cov @ mean_returns
# plot the frontier and its symmetric w/r to origin
sigmarange = np.linspace(1. / np.sqrt(a) + 1.e-10, 1.1 * np.max(std_list), 47)
# compute the return of the optimal portfolio for sigma in sigmarange
# will use the "factor" auxiliary variable
factor = np.sqrt(sigmarange**2 - 1. / a)
optimal_return = b / a + np.sqrt(sigmarange**2 - 1. / a) * factor
fig = plt.figure('perf')
plt.scatter(std_list, rdt_list)
plt.plot(sigmarange, optimal_return, 'r-')
plt.xlabel('std')
plt.ylabel('rdt')
#plt.xlim([0,.2])
#plt.ylim([-.05,.05])
plt.show()
# %%

View File

@@ -12,6 +12,7 @@ dependencies = [
"numpy>=2.2.5",
"opencv-python>=4.11.0.86",
"pandas>=2.2.3",
"pandas-stubs>=2.3.2.250926",
"plotly>=6.3.0",
"scikit-learn>=1.6.1",
"scipy>=1.15.2",
@@ -48,6 +49,7 @@ select = [
# Désactiver certaines règles
ignore = [
"E501", # line too long, géré par le formatter
"E402", # Imports in top of file
]
# Exclure certains fichiers ou répertoires
@@ -79,5 +81,5 @@ fixable = ["ALL"]
unfixable = []
# Formatage des imports
[isort]
known-third-party = ["pydantic", "django"]
[tool.isort]
known_third_party = ["pydantic", "django"]

24
uv.lock generated
View File

@@ -1005,6 +1005,19 @@ wheels = [
{ url = "https://files.pythonhosted.org/packages/ab/5f/b38085618b950b79d2d9164a711c52b10aefc0ae6833b96f626b7021b2ed/pandas-2.2.3-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:ad5b65698ab28ed8d7f18790a0dc58005c7629f227be9ecc1072aa74c0c1d43a", size = 13098436, upload-time = "2024-09-20T13:09:48.112Z" },
]
[[package]]
name = "pandas-stubs"
version = "2.3.2.250926"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "numpy" },
{ name = "types-pytz" },
]
sdist = { url = "https://files.pythonhosted.org/packages/1b/3b/32be58a125db39d0b5f62cc93795f32b5bb2915bd5c4a46f0e35171985e2/pandas_stubs-2.3.2.250926.tar.gz", hash = "sha256:c64b9932760ceefb96a3222b953e6a251321a9832a28548be6506df473a66406", size = 102147, upload-time = "2025-09-26T19:50:39.522Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/40/96/1e4a035eaf4dce9610aac6e43026d0c6baa05773daf6d21e635a4fe19e21/pandas_stubs-2.3.2.250926-py3-none-any.whl", hash = "sha256:81121818453dcfe00f45c852f4dceee043640b813830f6e7bd084a4ef7ff7270", size = 159995, upload-time = "2025-09-26T19:50:38.241Z" },
]
[[package]]
name = "parso"
version = "0.8.4"
@@ -1512,6 +1525,7 @@ dependencies = [
{ name = "numpy" },
{ name = "opencv-python" },
{ name = "pandas" },
{ name = "pandas-stubs" },
{ name = "plotly" },
{ name = "scikit-learn" },
{ name = "scipy" },
@@ -1535,6 +1549,7 @@ requires-dist = [
{ name = "numpy", specifier = ">=2.2.5" },
{ name = "opencv-python", specifier = ">=4.11.0.86" },
{ name = "pandas", specifier = ">=2.2.3" },
{ name = "pandas-stubs", specifier = ">=2.3.2.250926" },
{ name = "plotly", specifier = ">=6.3.0" },
{ name = "scikit-learn", specifier = ">=1.6.1" },
{ name = "scipy", specifier = ">=1.15.2" },
@@ -1662,6 +1677,15 @@ wheels = [
{ url = "https://files.pythonhosted.org/packages/00/c0/8f5d070730d7836adc9c9b6408dec68c6ced86b304a9b26a14df072a6e8c/traitlets-5.14.3-py3-none-any.whl", hash = "sha256:b74e89e397b1ed28cc831db7aea759ba6640cb3de13090ca145426688ff1ac4f", size = 85359, upload-time = "2024-04-19T11:11:46.763Z" },
]
[[package]]
name = "types-pytz"
version = "2025.2.0.20250809"
source = { registry = "https://pypi.org/simple" }
sdist = { url = "https://files.pythonhosted.org/packages/07/e2/c774f754de26848f53f05defff5bb21dd9375a059d1ba5b5ea943cf8206e/types_pytz-2025.2.0.20250809.tar.gz", hash = "sha256:222e32e6a29bb28871f8834e8785e3801f2dc4441c715cd2082b271eecbe21e5", size = 10876, upload-time = "2025-08-09T03:14:17.453Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/db/d0/91c24fe54e565f2344d7a6821e6c6bb099841ef09007ea6321a0bac0f808/types_pytz-2025.2.0.20250809-py3-none-any.whl", hash = "sha256:4f55ed1b43e925cf851a756fe1707e0f5deeb1976e15bf844bcaa025e8fbd0db", size = 10095, upload-time = "2025-08-09T03:14:16.674Z" },
]
[[package]]
name = "typing-extensions"
version = "4.15.0"