antitheft159 commited on
Commit
87bdb99
·
verified ·
1 Parent(s): cd87d0b

Upload bayburtanalysis_159.py

Browse files
Files changed (1) hide show
  1. bayburtanalysis_159.py +487 -0
bayburtanalysis_159.py ADDED
@@ -0,0 +1,487 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # -*- coding: utf-8 -*-
2
+ """bayburtanalysis.159
3
+
4
+ Automatically generated by Colab.
5
+
6
+ Original file is located at
7
+ https://colab.research.google.com/drive/1i3xf37d6YszBy480hNM0EGmK3u-RtMJB
8
+ """
9
+
10
+ import pandas as pd
11
+ import numpy as np
12
+ import matplotlib.pyplot as plt
13
+ import seaborn as sns
14
+ from datetime import datetime
15
+
16
+ from statsmodels.tsa.seasonal import seasonal_decompose
17
+ from statsmodels.tsa.arima.model import ARIMA
18
+ import prophet
19
+
20
+ from sklearn.model_selection import train_test_split
21
+ from sklearn.preprocessing import StandardScaler
22
+ from sklearn.linear_model import LinearRegression
23
+ from sklearn.metrics import mean_squared_error, r2_score
24
+ from sklearn.ensemble import RandomForestRegressor
25
+
26
+ from textblob import TextBlob
27
+ import nltk
28
+ from nltk.sentiment.vader import SentimentIntensityAnalyzer
29
+ nltk.download('vader_lexicon')
30
+
31
+ import plotly.express as px
32
+ import plotly.graph_objs as go
33
+ import plotly.figure_factory as ff
34
+
35
+ import warnings
36
+ warnings.filterwarnings('ignore')
37
+
38
+ print("Very well you may continue")
39
+
40
+ big_tech_companies = pd.read_csv('big_tech_companies.csv')
41
+ big_tech_stock_prices = pd.read_csv('big_tech_stock_prices.csv')
42
+
43
+ print("Big Tech Companies Dataset:")
44
+ print(big_tech_companies.head())
45
+
46
+ print("\nBig Tech Stock Prices Dataset:")
47
+ print(big_tech_stock_prices.head())
48
+
49
+ print("\nBig Tech Companies Dataset Info:")
50
+ print(big_tech_companies.info())
51
+
52
+ print("\nBig Tech Stock Prices Dataset Info:")
53
+ print(big_tech_stock_prices.info())
54
+
55
+ print("\nBig Tech Companies Dataset Description:")
56
+ print(big_tech_companies.describe())
57
+
58
+ print("\nBig Tech Stock Prices Dataset Description:")
59
+ print(big_tech_stock_prices.describe())
60
+
61
+ print("\nUnique Companies in Big Tech Companies Dataset:")
62
+ print(big_tech_companies['company'].nunique())
63
+
64
+ print("\nUnique Stock Symbols in Big Tech Stock Prices Dataset:")
65
+ print(big_tech_stock_prices['stock_symbol'].nunique())
66
+
67
+ print("\nMissing Values in Big Tech Companies Dataset:")
68
+ print(big_tech_companies.isnull().sum())
69
+
70
+ print("\nMissing Values in Big Tech Stock Prices Dataset:")
71
+ print(big_tech_stock_prices.isnull().sum())
72
+
73
+ print("\nStock Symbol Counts in Big Tech Stock Prices Dataset:")
74
+ print(big_tech_stock_prices['stock_symbol'].value_counts())
75
+
76
+ big_tech_stock_prices['date'] = pd.to_datetime(big_tech_stock_prices['date'])
77
+
78
+ plt.figure(figsize=(14, 7))
79
+ sns.lineplot(data=big_tech_stock_prices, x='date', y='close', hue='stock_symbol')
80
+ plt.title('Stock Prices Over Time')
81
+ plt.xlabel('Date')
82
+ plt.ylabel('Close Price')
83
+ plt.legend(title='Stock Symbol')
84
+ plt.show()
85
+
86
+ plt.figure(figsize=(14, 7))
87
+ sns.lineplot(data=big_tech_stock_prices, x='date', y ='volume', hue='stock_symbol')
88
+ plt.title('Trading Volume Over Time')
89
+ plt.xlabel('Data')
90
+ plt.ylabel('Volume')
91
+ plt.legend(title='Stock Symbol')
92
+ plt.show()
93
+
94
+ plt.figure(figsize=(14,7))
95
+ sns.boxplot(data=big_tech_stock_prices, x='stock_symbol', y='close')
96
+ plt.title('Distribution of Closing Prices by Stock Symbol')
97
+ plt.xlabel('Stock Symbol')
98
+ plt.ylabel('Close Price')
99
+ plt.show()
100
+
101
+ apple_stock = big_tech_stock_prices[big_tech_stock_prices['stock_symbol'] == 'AAPL']
102
+ apple_stock.set_index('date', inplace=True)
103
+
104
+ decompostiion = seasonal_decompose(apple_stock['close'], model='multiplicative', period=365)
105
+ fig = decompostiion.plot()
106
+ fig.set_size_inches(14, 10)
107
+ plt.show()
108
+
109
+ plt.figure(figsize=(14, 7))
110
+ apple_stock['close'].plot()
111
+ plt.title('Apple Closing Prices')
112
+ plt.xlabel('Date')
113
+ plt.ylabel('Close Price')
114
+ plt.show()
115
+
116
+ apple_stock['rolling_mean'] = apple_stock['close'].rolling(window=30).mean()
117
+
118
+ plt.figure(figsize=(14, 7))
119
+ apple_stock[['close', 'rolling_mean']].plot()
120
+ plt.title('Apple Closing Prices and 30-Day Moving Average')
121
+ plt.xlabel('Date')
122
+ plt.ylabel('Close Price')
123
+ plt.show()
124
+
125
+ pivot_table = big_tech_stock_prices.pivot(index='date', columns='stock_symbol', values='close')
126
+ correlation_matrix = pivot_table.corr()
127
+
128
+ plt.figure(figsize=(12, 8))
129
+ sns.heatmap(correlation_matrix, annot=True, cmap='coolwarm', linewidths=0.5)
130
+ plt.title('Correlation Matrix of Stock Closing Prices')
131
+ plt.show()
132
+
133
+ big_tech_stock_prices_2020 = big_tech_stock_prices
134
+ [(big_tech_stock_prices['date'] >= '2020-01-01') &
135
+ (big_tech_stock_prices['date'] <= '2020-12-31')]
136
+
137
+ plt.figure(figsize=(14, 7))
138
+ sns.lineplot(data=big_tech_stock_prices_2020, x='date', y='close', hue='stock_symbol')
139
+ plt.title('Stock Prices During 2020')
140
+ plt.xlabel('Date')
141
+ plt.ylabel('Close Price')
142
+ plt.legend(title='Stock Symbol')
143
+ plt.show()
144
+
145
+ big_tech_stock_prices['year'] = big_tech_stock_prices['date'].dt.year
146
+
147
+ yearly_avg_prices = big_tech_stock_prices.groupby(['year', 'stock_symbol']).mean().reset_index()
148
+
149
+ plt.figure(figsize=(14, 7))
150
+ sns.lineplot(data=yearly_avg_prices, x='year', y='close', hue='stock_symbol')
151
+ plt.title('Yearly Average Closing Prices')
152
+ plt.xlabel('Year')
153
+ plt.ylabel('Average Close Price')
154
+ plt.legend(title='Stock Symbol')
155
+ plt.show()
156
+
157
+ big_tech_stock_prices['price_change'] = big_tech_stock_prices.groupby('stock_symbol')['close'].pct_change()
158
+
159
+ plt.figure(figsize=(14, 10))
160
+
161
+ sns.histplot(big_tech_stock_prices['price_change']. dropna(), bins=100, kde=True)
162
+ plt.title('Histogram of Daily Price Changes for All Stocks')
163
+ plt.xlabel('Daily Price Change')
164
+ plt.ylabel('Frequency')
165
+ plt.show()
166
+
167
+ unique_symbols = big_tech_stock_prices['stock_symbol'].unique()
168
+
169
+ for symbol in unique_symbols:
170
+ plt.figure(figsize=(14, 7))
171
+ sns.histplot(big_tech_stock_prices[big_tech_stock_prices['stock_symbol'] == symbol]['price_change'].dropna(), bins=100, kde=True)
172
+ plt.title(f'Histogram of Daily Price Changes for {symbol}')
173
+ plt.xlabel('Daily Price Change')
174
+ plt.ylabel('Frequency')
175
+ plt.show()
176
+
177
+ volatility = big_tech_stock_prices.groupby('stock_symbol')['price_change'].std().reset_index()
178
+ volatility.columns = ['stock_symbol', 'volatility']
179
+
180
+ plt.figure(figsize=(14, 7))
181
+ sns.barplot(data=volatility, x='stock_symbol', y='volatility')
182
+ plt.title('Stock Price Volatility')
183
+ plt.xlabel('Stock Symbol')
184
+ plt.ylabel('Volatility(Standard Deviation of Daily Price Changes)')
185
+ plt.show()
186
+
187
+ yearly_price_change = big_tech_stock_prices.groupby(['year', 'stock_symbol'])['close'].mean().pct_change().reset_index()
188
+ yearly_price_change = yearly_price_change.dropna()
189
+
190
+ plt.figure(figsize=(14, 7))
191
+ sns.lineplot(data=yearly_price_change, x='year', y='close', hue='stock_symbol', marker='o')
192
+ plt.title('Yearly Percentage Change in Average Closing Prices')
193
+ plt.xlabel('Year')
194
+ plt.ylabel('Percentage Change in Average Close Price')
195
+ plt.legend(title='Stock Symbol')
196
+ plt.show()
197
+
198
+ model = ARIMA(apple_stock['close'], order=(5, 1, 0))
199
+
200
+ model_fit = model.fit()
201
+ print(model_fit.summary())
202
+
203
+ plt.figure(figsize=(14, 7))
204
+ plt.plot(apple_stock['close'], label='Original')
205
+ plt.plot(model_fit.fittedvalues, color='red', label='Fitted Values')
206
+ plt.title('ARIMA Model Fit')
207
+ plt.xlabel('Date')
208
+ plt.ylabel('Close Price')
209
+ plt.legend()
210
+ plt.show()
211
+
212
+ forecast = model_fit.get_forecast(steps=30)
213
+ forecast_index = pd.date_range(start=apple_stock.index[-1], periods=30, freq='D')
214
+ forecast_mean = forecast.predicted_mean
215
+ forecast_conf_int = forecast.conf_int()
216
+
217
+ plt.figure(figsize=(14, 7))
218
+ plt.plot(apple_stock['close'], label='Original')
219
+ plt.plot(forecast_index, forecast_mean, color='red', label='Forecast')
220
+ plt.fill_between(forecast_index, forecast_conf_int.iloc[:, 0], forecast_conf_int.iloc[:, 1], color='pink', alpha=0.3)
221
+ plt.title('ARIMA Model Forecast')
222
+ plt.xlabel('Date')
223
+ plt.ylabel('Close Price')
224
+ plt.legend()
225
+ plt.show()
226
+
227
+ unique_symbols = big_tech_stock_prices['stock_symbol'].unique()
228
+
229
+ for symbol in unique_symbols:
230
+ stock_data = big_tech_stock_prices[big_tech_stock_prices['stock_symbol'] == symbol]
231
+ stock_data.set_index('date', inplace=True)
232
+
233
+ print(f"\n### {symbol} ###")
234
+
235
+ model = ARIMA(stock_data['close'], order=(5, 1, 0))
236
+ model_fit = model.fit()
237
+ print(model_fit.summary())
238
+
239
+ plt.figure(figsize=(14, 7))
240
+ plt.plot(stock_data['close'], label='Original')
241
+ plt.plot(model_fit.fittedvalues, color='red', label='Fitted Values')
242
+ plt.title(f'{symbol} ARIMA Model Fit')
243
+ plt.xlabel('Date')
244
+ plt.ylabel('Close Price')
245
+ plt.legend()
246
+ plt.show()
247
+
248
+ forecast = model_fit.get_forecast(steps=30)
249
+ forecast_index = pd.date_range(start=stock_data.index[-1], periods=30, freq='D')
250
+ forecast_mean = forecast.predicted_mean
251
+ forecast_conf_int = forecast.conf_int()
252
+
253
+ plt.figure(figsize=(14, 7))
254
+ plt.plot(stock_data['close'], label='Original')
255
+ plt.plot(forecast_index, forecast_mean, color='red', label='Forecast')
256
+ plt.fill_between(forecast_index, forecast_conf_int.iloc[:, 0], forecast_conf_int.iloc[:, 1], color='pink', alpha=0.3)
257
+ plt.title(f'{symbol} ARIMA Model Forecast')
258
+ plt.xlabel('Date')
259
+ plt.ylabel('Close Price')
260
+ plt.legend()
261
+ plt.show()
262
+
263
+ big_tech_stock_prices['daily_return'] = big_tech_stock_prices.groupby('stock_symbol')['close'].pct_change()
264
+
265
+ mean_returns = big_tech_stock_prices.groupby('stock_symbol')['daily_return'].mean()
266
+ volatilties = big_tech_stock_prices.groupby('stock_symbol')['daily_return'].std()
267
+
268
+ risk_return_df = pd.DataFrame({'mean_return': mean_returns, 'volatility': volatilties})
269
+ print(risk_return_df)
270
+
271
+ mean_returns = big_tech_stock_prices.groupby('stock_symbol')['daily_return'].mean()
272
+ cov_matrix = big_tech_stock_prices.pivot_table(index='date', columns='stock_symbol', values='daily_return').cov()
273
+
274
+ num_portfolios = 10000
275
+ results = np.zeros((4, num_portfolios))
276
+ weights_record = []
277
+
278
+ np.random.seed(42)
279
+
280
+ for i in range(num_portfolios):
281
+ weights = np.random.random(len(mean_returns))
282
+ weights /= np.sum(weights)
283
+ weights_record.append(weights)
284
+ portfolio_return = np.dot(weights, mean_returns)
285
+ portfolio_stddev = np.sqrt(np.dot(weights.T, np.dot(cov_matrix, weights)))
286
+ results[0, i] = portfolio_return
287
+ results[1, i] = portfolio_stddev
288
+ results[2, i] = results[0, i] / results[1, i]
289
+
290
+ results_frame = pd.DataFrame(results.T, columns=['Return', 'Risk', 'Sharpe Ratio', 'Index'])
291
+
292
+ max_sharpe_idx = results_frame['Sharpe Ratio'].idxmax()
293
+ max_sharpe_portfolio = results_frame.iloc[max_sharpe_idx]
294
+ max_sharpe_weights = weights_record[int(max_sharpe_portfolio[3])]
295
+
296
+ min_risk_idx = results_frame['Risk'].idxmin()
297
+ min_risk_portfolio = results_frame.iloc[min_risk_idx]
298
+ min_risk_weights = weights_record[int(min_risk_portfolio[3])]
299
+
300
+ plt.figure(figsize=(10, 6))
301
+ plt.scatter(results_frame['Risk'], results_frame['Return'], c=results_frame['Sharpe Ratio'], cmap='viridis')
302
+ plt.colorbar(label='Sharpe Ratio')
303
+ plt.scatter(max_sharpe_portfolio[1], max_sharpe_portfolio[0], marker='*', color='r', s=200, label='Max Sharpe Ratio')
304
+ plt.scatter(min_risk_portfolio[1], min_risk_portfolio[0], marker='*', color='b', s=200, label= 'Min Risk')
305
+ plt.title('Portfolio Optimization based on Efficient Frontier')
306
+ plt.xlabel('Risk (Standard Deviation)')
307
+ plt.ylabel('Return')
308
+ plt.legend()
309
+ plt.show
310
+
311
+ print("Maximum Sharpe Ratio Portfolio Allocation\n")
312
+ print("Return:", max_sharpe_portfolio[0])
313
+ print("Risk:", max_sharpe_portfolio[1])
314
+ print("Sharpe Ratio:", max_sharpe_portfolio[2])
315
+ print("\nWeights:\n")
316
+ for i, txt in enumerate(mean_returns.index):
317
+ print(f"{txt}: {max_sharpe_weights[i]}")
318
+
319
+ print("\nMinimum Risk Portfolio Allocation\n")
320
+ print("Return:", min_risk_portfolio[0])
321
+ print("Risk:", min_risk_portfolio[1])
322
+ print("\nWeights:\n")
323
+ for i, txt in enumerate(mean_returns.index):
324
+ print(f"{txt}: {min_risk_weights[i]}")
325
+
326
+ big_tech_stock_price = pd.read_csv('big_tech_stock_prices.csv')
327
+ macro_data = pd.read_csv('DATA.csv')
328
+
329
+ print(macro_data.columns)
330
+
331
+ macro_data = macro_data.rename(columns={
332
+ 'UNRATE(%)': 'unemployment_rate',
333
+ 'CPIALLITEMS': 'cpi',
334
+ 'INFLATION(%)': 'inflation_rate',
335
+ 'MORTGAGE INT. MONTHLY AVG(%)': 'mortgage_interest_rate',
336
+ 'CORP. BOND YIELD(%)': 'corporate_bond_yield'
337
+ })
338
+
339
+ macro_data['DATE'] = pd.to_datetime(macro_data['DATE'])
340
+
341
+ macro_data.rename(columns={'DATE': 'date'}, inplace=True)
342
+
343
+ big_tech_stock_price['date'] = pd.to_datetime(big_tech_stock_price['date'])
344
+
345
+ merged_data = pd.merge(big_tech_stock_prices, macro_data, on='date', how='inner')
346
+
347
+ print(merged_data.head())
348
+ print(merged_data.columns)
349
+
350
+ correlation_matrix = merged_data[['close', 'unemployment_rate', 'cpi', 'inflation_rate', 'mortgage_interest_rate', 'corporate_bond_yield']].corr()
351
+ print(correlation_matrix)
352
+
353
+ plt.figure(figsize=(10, 6))
354
+ sns.heatmap(correlation_matrix, annot=True, cmap='coolwarm', linewidths=0.5)
355
+ plt.title('Correlation Matrix of Stock Prices and Macro-Economic Indicators')
356
+ plt.show()
357
+
358
+ plt.figure(figsize=(14, 7))
359
+ sns.lineplot(data=merged_data, x='date', y='close', hue='stock_symbol')
360
+ plt.title('Stock Prices Over Time')
361
+ plt.xlabel('Date')
362
+ plt.ylabel('Close Price')
363
+ plt.show()
364
+
365
+ X = merged_data[['unemployment_rate', 'cpi', 'inflation_rate', 'mortgage_interest_rate', 'corporate_bond_yield']]
366
+ y = merged_data['close']
367
+
368
+ X_train, X_test, y_train, y_test = train_test_split(X, y, test_size=0.2, random_state=42)
369
+
370
+ model = LinearRegression()
371
+ model.fit(X_train, y_train)
372
+
373
+ y_pred = model.predict(X_test)
374
+ r2_score = model.score(X_test, y_test)
375
+
376
+ print(f"R^2 Score: {r2_score}")
377
+
378
+ coefficients = pd.DataFrame(model.coef_, X.columns, columns=['Coefficient'])
379
+ print(coefficients)
380
+
381
+ for symbol in unique_symbols:
382
+ stock_data = big_tech_stock_prices[big_tech_stock_prices['stock_symbol'] == symbol]
383
+ stock_data.set_index('date', inplace=True)
384
+
385
+ stock_data['z_score'] = (stock_data['close'] - stock_data['close'].mean()) / stock_data['close'].std()
386
+
387
+ stock_data['anomaly'] = np.where(stock_data['z_score'].abs() > 3, True, False)
388
+
389
+ plt.figure(figsize=(14, 7))
390
+ plt.plot(stock_data.index, stock_data['close'], label='Close Price')
391
+ plt.scatter(stock_data[stock_data['anomaly']]. index, stock_data[stock_data['anomaly']]['close'], color='red', label='Anomaly')
392
+ plt.title(f'{symbol} Stock Price with Anomalies')
393
+ plt.xlabel('Date')
394
+ plt.ylabel('Close Price')
395
+ plt.legend()
396
+ plt.show()
397
+
398
+ anomalies = stock_data[stock_data['anomaly']]
399
+ print(f"Anomalies for {symbol}:")
400
+ print(anomalies[['close', 'z_score']])
401
+ print("\n")
402
+
403
+ pip install arch
404
+
405
+ from arch import arch_model
406
+
407
+ for symbol in unique_symbols:
408
+ stock_data = big_tech_stock_prices[big_tech_stock_prices['stock_symbol'] == symbol]
409
+ stock_data.set_index('date', inplace=True)
410
+
411
+ stock_data['return'] = stock_data['close'].pct_change().dropna()
412
+
413
+ model = arch_model(stock_data['return'].dropna(), vol='Garch', p=1, q=1)
414
+ model_fit = model.fit(disp='off')
415
+ print(f"Summary for {symbol}:")
416
+ print(model_fit.summary())
417
+
418
+ volatility = model_fit.conditional_volatility
419
+
420
+ plt.figure(figsize=(14, 7))
421
+ plt.plot(volatility)
422
+ plt.title(f'{symbol} Stock Volatility')
423
+ plt.xlabel('Date')
424
+ plt.ylabel('Volatility')
425
+ plt.show()
426
+
427
+ forecast_horizon = 30
428
+ forecast = model_fit.forecast(horizon=forecast_horizon)
429
+ forecast_volatility = np.sqrt(forecast.variance.values[-1, :])
430
+
431
+ plt.figure(figsize=(14, 7))
432
+ plt.plot(range(1, forecast_horizon+1), forecast_volatility)
433
+ plt.title(f'{symbol} Forecasted Volatility for Next 30 Days')
434
+ plt.xlabel('Days')
435
+ plt.ylabel('Volatility')
436
+ plt.show()
437
+
438
+ for symbol in unique_symbols:
439
+ stock_data = big_tech_stock_prices[big_tech_stock_prices['stock_symbol'] == symbol]
440
+ stock_data.set_index('date', inplace=True)
441
+
442
+ stock_data['SMA50'] = stock_data['close'].rolling(window=50).mean()
443
+ stock_data['SMA200'] = stock_data['close'].rolling(window=200).mean()
444
+
445
+ stock_data['Signal'] = 0.0
446
+ stock_data['Signal'][50:] = np.where(stock_data['SMA50'][50:] > stock_data['SMA200'][50:], 1.0, 0.0)
447
+ stock_data['Position'] = stock_data['Signal'].diff()
448
+
449
+ for symbol in unique_symbols:
450
+ stock_data = big_tech_stock_prices[big_tech_stock_prices['stock_symbol'] == symbol]
451
+ stock_data.set_index('date', inplace=True)
452
+
453
+ stock_data['SMA50'] = stock_data['close'].rolling(window=50).mean()
454
+ stock_data['SMA200'] = stock_data['close'].rolling(window=200).mean()
455
+
456
+ stock_data['Signal'] = 0.0
457
+ stock_data['Signal'][50:] = np.where(stock_data['SMA50'][50:] > stock_data['SMA200'][50:], 1.0, 0.0)
458
+ stock_data['Position'] = stock_data['Signal'].diff()
459
+
460
+ # The following lines were incorrectly indented
461
+ plt.figure(figsize=(14, 7))
462
+ plt.plot(stock_data['close'], label='Close Price')
463
+ plt.plot(stock_data['SMA50'], label='50-day SMA', alpha=0.7)
464
+ plt.plot(stock_data['SMA200'], label='200-day SMA', alpha=0.7)
465
+ plt.plot(stock_data[stock_data['Position'] == 1].index, stock_data['SMA50'][stock_data['Position'] == 1], '^', markersize=10, color='g', lw=0, label='Buy Signal')
466
+ plt.plot(stock_data[stock_data['Position'] == -1].index, stock_data['SMA50'][stock_data['Position'] == -1], 'v', markersize=10, color='r', lw=0, label='Sell Signal')
467
+ plt.title(f'{symbol} - SMA Crossover Strategy')
468
+ plt.xlabel('Date')
469
+ plt.ylabel('Close Price')
470
+ plt.legend()
471
+ plt.show()
472
+
473
+ X = merged_data[['unemployment_rate', 'cpi', 'inflation_rate', 'mortgage_interest_rate', 'corporate_bond_yield']]
474
+ y = merged_data['close']
475
+
476
+ X_train, X_test, y_train, y_test = train_test_split(X, y, test_size=0.2, random_state=42)
477
+
478
+ model = RandomForestRegressor()
479
+ model.fit(X_train, y_train)
480
+
481
+ !pip install shap
482
+ import shap
483
+
484
+ explainer = shap.TreeExplainer(model)
485
+ shap_values = explainer.shap_values(X_test)
486
+
487
+ shap.summary_plot(shap_values, X_test)