In [2]:
# Description: this program uses an artificial recurrent neural network called Long Short Term Memory (LSTM)
# to predict the closing stock price of a corporation (IBM)
# pip install pandas_datareader
# pip install tensorflow
import math
import pandas_datareader as web
import numpy as np
import pandas as pd
from sklearn.preprocessing import MinMaxScaler
from keras.models import Sequential
from keras.layers import Dense, LSTM
import matplotlib.pyplot as plt
plt.style.use('fivethirtyeight')
In [11]:
# get stock data from yahoo
df = web.DataReader('IBM', data_source='yahoo', start='2012-01-01', end='2021-04-25')
df
Out[11]:
High | Low | Open | Close | Volume | Adj Close | |
---|---|---|---|---|---|---|
Date | ||||||
2012-01-03 | 188.710007 | 186.000000 | 186.729996 | 186.300003 | 5646000.0 | 134.881241 |
2012-01-04 | 186.330002 | 184.940002 | 185.570007 | 185.539993 | 4346700.0 | 134.330948 |
2012-01-05 | 185.029999 | 183.100006 | 184.809998 | 184.660004 | 4463100.0 | 133.693863 |
2012-01-06 | 184.479996 | 182.309998 | 184.389999 | 182.539993 | 4897100.0 | 132.158981 |
2012-01-09 | 182.270004 | 180.270004 | 182.199997 | 181.589996 | 5201200.0 | 131.471176 |
… | … | … | … | … | … | … |
2021-04-19 | 133.820007 | 132.580002 | 133.600006 | 133.119995 | 8198600.0 | 133.119995 |
2021-04-20 | 139.770004 | 136.699997 | 137.070007 | 138.160004 | 15480600.0 | 138.160004 |
2021-04-21 | 143.729996 | 137.710007 | 138.059998 | 143.550003 | 11909000.0 | 143.550003 |
2021-04-22 | 144.740005 | 141.000000 | 143.699997 | 141.279999 | 7101400.0 | 141.279999 |
2021-04-23 | 143.610001 | 140.949997 | 141.309998 | 142.429993 | 4556000.0 | 142.429993 |
2342 rows × 6 columns
In [12]:
df.shape
Out[12]:
(2342, 6)
In [13]:
# Visualize the closing price history
plt.figure(figsize=(16,8))
plt.title('Close Price History')
plt.plot(df['Close'])
plt.xlabel('Date', fontsize=18)
plt.ylabel('Close Price USD$', fontsize=18)
plt.show()
In [15]:
# create a new dataframe with only the Close colunm
data = df.filter(['Close'])
dataset = data.values
training_data_len = math.ceil(len(dataset) * 0.8)
training_data_len
Out[15]:
1874
In [16]:
# Scale the data
scaler = MinMaxScaler(feature_range=(0,1))
scaled_data = scaler.fit_transform(dataset)
scaled_data
Out[16]:
array([[0.75625879], [0.74997928], [0.74270844], ..., [0.4030406 ], [0.38428489], [0.39378661]])
In [19]:
# create the training data set
# create the scaled training data set
train_data = scaled_data[0:training_data_len, :]
x_train = []
y_train = []
for i in range(60, len(train_data)):
x_train.append(train_data[i-60:i, 0])
y_train.append(train_data[i, 0])
if i<=60:
print(x_train)
print(y_train)
print
[array([0.75625879, 0.74997928, 0.74270844, 0.72519203, 0.71734277, 0.7150293 , 0.72337442, 0.70874991, 0.69726516, 0.70420556, 0.7130464 , 0.70850205, 0.77460136, 0.78666441, 0.8027761 , 0.80112364, 0.79492683, 0.79063046, 0.80748573, 0.80831202, 0.80847718, 0.79947118, 0.81690488, 0.81012977, 0.81450883, 0.81120379, 0.81269109, 0.80682472, 0.80847718, 0.80517227, 0.80542013, 0.81178222, 0.81508714, 0.81483928, 0.8188052 , 0.84970667, 0.85094598, 0.84904566, 0.85276372, 0.84243571, 0.84904566, 0.85962154, 0.87490706, 0.84681478, 0.85102869, 0.86788396, 0.87457649, 0.87771625, 0.90068575, 0.90845244, 0.91902832, 0.9191109 , 0.91671485, 0.90456909, 0.90820458, 0.91481453, 0.91473183, 0.93365283, 0.92877791, 0.92968678])] [0.9377840338711563]
In [20]:
# convert the x_train and y_train to numpy arrays
x_train, y_train = np.array(x_train), np.array(y_train)
x_train = np.reshape(x_train, (x_train.shape[0], x_train.shape[1], 1))
x_train.shape
Out[20]:
(1814, 60, 1)
In [21]:
# build the LSTM model
model = Sequential()
model.add(LSTM(50, return_sequences=True, input_shape=(x_train.shape[1], 1)))
model.add(LSTM(50, return_sequences=False))
model.add(Dense(25))
model.add(Dense(1))
In [23]:
# compile the model
model.compile(optimizer='adam', loss='mean_squared_error')
In [24]:
# train the model
model.fit(x_train, y_train, batch_size=1, epochs=1)
1814/1814 [==============================] - 72s 38ms/step - loss: 0.0057
Out[24]:
<tensorflow.python.keras.callbacks.History at 0x7f0eb01d3ad0>
In [25]:
# create the testing data set
# create a new array containing scaled values from index 1814 to 2342
test_data = scaled_data[training_data_len - 60: , :]
x_test = []
y_test = dataset[training_data_len: , :]
for i in range(60, len(test_data)):
x_test.append(test_data[i-60:i, 0])
In [26]:
x_test = np.array(x_test)
In [27]:
x_test = np.reshape(x_test, (x_test.shape[0], x_test.shape[1], 1))
In [28]:
#get teh models predicted price values
predictions = model.predict(x_test)
predictions = scaler.inverse_transform(predictions)
In [29]:
# get the root mean squared error (RMSE)
rmse = np.sqrt(np.mean(predictions - y_test)**2)
rmse
Out[29]:
1.0317235441289396
In [30]:
# plot the data
train = data[:training_data_len]
valid = data[training_data_len:]
valid['Predictions'] = predictions
plt.figure(figsize=(16,8))
plt.title('Model')
plt.xlabel('Date')
plt.ylabel('Close Price USD$', fontsize=18)
plt.plot(train['Close'])
plt.plot(valid[['Close', 'Predictions']])
plt.legend(['Train', 'Val', 'Predictions'], loc='lower right')
plt.show()
/home/ubuntu/anaconda3/lib/python3.7/site-packages/ipykernel_launcher.py:4: SettingWithCopyWarning: A value is trying to be set on a copy of a slice from a DataFrame. Try using .loc[row_indexer,col_indexer] = value instead See the caveats in the documentation: https://pandas.pydata.org/pandas-docs/stable/user_guide/indexing.html#returning-a-view-versus-a-copy after removing the cwd from sys.path.
In [31]:
valid
Out[31]:
Close | Predictions | |
---|---|---|
Date | ||
2019-06-17 | 134.949997 | 133.716370 |
2019-06-18 | 136.380005 | 133.921265 |
2019-06-19 | 137.080002 | 134.221985 |
2019-06-20 | 138.850006 | 134.618698 |
2019-06-21 | 139.199997 | 135.268433 |
… | … | … |
2021-04-19 | 133.119995 | 132.004868 |
2021-04-20 | 138.160004 | 131.903015 |
2021-04-21 | 143.550003 | 132.635849 |
2021-04-22 | 141.279999 | 134.464783 |
2021-04-23 | 142.429993 | 136.136017 |
468 rows × 2 columns
In [35]:
# get the quote
apple_quote = web.DataReader('IBM', data_source='yahoo', start='2012-01-01', end='2021-04-25')
new_df = apple_quote.filter(['Close'])
last_60_days = new_df[-60:].values
#scale the data to be values between 0 and 1
last_60_days_scaled = scaler.transform(last_60_days)
X_test = []
X_test.append(last_60_days_scaled)
X_test = np.array(X_test)
X_test = np.reshape(X_test, (X_test.shape[0], X_test.shape[1], 1))
pred_price = model.predict(X_test)
pred_price = scaler.inverse_transform(pred_price)
print(pred_price)
[[137.67413]]
In [37]:
# get the quote
apple_quote2 = web.DataReader('IBM', data_source='yahoo', start='2021-04-27', end='2021-04-27')
print(apple_quote2['Close'])
Date 2021-04-27 142.009995 Name: Close, dtype: float64
In [ ]:
Hi, I do think this is a great blog. I stumbledupon it 😉
I’m going to return once again since i have book marked it.
Money and freedom is the greatest way to change, may you be rich
and continue to help other people.
I precisely wanted to appreciate you once more. I am not sure
what I might have made to happen in the absence of these solutions revealed by
you directly on such a theme. It had been a very frustrating difficulty in my
view, but seeing the skilled avenue you resolved the issue took me
to weep over fulfillment. I will be grateful for your support and even pray you comprehend what a powerful job you were
undertaking instructing people today through your webpage.
Probably you’ve never got to know all of us.
If some one desires expert view regarding running a blog after that
i suggest him/her to pay a visit this website, Keep up
the good work.
Good post. I am dealing with some of these issues
as well..
My website :: http://www.carraigfoundations.com
Perfect work you have done, this internet site is really cool with
superb information.
Also visit my website … Rachele
Excellent post however I was wondering if you could write a litte
more on this subject? I’d be very grateful if you could elaborate a little bit further.
Thank you!
My website :: bogema.anapacenter.info
Thanks designed for sharing such a nice opinion, piece of writing is pleasant,
thats why i have read it completely
Review my webpage :: Full Essentials CBD Review
Howdy! This blog post couldn’t be written much better! Going through this article reminds me of my previous roommate!
He always kept talking about this. I most certainly will
send this information to him. Fairly certain he will have a good read.
Thank you for sharing!
If you are going for best contents like I do, simply visit this web
page all the time because it offers feature contents, thanks
Wonderful blog! I found it while searching on Yahoo News.
Do you have any tips on how to get listed in Yahoo News?
I’ve been trying for a while but I never seem to get
there! Many thanks
Very quickly this web page will be famous amid all blogging people, due to it’s fastidious articles or reviews
Amazing! This blog looks just like my old one! It’s on a totally
different topic but it has pretty much the same layout and design. Wonderful choice
of colors!
Here is my website: forum.hostpoco.com
No matter if some one searches for his vital thing, therefore
he/she wants to be available that in detail, therefore that thing is maintained
over here.
Way cool! Some extremely valid points! I appreciate you penning this write-up plus the rest of the website is very good.
Someone necessarily lend a hand to make seriously articles I’d state.
That is the very first time I frequented your website page and thus far?
I amazed with the research you made to create this particular put up incredible.
Excellent activity!
Hi there to every one, the contents existing at this web site are
really awesome for people experience, well, keep up the nice work
fellows.
What’s up to every one, since I am genuinely keen of reading this weblog’s post to be updated regularly.
It contains pleasant stuff.
May I simply just say what a relief to discover someone that truly understands what they’re talking about over the internet.
You certainly realize how to bring an issue to
light and make it important. More people must check this out and understand this side of your story.
I was surprised that you’re not more popular given that you most certainly possess the gift.
Its like you read my mind! You appear to know a lot about this, like you wrote the book in it
or something. I think that you could do with some pics to drive the message home a little bit, but instead of that, this is wonderful
blog. An excellent read. I will definitely be back.
whoah this blog is magnificent i like reading your posts.
Keep up the good work! You understand, a lot of persons are hunting around
for this information, you can aid them greatly.
you are actually a just right webmaster. The
website loading velocity is incredible. It sort of feels that you are doing any distinctive trick.
Furthermore, The contents are masterwork. you have performed a great activity in this matter!
I’ll immediately grab your rss as I can not find your email subscription hyperlink
or newsletter service. Do you have any? Kindly let me recognize so that I may subscribe.
Thanks.
Howdy! This post сould not be ѡritten any bettеr! Reading thrߋugh tһіs
post reminds me of my good olԁ roоm mate! He аlways keрt chatting about
this. Ι will forward this page to him. Fairly ϲertain һе will hɑve a ցood read.
Thanks fоr sharing!
I think what you said made a ton of sense. However,
think about this, what if you were to create a awesome post title?
I am not suggesting your information isn’t solid., however suppose you added something that
grabbed a person’s attention? I mean Stock Price Prediction Using Python & Machine Learning – python design is
kinda plain. You should glance at Yahoo’s front page and note how they create post titles
to get people interested. You might try adding a video or a
picture or two to grab people interested about everything’ve written. In my opinion, it might bring your blog
a little bit more interesting.
Hi there, just wanted to tell you, I enjoyed this post.
It was helpful. Keep on posting!
Feel free to surf to my webpage; Freeman
I feel this is one of the such a lot vital information for me.
And i am satisfied reading your article. However wanna commentary on few
normal things, The website style is great, the articles is actually great : D.
Just right process, cheers
Hi there to every one, it’s in fact a good for me
to pay a quick visit this web page, it consists
of useful Information.
Its like you read my thoughts! You appear to grasp so much approximately this,
such as you wrote the e-book in it or something. I think that you simply could do
with some % to power the message house a bit, however other than that, this is great blog.
An excellent read. I will certainly be back.
I read this post fully on the topic of the resemblance of most up-to-date and earlier technologies,
it’s amazing article.
I’m more than happy to uncover this website. I need
to to thank you for your time due to this wonderful read!!
I definitely really liked every little bit of it and I have you saved to fav to see new stuff in your blog.
This text is invaluable. How can I find out more?
I’m not that much of a internet reader to be honest but your sites really nice, keep it up!
I’ll go ahead and bookmark your website to come back
in the future. Many thanks
Great post. I was checking constantly this blog and I’m impressed!
Extremely helpful information particularly the last part
🙂 I care for such information much. I was looking for this
certain information for a very long time. Thank you and best of luck.
My web blog: Clinical Keto
This is very interesting, You are a very skilled blogger.
I’ve joined your rss feed and look forward to seeking more of your magnificent post.
Also, I have shared your site in my social networks!
I simply needed to appreciate you all over again. I do not know the things that I would have done without the type of aspects revealed by you directly on my subject matter. It truly was the intimidating situation for me personally, however , considering the expert tactic you handled that made me to jump for joy. Extremely happier for this information and even expect you know what a great job your are doing teaching most people via a site. I’m certain you’ve never come across any of us.
What’s up to every one, the contents present
at this web page are truly awesome for people experience, well,
keep up the good work fellows.
I must thank you for the efforts you have put in penning this website.
I really hope to see the same high-grade content from
you later on as well. In fact, your creative writing abilities has encouraged me to get my very own website
now 😉
Very good write-up. I absolutely appreciate this site.
Stick with it!