1 ~Min 600 2 3 4 5 7 # ====== 23 ======= Polynomial Regression 8 9 10 11 12 13 14 15 16 17 18 import numpy as np 19 import matplotlib.pyplot as plt 20 21 22 ======== # Thus far, we have assumed that the relationship between the explanatory # variables and the response variable is linear. This assumption is not always # true. This is where polynomial regression comes in. Polynomial regression # is a special case of multiple linear regression that adds terms with degrees # greater than one to the model. The real-world curvilinear relationship is captured # when you transform the training data by adding polynomial terms, which are then fit in # the same manner as in multiple linear regression. # We are now going to us only one explanatory variable, but the model now has # three terms instead of two. The explanatory variable has been transformed # and added as a third term to the model to captre the curvilinear relationship. # The Polynomial Features transformer can be used to easily add polynomial features # to a feature representation. Let's fit a model to these features, and compare it # to the simple linear regression model: from sklearn.linear model import LinearRegression from sklearn.preprocessing import Polynomial Features 24 25 26 27 28 29 30 31 32 33 34 regressor.fit(X_train, y_train) 35 XX = np.linspace(0, 26, 100) 36 37 ======== # Training set x_train = [[6], [8], [10], [14], [18]] #diamters of pizzas y_train = [[7], [9], [13], [17.5], [18]] #prices of pizzas # Testing set x_test = [[6], [8], [11], [16]] #diamters of pizzas y_test = [[8], [12], [15], [18]] #prices of pizzas # Train the Linear Regression model and plot a prediction LinearRegression() regressor = = yy regressor.predict(xx.reshape(xx.shape[0], 1)) plt.plot(xx, yy)

Database System Concepts
7th Edition
ISBN:9780078022159
Author:Abraham Silberschatz Professor, Henry F. Korth, S. Sudarshan
Publisher:Abraham Silberschatz Professor, Henry F. Korth, S. Sudarshan
Chapter1: Introduction
Section: Chapter Questions
Problem 1PE
icon
Related questions
Question

Follow these steps:
● Read the example file.
● Try to think of a relationship you can model and create a new Python file
in this folder called poly.py.
● Inside poly.py, identify a relationship, and use Polynomial regression to
train, predict, and plot your results.

38
39
40 quadratic_featurizer
41
42
# This preprocessor transforms an input data matrix into a new data matrix of a given degree
43 X_train_quadratic =
44 X_test_quadratic =
quadratic_featurizer.fit_transform(X_train)
quadratic_featurizer.transform(X_test)
45
46
47
48
49
xx_quadratic = quadratic_featurizer.transform(xx.reshape(xx.shape [0], 1))
50
51
# Plot the graph
52 plt.plot(xx, regressor_quadratic.predict(xx_quadratic), c = 'r', linestyle
53 plt.title('Pizza price regressed on diameter')
54 plt.xlabel('Diameter in inches')
55 plt.ylabel('Price in dollars')
56 plt.axis([0, 25, 0, 25])
57 plt.grid(True)
58
59
60
61
62
63
64
65
66
67
68
# Set the degree of the Polynomial Regression model
Polynomial Features (degree = 2)
=
# Train and test the regressor_quadratic model
regressor_quadratic = LinearRegression ()
regressor_quadratic.fit(X_train_quadratic, y_train)
plt.scatter (X_train, y_train)
plt.show()
print X train
print X train_quadratic
print X test
print X_test_quadratic
=
# If you execute the code, you will see that the simple linear regression model is plotted with
# a solid line. The quadratic regression model is plotted with a dashed line and evidently
# the quadratic regression model fits the training data better.
Transcribed Image Text:38 39 40 quadratic_featurizer 41 42 # This preprocessor transforms an input data matrix into a new data matrix of a given degree 43 X_train_quadratic = 44 X_test_quadratic = quadratic_featurizer.fit_transform(X_train) quadratic_featurizer.transform(X_test) 45 46 47 48 49 xx_quadratic = quadratic_featurizer.transform(xx.reshape(xx.shape [0], 1)) 50 51 # Plot the graph 52 plt.plot(xx, regressor_quadratic.predict(xx_quadratic), c = 'r', linestyle 53 plt.title('Pizza price regressed on diameter') 54 plt.xlabel('Diameter in inches') 55 plt.ylabel('Price in dollars') 56 plt.axis([0, 25, 0, 25]) 57 plt.grid(True) 58 59 60 61 62 63 64 65 66 67 68 # Set the degree of the Polynomial Regression model Polynomial Features (degree = 2) = # Train and test the regressor_quadratic model regressor_quadratic = LinearRegression () regressor_quadratic.fit(X_train_quadratic, y_train) plt.scatter (X_train, y_train) plt.show() print X train print X train_quadratic print X test print X_test_quadratic = # If you execute the code, you will see that the simple linear regression model is plotted with # a solid line. The quadratic regression model is plotted with a dashed line and evidently # the quadratic regression model fits the training data better.
HN M & in ∞00 OHN
1
2
3
# Thus far, we have assumed that the relationship between the explanatory
# variables and the response variable is linear. This assumption is not always
# true. This is where polynomial regression comes in. Polynomial regression
# is a special case of multiple linear regression that adds terms with degrees
# greater than one to the model. The real-world curvilinear relationship is captured
8 # when you transform the training data by adding polynomial terms, which are then fit in
9 # the same manner as in multiple linear regression.
10
11 # We are now going to us only one explanatory variable, but the model now has
12 # three terms instead of two. The explanatory variable has been transformed
13
14
# and added as a third term to the model to captre the curvilinear relationship.
# The Polynomial Features transformer can be used to easily add polynomial features
# to a feature representation. Let's fit a model to these features, and compare it
# to the simple linear regression model:
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
4
5
6
7
# ================= Polynomial Regression
NA
import numpy as np
import matplotlib.pyplot as plt
===================
from sklearn.linear model import LinearRegression
from sklearn.preprocessing import Polynomial Features
# Training set
x_train = [[6], [8], [10], [14], [18]] #diamters of pizzas
y_train = [[7], [9], [13], [17.5], [18]] #prices of pizzas
# Testing set
x_test = [[6], [8], [11], [16]] #diamters of pizzas
y_test = [[8], [12], [15], [18]] #prices of pizzas
32
# Train the Linear Regression model and plot a prediction
regressor = LinearRegression ()
33
34 regressor.fit(X_train, y_train)
35
XX = np.linspace(0, 26, 100)
yy = regressor.predict(xx.reshape(xx.shape [0], 1))
36
37 plt.plot(xx, yy)
Transcribed Image Text:HN M & in ∞00 OHN 1 2 3 # Thus far, we have assumed that the relationship between the explanatory # variables and the response variable is linear. This assumption is not always # true. This is where polynomial regression comes in. Polynomial regression # is a special case of multiple linear regression that adds terms with degrees # greater than one to the model. The real-world curvilinear relationship is captured 8 # when you transform the training data by adding polynomial terms, which are then fit in 9 # the same manner as in multiple linear regression. 10 11 # We are now going to us only one explanatory variable, but the model now has 12 # three terms instead of two. The explanatory variable has been transformed 13 14 # and added as a third term to the model to captre the curvilinear relationship. # The Polynomial Features transformer can be used to easily add polynomial features # to a feature representation. Let's fit a model to these features, and compare it # to the simple linear regression model: 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 4 5 6 7 # ================= Polynomial Regression NA import numpy as np import matplotlib.pyplot as plt =================== from sklearn.linear model import LinearRegression from sklearn.preprocessing import Polynomial Features # Training set x_train = [[6], [8], [10], [14], [18]] #diamters of pizzas y_train = [[7], [9], [13], [17.5], [18]] #prices of pizzas # Testing set x_test = [[6], [8], [11], [16]] #diamters of pizzas y_test = [[8], [12], [15], [18]] #prices of pizzas 32 # Train the Linear Regression model and plot a prediction regressor = LinearRegression () 33 34 regressor.fit(X_train, y_train) 35 XX = np.linspace(0, 26, 100) yy = regressor.predict(xx.reshape(xx.shape [0], 1)) 36 37 plt.plot(xx, yy)
Expert Solution
steps

Step by step

Solved in 4 steps with 4 images

Blurred answer
Knowledge Booster
Implementation of service oriented software
Learn more about
Need a deep-dive on the concept behind this application? Look no further. Learn more about this topic, computer-science and related others by exploring similar questions and additional content below.
Similar questions
Recommended textbooks for you
Database System Concepts
Database System Concepts
Computer Science
ISBN:
9780078022159
Author:
Abraham Silberschatz Professor, Henry F. Korth, S. Sudarshan
Publisher:
McGraw-Hill Education
Starting Out with Python (4th Edition)
Starting Out with Python (4th Edition)
Computer Science
ISBN:
9780134444321
Author:
Tony Gaddis
Publisher:
PEARSON
Digital Fundamentals (11th Edition)
Digital Fundamentals (11th Edition)
Computer Science
ISBN:
9780132737968
Author:
Thomas L. Floyd
Publisher:
PEARSON
C How to Program (8th Edition)
C How to Program (8th Edition)
Computer Science
ISBN:
9780133976892
Author:
Paul J. Deitel, Harvey Deitel
Publisher:
PEARSON
Database Systems: Design, Implementation, & Manag…
Database Systems: Design, Implementation, & Manag…
Computer Science
ISBN:
9781337627900
Author:
Carlos Coronel, Steven Morris
Publisher:
Cengage Learning
Programmable Logic Controllers
Programmable Logic Controllers
Computer Science
ISBN:
9780073373843
Author:
Frank D. Petruzella
Publisher:
McGraw-Hill Education