首頁 > 軟體

R語言在散點圖中新增lm線性迴歸公式的問題

2022-09-08 18:04:50

1. 簡單的線性迴歸

函數自帶的例子(R 中鍵入?lm),lm(y ~ x)迴歸y=kx + blm( y ~ x -1 )省略b,不對截距進行估計

require(graphics)

## Annette Dobson (1990) "An Introduction to Generalized Linear Models".
## Page 9: Plant Weight Data.
ctl <- c(4.17,5.58,5.18,6.11,4.50,4.61,5.17,4.53,5.33,5.14)
trt <- c(4.81,4.17,4.41,3.59,5.87,3.83,6.03,4.89,4.32,4.69)
group <- gl(2, 10, 20, labels = c("Ctl","Trt"))
weight <- c(ctl, trt)
lm.D9 <- lm(weight ~ group)
lm.D90 <- lm(weight ~ group - 1) # omitting intercept

anova(lm.D9)
summary(lm.D90)

opar <- par(mfrow = c(2,2), oma = c(0, 0, 1.1, 0))
plot(lm.D9, las = 1)      # Residuals, Fitted, ...
par(opar)

使用R中自帶的mtcars資料,可以得到截距和斜率,也可以得到解釋率R-square:

require(ggplot2)
library(dplyr) #載入dplyr包
library(ggpmisc) #載入ggpmisc包
library(ggpubr)
require(gridExtra)
model=lm(mtcars$wt ~ mtcars$mpg)
model
## 輸出:
Call:
lm(formula = mtcars$wt ~ mtcars$mpg)

Coefficients:
(Intercept)   mtcars$mpg  
      6.047       -0.141
      ```
```handlebars
summary(model)

## 輸出:
Call:
lm(formula = mtcars$wt ~ mtcars$mpg)

Residuals:
   Min     1Q Median     3Q    Max 
-0.652 -0.349 -0.138  0.319  1.368 

Coefficients:
            Estimate Std. Error t value Pr(>|t|)    
(Intercept)   6.0473     0.3087   19.59  < 2e-16 ***
mtcars$mpg   -0.1409     0.0147   -9.56  1.3e-10 ***
---
Signif. codes:  0 ‘***' 0.001 ‘**' 0.01 ‘*' 0.05 ‘.' 0.1 ‘ ' 1

Residual standard error: 0.494 on 30 degrees of freedom
Multiple R-squared:  0.753,	Adjusted R-squared:  0.745 
F-statistic: 91.4 on 1 and 30 DF,  p-value: 1.29e-10

提取回歸R-square值:

通過summary提取:
## 上面的例子


## mtcars例子
model=lm(mtcars$wt ~ mtcars$mpg)
res=summary(model)
str(res) 
## 提取各個值:
res$r.squared
res$coefficients
res$adj.r.squared  ## df 矯正後的結果
res$coefficients[1,1]
res$coefficients[2,1]

使用預設的plot繪製迴歸散點:

plot(mtcars$mpg, mtcars$wt, pch=20,cex=2)
abline(model,col="red",lwd=2)

計算Confidence interval(95%):

test=mtcars[c("mpg","wt")]
head(test)
colnames(test)=c("x","y")
model = lm(y ~ x, test)

test$predicted = predict(
  object = model,
  newdata = test)

test$CI = predict(
  object = model,
  newdata = test,
  se.fit = TRUE
)$se.fit * qt(1 - (1-0.95)/2, nrow(test))

test$predicted = predict(
  object = model,
  newdata = test)

test$CI_u=test$predicted+test$CI
test$CI_l=test$predicted-test$CI
plot(mtcars$mpg, mtcars$wt, pch=20,cex=1) ##  have replicated x values
abline(model,col="red",lwd=2)
lines(x=test$x,y=test$CI_u,col="blue")
lines(x=test$x,y=test$CI_l,col="blue")

上面的圖藍線有點奇怪,簡單繪製最初的plot:

plot(mtcars$mpg, mtcars$wt, pch=20,cex=1,type="b") ##  have replicated x values

實際上面的計算方法沒問題,但是資料不合適,因為資料x含有重複值,所以要考慮這個。

2. 使用ggplot2展示

ggplot2例子:

p <- ggplot(df, aes(x=yreal, y=ypred)) +
  geom_point(color = "grey20",size = 1, alpha = 0.8)
#迴歸線
#新增迴歸曲線
p2 <- p + geom_smooth(formula = y ~ x, color = "red",
                      fill = "blue", method = "lm",se = T, level=0.95) +
  theme_bw() +
  stat_poly_eq(
    aes(label = paste(..eq.label.., ..adj.rr.label.., sep = '~~~')),
    formula = y ~ x,  parse = TRUE,color="blue",
    size = 5, #公式字型大小
    label.x = 0.05,  #位置 ,0-1之間的比例
    label.y = 0.95) + 
  labs(title="test",x="Real Value (Huang Huaihai 1777)" , y="Predicted Value (Correlation: 0.5029)")
p2

ggplot版本的手動計算:

require(ggplot2)
library(dplyr) #載入dplyr包
library(ggpmisc) #載入ggpmisc包
library(ggpubr)
require(gridExtra)
ggplot(data=df, aes(x=yreal, y=ypred)) +
  geom_smooth(formula = y ~ x, color = "blue",
              fill = "grey10", method = "lm")  +
  geom_point() +
  stat_regline_equation(label.x=0.1, label.y=-1.5) +
  stat_cor(aes(label=..rr.label..), label.x=0.1, label.y=-2)

test=df
head(test)
colnames(test)=c("x","y")
model = lm(y ~ x, test)
test$predicted = predict(
  object = model,
  newdata = test)

test$CI = predict(
  object = model,
  newdata = test,
  se.fit = TRUE
)$se.fit * qt(1 - (1-0.95)/2, nrow(test))

ggplot(test) +
  aes(x = x, y = y) +
  geom_point(size = 1,colour="grey40") +
  geom_smooth(formula =y ~ x,method = "lm",  fullrange = TRUE, color = "black") +
  geom_line(aes(y = predicted + CI), color = "blue") + # upper
  geom_line(aes(y = predicted - CI), color = "red") + # lower
  theme_classic()

參考:
https://stackoverflow.com/questions/23519224/extract-r-square-value-with-r-in-linear-models (提取R2)
https://blog.csdn.net/LeaningR/article/details/118971000 (提取R2等)
https://stackoverflow.com/questions/45742987/how-is-level-used-to-generate-the-confidence-interval-in-geom-smooth (新增lm線)
https://zhuanlan.zhihu.com/p/131604431 (知乎)

到此這篇關於R語言在散點圖中新增lm線性迴歸公式的問題的文章就介紹到這了,更多相關R語言線性迴歸內容請搜尋it145.com以前的文章或繼續瀏覽下面的相關文章希望大家以後多多支援it145.com!


IT145.com E-mail:sddin#qq.com