R Under development (unstable) (2019-04-05 r76323) -- "Unsuffered Consequences"
Copyright (C) 2019 The R Foundation for Statistical Computing
Platform: x86_64-pc-linux-gnu (64-bit)

R is free software and comes with ABSOLUTELY NO WARRANTY.
You are welcome to redistribute it under certain conditions.
Type 'license()' or 'licence()' for distribution details.

  Natural language support but running in an English locale

R is a collaborative project with many contributors.
Type 'contributors()' for more information and
'citation()' on how to cite R or R packages in publications.

Type 'demo()' for some demos, 'help()' for on-line help, or
'help.start()' for an HTML browser interface to help.
Type 'q()' to quit R.

> pkgname <- "rpart"
> source(file.path(R.home("share"), "R", "examples-header.R"))
> options(warn = 1)
> library('rpart')
> 
> base::assign(".oldSearch", base::search(), pos = 'CheckExEnv')
> base::assign(".old_wd", base::getwd(), pos = 'CheckExEnv')
> cleanEx()
> nameEx("car.test.frame")
> ### * car.test.frame
> 
> flush(stderr()); flush(stdout())
> 
> ### Name: car.test.frame
> ### Title: Automobile Data from 'Consumer Reports' 1990
> ### Aliases: car.test.frame
> ### Keywords: datasets
> 
> ### ** Examples
> 
> z.auto <- rpart(Mileage ~ Weight, car.test.frame)
> summary(z.auto)
Call:
rpart(formula = Mileage ~ Weight, data = car.test.frame)
  n= 60 

          CP nsplit rel error    xerror       xstd
1 0.59534912      0 1.0000000 1.0337818 0.18046532
2 0.13452819      1 0.4046509 0.5836606 0.10900973
3 0.01282843      2 0.2701227 0.4409221 0.08652804
4 0.01000000      3 0.2572943 0.4415805 0.08663003

Variable importance
Weight 
   100 

Node number 1: 60 observations,    complexity param=0.5953491
  mean=24.58333, MSE=22.57639 
  left son=2 (45 obs) right son=3 (15 obs)
  Primary splits:
      Weight < 2567.5 to the right, improve=0.5953491, (0 missing)

Node number 2: 45 observations,    complexity param=0.1345282
  mean=22.46667, MSE=8.026667 
  left son=4 (22 obs) right son=5 (23 obs)
  Primary splits:
      Weight < 3087.5 to the right, improve=0.5045118, (0 missing)

Node number 3: 15 observations
  mean=30.93333, MSE=12.46222 

Node number 4: 22 observations
  mean=20.40909, MSE=2.78719 

Node number 5: 23 observations,    complexity param=0.01282843
  mean=24.43478, MSE=5.115312 
  left son=10 (15 obs) right son=11 (8 obs)
  Primary splits:
      Weight < 2747.5 to the right, improve=0.1476996, (0 missing)

Node number 10: 15 observations
  mean=23.8, MSE=4.026667 

Node number 11: 8 observations
  mean=25.625, MSE=4.984375 

> 
> 
> 
> cleanEx()
> nameEx("car90")
> ### * car90
> 
> flush(stderr()); flush(stdout())
> 
> ### Name: car90
> ### Title: Automobile Data from 'Consumer Reports' 1990
> ### Aliases: car90
> ### Keywords: datasets
> 
> ### ** Examples
> 
> data(car90)
> plot(car90$Price/1000, car90$Weight,
+      xlab = "Price (thousands)", ylab = "Weight (lbs)")
> mlowess <- function(x, y, ...) {
+     keep <- !(is.na(x) | is.na(y))
+     lowess(x[keep], y[keep], ...)
+ }
> with(car90, lines(mlowess(Price/1000, Weight, f = 0.5)))
> 
> 
> 
> cleanEx()
> nameEx("cu.summary")
> ### * cu.summary
> 
> flush(stderr()); flush(stdout())
> 
> ### Name: cu.summary
> ### Title: Automobile Data from 'Consumer Reports' 1990
> ### Aliases: cu.summary
> ### Keywords: datasets
> 
> ### ** Examples
> 
> fit <- rpart(Price ~ Mileage + Type + Country, cu.summary)
> par(xpd = TRUE)
> plot(fit, compress = TRUE)
> text(fit, use.n = TRUE)
> 
> 
> 
> graphics::par(get("par.postscript", pos = 'CheckExEnv'))
> cleanEx()
> nameEx("kyphosis")
> ### * kyphosis
> 
> flush(stderr()); flush(stdout())
> 
> ### Name: kyphosis
> ### Title: Data on Children who have had Corrective Spinal Surgery
> ### Aliases: kyphosis
> ### Keywords: datasets
> 
> ### ** Examples
> 
> fit <- rpart(Kyphosis ~ Age + Number + Start, data = kyphosis)
> fit2 <- rpart(Kyphosis ~ Age + Number + Start, data = kyphosis,
+               parms = list(prior = c(0.65, 0.35), split = "information"))
> fit3 <- rpart(Kyphosis ~ Age + Number + Start, data=kyphosis,
+               control = rpart.control(cp = 0.05))
> par(mfrow = c(1,2), xpd = TRUE)
> plot(fit)
> text(fit, use.n = TRUE)
> plot(fit2)
> text(fit2, use.n = TRUE)
> 
> 
> 
> graphics::par(get("par.postscript", pos = 'CheckExEnv'))
> cleanEx()
> nameEx("meanvar.rpart")
> ### * meanvar.rpart
> 
> flush(stderr()); flush(stdout())
> 
> ### Name: meanvar.rpart
> ### Title: Mean-Variance Plot for an Rpart Object
> ### Aliases: meanvar meanvar.rpart
> ### Keywords: tree
> 
> ### ** Examples
> 
> z.auto <- rpart(Mileage ~ Weight, car.test.frame)
> meanvar(z.auto, log = 'xy')
> 
> 
> 
> cleanEx()
> nameEx("path.rpart")
> ### * path.rpart
> 
> flush(stderr()); flush(stdout())
> 
> ### Name: path.rpart
> ### Title: Follow Paths to Selected Nodes of an Rpart Object
> ### Aliases: path.rpart
> ### Keywords: tree
> 
> ### ** Examples
> 
> fit <- rpart(Kyphosis ~ Age + Number + Start, data = kyphosis)
> print(fit)
n= 81 

node), split, n, loss, yval, (yprob)
      * denotes terminal node

 1) root 81 17 absent (0.79012346 0.20987654)  
   2) Start>=8.5 62  6 absent (0.90322581 0.09677419)  
     4) Start>=14.5 29  0 absent (1.00000000 0.00000000) *
     5) Start< 14.5 33  6 absent (0.81818182 0.18181818)  
      10) Age< 55 12  0 absent (1.00000000 0.00000000) *
      11) Age>=55 21  6 absent (0.71428571 0.28571429)  
        22) Age>=111 14  2 absent (0.85714286 0.14285714) *
        23) Age< 111 7  3 present (0.42857143 0.57142857) *
   3) Start< 8.5 19  8 present (0.42105263 0.57894737) *
> path.rpart(fit, nodes = c(11, 22))

 node number: 11 
   root
   Start>=8.5
   Start< 14.5
   Age>=55

 node number: 22 
   root
   Start>=8.5
   Start< 14.5
   Age>=55
   Age>=111
> 
> 
> 
> cleanEx()
> nameEx("plot.rpart")
> ### * plot.rpart
> 
> flush(stderr()); flush(stdout())
> 
> ### Name: plot.rpart
> ### Title: Plot an Rpart Object
> ### Aliases: plot.rpart
> ### Keywords: tree
> 
> ### ** Examples
> 
> fit <- rpart(Price ~ Mileage + Type + Country, cu.summary)
> par(xpd = TRUE)
> plot(fit, compress = TRUE)
> text(fit, use.n = TRUE)
> 
> 
> 
> graphics::par(get("par.postscript", pos = 'CheckExEnv'))
> cleanEx()
> nameEx("post.rpart")
> ### * post.rpart
> 
> flush(stderr()); flush(stdout())
> 
> ### Name: post.rpart
> ### Title: PostScript Presentation Plot of an Rpart Object
> ### Aliases: post.rpart post
> ### Keywords: tree
> 
> ### ** Examples
> 
> ## Not run: 
> ##D z.auto <- rpart(Mileage ~ Weight, car.test.frame)
> ##D post(z.auto, file = "")   # display tree on active device
> ##D    # now construct postscript version on file "pretty.ps"
> ##D    # with no title
> ##D post(z.auto, file = "pretty.ps", title = " ")
> ##D z.hp <- rpart(Mileage ~ Weight + HP, car.test.frame)
> ##D post(z.hp)
> ## End(Not run)
> 
> 
> 
> cleanEx()
> nameEx("predict.rpart")
> ### * predict.rpart
> 
> flush(stderr()); flush(stdout())
> 
> ### Name: predict.rpart
> ### Title: Predictions from a Fitted Rpart Object
> ### Aliases: predict.rpart
> ### Keywords: tree
> 
> ### ** Examples
> 
> z.auto <- rpart(Mileage ~ Weight, car.test.frame)
> predict(z.auto)
               Eagle Summit 4               Ford Escort   4 
                     30.93333                      30.93333 
               Ford Festiva 4                 Honda Civic 4 
                     30.93333                      30.93333 
              Mazda Protege 4              Mercury Tracer 4 
                     30.93333                      30.93333 
              Nissan Sentra 4              Pontiac LeMans 4 
                     30.93333                      30.93333 
              Subaru Loyale 4                Subaru Justy 3 
                     30.93333                      30.93333 
             Toyota Corolla 4               Toyota Tercel 4 
                     30.93333                      30.93333 
           Volkswagen Jetta 4           Chevrolet Camaro V8 
                     30.93333                      20.40909 
                Dodge Daytona               Ford Mustang V8 
                     23.80000                      20.40909 
                   Ford Probe          Honda Civic CRX Si 4 
                     25.62500                      30.93333 
       Honda Prelude Si 4WS 4                Nissan 240SX 4 
                     25.62500                      23.80000 
               Plymouth Laser                   Subaru XT 4 
                     23.80000                      30.93333 
                    Audi 80 4               Buick Skylark 4 
                     25.62500                      25.62500 
          Chevrolet Beretta 4          Chrysler Le Baron V6 
                     25.62500                      23.80000 
                 Ford Tempo 4                Honda Accord 4 
                     23.80000                      23.80000 
                  Mazda 626 4           Mitsubishi Galant 4 
                     23.80000                      25.62500 
          Mitsubishi Sigma V6               Nissan Stanza 4 
                     20.40909                      23.80000 
          Oldsmobile Calais 4                 Peugeot 405 4 
                     25.62500                      25.62500 
              Subaru Legacy 4                Toyota Camry 4 
                     23.80000                      23.80000 
                  Volvo 240 4               Acura Legend V6 
                     23.80000                      20.40909 
              Buick Century 4       Chrysler Le Baron Coupe 
                     23.80000                      23.80000 
       Chrysler New Yorker V6              Eagle Premier V6 
                     20.40909                      20.40909 
               Ford Taurus V6           Ford Thunderbird V6 
                     20.40909                      20.40909 
             Hyundai Sonata 4                  Mazda 929 V6 
                     23.80000                      20.40909 
             Nissan Maxima V6    Oldsmobile Cutlass Ciera 4 
                     20.40909                      23.80000 
Oldsmobile Cutlass Supreme V6             Toyota Cressida 6 
                     20.40909                      20.40909 
            Buick Le Sabre V6          Chevrolet Caprice V8 
                     20.40909                      20.40909 
   Ford LTD Crown Victoria V8       Chevrolet Lumina APV V6 
                     20.40909                      20.40909 
       Dodge Grand Caravan V6              Ford Aerostar V6 
                     20.40909                      20.40909 
                 Mazda MPV V6            Mitsubishi Wagon 4 
                     20.40909                      20.40909 
              Nissan Axxess 4                  Nissan Van 4 
                     20.40909                      20.40909 
> 
> fit <- rpart(Kyphosis ~ Age + Number + Start, data = kyphosis)
> predict(fit, type = "prob")   # class probabilities (default)
      absent   present
1  0.4210526 0.5789474
2  0.8571429 0.1428571
3  0.4210526 0.5789474
4  0.4210526 0.5789474
5  1.0000000 0.0000000
6  1.0000000 0.0000000
7  1.0000000 0.0000000
8  1.0000000 0.0000000
9  1.0000000 0.0000000
10 0.4285714 0.5714286
11 0.4285714 0.5714286
12 1.0000000 0.0000000
13 0.4210526 0.5789474
14 1.0000000 0.0000000
15 1.0000000 0.0000000
16 1.0000000 0.0000000
17 1.0000000 0.0000000
18 0.8571429 0.1428571
19 1.0000000 0.0000000
20 1.0000000 0.0000000
21 1.0000000 0.0000000
22 0.4210526 0.5789474
23 0.4285714 0.5714286
24 0.4210526 0.5789474
25 0.4210526 0.5789474
26 1.0000000 0.0000000
27 0.4210526 0.5789474
28 0.4285714 0.5714286
29 1.0000000 0.0000000
30 1.0000000 0.0000000
31 1.0000000 0.0000000
32 0.8571429 0.1428571
33 0.8571429 0.1428571
34 1.0000000 0.0000000
35 0.8571429 0.1428571
36 1.0000000 0.0000000
37 1.0000000 0.0000000
38 0.4210526 0.5789474
39 1.0000000 0.0000000
40 0.4285714 0.5714286
41 0.4210526 0.5789474
42 1.0000000 0.0000000
43 0.4210526 0.5789474
44 0.4210526 0.5789474
45 1.0000000 0.0000000
46 0.8571429 0.1428571
47 1.0000000 0.0000000
48 0.8571429 0.1428571
49 0.4210526 0.5789474
50 0.8571429 0.1428571
51 0.4285714 0.5714286
52 1.0000000 0.0000000
53 0.4210526 0.5789474
54 1.0000000 0.0000000
55 1.0000000 0.0000000
56 1.0000000 0.0000000
57 1.0000000 0.0000000
58 0.4210526 0.5789474
59 1.0000000 0.0000000
60 0.4285714 0.5714286
61 0.4210526 0.5789474
62 0.4210526 0.5789474
63 0.4210526 0.5789474
64 1.0000000 0.0000000
65 1.0000000 0.0000000
66 1.0000000 0.0000000
67 1.0000000 0.0000000
68 0.8571429 0.1428571
69 1.0000000 0.0000000
70 1.0000000 0.0000000
71 0.8571429 0.1428571
72 0.8571429 0.1428571
73 1.0000000 0.0000000
74 0.8571429 0.1428571
75 1.0000000 0.0000000
76 1.0000000 0.0000000
77 0.8571429 0.1428571
78 1.0000000 0.0000000
79 0.8571429 0.1428571
80 0.4210526 0.5789474
81 1.0000000 0.0000000
> predict(fit, type = "vector") # level numbers
 1  2  3  4  5  6  7  8  9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 
 2  1  2  2  1  1  1  1  1  2  2  1  2  1  1  1  1  1  1  1  1  2  2  2  2  1 
27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 
 2  2  1  1  1  1  1  1  1  1  1  2  1  2  2  1  2  2  1  1  1  1  2  1  2  1 
53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 
 2  1  1  1  1  2  1  2  2  2  2  1  1  1  1  1  1  1  1  1  1  1  1  1  1  1 
79 80 81 
 1  2  1 
> predict(fit, type = "class")  # factor
      1       2       3       4       5       6       7       8       9      10 
present  absent present present  absent  absent  absent  absent  absent present 
     11      12      13      14      15      16      17      18      19      20 
present  absent present  absent  absent  absent  absent  absent  absent  absent 
     21      22      23      24      25      26      27      28      29      30 
 absent present present present present  absent present present  absent  absent 
     31      32      33      34      35      36      37      38      39      40 
 absent  absent  absent  absent  absent  absent  absent present  absent present 
     41      42      43      44      45      46      47      48      49      50 
present  absent present present  absent  absent  absent  absent present  absent 
     51      52      53      54      55      56      57      58      59      60 
present  absent present  absent  absent  absent  absent present  absent present 
     61      62      63      64      65      66      67      68      69      70 
present present present  absent  absent  absent  absent  absent  absent  absent 
     71      72      73      74      75      76      77      78      79      80 
 absent  absent  absent  absent  absent  absent  absent  absent  absent present 
     81 
 absent 
Levels: absent present
> predict(fit, type = "matrix") # level number, class frequencies, probabilities
   [,1] [,2] [,3]      [,4]      [,5]       [,6]
1     2    8   11 0.4210526 0.5789474 0.23456790
2     1   12    2 0.8571429 0.1428571 0.17283951
3     2    8   11 0.4210526 0.5789474 0.23456790
4     2    8   11 0.4210526 0.5789474 0.23456790
5     1   29    0 1.0000000 0.0000000 0.35802469
6     1   29    0 1.0000000 0.0000000 0.35802469
7     1   29    0 1.0000000 0.0000000 0.35802469
8     1   29    0 1.0000000 0.0000000 0.35802469
9     1   29    0 1.0000000 0.0000000 0.35802469
10    2    3    4 0.4285714 0.5714286 0.08641975
11    2    3    4 0.4285714 0.5714286 0.08641975
12    1   29    0 1.0000000 0.0000000 0.35802469
13    2    8   11 0.4210526 0.5789474 0.23456790
14    1   12    0 1.0000000 0.0000000 0.14814815
15    1   29    0 1.0000000 0.0000000 0.35802469
16    1   29    0 1.0000000 0.0000000 0.35802469
17    1   29    0 1.0000000 0.0000000 0.35802469
18    1   12    2 0.8571429 0.1428571 0.17283951
19    1   29    0 1.0000000 0.0000000 0.35802469
20    1   12    0 1.0000000 0.0000000 0.14814815
21    1   29    0 1.0000000 0.0000000 0.35802469
22    2    8   11 0.4210526 0.5789474 0.23456790
23    2    3    4 0.4285714 0.5714286 0.08641975
24    2    8   11 0.4210526 0.5789474 0.23456790
25    2    8   11 0.4210526 0.5789474 0.23456790
26    1   12    0 1.0000000 0.0000000 0.14814815
27    2    8   11 0.4210526 0.5789474 0.23456790
28    2    3    4 0.4285714 0.5714286 0.08641975
29    1   29    0 1.0000000 0.0000000 0.35802469
30    1   29    0 1.0000000 0.0000000 0.35802469
31    1   29    0 1.0000000 0.0000000 0.35802469
32    1   12    2 0.8571429 0.1428571 0.17283951
33    1   12    2 0.8571429 0.1428571 0.17283951
34    1   29    0 1.0000000 0.0000000 0.35802469
35    1   12    2 0.8571429 0.1428571 0.17283951
36    1   29    0 1.0000000 0.0000000 0.35802469
37    1   12    0 1.0000000 0.0000000 0.14814815
38    2    8   11 0.4210526 0.5789474 0.23456790
39    1   12    0 1.0000000 0.0000000 0.14814815
40    2    3    4 0.4285714 0.5714286 0.08641975
41    2    8   11 0.4210526 0.5789474 0.23456790
42    1   12    0 1.0000000 0.0000000 0.14814815
43    2    8   11 0.4210526 0.5789474 0.23456790
44    2    8   11 0.4210526 0.5789474 0.23456790
45    1   29    0 1.0000000 0.0000000 0.35802469
46    1   12    2 0.8571429 0.1428571 0.17283951
47    1   29    0 1.0000000 0.0000000 0.35802469
48    1   12    2 0.8571429 0.1428571 0.17283951
49    2    8   11 0.4210526 0.5789474 0.23456790
50    1   12    2 0.8571429 0.1428571 0.17283951
51    2    3    4 0.4285714 0.5714286 0.08641975
52    1   29    0 1.0000000 0.0000000 0.35802469
53    2    8   11 0.4210526 0.5789474 0.23456790
54    1   29    0 1.0000000 0.0000000 0.35802469
55    1   29    0 1.0000000 0.0000000 0.35802469
56    1   29    0 1.0000000 0.0000000 0.35802469
57    1   12    0 1.0000000 0.0000000 0.14814815
58    2    8   11 0.4210526 0.5789474 0.23456790
59    1   12    0 1.0000000 0.0000000 0.14814815
60    2    3    4 0.4285714 0.5714286 0.08641975
61    2    8   11 0.4210526 0.5789474 0.23456790
62    2    8   11 0.4210526 0.5789474 0.23456790
63    2    8   11 0.4210526 0.5789474 0.23456790
64    1   29    0 1.0000000 0.0000000 0.35802469
65    1   29    0 1.0000000 0.0000000 0.35802469
66    1   12    0 1.0000000 0.0000000 0.14814815
67    1   29    0 1.0000000 0.0000000 0.35802469
68    1   12    2 0.8571429 0.1428571 0.17283951
69    1   12    0 1.0000000 0.0000000 0.14814815
70    1   29    0 1.0000000 0.0000000 0.35802469
71    1   12    2 0.8571429 0.1428571 0.17283951
72    1   12    2 0.8571429 0.1428571 0.17283951
73    1   29    0 1.0000000 0.0000000 0.35802469
74    1   12    2 0.8571429 0.1428571 0.17283951
75    1   29    0 1.0000000 0.0000000 0.35802469
76    1   29    0 1.0000000 0.0000000 0.35802469
77    1   12    2 0.8571429 0.1428571 0.17283951
78    1   12    0 1.0000000 0.0000000 0.14814815
79    1   12    2 0.8571429 0.1428571 0.17283951
80    2    8   11 0.4210526 0.5789474 0.23456790
81    1   12    0 1.0000000 0.0000000 0.14814815
> 
> sub <- c(sample(1:50, 25), sample(51:100, 25), sample(101:150, 25))
> fit <- rpart(Species ~ ., data = iris, subset = sub)
> fit
n= 75 

node), split, n, loss, yval, (yprob)
      * denotes terminal node

1) root 75 50 setosa (0.33333333 0.33333333 0.33333333)  
  2) Petal.Length< 2.5 25  0 setosa (1.00000000 0.00000000 0.00000000) *
  3) Petal.Length>=2.5 50 25 versicolor (0.00000000 0.50000000 0.50000000)  
    6) Petal.Length< 4.85 26  2 versicolor (0.00000000 0.92307692 0.07692308) *
    7) Petal.Length>=4.85 24  1 virginica (0.00000000 0.04166667 0.95833333) *
> table(predict(fit, iris[-sub,], type = "class"), iris[-sub, "Species"])
            
             setosa versicolor virginica
  setosa         25          0         0
  versicolor      0         22         1
  virginica       0          3        24
> 
> 
> 
> cleanEx()
> nameEx("print.rpart")
> ### * print.rpart
> 
> flush(stderr()); flush(stdout())
> 
> ### Name: print.rpart
> ### Title: Print an Rpart Object
> ### Aliases: print.rpart
> ### Keywords: tree
> 
> ### ** Examples
> 
> z.auto <- rpart(Mileage ~ Weight, car.test.frame)
> z.auto
n= 60 

node), split, n, deviance, yval
      * denotes terminal node

 1) root 60 1354.58300 24.58333  
   2) Weight>=2567.5 45  361.20000 22.46667  
     4) Weight>=3087.5 22   61.31818 20.40909 *
     5) Weight< 3087.5 23  117.65220 24.43478  
      10) Weight>=2747.5 15   60.40000 23.80000 *
      11) Weight< 2747.5 8   39.87500 25.62500 *
   3) Weight< 2567.5 15  186.93330 30.93333 *
> ## Not run: 
> ##D node), split, n, deviance, yval
> ##D       * denotes terminal node
> ##D 
> ##D  1) root 60 1354.58300 24.58333  
> ##D    2) Weight>=2567.5 45  361.20000 22.46667  
> ##D      4) Weight>=3087.5 22   61.31818 20.40909 *
> ##D      5) Weight<3087.5 23  117.65220 24.43478  
> ##D       10) Weight>=2747.5 15   60.40000 23.80000 *
> ##D       11) Weight<2747.5 8   39.87500 25.62500 *
> ##D    3) Weight<2567.5 15  186.93330 30.93333 *
> ## End(Not run)
> 
> 
> cleanEx()
> nameEx("printcp")
> ### * printcp
> 
> flush(stderr()); flush(stdout())
> 
> ### Name: printcp
> ### Title: Displays CP table for Fitted Rpart Object
> ### Aliases: printcp
> ### Keywords: tree
> 
> ### ** Examples
> 
> z.auto <- rpart(Mileage ~ Weight, car.test.frame)
> printcp(z.auto)

Regression tree:
rpart(formula = Mileage ~ Weight, data = car.test.frame)

Variables actually used in tree construction:
[1] Weight

Root node error: 1354.6/60 = 22.576

n= 60 

        CP nsplit rel error  xerror     xstd
1 0.595349      0   1.00000 1.03378 0.180465
2 0.134528      1   0.40465 0.58366 0.109010
3 0.012828      2   0.27012 0.44092 0.086528
4 0.010000      3   0.25729 0.44158 0.086630
> ## Not run: 
> ##D Regression tree:
> ##D rpart(formula = Mileage ~ Weight, data = car.test.frame)
> ##D 
> ##D Variables actually used in tree construction:
> ##D [1] Weight
> ##D 
> ##D Root node error: 1354.6/60 = 22.576
> ##D 
> ##D         CP nsplit rel error  xerror     xstd 
> ##D 1 0.595349      0   1.00000 1.03436 0.178526
> ##D 2 0.134528      1   0.40465 0.60508 0.105217
> ##D 3 0.012828      2   0.27012 0.45153 0.083330
> ##D 4 0.010000      3   0.25729 0.44826 0.076998
> ## End(Not run)
> 
> 
> cleanEx()
> nameEx("prune.rpart")
> ### * prune.rpart
> 
> flush(stderr()); flush(stdout())
> 
> ### Name: prune.rpart
> ### Title: Cost-complexity Pruning of an Rpart Object
> ### Aliases: prune.rpart prune
> ### Keywords: tree
> 
> ### ** Examples
> 
> z.auto <- rpart(Mileage ~ Weight, car.test.frame)
> zp <- prune(z.auto, cp = 0.1)
> plot(zp) #plot smaller rpart object
> 
> 
> 
> cleanEx()
> nameEx("residuals.rpart")
> ### * residuals.rpart
> 
> flush(stderr()); flush(stdout())
> 
> ### Name: residuals.rpart
> ### Title: Residuals From a Fitted Rpart Object
> ### Aliases: residuals.rpart
> ### Keywords: tree
> 
> ### ** Examples
> 
> fit <- rpart(skips ~ Opening + Solder + Mask + PadType + Panel,
+              data = solder.balance, method = "anova")
> summary(residuals(fit))
    Min.  1st Qu.   Median     Mean  3rd Qu.     Max. 
-13.8000  -1.0361  -0.6833   0.0000   0.9639  16.2000 
> plot(predict(fit),residuals(fit))
> 
> 
> 
> cleanEx()
> nameEx("rpart")
> ### * rpart
> 
> flush(stderr()); flush(stdout())
> 
> ### Name: rpart
> ### Title: Recursive Partitioning and Regression Trees
> ### Aliases: rpart
> ### Keywords: tree
> 
> ### ** Examples
> 
> fit <- rpart(Kyphosis ~ Age + Number + Start, data = kyphosis)
> fit2 <- rpart(Kyphosis ~ Age + Number + Start, data = kyphosis,
+               parms = list(prior = c(.65,.35), split = "information"))
> fit3 <- rpart(Kyphosis ~ Age + Number + Start, data = kyphosis,
+               control = rpart.control(cp = 0.05))
> par(mfrow = c(1,2), xpd = NA) # otherwise on some devices the text is clipped
> plot(fit)
> text(fit, use.n = TRUE)
> plot(fit2)
> text(fit2, use.n = TRUE)
> 
> 
> 
> graphics::par(get("par.postscript", pos = 'CheckExEnv'))
> cleanEx()
> nameEx("rsq.rpart")
> ### * rsq.rpart
> 
> flush(stderr()); flush(stdout())
> 
> ### Name: rsq.rpart
> ### Title: Plots the Approximate R-Square for the Different Splits
> ### Aliases: rsq.rpart
> ### Keywords: tree
> 
> ### ** Examples
> 
> z.auto <- rpart(Mileage ~ Weight, car.test.frame)
> rsq.rpart(z.auto)

Regression tree:
rpart(formula = Mileage ~ Weight, data = car.test.frame)

Variables actually used in tree construction:
[1] Weight

Root node error: 1354.6/60 = 22.576

n= 60 

        CP nsplit rel error  xerror     xstd
1 0.595349      0   1.00000 1.03378 0.180465
2 0.134528      1   0.40465 0.58366 0.109010
3 0.012828      2   0.27012 0.44092 0.086528
4 0.010000      3   0.25729 0.44158 0.086630
> 
> 
> 
> cleanEx()
> nameEx("snip.rpart")
> ### * snip.rpart
> 
> flush(stderr()); flush(stdout())
> 
> ### Name: snip.rpart
> ### Title: Snip Subtrees of an Rpart Object
> ### Aliases: snip.rpart
> ### Keywords: tree
> 
> ### ** Examples
> 
> ## dataset not in R
> ## Not run: 
> ##D z.survey <- rpart(market.survey) # grow the rpart object
> ##D plot(z.survey) # plot the tree
> ##D z.survey2 <- snip.rpart(z.survey, toss = 2) # trim subtree at node 2
> ##D plot(z.survey2) # plot new tree
> ##D 
> ##D # can also interactively select the node using the mouse in the
> ##D # graphics window
> ## End(Not run)
> 
> 
> cleanEx()
> nameEx("solder.balance")
> ### * solder.balance
> 
> flush(stderr()); flush(stdout())
> 
> ### Name: solder.balance
> ### Title: Soldering of Components on Printed-Circuit Boards
> ### Aliases: solder.balance solder
> ### Keywords: datasets
> 
> ### ** Examples
> 
> fit <- rpart(skips ~ Opening + Solder + Mask + PadType + Panel,
+              data = solder.balance, method = "anova")
> summary(residuals(fit))
    Min.  1st Qu.   Median     Mean  3rd Qu.     Max. 
-13.8000  -1.0361  -0.6833   0.0000   0.9639  16.2000 
> plot(predict(fit), residuals(fit))
> 
> 
> 
> cleanEx()
> nameEx("stagec")
> ### * stagec
> 
> flush(stderr()); flush(stdout())
> 
> ### Name: stagec
> ### Title: Stage C Prostate Cancer
> ### Aliases: stagec
> ### Keywords: datasets
> 
> ### ** Examples
> 
> require(survival)
Loading required package: survival
> rpart(Surv(pgtime, pgstat) ~ ., stagec)
n= 146 

node), split, n, deviance, yval
      * denotes terminal node

 1) root 146 192.111100 1.0000000  
   2) grade< 2.5 61  44.799010 0.3634439  
     4) g2< 11.36 33   9.117405 0.1229835 *
     5) g2>=11.36 28  27.602190 0.7345610  
      10) gleason< 5.5 20  14.297110 0.5304115 *
      11) gleason>=5.5 8  11.094650 1.3069940 *
   3) grade>=2.5 85 122.441500 1.6148600  
     6) age>=56.5 75 103.062900 1.4255040  
      12) gleason< 7.5 50  66.119800 1.1407320  
        24) g2< 13.475 24  27.197170 0.8007306 *
        25) g2>=13.475 26  36.790960 1.4570210  
          50) g2>=17.915 15  20.332740 0.9789825 *
          51) g2< 17.915 11  13.459010 2.1714480 *
      13) gleason>=7.5 25  33.487250 2.0307290  
        26) g2>=15.29 10  11.588480 1.2156230 *
        27) g2< 15.29 15  18.939150 2.7053610 *
     7) age< 56.5 10  13.769010 3.1822320 *
> 
> 
> 
> cleanEx()

detaching ‘package:survival’

> nameEx("summary.rpart")
> ### * summary.rpart
> 
> flush(stderr()); flush(stdout())
> 
> ### Name: summary.rpart
> ### Title: Summarize a Fitted Rpart Object
> ### Aliases: summary.rpart
> ### Keywords: tree
> 
> ### ** Examples
> 
> ## a regression tree
> z.auto <- rpart(Mileage ~ Weight, car.test.frame)
> summary(z.auto)
Call:
rpart(formula = Mileage ~ Weight, data = car.test.frame)
  n= 60 

          CP nsplit rel error    xerror       xstd
1 0.59534912      0 1.0000000 1.0337818 0.18046532
2 0.13452819      1 0.4046509 0.5836606 0.10900973
3 0.01282843      2 0.2701227 0.4409221 0.08652804
4 0.01000000      3 0.2572943 0.4415805 0.08663003

Variable importance
Weight 
   100 

Node number 1: 60 observations,    complexity param=0.5953491
  mean=24.58333, MSE=22.57639 
  left son=2 (45 obs) right son=3 (15 obs)
  Primary splits:
      Weight < 2567.5 to the right, improve=0.5953491, (0 missing)

Node number 2: 45 observations,    complexity param=0.1345282
  mean=22.46667, MSE=8.026667 
  left son=4 (22 obs) right son=5 (23 obs)
  Primary splits:
      Weight < 3087.5 to the right, improve=0.5045118, (0 missing)

Node number 3: 15 observations
  mean=30.93333, MSE=12.46222 

Node number 4: 22 observations
  mean=20.40909, MSE=2.78719 

Node number 5: 23 observations,    complexity param=0.01282843
  mean=24.43478, MSE=5.115312 
  left son=10 (15 obs) right son=11 (8 obs)
  Primary splits:
      Weight < 2747.5 to the right, improve=0.1476996, (0 missing)

Node number 10: 15 observations
  mean=23.8, MSE=4.026667 

Node number 11: 8 observations
  mean=25.625, MSE=4.984375 

> 
> ## a classification tree with multiple variables and surrogate splits.
> summary(rpart(Kyphosis ~ Age + Number + Start, data = kyphosis))
Call:
rpart(formula = Kyphosis ~ Age + Number + Start, data = kyphosis)
  n= 81 

          CP nsplit rel error   xerror      xstd
1 0.17647059      0 1.0000000 1.000000 0.2155872
2 0.01960784      1 0.8235294 1.058824 0.2200975
3 0.01000000      4 0.7647059 1.058824 0.2200975

Variable importance
 Start    Age Number 
    64     24     12 

Node number 1: 81 observations,    complexity param=0.1764706
  predicted class=absent   expected loss=0.2098765  P(node) =1
    class counts:    64    17
   probabilities: 0.790 0.210 
  left son=2 (62 obs) right son=3 (19 obs)
  Primary splits:
      Start  < 8.5  to the right, improve=6.762330, (0 missing)
      Number < 5.5  to the left,  improve=2.866795, (0 missing)
      Age    < 39.5 to the left,  improve=2.250212, (0 missing)
  Surrogate splits:
      Number < 6.5  to the left,  agree=0.802, adj=0.158, (0 split)

Node number 2: 62 observations,    complexity param=0.01960784
  predicted class=absent   expected loss=0.09677419  P(node) =0.7654321
    class counts:    56     6
   probabilities: 0.903 0.097 
  left son=4 (29 obs) right son=5 (33 obs)
  Primary splits:
      Start  < 14.5 to the right, improve=1.0205280, (0 missing)
      Age    < 55   to the left,  improve=0.6848635, (0 missing)
      Number < 4.5  to the left,  improve=0.2975332, (0 missing)
  Surrogate splits:
      Number < 3.5  to the left,  agree=0.645, adj=0.241, (0 split)
      Age    < 16   to the left,  agree=0.597, adj=0.138, (0 split)

Node number 3: 19 observations
  predicted class=present  expected loss=0.4210526  P(node) =0.2345679
    class counts:     8    11
   probabilities: 0.421 0.579 

Node number 4: 29 observations
  predicted class=absent   expected loss=0  P(node) =0.3580247
    class counts:    29     0
   probabilities: 1.000 0.000 

Node number 5: 33 observations,    complexity param=0.01960784
  predicted class=absent   expected loss=0.1818182  P(node) =0.4074074
    class counts:    27     6
   probabilities: 0.818 0.182 
  left son=10 (12 obs) right son=11 (21 obs)
  Primary splits:
      Age    < 55   to the left,  improve=1.2467530, (0 missing)
      Start  < 12.5 to the right, improve=0.2887701, (0 missing)
      Number < 3.5  to the right, improve=0.1753247, (0 missing)
  Surrogate splits:
      Start  < 9.5  to the left,  agree=0.758, adj=0.333, (0 split)
      Number < 5.5  to the right, agree=0.697, adj=0.167, (0 split)

Node number 10: 12 observations
  predicted class=absent   expected loss=0  P(node) =0.1481481
    class counts:    12     0
   probabilities: 1.000 0.000 

Node number 11: 21 observations,    complexity param=0.01960784
  predicted class=absent   expected loss=0.2857143  P(node) =0.2592593
    class counts:    15     6
   probabilities: 0.714 0.286 
  left son=22 (14 obs) right son=23 (7 obs)
  Primary splits:
      Age    < 111  to the right, improve=1.71428600, (0 missing)
      Start  < 12.5 to the right, improve=0.79365080, (0 missing)
      Number < 3.5  to the right, improve=0.07142857, (0 missing)

Node number 22: 14 observations
  predicted class=absent   expected loss=0.1428571  P(node) =0.1728395
    class counts:    12     2
   probabilities: 0.857 0.143 

Node number 23: 7 observations
  predicted class=present  expected loss=0.4285714  P(node) =0.08641975
    class counts:     3     4
   probabilities: 0.429 0.571 

> 
> 
> 
> cleanEx()
> nameEx("text.rpart")
> ### * text.rpart
> 
> flush(stderr()); flush(stdout())
> 
> ### Name: text.rpart
> ### Title: Place Text on a Dendrogram Plot
> ### Aliases: text.rpart
> ### Keywords: tree
> 
> ### ** Examples
> 
> freen.tr <- rpart(y ~ ., freeny)
> par(xpd = TRUE)
> plot(freen.tr)
> text(freen.tr, use.n = TRUE, all = TRUE)
> 
> 
> 
> graphics::par(get("par.postscript", pos = 'CheckExEnv'))
> cleanEx()
> nameEx("xpred.rpart")
> ### * xpred.rpart
> 
> flush(stderr()); flush(stdout())
> 
> ### Name: xpred.rpart
> ### Title: Return Cross-Validated Predictions
> ### Aliases: xpred.rpart
> ### Keywords: tree
> 
> ### ** Examples
> 
> fit <- rpart(Mileage ~ Weight, car.test.frame)
> xmat <- xpred.rpart(fit)
> xerr <- (xmat - car.test.frame$Mileage)^2
> apply(xerr, 2, sum)   # cross-validated error estimate
0.79767456 0.28300396 0.04154257 0.01132626 
 1396.6687   773.1546   577.8990   594.1341 
> 
> # approx same result as rel. error from printcp(fit)
> apply(xerr, 2, sum)/var(car.test.frame$Mileage) 
0.79767456 0.28300396 0.04154257 0.01132626 
  60.83306   33.67539   25.17087   25.87800 
> printcp(fit)

Regression tree:
rpart(formula = Mileage ~ Weight, data = car.test.frame)

Variables actually used in tree construction:
[1] Weight

Root node error: 1354.6/60 = 22.576

n= 60 

        CP nsplit rel error  xerror     xstd
1 0.595349      0   1.00000 1.03378 0.180465
2 0.134528      1   0.40465 0.58366 0.109010
3 0.012828      2   0.27012 0.44092 0.086528
4 0.010000      3   0.25729 0.44158 0.086630
> 
> 
> 
> ### * <FOOTER>
> ###
> cleanEx()
> options(digits = 7L)
> base::cat("Time elapsed: ", proc.time() - base::get("ptime", pos = 'CheckExEnv'),"\n")
Time elapsed:  0.777 0.048 0.825 0 0 
> grDevices::dev.off()
null device 
          1 
> ###
> ### Local variables: ***
> ### mode: outline-minor ***
> ### outline-regexp: "\\(> \\)?### [*]+" ***
> ### End: ***
> quit('no')