diff --git a/DoE/DoE.html b/DoE/DoE.html deleted file mode 100644 index ca1a45bc251db6f6ecd60322dacd6452fcb0342c..0000000000000000000000000000000000000000 --- a/DoE/DoE.html +++ /dev/null @@ -1,766 +0,0 @@ - - - - -
- - - - - - - - - - -The model studied in this experiment is a black-blox, where x1, x2, -…, x11 are controlable factores, z1,…,z11 are uncontrolable factors and -y is the output. In order to approximate this unknown model, we need -first to determine which variables are the most significant on the -response y, using screening designs.
-Then, define and fit an analytical model of the response y as a -function of the primary factors x using regression and lhs & optimal -designs.
-My first intuition was to run an lhs design using the 11 factors to -have a general overview about the response behavior.
-library(DoE.wrapper)
- set.seed(45);
-design <- lhs.design(type= "maximin", nruns= 500 , nfactors= 11, digits=NULL, seed= 20523, factor.names=list(X1=c(0,1),X2=c(0,1),X3=c(0,1),X4=c(0,1),X5=c(0,1),X6=c(0,1),X7=c(0,1),X8=c(0,1),X9=c(0,1),X10=c(0,1),X11=c(0,1)))
-
-design.Dopt <- Dopt.design(30, data=design, nRepeat= 5, randomize= TRUE, seed=19573); design.Dopt
-Once data was generated, I tested it on the DoE shiny app and got a -csv file containing the generated data with the corresponding -response.
-df <- read.csv("exp.csv",header = TRUE, colClasses=c("NULL", NA, NA, NA, NA, NA, NA, NA,NA, NA, NA, NA, NA));
-df
-## x1 x2 x3 x4 x5 x6
-## 1 0.99947246 0.02323586 0.01041495 0.92758385 0.405715999 0.96732174
-## 2 0.04848198 0.09642373 0.32764357 0.09601540 0.095546909 0.15338152
-## 3 0.94423611 0.39096927 0.11929524 0.40408465 0.934471510 0.37732404
-## 4 0.23554733 0.97640699 0.82210862 0.76493711 0.117375133 0.57908885
-## 5 0.03450041 0.24211967 0.04198608 0.28516554 0.241993050 0.31437907
-## 6 0.90467968 0.97548797 0.55360886 0.17696613 0.867162465 0.48496169
-## 7 0.05065056 0.01818887 0.60924413 0.95601520 0.448485376 0.69625769
-## 8 0.76961950 0.20821603 0.98666699 0.01839787 0.007111332 0.80098393
-## 9 0.99702075 0.92300916 0.94747204 0.49589108 0.744617010 0.02878196
-## 10 0.44149406 0.19504098 0.78658170 0.78465226 0.001755782 0.05836366
-## 11 0.01803940 0.85952905 0.47851799 0.05199098 0.245971032 0.76737971
-## 12 0.79507995 0.12549151 0.86682214 0.23872548 0.034331251 0.27697563
-## 13 0.01620349 0.21154352 0.15991234 0.99492808 0.984350656 0.93669917
-## 14 0.83452798 0.02420187 0.97402625 0.88712762 0.909108593 0.16447802
-## 15 0.26547643 0.87864987 0.12016306 0.95044120 0.013099241 0.83484864
-## 16 0.05461335 0.24687019 0.14160278 0.03868864 0.803510236 0.09732755
-## 17 0.76072420 0.86439767 0.93660569 0.07267420 0.927266422 0.02473151
-## 18 0.91358330 0.15037229 0.84170975 0.02888531 0.896672602 0.77572634
-## 19 0.77436158 0.88778810 0.28763308 0.94534297 0.320167265 0.06204175
-## 20 0.76260426 0.06148193 0.10968400 0.02090904 0.849605474 0.86390842
-## 21 0.03072968 0.66305584 0.90460102 0.87067431 0.951323499 0.29598197
-## 22 0.85448616 0.99732905 0.89493945 0.46002165 0.113846141 0.84767510
-## 23 0.45856606 0.24450849 0.29138303 0.04447944 0.022780510 0.90731968
-## 24 0.98654900 0.95545206 0.05199728 0.50020307 0.912256007 0.25399299
-## 25 0.72941264 0.84726694 0.02996853 0.83298626 0.144377987 0.97311178
-## 26 0.64733055 0.81697627 0.97715863 0.70086724 0.819694117 0.90442736
-## 27 0.97023434 0.33634894 0.08136717 0.95946763 0.080633877 0.10405929
-## 28 0.07221227 0.39804243 0.63569917 0.96903787 0.921338058 0.73734465
-## 29 0.00275796 0.97003381 0.19024957 0.11480417 0.723956371 0.08387815
-## 30 0.96594488 0.42137377 0.05228119 0.84771475 0.147307147 0.12740675
-## 31 0.99947246 0.02323586 0.01041495 0.92758385 0.405715999 0.96732174
-## 32 0.04848198 0.09642373 0.32764357 0.09601540 0.095546909 0.15338152
-## 33 0.94423611 0.39096927 0.11929524 0.40408465 0.934471510 0.37732404
-## 34 0.23554733 0.97640699 0.82210862 0.76493711 0.117375133 0.57908885
-## 35 0.03450041 0.24211967 0.04198608 0.28516554 0.241993050 0.31437907
-## 36 0.90467968 0.97548797 0.55360886 0.17696613 0.867162465 0.48496169
-## 37 0.05065056 0.01818887 0.60924413 0.95601520 0.448485376 0.69625769
-## 38 0.76961950 0.20821603 0.98666699 0.01839787 0.007111332 0.80098393
-## 39 0.99702075 0.92300916 0.94747204 0.49589108 0.744617010 0.02878196
-## 40 0.44149406 0.19504098 0.78658170 0.78465226 0.001755782 0.05836366
-## 41 0.01803940 0.85952905 0.47851799 0.05199098 0.245971032 0.76737971
-## 42 0.79507995 0.12549151 0.86682214 0.23872548 0.034331251 0.27697563
-## 43 0.01620349 0.21154352 0.15991234 0.99492808 0.984350656 0.93669917
-## 44 0.83452798 0.02420187 0.97402625 0.88712762 0.909108593 0.16447802
-## 45 0.26547643 0.87864987 0.12016306 0.95044120 0.013099241 0.83484864
-## 46 0.05461335 0.24687019 0.14160278 0.03868864 0.803510236 0.09732755
-## 47 0.76072420 0.86439767 0.93660569 0.07267420 0.927266422 0.02473151
-## 48 0.91358330 0.15037229 0.84170975 0.02888531 0.896672602 0.77572634
-## 49 0.77436158 0.88778810 0.28763308 0.94534297 0.320167265 0.06204175
-## 50 0.76260426 0.06148193 0.10968400 0.02090904 0.849605474 0.86390842
-## 51 0.03072968 0.66305584 0.90460102 0.87067431 0.951323499 0.29598197
-## 52 0.85448616 0.99732905 0.89493945 0.46002165 0.113846141 0.84767510
-## 53 0.45856606 0.24450849 0.29138303 0.04447944 0.022780510 0.90731968
-## 54 0.98654900 0.95545206 0.05199728 0.50020307 0.912256007 0.25399299
-## 55 0.72941264 0.84726694 0.02996853 0.83298626 0.144377987 0.97311178
-## 56 0.64733055 0.81697627 0.97715863 0.70086724 0.819694117 0.90442736
-## 57 0.97023434 0.33634894 0.08136717 0.95946763 0.080633877 0.10405929
-## 58 0.07221227 0.39804243 0.63569917 0.96903787 0.921338058 0.73734465
-## 59 0.00275796 0.97003381 0.19024957 0.11480417 0.723956371 0.08387815
-## 60 0.96594488 0.42137377 0.05228119 0.84771475 0.147307147 0.12740675
-## x7 x8 x9 x10 x11 y
-## 1 0.84503189 0.394353251 0.917277570 0.426533959 0.01447272 -0.71959133
-## 2 0.09260289 0.583933518 0.057360351 0.002306756 0.44842306 1.00222100
-## 3 0.79539751 0.963142799 0.015531428 0.111805061 0.90193135 1.00733833
-## 4 0.06339440 0.244377105 0.131221651 0.119161275 0.05375979 1.60667020
-## 5 0.37508288 0.959322703 0.869314007 0.980955361 0.28117288 -0.46736899
-## 6 0.99017368 0.117951640 0.956844011 0.121355357 0.88851783 -0.64166381
-## 7 0.24199329 0.034833848 0.370941672 0.001192110 0.91462615 1.17554290
-## 8 0.98443527 0.202810030 0.280972042 0.703396084 0.82243106 2.09940585
-## 9 0.81327254 0.066411848 0.859554949 0.908110495 0.13107158 -0.91633571
-## 10 0.09051193 0.819929237 0.994056284 0.991800176 0.30408971 0.24243486
-## 11 0.19218221 0.163608765 0.768295896 0.926162640 0.99424095 -0.46534643
-## 12 0.85859604 0.789628485 0.767144314 0.048362733 0.05012624 1.13823441
-## 13 0.41394232 0.493911991 0.872465127 0.034815668 0.03030974 0.14521729
-## 14 0.10997367 0.028360326 0.066462914 0.969474933 0.15750639 2.80919025
-## 15 0.95348609 0.713065300 0.786374649 0.011630352 0.90448781 0.25334172
-## 16 0.90375913 0.450435088 0.004159208 0.660080503 0.92087664 1.04593518
-## 17 0.26711889 0.630150879 0.429992650 0.818564421 0.99997453 1.88363486
-## 18 0.60182548 0.970759916 0.608452309 0.101032550 0.02980918 -0.15918283
-## 19 0.85129034 0.021645674 0.030618433 0.902453779 0.08730313 3.26934335
-## 20 0.05358418 0.135862966 0.481783923 0.386720357 0.87722008 1.73108285
-## 21 0.89261596 0.599283924 0.920637280 0.149939033 0.08428514 -0.18385625
-## 22 0.60848095 0.905863727 0.695638333 0.069318383 0.89530425 0.80978109
-## 23 0.94218233 0.001120312 0.267075388 0.598658408 0.04167006 1.01183223
-## 24 0.04277122 0.333192510 0.971492197 0.453813455 0.16506041 -0.98053985
-## 25 0.15230159 0.920069823 0.043056139 0.828114612 0.12660472 3.42841616
-## 26 0.01832533 0.888808352 0.037813449 0.963918908 0.10416447 3.02941185
-## 27 0.15640768 0.683962808 0.834437925 0.243924631 0.81368127 -0.21212084
-## 28 0.99991691 0.710155810 0.814793769 0.988010561 0.91993764 0.07912545
-## 29 0.72975848 0.099525899 0.113546940 0.238410641 0.29531121 0.87539175
-## 30 0.81096716 0.988539539 0.077358901 0.744459103 0.82871555 1.06231366
-## 31 0.84503189 0.394353251 0.917277570 0.426533959 0.01447272 -0.71722974
-## 32 0.09260289 0.583933518 0.057360351 0.002306756 0.44842306 1.00494328
-## 33 0.79539751 0.963142799 0.015531428 0.111805061 0.90193135 1.00938298
-## 34 0.06339440 0.244377105 0.131221651 0.119161275 0.05375979 1.60521011
-## 35 0.37508288 0.959322703 0.869314007 0.980955361 0.28117288 -0.46177590
-## 36 0.99017368 0.117951640 0.956844011 0.121355357 0.88851783 -0.64251023
-## 37 0.24199329 0.034833848 0.370941672 0.001192110 0.91462615 1.16741000
-## 38 0.98443527 0.202810030 0.280972042 0.703396084 0.82243106 2.09774303
-## 39 0.81327254 0.066411848 0.859554949 0.908110495 0.13107158 -0.91786672
-## 40 0.09051193 0.819929237 0.994056284 0.991800176 0.30408971 0.24010067
-## 41 0.19218221 0.163608765 0.768295896 0.926162640 0.99424095 -0.47362237
-## 42 0.85859604 0.789628485 0.767144314 0.048362733 0.05012624 1.13599619
-## 43 0.41394232 0.493911991 0.872465127 0.034815668 0.03030974 0.14060717
-## 44 0.10997367 0.028360326 0.066462914 0.969474933 0.15750639 2.81151136
-## 45 0.95348609 0.713065300 0.786374649 0.011630352 0.90448781 0.25268772
-## 46 0.90375913 0.450435088 0.004159208 0.660080503 0.92087664 1.04696958
-## 47 0.26711889 0.630150879 0.429992650 0.818564421 0.99997453 1.89070960
-## 48 0.60182548 0.970759916 0.608452309 0.101032550 0.02980918 -0.15243809
-## 49 0.85129034 0.021645674 0.030618433 0.902453779 0.08730313 3.26912826
-## 50 0.05358418 0.135862966 0.481783923 0.386720357 0.87722008 1.72776002
-## 51 0.89261596 0.599283924 0.920637280 0.149939033 0.08428514 -0.18233387
-## 52 0.60848095 0.905863727 0.695638333 0.069318383 0.89530425 0.80516917
-## 53 0.94218233 0.001120312 0.267075388 0.598658408 0.04167006 1.01924947
-## 54 0.04277122 0.333192510 0.971492197 0.453813455 0.16506041 -0.98362770
-## 55 0.15230159 0.920069823 0.043056139 0.828114612 0.12660472 3.42559363
-## 56 0.01832533 0.888808352 0.037813449 0.963918908 0.10416447 3.03522536
-## 57 0.15640768 0.683962808 0.834437925 0.243924631 0.81368127 -0.20782722
-## 58 0.99991691 0.710155810 0.814793769 0.988010561 0.91993764 0.08086296
-## 59 0.72975848 0.099525899 0.113546940 0.238410641 0.29531121 0.87965510
-## 60 0.81096716 0.988539539 0.077358901 0.744459103 0.82871555 1.06098869
-Now, we’re interested to see the factors effects in more in details. -This will allow us to define the most efficient factor that influence -the response. Since running a large number of such experiments is -tedious, we gonna use the Plackett-Burman designs to see the different -possible interactions.
-library(FrF2)
-d<-pb(nruns= 12 ,n12.taguchi= FALSE ,nfactors= 12 -1, ncenter= 0 , replications= 1 ,repeat.only= FALSE ,randomize= TRUE ,seed= 26654 ,factor.names=list( X1=c(0,1),X2=c(0,1),X3=c(0,1),X4=c(0,1),
-X5=c(0,1),X6=c(0,1),X7=c(0, 1),X8=c(0,1),X9=c(0,1),X10=c(0,1),
-X11=c(0,1)));d
-Here are the results:
-X1 X2 X3 X4 X5 X6 X7 X8 X9 X10 X11
-1 1 0 1 1 0 1 1 1 0 0 0
-2 1 0 0 0 1 0 1 1 0 1 1
-3 0 0 1 0 1 1 0 1 1 1 0
-4 0 1 1 0 1 1 1 0 0 0 1
-5 0 1 0 1 1 0 1 1 1 0 0
-6 1 1 0 1 1 1 0 0 0 1 0
-7 1 1 1 0 0 0 1 0 1 1 0
-8 1 0 1 1 1 0 0 0 1 0 1
-9 0 0 0 0 0 0 0 0 0 0 0
-10 1 1 0 0 0 1 0 1 1 0 1
-11 0 0 0 1 0 1 1 0 1 1 1
-12 0 1 1 1 0 0 0 1 0 1 1
-In order to vizualise the correlation between factors and the -response, we do a linear regression on the data generated by the -application, and analyse the variance to see the effect.
-y<-df$y
-summary(lm(y~df$x1+df$x3+df$x4+df$x6+df$x7+df$x8,data=df))
-##
-## Call:
-## lm(formula = y ~ df$x1 + df$x3 + df$x4 + df$x6 + df$x7 + df$x8,
-## data = df)
-##
-## Residuals:
-## Min 1Q Median 3Q Max
-## -2.0563 -0.9079 0.0845 0.6693 2.6284
-##
-## Coefficients:
-## Estimate Std. Error t value Pr(>|t|)
-## (Intercept) 0.6376 0.5677 1.123 0.266
-## df$x1 0.3327 0.4232 0.786 0.435
-## df$x3 0.5420 0.4427 1.224 0.226
-## df$x4 0.2155 0.4350 0.495 0.622
-## df$x6 0.2991 0.4675 0.640 0.525
-## df$x7 -0.7373 0.4443 -1.659 0.103
-## df$x8 -0.2206 0.4721 -0.467 0.642
-##
-## Residual standard error: 1.24 on 53 degrees of freedom
-## Multiple R-squared: 0.09633, Adjusted R-squared: -0.005968
-## F-statistic: 0.9417 on 6 and 53 DF, p-value: 0.4734
-Nothing interesting :/, even R^2 is too small wich is too bad.
-y<-df$y
-summary(lm(y~df$x1+df$x5+df$x7+df$x8+df$x10+df$x11,data=df))
-##
-## Call:
-## lm(formula = y ~ df$x1 + df$x5 + df$x7 + df$x8 + df$x10 + df$x11,
-## data = df)
-##
-## Residuals:
-## Min 1Q Median 3Q Max
-## -2.1328 -0.9092 0.1487 0.6941 2.0707
-##
-## Coefficients:
-## Estimate Std. Error t value Pr(>|t|)
-## (Intercept) 1.02680 0.54801 1.874 0.0665 .
-## df$x1 0.30183 0.41698 0.724 0.4723
-## df$x5 -0.42006 0.42052 -0.999 0.3224
-## df$x7 -0.65771 0.44176 -1.489 0.1425
-## df$x8 -0.22215 0.46419 -0.479 0.6342
-## df$x10 0.71464 0.42558 1.679 0.0990 .
-## df$x11 -0.08727 0.41089 -0.212 0.8326
-## ---
-## Signif. codes: 0 '***' 0.001 '**' 0.01 '*' 0.05 '.' 0.1 ' ' 1
-##
-## Residual standard error: 1.22 on 53 degrees of freedom
-## Multiple R-squared: 0.1252, Adjusted R-squared: 0.02621
-## F-statistic: 1.265 on 6 and 53 DF, p-value: 0.2894
-Very small improvement in R^2, but still too bad.
-y<-df$y
-summary(lm(y~df$x3+df$x5+df$x6+df$x8+df$x9+df$x10,data=df))
-##
-## Call:
-## lm(formula = y ~ df$x3 + df$x5 + df$x6 + df$x8 + df$x9 + df$x10,
-## data = df)
-##
-## Residuals:
-## Min 1Q Median 3Q Max
-## -1.02781 -0.54536 0.03159 0.34813 1.33800
-##
-## Coefficients:
-## Estimate Std. Error t value Pr(>|t|)
-## (Intercept) 1.3807 0.3086 4.474 4.09e-05 ***
-## df$x3 0.7045 0.2414 2.918 0.00515 **
-## df$x5 -0.2770 0.2337 -1.185 0.24124
-## df$x6 0.5971 0.2577 2.317 0.02440 *
-## df$x8 0.1094 0.2548 0.429 0.66945
-## df$x9 -2.6861 0.2415 -11.125 1.78e-15 ***
-## df$x10 0.5314 0.2383 2.230 0.02999 *
-## ---
-## Signif. codes: 0 '***' 0.001 '**' 0.01 '*' 0.05 '.' 0.1 ' ' 1
-##
-## Residual standard error: 0.6723 on 53 degrees of freedom
-## Multiple R-squared: 0.7343, Adjusted R-squared: 0.7042
-## F-statistic: 24.41 on 6 and 53 DF, p-value: 1.211e-13
-It seems that X9 is a significant factor that influences the model, -and the determination coefficient R^2 is 0.73 now, which is pretty -good.
-y<-df$y
-summary(lm(y~df$x3+df$x9+df$x6+df$x3:df$x6,data=df))
-##
-## Call:
-## lm(formula = y ~ df$x3 + df$x9 + df$x6 + df$x3:df$x6, data = df)
-##
-## Residuals:
-## Min 1Q Median 3Q Max
-## -1.18415 -0.57451 0.06863 0.38570 1.55104
-##
-## Coefficients:
-## Estimate Std. Error t value Pr(>|t|)
-## (Intercept) 1.3097 0.2448 5.350 1.76e-06 ***
-## df$x3 1.5566 0.4043 3.850 0.00031 ***
-## df$x9 -2.9189 0.2445 -11.938 < 2e-16 ***
-## df$x6 1.3043 0.3958 3.296 0.00172 **
-## df$x3:df$x6 -1.7163 0.6824 -2.515 0.01485 *
-## ---
-## Signif. codes: 0 '***' 0.001 '**' 0.01 '*' 0.05 '.' 0.1 ' ' 1
-##
-## Residual standard error: 0.6633 on 55 degrees of freedom
-## Multiple R-squared: 0.7316, Adjusted R-squared: 0.7121
-## F-statistic: 37.48 on 4 and 55 DF, p-value: 4.142e-15
-Not too much interesting. Let’s try another experiment where X9 and -X3 are set to 1.
-y<-df$y
-summary(lm(y~df$x1+df$x3+df$x4+df$x5+df$x6+df$x9,data=df))
-##
-## Call:
-## lm(formula = y ~ df$x1 + df$x3 + df$x4 + df$x5 + df$x6 + df$x9,
-## data = df)
-##
-## Residuals:
-## Min 1Q Median 3Q Max
-## -0.95146 -0.36333 -0.09012 0.39626 1.26471
-##
-## Coefficients:
-## Estimate Std. Error t value Pr(>|t|)
-## (Intercept) 1.2744 0.2824 4.512 3.6e-05 ***
-## df$x1 0.3293 0.2199 1.498 0.14017
-## df$x3 0.7894 0.2327 3.393 0.00131 **
-## df$x4 0.6057 0.2273 2.665 0.01018 *
-## df$x5 -0.2703 0.2243 -1.205 0.23361
-## df$x6 0.5019 0.2460 2.040 0.04632 *
-## df$x9 -2.8278 0.2327 -12.151 < 2e-16 ***
-## ---
-## Signif. codes: 0 '***' 0.001 '**' 0.01 '*' 0.05 '.' 0.1 ' ' 1
-##
-## Residual standard error: 0.6487 on 53 degrees of freedom
-## Multiple R-squared: 0.7526, Adjusted R-squared: 0.7246
-## F-statistic: 26.87 on 6 and 53 DF, p-value: 1.921e-14
-Removing X11 doesn’t have any effect. We decide to keep X9, X6, X4, -combine X5 with X7, combine X1 with X3.
-y<-df$y
-summary(lm(y~df$x3:df$x1+df$x6+df$x5:df$x7+df$x4+df$x9,data=df))
-##
-## Call:
-## lm(formula = y ~ df$x3:df$x1 + df$x6 + df$x5:df$x7 + df$x4 +
-## df$x9, data = df)
-##
-## Residuals:
-## Min 1Q Median 3Q Max
-## -0.96904 -0.38017 -0.04281 0.40090 1.07561
-##
-## Coefficients:
-## Estimate Std. Error t value Pr(>|t|)
-## (Intercept) 1.5565 0.2251 6.914 5.67e-09 ***
-## df$x6 0.4247 0.2303 1.844 0.070699 .
-## df$x4 0.7251 0.2152 3.369 0.001397 **
-## df$x9 -2.7265 0.2177 -12.524 < 2e-16 ***
-## df$x3:df$x1 0.9932 0.2522 3.938 0.000237 ***
-## df$x5:df$x7 -0.7790 0.2618 -2.975 0.004373 **
-## ---
-## Signif. codes: 0 '***' 0.001 '**' 0.01 '*' 0.05 '.' 0.1 ' ' 1
-##
-## Residual standard error: 0.5996 on 54 degrees of freedom
-## Multiple R-squared: 0.7847, Adjusted R-squared: 0.7647
-## F-statistic: 39.35 on 5 and 54 DF, p-value: < 2.2e-16
-We can say that our model is: y= -2.72*X9 + 0.72*X4 + 0.42*X6 + -0.99*X1*X3 -0.77*X5*X7 + 1.55
-