options(contrasts=c("contr.treatment","contr.poly")) #### Logistic regression r=c(10,17,12,7,23,22,29,29,23) n=c(31,30,31,27,26,30,31,30,30) - r logconc=c(2.68,2.76,2.82,2.90,3.02,3.04,3.13,3.20,3.21) counts=cbind(r,n) result=glm(counts~logconc,family=binomial("logit")) summary(result,correlation=TRUE,symbolic.cor = TRUE) result\$coefficients #### plot residuals vs. linear predictor plot(residuals(result, type="pearson"),result\$linear.predictors) #### plot logconc vs. empirical logits emplogit=log((r+0.5)/(n-r+0.5)) plot(logconc,emplogit) plot(result) #### adjusting for overdispersion #### This should give you the same model but with adjusted covariance #### matirix, that is SE for your beta's and also changed z-values. #### First estimate the dispersion parameter based on the MAXIMAL model; #### in our example this is simple since we have only one model #### X^2/df=4.08 #### Notice that this does not adjust overall fit statistics summary(result, dispersion=4.08,correlation=TRUE,symbolic.cor = TRUE) #### Notice you can also use new package DISPMOD #### gives a bit different result because it uses G^2/df #### It adjusts the overall fit statistis too install.pacakges("dispmod") library(dispmod) glm.binomial.disp(result) #### ROC curve #### sensitivity vs 1-specificity lp = result\$linear.predictors p = exp(lp)/(1+exp(lp)) cbind(yes,no,p) p0 = 0 sens = 1 spec = 0 total = 100 for (i in (1:total)/total) { yy = sum(r*(p>=i)) yn = sum(r*(p=i)) nn = sum(n*(p