Investigando o objeto retornado pela função
library(psych)
print.psych(psicom, short = FALSE)
## Call: scoreItems(keys = keys, items = data[, rownames(keys)], missing = TRUE,
## impute = "none", digits = 3)
##
## (Standardized) Alpha:
## A C E N O
## alpha 0.86 0.94 0.81 0.88 0.89
##
## Standard errors of unstandardized Alpha:
## A C E N O
## ASE 0.0071 0.0033 0.0097 0.0068 0.0062
##
## Standardized Alpha of observed scales:
## A C E N O
## [1,] 0.86 0.94 0.81 0.88 0.89
##
## Average item correlation:
## A C E N O
## average.r 0.15 0.27 0.14 0.21 0.23
##
## Guttman 6* reliability:
## A C E N O
## Lambda.6 0.91 0.96 0.88 0.92 0.93
##
## Signal/Noise based upon av.r :
## A C E N O
## Signal/Noise 6.1 17 4.2 7.1 8.2
##
## Scale intercorrelations corrected for attenuation
## raw correlations below the diagonal, alpha on the diagonal
## corrected correlations above the diagonal:
##
## Note that these are the correlations of the complete scales based on the correlation matrix,
## not the observed scales based on the raw items.
## A C E N O
## A 0.86 0.75 0.57 0.69 0.68
## C 0.68 0.94 0.58 0.69 0.74
## E 0.48 0.51 0.81 0.58 0.60
## N 0.60 0.63 0.49 0.88 0.62
## O 0.60 0.68 0.51 0.54 0.89
##
## Item by scale correlations:
## corrected for item overlap and scale reliability
## A C E N O
## Sv1.004 0.42 0.30 0.42 0.33 0.33
## sv2.132 0.45 0.30 0.33 0.18 0.33
## sv2.133 0.47 0.29 0.32 0.18 0.27
## sv2.098 -0.34 -0.20 -0.21 -0.18 -0.22
## sv2.137 -0.23 -0.22 -0.23 -0.17 -0.15
## sv2.179 -0.18 -0.14 -0.17 -0.15 -0.16
## sv2.542 0.29 0.13 0.10 0.13 0.16
## sv2.543 0.20 0.04 -0.01 0.06 0.10
## sv2.545 0.38 0.26 0.00 0.19 0.23
## sv2.097 -0.17 -0.01 0.04 0.00 0.00
## sv2.159 -0.18 -0.10 0.22 -0.06 0.05
## sv2.160 -0.28 -0.19 -0.12 -0.13 -0.12
## sv2.166 0.60 0.48 0.39 0.34 0.41
## sv2.170 0.49 0.51 0.23 0.32 0.39
## sv2.642 0.54 0.55 0.34 0.41 0.45
## sv2.151 -0.29 -0.34 0.05 -0.27 -0.19
## sv2.162 -0.38 -0.36 -0.04 -0.31 -0.22
## sv2.178 -0.30 -0.31 -0.05 -0.28 -0.17
## Sv1.034 0.54 0.24 0.33 0.30 0.28
## sv2.191 0.35 0.12 0.20 0.17 0.15
## sv2.533 0.59 0.47 0.44 0.42 0.43
## sv2.534 -0.30 -0.15 -0.19 -0.24 -0.07
## sv2.536 -0.01 -0.01 0.03 -0.05 0.05
## sv2.538 -0.01 0.01 0.05 -0.11 0.09
## sv2.209 0.49 0.69 0.38 0.41 0.50
## sv2.274 0.50 0.69 0.37 0.48 0.51
## sv2.279 0.36 0.55 0.30 0.32 0.40
## sv2.572 -0.24 -0.35 -0.19 -0.20 -0.26
## sv2.644 -0.27 -0.35 -0.23 -0.25 -0.30
## sv2.645 -0.04 -0.09 -0.02 0.00 -0.05
## sv2.222 0.53 0.67 0.44 0.53 0.53
## sv2.223 0.26 0.36 0.22 0.28 0.32
## sv2.648 0.52 0.58 0.31 0.37 0.44
## Sv1.036 -0.12 -0.35 -0.05 -0.25 -0.19
## sv2.221 -0.18 -0.36 -0.15 -0.25 -0.27
## sv2.262 -0.26 -0.41 0.00 -0.28 -0.25
## Sv1.006 0.50 0.70 0.36 0.47 0.51
## sv2.233 0.47 0.67 0.30 0.46 0.47
## sv2.236 0.46 0.62 0.34 0.43 0.43
## Sv1.051 -0.15 -0.38 -0.08 -0.24 -0.18
## sv2.228 -0.14 -0.38 -0.07 -0.21 -0.19
## sv2.229 -0.13 -0.37 -0.08 -0.21 -0.15
## sv2.235 0.48 0.64 0.33 0.40 0.42
## sv2.290 0.38 0.49 0.34 0.34 0.43
## sv2.574 0.41 0.50 0.44 0.44 0.46
## sv2.205 -0.27 -0.38 -0.16 -0.17 -0.28
## sv2.257 -0.19 -0.43 -0.11 -0.15 -0.20
## sv2.261 -0.28 -0.48 -0.14 -0.24 -0.28
## sv2.244 0.53 0.68 0.40 0.41 0.50
## sv2.258 0.42 0.42 0.36 0.30 0.32
## sv2.585 0.47 0.55 0.42 0.36 0.43
## sv2.249 -0.19 -0.34 -0.19 -0.23 -0.20
## sv2.251 -0.22 -0.42 -0.23 -0.21 -0.22
## sv2.256 -0.21 -0.38 -0.18 -0.23 -0.21
## Sv1.055 0.31 0.29 0.58 0.35 0.30
## Sv1.063 0.42 0.43 0.60 0.49 0.42
## sv2.620 0.38 0.31 0.45 0.39 0.31
## sv2.291 0.03 -0.02 -0.08 0.00 -0.04
## sv2.301 -0.08 -0.18 -0.15 -0.09 -0.10
## sv2.652 0.30 0.40 -0.08 0.27 0.30
## sv2.331 0.10 0.08 0.24 0.07 0.10
## sv2.654 0.35 0.39 0.52 0.26 0.47
## sv2.656 0.27 0.37 0.43 0.24 0.41
## sv2.311 0.17 0.02 -0.09 0.02 0.05
## sv2.343 -0.10 -0.21 -0.40 -0.22 -0.19
## sv2.346 0.00 -0.03 -0.36 -0.09 -0.07
## Sv1.071 0.12 -0.04 0.35 -0.02 0.03
## Sv1.075 0.39 0.24 0.47 0.21 0.25
## sv2.353 0.34 0.27 0.48 0.28 0.31
## Sv1.067 0.30 0.25 -0.13 0.19 0.20
## sv2.342 0.03 0.02 -0.25 -0.01 0.01
## sv2.362 0.00 0.02 -0.15 -0.04 0.07
## Sv1.037 0.39 0.39 0.20 0.61 0.38
## sv2.440 0.29 0.24 0.18 0.36 0.29
## sv2.658 0.32 0.25 0.18 0.45 0.26
## Sv1.032 -0.29 -0.29 -0.07 -0.53 -0.23
## sv2.437 -0.10 -0.16 -0.06 -0.33 -0.06
## sv2.438 -0.30 -0.28 -0.02 -0.49 -0.19
## sv2.551 0.37 0.34 0.36 0.44 0.35
## sv2.552 0.35 0.28 0.27 0.44 0.32
## sv2.722 0.46 0.50 0.46 0.53 0.39
## sv2.367 -0.04 -0.17 -0.09 -0.36 -0.06
## sv2.369 0.02 -0.13 -0.04 -0.32 -0.06
## sv2.376 0.15 0.00 -0.14 -0.10 0.00
## sv2.557 0.31 0.23 0.25 0.32 0.24
## sv2.560 0.48 0.48 0.52 0.52 0.42
## sv2.561 0.39 0.36 0.40 0.46 0.34
## sv2.396 -0.16 -0.29 -0.24 -0.37 -0.16
## sv2.402 -0.04 -0.06 -0.05 -0.15 0.04
## sv2.413 -0.14 -0.22 -0.22 -0.39 -0.12
## Sv1.040 0.25 0.33 0.23 0.22 0.55
## sv2.477 0.36 0.39 0.27 0.32 0.61
## sv2.590 0.47 0.41 0.38 0.37 0.52
## sv2.593 -0.17 -0.21 -0.12 -0.19 -0.23
## sv2.662 -0.23 -0.24 -0.13 -0.15 -0.35
## sv2.663 -0.19 -0.26 -0.09 -0.20 -0.33
## Sv1.005 0.34 0.48 0.39 0.40 0.60
## sv2.493 0.28 0.26 0.29 0.24 0.51
## sv2.607 0.38 0.41 0.36 0.36 0.68
## sv2.488 -0.09 -0.08 -0.13 -0.07 -0.19
## sv2.608 -0.04 -0.14 -0.08 -0.08 -0.14
## sv2.610 -0.21 -0.25 -0.17 -0.17 -0.32
## Sv1.066 0.44 0.39 0.38 0.35 0.54
## sv2.507 0.50 0.52 0.39 0.42 0.60
## sv2.508 0.28 0.27 0.29 0.20 0.46
## sv2.613 -0.09 -0.10 -0.07 -0.06 -0.19
## sv2.667 -0.13 -0.11 -0.10 -0.06 -0.15
## sv2.668 -0.22 -0.23 -0.16 -0.19 -0.29
## sv2.116 0.55 0.44 0.45 0.32 0.49
## sv2.148 0.53 0.39 0.45 0.28 0.42
## sv2.149 0.43 0.34 0.39 0.21 0.38
## sv2.672 0.44 0.41 0.36 0.36 0.40
## sv2.674 0.40 0.28 0.06 0.31 0.26
## sv2.676 0.49 0.39 0.31 0.39 0.38
## sv2.174 0.56 0.44 0.22 0.48 0.38
## sv2.177 0.58 0.48 0.27 0.48 0.44
## sv2.678 0.61 0.52 0.37 0.45 0.46
## sv2.202 0.58 0.33 0.36 0.38 0.34
## sv2.203 0.55 0.31 0.35 0.34 0.29
## sv2.680 0.44 0.22 0.30 0.34 0.25
## sv2.683 0.53 0.69 0.40 0.45 0.51
## sv2.684 0.52 0.59 0.54 0.54 0.51
## sv2.685 0.52 0.62 0.49 0.46 0.54
## sv2.283 0.50 0.66 0.40 0.51 0.52
## sv2.284 0.49 0.56 0.33 0.51 0.48
## sv2.286 0.49 0.71 0.41 0.48 0.50
## sv2.238 0.45 0.67 0.33 0.42 0.45
## sv2.240 0.49 0.65 0.40 0.50 0.50
## sv2.686 0.46 0.65 0.32 0.42 0.44
## Sv1.077 0.48 0.68 0.32 0.36 0.45
## Sv1.091 0.47 0.61 0.35 0.43 0.51
## sv2.219 0.44 0.64 0.36 0.37 0.45
## sv2.195 0.48 0.48 0.26 0.41 0.39
## sv2.687 0.52 0.65 0.45 0.43 0.46
## sv2.688 0.54 0.65 0.47 0.44 0.49
## sv2.298 0.45 0.48 0.49 0.46 0.46
## sv2.299 0.47 0.40 0.43 0.35 0.33
## sv2.410 0.44 0.41 0.52 0.55 0.39
## sv2.318 0.38 0.43 0.53 0.36 0.41
## sv2.332 0.42 0.51 0.55 0.39 0.46
## sv2.364 0.41 0.52 0.58 0.43 0.50
## sv2.117 0.55 0.51 0.56 0.46 0.43
## sv2.323 0.25 0.24 0.44 0.23 0.28
## sv2.324 0.23 0.22 0.43 0.22 0.26
## Sv1.079 0.37 0.33 0.25 0.53 0.32
## sv2.126 0.45 0.37 0.25 0.65 0.38
## sv2.447 0.46 0.39 0.24 0.61 0.36
## sv2.381 0.44 0.45 0.36 0.69 0.44
## sv2.690 0.49 0.47 0.34 0.67 0.47
## sv2.692 0.39 0.38 0.33 0.57 0.35
## sv2.411 0.49 0.52 0.46 0.55 0.50
## sv2.417 0.46 0.43 0.39 0.62 0.43
## sv2.429 0.48 0.49 0.56 0.59 0.44
## sv2.482 0.28 0.36 0.22 0.35 0.58
## sv2.695 0.44 0.45 0.31 0.39 0.64
## sv2.696 0.44 0.47 0.33 0.35 0.64
## sv2.701 0.38 0.45 0.32 0.34 0.64
## sv2.702 0.47 0.56 0.48 0.54 0.69
## sv2.703 0.43 0.51 0.39 0.42 0.72
## sv2.706 0.51 0.59 0.52 0.49 0.62
## sv2.707 0.47 0.54 0.47 0.46 0.63
## sv2.708 0.46 0.50 0.37 0.43 0.70
##
## Non missing response frequency for each item
## 1 2 3 4 5 miss
## Sv1.004 0.05 0.16 0.25 0.33 0.22 0
## sv2.132 0.08 0.21 0.25 0.28 0.17 0
## sv2.133 0.05 0.16 0.27 0.34 0.18 0
## sv2.098 0.52 0.22 0.17 0.06 0.03 0
## sv2.137 0.36 0.34 0.23 0.05 0.02 0
## sv2.179 0.46 0.22 0.16 0.12 0.05 0
## sv2.542 0.38 0.19 0.13 0.15 0.15 0
## sv2.543 0.33 0.21 0.20 0.16 0.11 0
## sv2.545 0.18 0.32 0.25 0.15 0.11 0
## sv2.097 0.32 0.26 0.25 0.11 0.06 0
## sv2.159 0.49 0.28 0.13 0.05 0.04 0
## sv2.160 0.74 0.16 0.07 0.02 0.01 0
## sv2.166 0.06 0.16 0.19 0.24 0.36 0
## sv2.170 0.05 0.10 0.12 0.26 0.47 0
## sv2.642 0.05 0.15 0.22 0.27 0.31 0
## sv2.151 0.43 0.28 0.17 0.07 0.04 0
## sv2.162 0.45 0.31 0.14 0.06 0.04 0
## sv2.178 0.65 0.19 0.11 0.03 0.02 0
## Sv1.034 0.14 0.29 0.34 0.15 0.07 0
## sv2.191 0.30 0.35 0.25 0.07 0.03 0
## sv2.533 0.07 0.19 0.27 0.28 0.18 0
## sv2.534 0.30 0.35 0.23 0.06 0.05 0
## sv2.536 0.14 0.32 0.30 0.14 0.10 0
## sv2.538 0.13 0.31 0.34 0.16 0.07 0
## sv2.209 0.07 0.18 0.27 0.27 0.21 0
## sv2.274 0.07 0.23 0.30 0.23 0.18 0
## sv2.279 0.03 0.12 0.25 0.39 0.21 0
## sv2.572 0.33 0.30 0.26 0.07 0.04 0
## sv2.644 0.37 0.30 0.20 0.09 0.04 0
## sv2.645 0.24 0.21 0.23 0.18 0.15 0
## sv2.222 0.05 0.19 0.30 0.28 0.19 0
## sv2.223 0.17 0.37 0.28 0.11 0.06 0
## sv2.648 0.09 0.17 0.24 0.31 0.19 0
## Sv1.036 0.24 0.38 0.25 0.09 0.04 0
## sv2.221 0.32 0.36 0.21 0.07 0.03 0
## sv2.262 0.17 0.36 0.25 0.14 0.08 0
## Sv1.006 0.08 0.21 0.31 0.24 0.16 0
## sv2.233 0.07 0.23 0.31 0.22 0.17 0
## sv2.236 0.05 0.16 0.23 0.27 0.29 0
## Sv1.051 0.31 0.38 0.22 0.07 0.03 0
## sv2.228 0.47 0.28 0.17 0.05 0.03 0
## sv2.229 0.36 0.36 0.18 0.06 0.04 0
## sv2.235 0.10 0.22 0.27 0.24 0.18 0
## sv2.290 0.09 0.21 0.26 0.26 0.18 0
## sv2.574 0.06 0.20 0.26 0.23 0.24 0
## sv2.205 0.53 0.26 0.14 0.05 0.02 0
## sv2.257 0.51 0.30 0.12 0.05 0.03 0
## sv2.261 0.45 0.31 0.18 0.04 0.02 0
## sv2.244 0.06 0.18 0.23 0.31 0.22 0
## sv2.258 0.09 0.18 0.21 0.26 0.26 0
## sv2.585 0.03 0.12 0.23 0.34 0.29 0
## sv2.249 0.35 0.38 0.20 0.05 0.02 0
## sv2.251 0.40 0.33 0.20 0.03 0.03 0
## sv2.256 0.37 0.36 0.21 0.04 0.02 0
## Sv1.055 0.04 0.11 0.18 0.28 0.40 0
## Sv1.063 0.08 0.17 0.22 0.22 0.30 0
## sv2.620 0.12 0.24 0.32 0.18 0.14 0
## sv2.291 0.25 0.28 0.26 0.12 0.09 0
## sv2.301 0.21 0.38 0.28 0.10 0.04 0
## sv2.652 0.16 0.27 0.29 0.15 0.13 0
## sv2.331 0.20 0.33 0.24 0.11 0.12 0
## sv2.654 0.14 0.26 0.28 0.19 0.13 0
## sv2.656 0.32 0.27 0.23 0.10 0.08 0
## sv2.311 0.29 0.35 0.24 0.07 0.05 0
## sv2.343 0.25 0.32 0.18 0.15 0.10 0
## sv2.346 0.27 0.30 0.24 0.12 0.07 0
## Sv1.071 0.05 0.19 0.22 0.26 0.28 0
## Sv1.075 0.04 0.13 0.21 0.30 0.32 0
## sv2.353 0.07 0.19 0.27 0.30 0.18 0
## Sv1.067 0.12 0.25 0.29 0.18 0.15 0
## sv2.342 0.36 0.25 0.22 0.09 0.07 0
## sv2.362 0.39 0.23 0.20 0.08 0.09 0
## Sv1.037 0.12 0.27 0.30 0.18 0.13 0
## sv2.440 0.21 0.30 0.24 0.15 0.10 0
## sv2.658 0.23 0.30 0.24 0.11 0.11 0
## Sv1.032 0.25 0.30 0.22 0.13 0.10 0
## sv2.437 0.32 0.30 0.24 0.08 0.06 0
## sv2.438 0.26 0.32 0.19 0.13 0.10 0
## sv2.551 0.12 0.31 0.32 0.17 0.08 0
## sv2.552 0.19 0.32 0.25 0.13 0.11 0
## sv2.722 0.07 0.24 0.34 0.22 0.14 0
## sv2.367 0.47 0.27 0.17 0.04 0.05 0
## sv2.369 0.21 0.36 0.25 0.11 0.07 0
## sv2.376 0.17 0.30 0.20 0.18 0.16 0
## sv2.557 0.19 0.31 0.23 0.15 0.12 0
## sv2.560 0.10 0.20 0.28 0.24 0.19 0
## sv2.561 0.17 0.25 0.24 0.20 0.14 0
## sv2.396 0.36 0.35 0.18 0.06 0.05 0
## sv2.402 0.27 0.35 0.23 0.10 0.05 0
## sv2.413 0.31 0.27 0.19 0.11 0.10 0
## Sv1.040 0.11 0.27 0.21 0.25 0.16 0
## sv2.477 0.14 0.25 0.25 0.19 0.16 0
## sv2.590 0.12 0.25 0.33 0.19 0.11 0
## sv2.593 0.42 0.31 0.20 0.04 0.02 0
## sv2.662 0.67 0.20 0.09 0.03 0.01 0
## sv2.663 0.50 0.26 0.14 0.05 0.04 0
## Sv1.005 0.12 0.26 0.33 0.20 0.09 0
## sv2.493 0.08 0.18 0.24 0.26 0.24 0
## sv2.607 0.13 0.25 0.25 0.19 0.18 0
## sv2.488 0.41 0.28 0.19 0.08 0.04 0
## sv2.608 0.30 0.35 0.24 0.08 0.04 0
## sv2.610 0.54 0.25 0.14 0.04 0.03 0
## Sv1.066 0.08 0.21 0.29 0.23 0.18 0
## sv2.507 0.06 0.19 0.25 0.28 0.21 0
## sv2.508 0.09 0.22 0.25 0.25 0.19 0
## sv2.613 0.39 0.30 0.18 0.06 0.05 0
## sv2.667 0.49 0.21 0.12 0.09 0.08 0
## sv2.668 0.47 0.26 0.17 0.06 0.04 0
## sv2.116 0.05 0.18 0.28 0.30 0.19 0
## sv2.148 0.05 0.19 0.25 0.30 0.22 0
## sv2.149 0.06 0.20 0.27 0.31 0.17 0
## sv2.672 0.07 0.22 0.37 0.21 0.13 0
## sv2.674 0.17 0.28 0.23 0.17 0.15 0
## sv2.676 0.07 0.22 0.36 0.21 0.13 0
## sv2.174 0.09 0.21 0.27 0.25 0.18 0
## sv2.177 0.12 0.23 0.29 0.21 0.15 0
## sv2.678 0.03 0.15 0.29 0.31 0.22 0
## sv2.202 0.09 0.29 0.39 0.17 0.06 0
## sv2.203 0.09 0.27 0.41 0.15 0.07 0
## sv2.680 0.14 0.31 0.37 0.11 0.06 0
## sv2.683 0.05 0.17 0.31 0.28 0.19 0
## sv2.684 0.04 0.17 0.25 0.33 0.22 0
## sv2.685 0.07 0.17 0.28 0.27 0.21 0
## sv2.283 0.07 0.26 0.34 0.21 0.12 0
## sv2.284 0.10 0.27 0.32 0.20 0.10 0
## sv2.286 0.04 0.19 0.33 0.27 0.16 0
## sv2.238 0.06 0.16 0.28 0.27 0.24 0
## sv2.240 0.05 0.20 0.36 0.24 0.14 0
## sv2.686 0.04 0.16 0.33 0.28 0.20 0
## Sv1.077 0.05 0.14 0.27 0.26 0.28 0
## Sv1.091 0.08 0.21 0.32 0.25 0.15 0
## sv2.219 0.04 0.13 0.32 0.27 0.23 0
## sv2.195 0.10 0.25 0.28 0.24 0.12 0
## sv2.687 0.03 0.13 0.27 0.30 0.28 0
## sv2.688 0.03 0.15 0.30 0.30 0.22 0
## sv2.298 0.08 0.18 0.26 0.27 0.21 0
## sv2.299 0.05 0.10 0.15 0.27 0.43 0
## sv2.410 0.05 0.21 0.29 0.26 0.19 0
## sv2.318 0.11 0.23 0.28 0.22 0.16 0
## sv2.332 0.06 0.22 0.32 0.23 0.18 0
## sv2.364 0.12 0.27 0.30 0.18 0.13 0
## sv2.117 0.02 0.13 0.23 0.32 0.29 0
## sv2.323 0.12 0.23 0.29 0.20 0.15 0
## sv2.324 0.13 0.21 0.28 0.19 0.20 0
## Sv1.079 0.10 0.30 0.32 0.17 0.11 0
## sv2.126 0.14 0.26 0.33 0.16 0.11 0
## sv2.447 0.14 0.28 0.31 0.17 0.10 0
## sv2.381 0.12 0.30 0.32 0.16 0.10 0
## sv2.690 0.11 0.27 0.36 0.16 0.09 0
## sv2.692 0.13 0.32 0.35 0.13 0.07 0
## sv2.411 0.05 0.19 0.32 0.26 0.17 0
## sv2.417 0.11 0.27 0.34 0.18 0.10 0
## sv2.429 0.04 0.16 0.29 0.29 0.21 0
## sv2.482 0.27 0.28 0.26 0.11 0.08 0
## sv2.695 0.17 0.28 0.25 0.17 0.13 0
## sv2.696 0.15 0.28 0.26 0.18 0.14 0
## sv2.701 0.16 0.27 0.29 0.16 0.13 0
## sv2.702 0.06 0.22 0.31 0.26 0.14 0
## sv2.703 0.12 0.24 0.28 0.20 0.15 0
## sv2.706 0.03 0.14 0.25 0.32 0.26 0
## sv2.707 0.05 0.19 0.33 0.25 0.19 0
## sv2.708 0.09 0.21 0.26 0.25 0.19 0
describe(psicom$scores)
## vars n mean sd median trimmed mad min max range skew kurtosis
## A 1 1135 3.39 0.47 3.36 3.38 0.49 2.06 4.83 2.78 0.12 -0.24
## C 2 1135 3.52 0.61 3.49 3.51 0.66 1.58 5.00 3.42 0.15 -0.55
## E 3 1135 3.34 0.49 3.33 3.34 0.49 1.41 4.78 3.37 -0.08 0.12
## N 4 1135 3.14 0.58 3.07 3.12 0.55 1.59 4.93 3.33 0.30 -0.12
## O 5 1135 3.41 0.60 3.33 3.39 0.60 1.74 4.96 3.22 0.31 -0.44
## se
## A 0.01
## C 0.02
## E 0.01
## N 0.02
## O 0.02
Modelo de Samejima resposta graduada via mirt
- Nesse exercĂcio note o uso do dplyr
- Note também as discriminações dos itens! Porque ha itens com discriminação negativa?
# Seleciona variáveis de um fator
library(dplyr)
##
## Attaching package: 'dplyr'
## The following objects are masked from 'package:stats':
##
## filter, lag
## The following objects are masked from 'package:base':
##
## intersect, setdiff, setequal, union
items <- item_dic2 %>% filter(factor == "C") %>% select(coditem)
library(mirt)
## Loading required package: stats4
## Loading required package: lattice
mod_graded <-
df %>% select(items$coditem) %>%
mirt(1, TOL = .001)
##
Iteration: 1, Log-Lik: -71225.895, Max-Change: 2.03329
Iteration: 2, Log-Lik: -65545.590, Max-Change: 1.81945
Iteration: 3, Log-Lik: -65103.250, Max-Change: 1.15219
Iteration: 4, Log-Lik: -65013.049, Max-Change: 0.41521
Iteration: 5, Log-Lik: -64991.783, Max-Change: 0.12837
Iteration: 6, Log-Lik: -64987.941, Max-Change: 0.04723
Iteration: 7, Log-Lik: -64987.476, Max-Change: 0.01490
Iteration: 8, Log-Lik: -64986.226, Max-Change: 0.01639
Iteration: 9, Log-Lik: -64985.263, Max-Change: 0.01356
Iteration: 10, Log-Lik: -64984.366, Max-Change: 0.01175
Iteration: 11, Log-Lik: -64983.731, Max-Change: 0.01022
Iteration: 12, Log-Lik: -64983.209, Max-Change: 0.00879
Iteration: 13, Log-Lik: -64981.840, Max-Change: 0.00985
Iteration: 14, Log-Lik: -64981.614, Max-Change: 0.00567
Iteration: 15, Log-Lik: -64981.442, Max-Change: 0.00526
Iteration: 16, Log-Lik: -64981.018, Max-Change: 0.00445
Iteration: 17, Log-Lik: -64980.928, Max-Change: 0.00309
Iteration: 18, Log-Lik: -64980.878, Max-Change: 0.00345
Iteration: 19, Log-Lik: -64980.755, Max-Change: 0.00220
Iteration: 20, Log-Lik: -64980.724, Max-Change: 0.00241
Iteration: 21, Log-Lik: -64980.697, Max-Change: 0.00189
Iteration: 22, Log-Lik: -64980.637, Max-Change: 0.00154
Iteration: 23, Log-Lik: -64980.624, Max-Change: 0.00096
coef(mod_graded, simplify=TRUE, IRTpars = TRUE)
## $items
## a b1 b2 b3 b4
## sv2.209 2.134 -1.843 -0.872 0.031 1.022
## sv2.274 2.013 -1.921 -0.717 0.268 1.208
## sv2.279 1.363 -3.028 -1.622 -0.365 1.329
## sv2.572 -0.733 1.001 -0.935 -3.131 -4.618
## sv2.644 -0.735 0.758 -1.151 -2.824 -4.640
## sv2.645 -0.084 13.429 2.285 -8.886 -21.117
## sv2.222 1.980 -2.188 -0.938 0.089 1.176
## sv2.223 0.770 -2.237 0.219 2.193 3.936
## sv2.648 1.431 -2.089 -0.979 -0.012 1.365
## Sv1.036 -0.652 1.914 -0.837 -3.061 -5.033
## sv2.221 -0.664 1.190 -1.263 -3.396 -5.346
## sv2.262 -0.750 2.335 -0.165 -1.858 -3.508
## Sv1.006 2.063 -1.805 -0.760 0.257 1.267
## sv2.233 1.872 -1.981 -0.741 0.316 1.275
## sv2.236 1.650 -2.410 -1.192 -0.252 0.760
## Sv1.051 -0.715 1.245 -1.210 -3.319 -5.138
## sv2.228 -0.718 0.162 -1.664 -3.638 -5.019
## sv2.229 -0.669 0.891 -1.583 -3.601 -5.082
## sv2.235 1.633 -1.825 -0.679 0.285 1.346
## sv2.290 1.183 -2.326 -0.918 0.244 1.615
## sv2.574 1.208 -2.728 -1.095 0.089 1.184
## sv2.205 -0.887 -0.182 -1.730 -3.203 -4.484
## sv2.257 -0.903 -0.070 -1.810 -3.096 -4.300
## sv2.261 -1.048 0.186 -1.325 -2.913 -3.893
## sv2.244 2.111 -2.007 -0.945 -0.167 0.951
## sv2.258 0.934 -2.819 -1.248 -0.095 1.300
## sv2.585 1.303 -3.146 -1.683 -0.515 0.907
## sv2.249 -0.694 0.946 -1.637 -4.001 -5.847
## sv2.251 -0.877 0.474 -1.379 -3.446 -4.481
## sv2.256 -0.697 0.851 -1.552 -4.189 -5.650
## sv2.683 2.136 -2.035 -0.990 0.032 1.096
## sv2.684 1.539 -2.677 -1.211 -0.183 1.150
## sv2.685 1.675 -2.146 -1.020 0.020 1.115
## sv2.283 1.890 -1.966 -0.635 0.530 1.630
## sv2.284 1.380 -2.031 -0.573 0.736 2.053
## sv2.286 2.297 -2.115 -0.929 0.151 1.218
## sv2.238 1.934 -2.070 -1.011 -0.029 0.938
## sv2.240 1.868 -2.226 -0.915 0.356 1.464
## sv2.686 1.845 -2.434 -1.134 0.048 1.128
## Sv1.077 2.120 -2.092 -1.113 -0.160 0.725
## Sv1.091 1.568 -2.065 -0.851 0.347 1.554
## sv2.219 1.824 -2.357 -1.266 -0.050 0.973
## sv2.195 1.132 -2.329 -0.702 0.566 2.128
## sv2.687 1.866 -2.584 -1.360 -0.266 0.763
## sv2.688 1.900 -2.562 -1.221 -0.075 1.024
##
## $means
## F1
## 0
##
## $cov
## F1
## F1 1
itemplot(mod_graded, 4, type = 'trace')

itemplot(mod_graded, 1, type = 'trace')

itemplot(mod_graded, 6, type = 'trace')

plot(mod_graded, type = "trace", which.items = c(1:8))

plot(mod_graded, type = "info")

Modelo “rating scale” de Andrich
- Qual o problema de rodar as análises sem inverter os itens ?
- Note a versatilidade da linguagem vetorial do R !
# Invertendo itens negativos
itens_negativos <- item_dic2 %>% filter(pole ==0) %>% select(coditem)
df2 <- df
df2[ , itens_negativos$coditem] <- 6-df2[ , itens_negativos$coditem]
mod_rsmIRT <-
df2 %>% select(items$coditem) %>%
mirt(1, itemtype = 'rsm', TOL = .001)
##
Iteration: 1, Log-Lik: -75472.432, Max-Change: 1.19263
Iteration: 2, Log-Lik: -68011.511, Max-Change: 0.35536
Iteration: 3, Log-Lik: -67798.718, Max-Change: 0.13651
Iteration: 4, Log-Lik: -67772.432, Max-Change: 0.05519
Iteration: 5, Log-Lik: -67749.446, Max-Change: 0.04334
Iteration: 6, Log-Lik: -67735.632, Max-Change: 0.03149
Iteration: 7, Log-Lik: -67722.831, Max-Change: 0.02526
Iteration: 8, Log-Lik: -67710.196, Max-Change: 0.02369
Iteration: 9, Log-Lik: -67697.690, Max-Change: 0.02244
Iteration: 10, Log-Lik: -67685.276, Max-Change: 0.02160
Iteration: 11, Log-Lik: -67672.963, Max-Change: 0.02221
Iteration: 12, Log-Lik: -67660.699, Max-Change: 0.02065
Iteration: 13, Log-Lik: -67648.587, Max-Change: 0.01968
Iteration: 14, Log-Lik: -67636.634, Max-Change: 0.01897
Iteration: 15, Log-Lik: -67624.876, Max-Change: 0.01830
Iteration: 16, Log-Lik: -67613.308, Max-Change: 0.02110
Iteration: 17, Log-Lik: -67601.916, Max-Change: 0.01745
Iteration: 18, Log-Lik: -67590.771, Max-Change: 0.01637
Iteration: 19, Log-Lik: -67579.876, Max-Change: 0.01565
Iteration: 20, Log-Lik: -67569.248, Max-Change: 0.01926
Iteration: 21, Log-Lik: -67558.894, Max-Change: 0.01438
Iteration: 22, Log-Lik: -67548.833, Max-Change: 0.01419
Iteration: 23, Log-Lik: -67539.095, Max-Change: 0.01399
Iteration: 24, Log-Lik: -67529.693, Max-Change: 0.01377
Iteration: 25, Log-Lik: -67520.638, Max-Change: 0.01888
Iteration: 26, Log-Lik: -67511.924, Max-Change: 0.01309
Iteration: 27, Log-Lik: -67503.581, Max-Change: 0.01284
Iteration: 28, Log-Lik: -67495.610, Max-Change: 0.01256
Iteration: 29, Log-Lik: -67488.014, Max-Change: 0.01228
Iteration: 30, Log-Lik: -67480.795, Max-Change: 0.01698
Iteration: 31, Log-Lik: -67473.923, Max-Change: 0.01151
Iteration: 32, Log-Lik: -67467.446, Max-Change: 0.01120
Iteration: 33, Log-Lik: -67461.338, Max-Change: 0.01089
Iteration: 34, Log-Lik: -67455.592, Max-Change: 0.01057
Iteration: 35, Log-Lik: -67450.199, Max-Change: 0.01453
Iteration: 36, Log-Lik: -67445.129, Max-Change: 0.00996
Iteration: 37, Log-Lik: -67440.405, Max-Change: 0.00949
Iteration: 38, Log-Lik: -67436.003, Max-Change: 0.00901
Iteration: 39, Log-Lik: -67431.911, Max-Change: 0.00880
Iteration: 40, Log-Lik: -67411.778, Max-Change: 0.01806
Iteration: 41, Log-Lik: -67409.221, Max-Change: 0.00730
Iteration: 42, Log-Lik: -67407.157, Max-Change: 0.00649
Iteration: 43, Log-Lik: -67397.274, Max-Change: 0.00991
Iteration: 44, Log-Lik: -67396.129, Max-Change: 0.00446
Iteration: 45, Log-Lik: -67395.196, Max-Change: 0.00410
Iteration: 46, Log-Lik: -67390.723, Max-Change: 0.00371
Iteration: 47, Log-Lik: -67390.264, Max-Change: 0.00294
Iteration: 48, Log-Lik: -67389.861, Max-Change: 0.00556
Iteration: 49, Log-Lik: -67389.179, Max-Change: 0.00245
Iteration: 50, Log-Lik: -67388.874, Max-Change: 0.00224
Iteration: 51, Log-Lik: -67388.596, Max-Change: 0.00225
Iteration: 52, Log-Lik: -67387.270, Max-Change: 0.00258
Iteration: 53, Log-Lik: -67387.139, Max-Change: 0.00158
Iteration: 54, Log-Lik: -67387.022, Max-Change: 0.00142
Iteration: 55, Log-Lik: -67386.463, Max-Change: 0.00108
Iteration: 56, Log-Lik: -67386.409, Max-Change: 0.00100
coef(mod_rsmIRT , simplify=TRUE, irt.parms = TRUE)
## $items
## a1 b1 b2 b3 b4 c
## sv2.209 1 -1.499 -0.737 0.033 0.711 0.000
## sv2.274 1 -1.499 -0.737 0.033 0.711 -0.148
## sv2.279 1 -1.499 -0.737 0.033 0.711 0.279
## sv2.572 1 -1.499 -0.737 0.033 0.711 0.499
## sv2.644 1 -1.499 -0.737 0.033 0.711 0.556
## sv2.645 1 -1.499 -0.737 0.033 0.711 -0.141
## sv2.222 1 -1.499 -0.737 0.033 0.711 0.001
## sv2.223 1 -1.499 -0.737 0.033 0.711 -0.877
## sv2.648 1 -1.499 -0.737 0.033 0.711 -0.025
## Sv1.036 1 -1.499 -0.737 0.033 0.711 0.350
## sv2.221 1 -1.499 -0.737 0.033 0.711 0.550
## sv2.262 1 -1.499 -0.737 0.033 0.711 0.045
## Sv1.006 1 -1.499 -0.737 0.033 0.711 -0.155
## sv2.233 1 -1.499 -0.737 0.033 0.711 -0.167
## sv2.236 1 -1.499 -0.737 0.033 0.711 0.264
## Sv1.051 1 -1.499 -0.737 0.033 0.711 0.550
## sv2.228 1 -1.499 -0.737 0.033 0.711 0.867
## sv2.229 1 -1.499 -0.737 0.033 0.711 0.670
## sv2.235 1 -1.499 -0.737 0.033 0.711 -0.201
## sv2.290 1 -1.499 -0.737 0.033 0.711 -0.153
## sv2.574 1 -1.499 -0.737 0.033 0.711 0.033
## sv2.205 1 -1.499 -0.737 0.033 0.711 1.027
## sv2.257 1 -1.499 -0.737 0.033 0.711 1.031
## sv2.261 1 -1.499 -0.737 0.033 0.711 0.890
## sv2.244 1 -1.499 -0.737 0.033 0.711 0.110
## sv2.258 1 -1.499 -0.737 0.033 0.711 0.059
## sv2.585 1 -1.499 -0.737 0.033 0.711 0.403
## sv2.249 1 -1.499 -0.737 0.033 0.711 0.709
## sv2.251 1 -1.499 -0.737 0.033 0.711 0.792
## sv2.256 1 -1.499 -0.737 0.033 0.711 0.739
## sv2.683 1 -1.499 -0.737 0.033 0.711 0.026
## sv2.684 1 -1.499 -0.737 0.033 0.711 0.168
## sv2.685 1 -1.499 -0.737 0.033 0.711 0.037
## sv2.283 1 -1.499 -0.737 0.033 0.711 -0.317
## sv2.284 1 -1.499 -0.737 0.033 0.711 -0.435
## sv2.286 1 -1.499 -0.737 0.033 0.711 -0.035
## sv2.238 1 -1.499 -0.737 0.033 0.711 0.103
## sv2.240 1 -1.499 -0.737 0.033 0.711 -0.134
## sv2.686 1 -1.499 -0.737 0.033 0.711 0.078
## Sv1.077 1 -1.499 -0.737 0.033 0.711 0.235
## Sv1.091 1 -1.499 -0.737 0.033 0.711 -0.187
## sv2.219 1 -1.499 -0.737 0.033 0.711 0.166
## sv2.195 1 -1.499 -0.737 0.033 0.711 -0.338
## sv2.687 1 -1.499 -0.737 0.033 0.711 0.340
## sv2.688 1 -1.499 -0.737 0.033 0.711 0.181
##
## $means
## F1
## 0
##
## $cov
## F1
## F1 0.49
plot(mod_rsmIRT, type = "trace", which.items = c(1:8))

Mapas de construto com o pacote WrightMap
# Ative o pacote
library(WrightMap)
# Calcula os tehtas
thetas <- fscores(mod_rsmIRT, method = "ML")
# Cria dataframe com os thresholds
thresholds <- coef(mod_rsmIRT , simplify=TRUE, irt.parms = TRUE)$items %>%
as.data.frame()
# Cria coditem
thresholds$coditem <- rownames(thresholds)
# Calcula thtresholds em cada categori
thresholds[ , 2:5] <- thresholds[ , 2:5] + thresholds[ , 6]
# Traz informação dos itens
var_itens <- names(item_dic2)[c(1,3,5, 9:11)]
thresholds <- left_join(thresholds, item_dic2[ , var_itens], by="coditem")
thresholds <- thresholds %>% arrange(c)
# Mapa classico
wrightMap(thetas, thresholds[ , 2:5], item.side = itemClassic,
return.thresholds = FALSE)

# Mapa mais elaborado
# Cores dos thresholds
library(RColorBrewer)
cores <- rep(brewer.pal(4, "Set1"), 45)
threshold_col <- matrix(cores, byrow = TRUE, ncol = 4)
library(stringr)
# Mapa de construto
wrightMap(thetas, thresholds[ , 2:5],
main.title = "",
item.prop = 0.75, # Proporçao espaço do item/theta
show.thr.lab= FALSE, # Nao mostra label dos thresholds
thr.sym.col.fg = threshold_col, # Colori os thresholds
thr.sym.col.bg = threshold_col,
thr.sym.cex = .8, # tamanho dos simbolos do thresshold
axis.items="", # elimina label do eixo x
label.items.srt=90, # ajusta item label para vertical
label.items.cex = .4, # tamanho da fonte dos itema
label.items = str_sub(thresholds$item_text, 1, 20), # label dos itens
return.thresholds = FALSE, cutpoints = -1)

Mapa de construto baseado no modelo “Master’s Partial Credit model”
source("http://www.labape.com.br/rprimi/R/make_construct_map5.R")
# Dicionário
items <- item_dic2 %>% filter(factor == "C")
# Calibrando os itens modelo
mod_pcmIRT <-
df2 %>% select(items$coditem) %>%
mirt(1, itemtype = 'Rasch', TOL = .001)
##
Iteration: 1, Log-Lik: -67603.268, Max-Change: 1.02227
Iteration: 2, Log-Lik: -66835.082, Max-Change: 0.16355
Iteration: 3, Log-Lik: -66824.495, Max-Change: 0.04569
Iteration: 4, Log-Lik: -66823.470, Max-Change: 0.03424
Iteration: 5, Log-Lik: -66822.645, Max-Change: 0.01408
Iteration: 6, Log-Lik: -66822.485, Max-Change: 0.00596
Iteration: 7, Log-Lik: -66822.440, Max-Change: 0.00575
Iteration: 8, Log-Lik: -66822.401, Max-Change: 0.00394
Iteration: 9, Log-Lik: -66822.372, Max-Change: 0.00336
Iteration: 10, Log-Lik: -66822.240, Max-Change: 0.00555
Iteration: 11, Log-Lik: -66822.221, Max-Change: 0.00125
Iteration: 12, Log-Lik: -66822.210, Max-Change: 0.00374
Iteration: 13, Log-Lik: -66822.198, Max-Change: 0.00083
# Parâmetros dos itens
coef(mod_pcmIRT , simplify=TRUE, IRTpars = TRUE)
## $items
## a b1 b2 b3 b4
## sv2.209 1 -1.453 -0.695 -0.006 0.633
## sv2.274 1 -1.711 -0.527 0.315 0.698
## sv2.279 1 -1.912 -1.138 -0.523 0.948
## sv2.572 1 -1.156 -1.720 -0.331 0.124
## sv2.644 1 -1.466 -1.159 -0.599 -0.024
## sv2.645 1 -0.648 -0.496 0.106 0.204
## sv2.222 1 -1.962 -0.717 0.062 0.793
## sv2.223 1 -1.089 0.245 1.228 1.474
## sv2.648 1 -1.174 -0.590 -0.286 0.881
## Sv1.036 1 -1.361 -1.389 -0.552 0.740
## sv2.221 1 -1.451 -1.466 -0.680 0.293
## sv2.262 1 -1.065 -0.930 -0.369 1.119
## Sv1.006 1 -1.462 -0.653 0.280 0.837
## sv2.233 1 -1.681 -0.551 0.379 0.705
## sv2.236 1 -1.756 -0.755 -0.260 0.199
## Sv1.051 1 -1.453 -1.570 -0.722 0.412
## sv2.228 1 -1.067 -1.762 -0.711 -0.437
## sv2.229 1 -1.017 -1.621 -0.890 0.148
## sv2.235 1 -1.257 -0.423 0.170 0.737
## sv2.290 1 -1.286 -0.450 0.032 0.810
## sv2.574 1 -1.734 -0.554 0.101 0.300
## sv2.205 1 -1.323 -1.598 -0.902 -0.632
## sv2.257 1 -1.222 -1.421 -1.198 -0.491
## sv2.261 1 -1.125 -2.001 -0.792 -0.293
## sv2.244 1 -1.714 -0.545 -0.365 0.668
## sv2.258 1 -1.211 -0.452 -0.239 0.306
## sv2.585 1 -1.963 -1.039 -0.526 0.394
## sv2.249 1 -1.527 -1.873 -0.854 0.255
## sv2.251 1 -0.959 -2.250 -0.708 -0.067
## sv2.256 1 -1.138 -2.203 -0.746 0.131
## sv2.683 1 -1.715 -0.878 0.076 0.756
## sv2.684 1 -2.062 -0.729 -0.333 0.732
## sv2.685 1 -1.485 -0.779 -0.013 0.612
## sv2.283 1 -1.783 -0.486 0.583 1.144
## sv2.284 1 -1.413 -0.346 0.612 1.266
## sv2.286 1 -2.064 -0.843 0.215 0.920
## sv2.238 1 -1.554 -0.849 -0.021 0.470
## sv2.240 1 -1.920 -0.849 0.437 1.020
## sv2.686 1 -2.077 -0.998 0.136 0.702
## Sv1.077 1 -1.646 -0.980 -0.076 0.234
## Sv1.091 1 -1.427 -0.693 0.312 0.994
## sv2.219 1 -1.729 -1.196 0.098 0.486
## sv2.195 1 -1.381 -0.314 0.262 1.241
## sv2.687 1 -2.109 -1.132 -0.205 0.305
## sv2.688 1 -2.263 -1.042 -0.039 0.626
##
## $means
## F1
## 0
##
## $cov
## F1
## F1 0.499
# Estima scpres dos sujeitos
thetas <- fscores(mod_pcmIRT, method = "ML")
thetas[is.infinite(thetas)] <- NA
# Categorias
categ_label <- c("nada", "pouco", "+ou-", "muito", "tudo")
# Faz o mapa
make_construct_map5(mirtObj = mod_pcmIRT,
dic = items,
data = df2,
min = -4, max=4,
categ_label = categ_label)
##
## Attaching package: 'gridExtra'
## The following object is masked from 'package:dplyr':
##
## combine
##
## Attaching package: 'ggplot2'
## The following objects are masked from 'package:psych':
##
## %+%, alpha
## Loading required package: xtable
##
## Attaching package: 'likert'
## The following object is masked from 'package:dplyr':
##
## recode
## Loading required package: grid
## Warning: Removed 3 rows containing non-finite values (stat_bin).

plot(mod_pcmIRT, type = 'rxx')
