7

Given is the following data frame:

structure(list(UH6401 = c(1, 1, 0, 0, 0, 1, 1, 0, 1, 0, 1, 1, 
1, 0, 1, 1, 1, 1, 1, 0, 0, 1, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 
1, 1, 1, 1, 0, 0, 0, 0, 1, 1, 0, 1, 1, 0, 0, 1, 1, 1, 1, 1, 0, 
0, 0, 1, 1, 1, 1, 0, 1, 1, 0, 1, 1, 1, 0, 1, 0, 0, 1, 1, 0, 0, 
1, 1, 1, 0, 1, 0, 0, 0, 0, 1, 0, 1, 0, 1, 1, 0, 0, 1, 1, 0, 0, 
1, 0, 1, 1), UH6402 = c(1, 1, 0, 0, 1, 1, 1, 0, 1, 0, 0, 1, 1, 
0, 1, 1, 1, 1, 1, 0, 1, 1, 0, 0, 0, 0, 1, 1, 0, 0, 1, 0, 1, 0, 
1, 0, 1, 1, 1, 0, 1, 0, 1, 1, 1, 0, 0, 0, 1, 1, 1, 1, 1, 0, 0, 
0, 1, 1, 1, 1, 0, 1, 1, 0, 1, 1, 1, 1, 1, 0, 1, 1, 1, 0, 1, 1, 
1, 1, 1, 1, 0, 1, 1, 1, 1, 0, 0, 0, 1, 1, 0, 0, 0, 1, 1, 0, 1, 
0, 1, 1), UH6403 = c(1, 0, 0, 0, 0, 1, 1, 0, 1, 0, 0, 1, 1, 0, 
1, 1, 1, 1, 1, 0, 1, 1, 0, 0, 0, 0, 1, 1, 1, 1, 1, 0, 1, 1, 0, 
1, 0, 0, 0, 0, 0, 0, 1, 1, 1, 0, 0, 0, 0, 1, 1, 0, 1, 0, 0, 1, 
1, 1, 1, 1, 0, 1, 0, 0, 1, 0, 1, 0, 1, 0, 0, 1, 1, 0, 0, 1, 1, 
0, 1, 1, 0, 0, 0, 1, 1, 0, 0, 0, 1, 1, 0, 0, 0, 1, 0, 0, 1, 0, 
1, 1), UH6404 = c(0, 0, 0, 0, 0, 1, 0, 0, 1, 0, 0, 1, 0, 0, 1, 
0, 1, 0, 1, 1, 0, 1, 0, 0, 0, 0, 0, 0, 1, 1, 0, 0, 1, 0, 0, 1, 
1, 0, 0, 0, 0, 1, 1, 1, 0, 0, 0, 0, 0, 1, 0, 0, 1, 0, 0, 0, 1, 
1, 1, 1, 0, 1, 0, 0, 0, 0, 1, 0, 1, 0, 0, 0, 1, 0, 0, 1, 1, 0, 
0, 1, 0, 0, 0, 1, 0, 1, 0, 0, 1, 1, 0, 0, 0, 1, 0, 0, 1, 0, 1, 
1), UH6409 = c(1, 0, 0, 0, 0, 1, 0, 0, 1, 1, 0, 1, 1, 0, 1, 0, 
1, 1, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 1, 1, 0, 0, 0, 0, 0, 0, 
0, 0, 1, 0, 0, 1, 1, 1, 0, 0, 0, 0, 0, 1, 0, 1, 0, 0, 0, 1, 0, 
1, 0, 0, 1, 0, 0, 1, 0, 1, 0, 1, 0, 0, 1, 0, 0, 0, 0, 1, 0, 0, 
1, 0, 0, 0, 1, 0, 0, 0, 0, 1, 1, 0, 0, 1, 1, 0, 0, 0, 0, 1, 0
), UH6410 = c(1, 0, 1, 0, 0, 1, 1, 1, 1, 0, 1, 1, 1, 0, 1, 0, 
1, 1, 1, 1, 0, 1, 1, 1, 0, 0, 0, 0, 1, 1, 1, 1, 0, 1, 1, 0, 1, 
1, 0, 1, 0, 1, 1, 1, 1, 0, 0, 1, 1, 1, 1, 0, 1, 0, 0, 0, 0, 1, 
1, 0, 1, 1, 1, 0, 1, 1, 0, 0, 1, 1, 1, 1, 1, 0, 0, 0, 1, 1, 0, 
0, 0, 0, 0, 1, 1, 0, 0, 0, 1, 1, 1, 0, 1, 1, 1, 0, 1, 1, 1, 0
), UH6411 = c(0, 0, 1, 0, 0, 1, 0, 0, 1, 0, 0, 1, 1, 0, 1, 0, 
1, 1, 1, 0, 0, 1, 1, 1, 0, 0, 0, 0, 1, 1, 0, 0, 1, 1, 1, 0, 1, 
0, 0, 0, 0, 1, 0, 0, 1, 0, 0, 0, 1, 1, 0, 0, 1, 0, 0, 1, 0, 1, 
1, 0, 1, 1, 1, 0, 0, 1, 1, 0, 1, 0, 0, 1, 1, 0, 0, 0, 0, 0, 0, 
1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 0, 1, 1, 0, 0, 1, 0, 0, 1
), UH6412 = c(1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 1, 1, 1, 1, 1, 1, 
1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 0, 1, 0, 
1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 
1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 
1, 1, 1, 1, 0, 1, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1
), UH6503 = c(1, 1, 1, 1, 0, 1, 1, 1, 1, 0, 1, 1, 1, 0, 0, 0, 
1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 0, 0, 
1, 1, 1, 1, 1, 0, 0, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 
1, 0, 1, 1, 1, 1, 1, 1, 0, 0, 1, 1, 1, 1, 0, 0, 1, 1, 1, 1, 1, 
1, 1, 1, 1, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1
), UH66 = c(1, 1, 1, 1, 0, 1, 1, 1, 0, 1, 0, 1, 0, 1, 1, 0, 1, 
1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 
1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 
0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 
1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1), 
    UH68 = c(0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 1, 0, 1, 0, 0, 0, 
    0, 0, 0, 0, 0, 1, 0, 0, 1, 0, 0, 0, 0, 1, 1, 1, 0, 0, 0, 
    0, 0, 0, 0, 0, 1, 0, 1, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 1, 
    0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 
    0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 
    0, 0, 0, 0, 0, 0, 0, 0), UH6501a = c(1, 1, 1, 1, 1, 1, 1, 
    1, 1, 1, 1, 1, 1, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 
    1, 0, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 
    1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 
    1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 
    1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1), UH6405a = c(1, 
    0, 0, 0, 0, 1, 0, 0, 1, 0, 1, 1, 1, 0, 1, 1, 1, 1, 1, 0, 
    0, 1, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 0, 1, 0, 0, 
    0, 1, 1, 1, 1, 1, 0, 1, 0, 1, 1, 0, 0, 1, 0, 0, 0, 1, 1, 
    1, 1, 0, 1, 0, 0, 1, 0, 1, 0, 1, 0, 1, 1, 1, 1, 1, 1, 1, 
    1, 1, 1, 0, 0, 1, 1, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 0, 0, 
    1, 0, 1, 1), UH6407a = c(1, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 
    1, 1, 0, 1, 1, 1, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 1, 
    0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 1, 1, 0, 0, 0, 1, 
    1, 0, 0, 1, 0, 0, 0, 1, 1, 1, 1, 0, 1, 0, 0, 0, 0, 1, 0, 
    1, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 
    0, 1, 1, 1, 0, 1, 1, 0, 0, 0, 0, 1, 1), weight = c(405.002592353822, 
    479.360356183825, 526.548105855472, 810.005184707644, 312.321528531308, 
    930.961115757095, 567.383058387095, 475.323944260643, 1226.91439266118, 
    517.086839792615, 1200.2669656949, 810.005184707644, 656.723784884795, 
    605.370463928298, 668.467435759576, 558.112457492436, 793.751055244424, 
    479.360356183825, 1226.91439266118, 1606.54816212786, 1657.48609449633, 
    300.803580980276, 605.370463928298, 1140.55078447979, 669.102760422943, 
    810.005184707644, 1657.48609449633, 305.569853371963, 2994.30343152033, 
    762.922030382216, 479.360356183825, 1147.36030437824, 668.467435759576, 
    517.086839792615, 479.360356183825, 399.141865860217, 656.723784884795, 
    913.364738988386, 312.321528531308, 569.10576379231, 775.630259688922, 
    1207.22952429547, 1053.09621171094, 1140.55078447979, 314.857225320909, 
    668.467435759576, 2416.57081451012, 573.680152189121, 396.875527622212, 
    605.370463928298, 1036.3159447043, 3088.62283807823, 569.10576379231, 
    1140.55078447979, 2416.57081451012, 1147.36030437824, 762.922030382216, 
    702.064141140629, 351.032070570315, 629.714450641817, 517.086839792615, 
    1996.20228768022, 828.743047248167, 475.323944260643, 920.185794495882, 
    793.751055244424, 796.08788273764, 1197.42559758065, 405.002592353822, 
    418.584343119327, 300.803580980276, 654.76828203733, 2740.09421696516, 
    351.032070570315, 1069.6202614693, 2094.91447516374, 399.141865860217, 
    654.76828203733, 1003.65414063441, 573.680152189121, 851.074587580641, 
    913.364738988386, 762.922030382216, 1034.17367958523, 573.680152189121, 
    479.360356183825, 3208.8607844079, 654.76828203733, 908.055695892447, 
    328.361892442398, 1036.3159447043, 702.064141140629, 613.457196330588, 
    601.607161960551, 567.383058387095, 479.360356183825, 306.261087672466, 
    920.185794495882, 654.76828203733, 828.743047248167)), .Names = c("UH6401", 
"UH6402", "UH6403", "UH6404", "UH6409", "UH6410", "UH6411", "UH6412", 
"UH6503", "UH66", "UH68", "UH6501a", "UH6405a", "UH6407a", "weight"
), row.names = c(NA, 100L), class = "data.frame")

In social science we often have a weight variable to weight a case (row) by the factor of that variable to correct the sample to fit e.g. the population by age classes. If the weight variable of a row is "1.6" it means that this row need do be observed 1.6 times to fit the basis population.

In SPSS I would write

WEIGHT BY weight. 

and all procedures after that command will weight the data accordingly.

In R I can do that with stabs with the command

xtabs(weight ~ UH6401, data=df)

But what if I want to do a SVD or PCA analysis? Here there is no function to weight data like it is in xtabs.

So the question is, is there a method to weight data in R like it is possible in SPSS? The point with whole numbers would be easy, with the factor "2" we would just double the line, but what is with all the factors that are decimal?


UPDATE:

The SVD or PCA was just an example! Take any other statistical procedure. In social science the samples are never perfect, but to do an statistical analysis with sample data, the sample needs to represent the basic population, but a sample mostly doesn't. So we try to fix that deficit with weights, so the sample represent the basic population!

Roman Luštrik
  • 69,533
  • 24
  • 154
  • 197
Dominik
  • 2,753
  • 7
  • 28
  • 32
  • For different processes there is ore than one way to weight data! e.g.: in a maximum likelihood setting I've seen both the likelihoods and the loglikelihoods weighted, which can give quite different results! So there is no 'general' way of specifying weights, and in a lot of functions, the concept of weighted data is not even considered (even though it would be valid). Tough luck, I guess. – Nick Sabbe Aug 11 '11 at 14:31

4 Answers4

8

First of all, doing PCA on this data doesn't make sense. Second, SPSS does not perform PCA but factor analysis, which is something else. I know they call it PCA, but it isn't.

The WEIGHT BY in SPSS is nothing more than a replication weight, and is exactly the same as doing your analysis by repeating your cases using rep(): complete madness. To link to your example: In SPSS, FACTOR (which is used for the socalled PCA) does not take fractional weights.

If you want to perform weighted procedures, the only sensible way of doing that is using the correct method/function/package for that. In statistics, there is no one-size-fits-all weight procedure, contrary to what SPSS likes to make you believe.

In your example : weighted PCA in R is contained in FactoMineR and aroma.light. But I strongly suggest you take also a look at the vegan package, as that contains a lot more useful ordination methods for the data you're describing.

Joris Meys
  • 106,551
  • 31
  • 221
  • 263
  • 1
    The PCA was just an example. Take a different procedure. Like regression analysis (also not with this data, I know). The question is just how to weight data with R generally. Even if you think that the weight procedure in SPSS is madness, it is still the standard procedure in social science if you work with samples of a population e.g. the socio oecomenic panel... – Dominik Aug 11 '11 at 14:14
  • 1
    @Dominik : It's in the answer. For weighted regression, use the specific procedures for weighted regression. For mimicking what SPSS does, use `rep()`. It's that easy. But whatever SPSS likes to make you believe, **there is no one-size-fits-all weight procedure in statistics** – Joris Meys Aug 11 '11 at 14:31
  • 3
    You deserve a +42 (in the Douglas Adams) sense for the repeated cluebat waving in this. I like the hint about madness too. – Dirk Eddelbuettel Aug 11 '11 at 14:34
  • 1
    @Dominik : I edited my answer to make it less aggresive (I'm not known for my feeling of tact, but no harm meant.) – Joris Meys Aug 11 '11 at 14:40
  • @Joris Meys: never mind, I didn't took it that way. Just had the feeling in be not understood, but that is more my fault... ;-) But if I understood rep() correctly, you can't repeat a row in a df .8 times, can you? – Dominik Aug 11 '11 at 15:05
  • 2
    @Dominik: Nope. But you can't use fractional weights in SPSS with every command either. As said, even though you give fractional weights to FACTOR, it will be interpreted as replication weights and rounded. But SPSS won't tell you that unless you dig out their complete manual. – Joris Meys Aug 11 '11 at 15:09
  • @Joris +1 for pointing out that PCA and Factor analysis aren't the same thing. I've run into this before as well when using SPSS. There's a decent discussion of it here: http://stats.stackexchange.com/questions/1576/what-are-the-differences-between-factor-analysis-and-principal-component-analysis – Brandon Bertelsen Aug 11 '11 at 15:15
  • @Brandon, it doesn't end there. I've tackled FA/PCA problem before. While it was enlightening to, in a sense, learn what SPSS is doing, I can't say I found it entertaining. Here's a post on the problem: http://stats.stackexchange.com/questions/612/is-psychprincipal-function-still-pca-when-using-rotation – Roman Luštrik Aug 17 '11 at 06:56
5

You probably need to get acquainted with the search engines for R. Baron's RSiteSearch and Rseek: This is one of the first hits on "weighted PCA" at Baron's site:

http://finzi.psych.upenn.edu/R/library/aroma.light/html/wpca.matrix.html

With the clarification in the comment to Joris Meys response, the answer is often that one needs to be clear that one is desires sample weights versus other types of weighting. Regression weighting is done with the survey package. Lumley's book on survey methods distinguishes among three types of weights. (The "weights" in the lm function are variance weights, NOT sample weights.)

Note: Both PCA and factor analysis (experimental) are included in the survey package. So maybe Dominick's question requestiong a unified approach to weighting in regression methods has a single "answer".

IRTFM
  • 258,963
  • 21
  • 364
  • 487
  • this is iteratively reweighted PCA. That's not a weighted PCA, but something completely different. – Joris Meys Aug 11 '11 at 13:46
  • 1
    I don't think that's what he's looking for. Dominik is asking about sample weights, and this seems to be something different. – Fojtasek Aug 11 '11 at 13:49
  • @Joris Meys: The advice was to learn appropriate search methods. The second hit was to William Revelle's tutorial that included weighted factor analysis and a comparison with SPSS methods, and the third hit was the same function you linked to in your reply. – IRTFM Aug 11 '11 at 14:28
  • @DWin : yeah, but link to the correct one. You can even take the link from aroma.light in my answer, I'll give you the upvote. – Joris Meys Aug 11 '11 at 14:29
  • 1
    It was the third hit and required only deleting a single character in the edit. – IRTFM Aug 11 '11 at 14:32
  • @Dwin I see. So if I understand you correctly, there is nothing like a global sample weight? – Dominik Aug 11 '11 at 14:33
  • If by "global sample weight" you mean a sample weight option to all regression type procedures, then yes, there is nothing like that. – IRTFM Aug 11 '11 at 14:36
  • @DWin your link with the survey package seems to be a very good solution for the most cases of survey analysis. For all others there seems no general solution, if there is a solution at all. Thanks. – Dominik Aug 11 '11 at 15:03
0

I have just found a Post in R-Bloggers which introduces a svydesign() function. As far as I know, this function from the 'survey' package is like SPSS function, allowing you to create a weighted data to use in further analysis. I find it more useful than using different functions from several packages in order to do multivariable analysis.

Note to @djhurio: The answer would have been better with code. It does seem a bit duplicative of my answer which pointed to the survey package that contains 'svydesign'. The cited webpage is still there 4 years later, but that might not always be the case.

IRTFM
  • 258,963
  • 21
  • 364
  • 487
Tito Sanz
  • 1,280
  • 1
  • 16
  • 33
0

I am not sure if this would suite you. See the R package weights.

djhurio
  • 5,437
  • 4
  • 27
  • 48