Is there more elegant (less code) way of find a matrix OUT,
with colSums(OUT)<=a and rowSums(OUT)<=b,
given ORD = order of filling
sum(OUT) -> maximised
Sudoku-like problem, (numbers are not unique and filling order is given, so not really sudoku). I feel that there is some simpler solution for this problem.
a <- c(4,2,1)
b <- c(3,2,2)
ORD <- matrix(c(1,5,6,8,2,9,7,4,3),ncol=3)
MIN <- outer(a,b,pmin)
OUT <- matrix(0,ncol=ncol(ORD),nrow=nrow(ORD))
L <- cbind(as.vector(row(ORD)),as.vector(col(ORD)))[order(ORD),]
for( i in 1:nrow(L)){
r <- L[i,1]
c <- L[i,2]
OUT[r,c] <- min(a[c],b[r])
a[c] <- max(a[c] - OUT[r,c],0)
b[r] <- max(b[r] - OUT[r,c],0)
}
OUT
Edit: Thanks! And finally I've ended with this (quite a long code for a super simple problem ;) :
cs <- c(4,2,1)
rs <- c(3,3,2)
ORD <- matrix(c(1,5,6,8,2,9,7,4,3),ncol=length(cs),byrow=T)
OUT <- matrix(0, nrow = length(rs), ncol = length(cs))
ROW <- row(OUT)
COL <- col(OUT)
for (i in order(ORD)){
r <- ROW[i]
c <- COL[i]
k <- min(cs[c],rs[r])
if(k>0){
OUT[i] <- k
cs[c] <- cs[c] - k
rs[r] <- rs[r] - k
}
if(all(cs==0) | (all(rs==0)))
break
}