diff --git a/src/TR.jl b/src/TR.jl index 72aa2e5..dc49563 100644 --- a/src/TR.jl +++ b/src/TR.jl @@ -462,3 +462,66 @@ R = Matrix(factorisation.R); return Q, R end + +""" +"Manual" k-fold cv for solving the Ridge regression problem. +The LS problem is solved explicitly and no shortcuts are used. +""" +function TRSegCVNaive(X, y, lambdas, cvfolds) + +n, p = size(X); +rmsecvman = zeros(length(lambdas)); +nfolds = length(unique(cvfolds)); + +for j = 1:length(lambdas) + for i = 1:nfolds + inds = (cvfolds .== i); + Xdata = X[vec(.!inds),:]; + ydata = y[vec(.!inds)]; + + mX = mean(Xdata, dims=1); + my = mean(ydata); + Xs = Xdata .- mX; + ys = ydata .- my; + + betas = [Xs; sqrt(lambdas[j]) * I(p)] \ [ys; zeros(p,1)]; + rmsecvman[j] += sum((y[vec(inds)] - ((X[vec(inds),:] .- mX) * betas .+ my)).^2); + end +end + +rmsecvman = sqrt.(1/n .* rmsecvman); + +return rmsecvman +end + +""" +"Manual" k-fold cv for solving the Ridge regression problem update. +The LS problem is solved explicitly and no shortcuts are used. +""" +function TRSegCVUpdateNaive(X, y, lambdas, cvfolds, bOld) + +n, p = size(X); +rmsecvman = zeros(length(lambdas)); +nfolds = length(unique(cvfolds)); + +for j = 1:length(lambdas) + for i = 1:nfolds + inds = (cvfolds .== i); + Xdata = X[vec(.!inds),:]; + ydata = y[vec(.!inds)]; + + mX = mean(Xdata, dims=1); + my = mean(ydata); + Xs = Xdata .- mX; + ys = ydata .- my; + + betas = [Xs; sqrt(lambdas[j]) * I(p)] \ [ys; sqrt(lambdas[j]) * bOld]; + rmsecvman[j] += sum((y[vec(inds)] - ((X[vec(inds),:] .- mX) * betas .+ my)).^2); + end +end + +rmsecvman = sqrt.(1/n .* rmsecvman); + +return rmsecvman +end + diff --git a/src/Ting.jl b/src/Ting.jl index fe9e3c9..2141e49 100644 --- a/src/Ting.jl +++ b/src/Ting.jl @@ -25,6 +25,7 @@ export TRSegCVUpdate export plegendre export TRLooCVUpdateFair export TRLooCVUpdateNaive +export TRSegCVUpdateNaive include("convenience.jl") include("TR.jl")