# Hi all, # consider the following code (please, run it: # it's fully working and requires just few minutes # to finish): require(CreditMetrics) require(clusterGeneration) install.packages("Rdonlp2", repos= c("http://R-Forge.R-project.org", getOption("repos"))) install.packages("Rsolnp2", repos= c("http://R-Forge.R-project.org", getOption("repos"))) require(Rdonlp2) require(Rsolnp) require(Rsolnp2) N <- 3 n <- 100000 r <- 0.0025 ead <- rep(1/3,3) rc <- c("AAA", "AA", "A", "BBB", "BB", "B", "CCC", "D") lgd <- 0.99 rating <- c("BB", "BB", "BBB") firmnames <- c("firm 1", "firm 2", "firm 3") alpha <- 0.99 # One year empirical migration matrix from Standard & Poor's website rc <- c("AAA", "AA", "A", "BBB", "BB", "B", "CCC", "D") M <- matrix(c(90.81, 8.33, 0.68, 0.06, 0.08, 0.02, 0.01, 0.01, 0.70, 90.65, 7.79, 0.64, 0.06, 0.13, 0.02, 0.01, 0.09, 2.27, 91.05, 5.52, 0.74, 0.26, 0.01, 0.06, 0.02, 0.33, 5.95, 85.93, 5.30, 1.17, 1.12, 0.18, 0.03, 0.14, 0.67, 7.73, 80.53, 8.84, 1.00, 1.06, 0.01, 0.11, 0.24, 0.43, 6.48, 83.46, 4.07, 5.20, 0.21, 0, 0.22, 1.30, 2.38, 11.24, 64.86, 19.79, 0, 0, 0, 0, 0, 0, 0, 100 )/100, 8, 8, dimnames = list(rc, rc), byrow = TRUE) # Correlation matrix rho <- rcorrmatrix(N) ; dimnames(rho) = list(firmnames, firmnames) # Credit Value at Risk cm.CVaR(M, lgd, ead, N, n, r, rho, alpha, rating) # Risk neutral yield rates Y <- cm.cs(M, lgd) y <- c(Y[match(rating[1],rc)], Y[match(rating[2],rc)], Y[match(rating[3],rc)]) ; y # The function to be minimized sharpe <- function(w) { - (t(w) %*% y) / cm.CVaR(M, lgd, ead, N, n, r, rho, alpha, rating) } # The linear constraints constr <- function(w) { sum(w) } # Results' matrix (it's empty by now) Results <- matrix(NA, nrow = 3, ncol = 4) rownames(Results) <- list('donlp2', 'solnp', 'solnp2') colnames(Results) <- list('w_1', 'w_2', 'w_3', 'Sharpe') # See the differences between different solvers rho Results[1,1:3] <- round(donlp2(fn = sharpe, par = rep(1/N,N), par.lower rep(0,N), par.upper = rep(1,N), A = t(rep(1,N)), lin.lower = 1, lin.upper 1)$par, 2) Results[2,1:3] <- round(solnp(pars = rep(1/N,N), fun = sharpe, eqfun constr, eqB = 1, LB = rep(0,N), UB = rep(1,N))$pars, 2) Results[3,1:3] <- round(solnp2(par = rep(1/N,N), fun = sharpe, eqfun constr, eqB = 1, LB = rep(0,N), UB = rep(1,N))$pars, 2) for(i in 1:3) { Results[i,4] <- abs(sharpe(Results[i,1:3])) } Results # In fact the "sharpe" function I previously defined # is not smooth because of the cm.CVaR function. # If you change correlation matrix, ratings or yields # you see how different solvers produce different # parameters estimation. # Then the main issue is: how may I know which is the # best solver at all to deal with non-smooth functions # such as this one? -- View this message in context: http://r.789695.n4.nabble.com/The-best-solver-for-non-smooth-functions-tp4636934.html Sent from the R help mailing list archive at Nabble.com.
# Whoops! I have just seen there's a little mistake # in the 'sharpe' function, because I had to use # 'w' array instead of 'ead' in the cm.CVaR function! # This does not change the main features of my, # but you should be aware of it --- # The function to be minimized sharpe <- function(w) { - (t(w) %*% y) / cm.CVaR(M, lgd, ead, N, n, r, rho, alpha, rating) } # This becomes... sharpe <- function(w) { - (t(w) %*% y) / cm.CVaR(M, lgd, w, N, n, r, rho, alpha, rating) } # ...substituting 'ead' with 'w'. -- View this message in context: http://r.789695.n4.nabble.com/The-best-solver-for-non-smooth-functions-tp4636934p4636936.html Sent from the R help mailing list archive at Nabble.com.
There are obviously a large variety of non-smooth problems; for CVAR problems, if by this you mean conditional value at risk portfolio problems, you can use modern interior point linear programming methods. Further details are here: http://www.econ.uiuc.edu/~roger/research/risk/risk.html Roger Koenker rkoenker at illinois.edu On Jul 18, 2012, at 3:09 PM, Cren wrote:> # Whoops! I have just seen there's a little mistake > # in the 'sharpe' function, because I had to use > # 'w' array instead of 'ead' in the cm.CVaR function! > # This does not change the main features of my, > # but you should be aware of it > > --- > > # The function to be minimized > > sharpe <- function(w) { > - (t(w) %*% y) / cm.CVaR(M, lgd, ead, N, n, r, rho, alpha, rating) > } > > # This becomes... > > sharpe <- function(w) { > - (t(w) %*% y) / cm.CVaR(M, lgd, w, N, n, r, rho, alpha, rating) > } > > # ...substituting 'ead' with 'w'. > > -- > View this message in context: http://r.789695.n4.nabble.com/The-best-solver-for-non-smooth-functions-tp4636934p4636936.html > Sent from the R help mailing list archive at Nabble.com. > > ______________________________________________ > R-help at r-project.org mailing list > https://stat.ethz.ch/mailman/listinfo/r-help > PLEASE do read the posting guide http://www.R-project.org/posting-guide.html > and provide commented, minimal, self-contained, reproducible code.
Roger Koenker-3 wrote> > There are obviously a large variety of non-smooth problems; > for CVAR problems, if by this you mean conditional value at > risk portfolio problems, you can use modern interior point > linear programming methods. Further details are here: > > http://www.econ.uiuc.edu/~roger/research/risk/risk.html > > Roger Koenker > rkoenker@# Hi, Roger. # Unfortunately that "C" does not stand for # "Conditional" but "Credit"... which means that # risk measure is obtained via Monte Carlo # simulated scenarios in order to quantify the # credit loss according to empirical transition # matrix. Then I am afraid of every solver finding # local maxima (or minima) because of some # "jump" in Credit VaR surface function of # portfolio weights :( -- View this message in context: http://r.789695.n4.nabble.com/The-best-solver-for-non-smooth-functions-tp4636934p4637002.html Sent from the R help mailing list archive at Nabble.com.
There is a new blog post that is pertinent to this question: http://www.portfolioprobe.com/2012/07/23/a-comparison-of-some-heuristic-optimization-methods/ Pat On 18/07/2012 21:00, Cren wrote:> # Hi all, > > # consider the following code (please, run it: > # it's fully working and requires just few minutes > # to finish): > > require(CreditMetrics) > require(clusterGeneration) > install.packages("Rdonlp2", repos= c("http://R-Forge.R-project.org", > getOption("repos"))) > install.packages("Rsolnp2", repos= c("http://R-Forge.R-project.org", > getOption("repos"))) > require(Rdonlp2) > require(Rsolnp) > require(Rsolnp2) > > N <- 3 > n <- 100000 > r <- 0.0025 > ead <- rep(1/3,3) > rc <- c("AAA", "AA", "A", "BBB", "BB", "B", "CCC", "D") > lgd <- 0.99 > rating <- c("BB", "BB", "BBB") > firmnames <- c("firm 1", "firm 2", "firm 3") > alpha <- 0.99 > > # One year empirical migration matrix from Standard & Poor's website > > rc <- c("AAA", "AA", "A", "BBB", "BB", "B", "CCC", "D") > M <- matrix(c(90.81, 8.33, 0.68, 0.06, 0.08, 0.02, 0.01, 0.01, > 0.70, 90.65, 7.79, 0.64, 0.06, 0.13, 0.02, 0.01, > 0.09, 2.27, 91.05, 5.52, 0.74, 0.26, 0.01, 0.06, > 0.02, 0.33, 5.95, 85.93, 5.30, 1.17, 1.12, 0.18, > 0.03, 0.14, 0.67, 7.73, 80.53, 8.84, 1.00, 1.06, > 0.01, 0.11, 0.24, 0.43, 6.48, 83.46, 4.07, 5.20, > 0.21, 0, 0.22, 1.30, 2.38, 11.24, 64.86, 19.79, > 0, 0, 0, 0, 0, 0, 0, 100 > )/100, 8, 8, dimnames = list(rc, rc), byrow = TRUE) > > # Correlation matrix > > rho <- rcorrmatrix(N) ; dimnames(rho) = list(firmnames, firmnames) > > # Credit Value at Risk > > cm.CVaR(M, lgd, ead, N, n, r, rho, alpha, rating) > > # Risk neutral yield rates > > Y <- cm.cs(M, lgd) > y <- c(Y[match(rating[1],rc)], Y[match(rating[2],rc)], > Y[match(rating[3],rc)]) ; y > > # The function to be minimized > > sharpe <- function(w) { > - (t(w) %*% y) / cm.CVaR(M, lgd, ead, N, n, r, rho, alpha, rating) > } > > # The linear constraints > > constr <- function(w) { > sum(w) > } > > # Results' matrix (it's empty by now) > > Results <- matrix(NA, nrow = 3, ncol = 4) > rownames(Results) <- list('donlp2', 'solnp', 'solnp2') > colnames(Results) <- list('w_1', 'w_2', 'w_3', 'Sharpe') > > # See the differences between different solvers > > rho > Results[1,1:3] <- round(donlp2(fn = sharpe, par = rep(1/N,N), par.lower > rep(0,N), par.upper = rep(1,N), A = t(rep(1,N)), lin.lower = 1, lin.upper > 1)$par, 2) > Results[2,1:3] <- round(solnp(pars = rep(1/N,N), fun = sharpe, eqfun > constr, eqB = 1, LB = rep(0,N), UB = rep(1,N))$pars, 2) > Results[3,1:3] <- round(solnp2(par = rep(1/N,N), fun = sharpe, eqfun > constr, eqB = 1, LB = rep(0,N), UB = rep(1,N))$pars, 2) > for(i in 1:3) { > Results[i,4] <- abs(sharpe(Results[i,1:3])) > } > Results > > # In fact the "sharpe" function I previously defined > # is not smooth because of the cm.CVaR function. > # If you change correlation matrix, ratings or yields > # you see how different solvers produce different > # parameters estimation. > > # Then the main issue is: how may I know which is the > # best solver at all to deal with non-smooth functions > # such as this one? > > -- > View this message in context: http://r.789695.n4.nabble.com/The-best-solver-for-non-smooth-functions-tp4636934.html > Sent from the R help mailing list archive at Nabble.com. > > ______________________________________________ > R-help at r-project.org mailing list > https://stat.ethz.ch/mailman/listinfo/r-help > PLEASE do read the posting guide http://www.R-project.org/posting-guide.html > and provide commented, minimal, self-contained, reproducible code. >-- Patrick Burns pburns at pburns.seanet.com twitter: @portfolioprobe http://www.portfolioprobe.com/blog http://www.burns-stat.com (home of 'Some hints for the R beginner' and 'The R Inferno')