import numpy as np N = 100000 d = 1000 Xs = np.random.normal(size=(N,d)) # We demonstrate that X0 is optimal. Find vector k. X0 = Xs[0] X0p = X0.clip(min=0) L2_X0p = np.linalg.norm(X0p) X0p_hat = X0p / L2_X0p X0_score = np.dot(X0p_hat, X0) # is also L2_X0p print("X0_score: %.4f" % X0_score) # Find scores scores = Xs @ X0p_hat scores = np.partition(scores, -2) print("1st score: %.4f, squared = %.4f ~ d/2 = %.2f" % (scores[-1], scores[-1]**2, d/2)) print("2nd score: %.4f, squared = %.4f < bd, bd ~ 2log(n) = %.4f" % (scores[-2], scores[-2]**2, 2*np.log(N))) print("If we allow other candidates to remove their negative scores") Xs = Xs.clip(min=0) scores = Xs @ X0p_hat scores = np.partition(scores, -2) dpart = np.sqrt(d/2)/np.pi npart = np.sqrt((1-1/np.pi)*np.log(N)) print("2nd score: %.4f ~ sqrt{d/2pi^2} + sqrt{(1-1/pi)ln(n)} = %.4f" % (scores[-2], dpart+npart))