From 03e9166e94ebfd3a6bd3bc121b792d1dab0b7368 Mon Sep 17 00:00:00 2001 From: xkszltl Date: Thu, 7 Nov 2024 01:50:52 -0800 Subject: [PATCH] Bugfix for `argsort` ordering. Numpy uses quicksort by default, causing unit test failure when numpy 2.0 released possibly due to internal behavior change. We should always use stable sorting for deterministic behavior. This change also implies `numpy>=1.15.0` to support `kind`. Should be OK since that is from mid 2018 and the earliest supported python listed is 3.8 from late 2019. Some intermediate results: ``` fitness = [ 0.33974567 0.81895563 0.94070405 0.52993818 0.88379917 0.77904015 0.57553742 0.47380936 0.37427307 0.51682958 0.44839885 0.31961157 0.61859446 0.81008902 0.92151043 0.09568099 0.94070405 0.27372766 0.90532631 0.77971909 0. 0.35179379 0.67697571 0.75771999 0.38116613 0.72893838 0.66027406 0.94070405 0.94070405 0.57791134 0.81895563 0.6465814 0.61902008 0.78928374 0.77971909 0.33974567 0.928166 0.50965352 0.72893838 0.96131336 0.63307761 0.81008902 0.87809592 0.94070405 0.85015997 0.5519724 0.88508586 0.66670118 0.53261073 0.88464403 0.92151043 0.94070405 0.84466016 0.92151043 0.67346263 0.64351788 0.88046279 0.74943025 0.85015997 0.94070405 0.92251378 0.79115731 0.322811 0.94070405 0.8047925 0.86139315 0.88508586 0.94070405 0.69791616 0.93857883 0.89034748 0.23008709 0.8074698 0.67242776 0.27152052 0.94070405 0.92151043 0.24725724 0.88508586 0.8074698 0.46557846 0.70198849 0.51334045 0.53751801 0.83399953 0.74622591 0.78625696 0.94070405 0.44174531 0.88508586 0.57743352 0.94070405 0.28999104 0.37486687 0.84896842 0.92151043 0.09305982 0.8074698 0.06718238 0.61554918 0.40841508 0.88382682 0.92151043 0.90532631 0.84458929 0.07197116 0.03002106 0.89591207 0.77971909 0.57791134 0.76741657 0.82206575 0.94070405 0.91762347 0.86139315 0.77971909 0.37066566 0.82901212 0.94070405 0.51739491 0.94070405 0.83795664 0.20879317 0.84058307 0.62029227 0.04250433 0.88382682 0.64843241 0. 0.85015997 0.6218872 0.94070405 0.53976123 0.91762347 0.08466398 0.42431066 0.44839885 0.89641524 0.85706889 0.76292463 0.26448191 0. 0.60351109 0.7122769 0.62225002 0.72038895 0.85015997 0.94070405 0.94070405 0.12910986 0.88379917 0.94070405 0.5181633 0.88508586 0.94070405 0.92151043 0.92158215 0.4051425 0.53836192 0.60054088 0.77971909 0.44839885 0.88508586 0.56913785 0.44376123 0.94070405 0.38552886 0.88508586 0.88508586 0.88508586 0.94070405 0.09222356 0.8096277 0.92151043 0.81051991 0.02988189 0.90532631 0.28390354 0.77971909 0.29585511 0.37720316 0.8074698 0.85015997 0.38552886 0.8074698 0.76741657 0.77971909 0.27008694 0.62396678 0.94070405 0.2849127 0.68423676 0.90532631 0.90459952 0.92151043 0.59974342 0.45411693 0.11342193 0.70516271 0.88219066 0.72821262 0.94070405 0.09465967 0.94070405 0.60690495 0.02250968 0.32600829 0.69264124 0.88219066 0.79500924 0.52118044 0.77971909 0.94070405 0.86466465 0.88508586 0.08784893 0.6424183 0.78384819 0.77971909 0.82418994 0.89034748 0. 0.81854996 0.29415009 0.09222356 0.50276137 0.48749366 0.58708714 0.84058307 0.77953462 0.7713532 0.92151043 0.88313334 0.49627758 0.10918025 0.80488051 0.65845542 0.44254166 0.94070405 0.91762347 0.92151043 0.38884114 0.94070405 0.92151043 0.88508586 0.94070405 0.11971749 0.59217191 0.67585517 0.94070405 0.77971909 0.72775919 0.83407586 0.94070405 0.23008709 0.83514714 0.77971909 0.93533783 0.3714695 0.88464403 0. 0.49601688 0.51164985 0.6218872 0.86139315 0.57225205 0.41146609 0.07472116 0.94070405 0.88382682 0.18041259 0.79395903 0.43869894 0.38552886 0.57570353 0.41347527 0.94070405 0.94070405 0.88508586 0.86113562 0.89034748 0.88046279 0.94070405 0.52723207 0.89768744 0.58341587 0.77971909 0.51751904 0.80843058 0.2851571 0.94070405 0.6459937 0.94070405 0.76741657 0.06516033 0.95802584 0.92151043 0.84440653 0.77971909 0.82783373 0.82163262 0.04811703 0.88508586 0.94070405 0.79516061 0.81895563 0.34463159 0.92683427 0.94070405 0.88508586 0.87309917 0.26448191 0.6424183 0.73195805 0.77971909 0.92367782 0.26448191 0.94070405 0.81479518 0.4204927 0.88508586 0.92151043 0.88382682 0.26576284 0.88508586 0.90532631 0.94070405 0.77971909 0.25035915 0.53850526 0.61554918 0.90532631 0.55879117 0.75153108 0.757016 0.39199112 0.77421967 0.8074698 0.29623808 0.23232798 0.77971909 0.38309104 0.0122373 0.94070405 0.77971909 0.95428278 0.93541914 0.09465967 0.97737236 0.83195164 0.77894074 0.88382682 0.90532631 0.81895563 0.48658361 0.72276988 0.83514714 0.92151043 0.94070405 0.81008902 0.94070405 0.76741657 0.16055894 0.67687135 0.77971909 0.73955796 0.83514714 0.73377648 0.13088784 0.77971909 0.69120136 0.48487633 0.92367782 0.63169887 0.7253329 0.75366789 0.87644307 0.87309917 0.36724386 0.35503614 0. 0.83582908 0.94070405 0.92151043 0.87498367 0.41575278 0.83514714 0.61554918 0.77971909 0.25573618 0.04811703 0.77971909 0.34609843 0.92151043 0.89641524 0.65840902 0.92151043 0.94070405 0.82411846 0.94070405 0.16146892 0.88508586 0.45403708 0.94070405 0.8096277 0.63700282 0.88046279 0.78280947 0.79525698 0.92151043 0.31825026 0.90789505 0.14922644 0.57109064 0.85494232 0.8096277 0.64919865 0.77971909 0.08605848 0.84791636 0.80513213 0.4156438 0.88508586 0.66074833 0.89034748 0.11146298 0.88508586 0.76554458 0.92151043 0.94070405 0.94070405 0.90532631 0.77971909 0.64380586 0.70972035 0.90532631 0.04811703 0.92151043 0.33974567 0.58031222 0.92151043 0.92151043 0.88508586 0.94070405 0.84039895 0.88508586 0.88508586 0.84058307 0.88379917 0.76070421 0.88508586 0.26063012 0.88508586 0.92367782 0.09465967 0.74943025 0.85092669 0.14726108 0.67306094 0.51975967 0.38116613 0.88508586 0.79115731 0.04811703 0.82370975 0.91762347 0.64919865 0.94070405 0.24079654 0.94070405 0.76047747 0.7971817 0.77971909 0.73373088 0.35719023 0.37021036 0.77679554 0.50839417 0.90551043 0.89641524 0.88508586 0.82227722 0.92151043 0.2379788 0.84198115 0.94070405 0.94070405 0.04899257 0.25385147 0.40621502 0.10429103 0.74449425 0.94070405 0.60250829 0.88508586 0.71577686 0.94070405 0.94070405 0.31440268 0.83514714 ] ``` With numpy 1: ``` hall_of_fame = [ 348 39 295 345 276 268 443 253 87 91 120 429 242 343 112 238 118 277 326 430 249 67 131 189 165 201 382 203 360 154 358 151 148 397 212 399 303 403 75 245 292 467 497 496 2 492 16 27 28 486 485 308 43 290 51 469 170 317 63 282 59 147 69 346 257 36 307 372 453 315 60 156 321 396 53 409 441 231 155 482 14 357 437 76 102 428 383 296 95 173 240 194 393 50 243 440 113 133 239 465 ] est = [ max(sub(abs(X0), X6), min(X0, X6)), sub(min(sub(min(sqrt(X9), X6), neg(X0)), X6), neg(X0)), neg(X0), div(sub(X0, X2), mul(add(mul(inv(X9), min(X2, 0.107)), inv(inv(X1))), div(max(max(X7, X8), sub(X6, X6)), sub(sqrt(X9), sub(X0, -0.548))))), sub(sqrt(sqrt(abs(add(X5, X1)))), neg(X0)), log(add(min(mul(add(max(0.405, X3), log(X5)), inv(min(X7, X9))), abs(min(neg(X6), inv(X8)))), neg(mul(mul(add(X5, X6), add(X8, X3)), log(sub(X1, -0.516)))))), sub(min(sqrt(X9), X6), mul(X2, log(X4))), log(add(min(X2, X6), mul(X8, X1))), min(sqrt(X9), X6), mul(max(mul(abs(0.308), mul(X6, X0)), add(neg(X9), min(X4, -0.368))), sub(abs(max(X8, X1)), abs(log(X0)))) ] ``` With numpy 2: ``` hall_of_fame = [ 348 39 295 345 2 27 486 469 467 28 63 43 51 59 120 91 112 118 131 67 75 87 212 238 242 165 170 189 201 203 249 253 443 245 268 154 151 148 496 492 358 360 282 290 277 276 308 317 430 429 326 343 292 303 397 497 16 485 403 382 399 147 69 346 257 36 307 453 315 372 60 156 53 76 50 396 409 95 482 102 383 393 155 321 296 14 440 231 240 437 243 441 357 194 173 428 465 239 113 133 ] est = [ max(sub(abs(X0), X6), min(X0, X6)), sub(min(sub(min(sqrt(X9), X6), neg(X0)), X6), neg(X0)), X0, div(sub(X0, X2), mul(add(mul(inv(X9), min(X2, 0.107)), inv(inv(X1))), div(max(max(X7, X8), sub(X6, X6)), sub(sqrt(X9), sub(X0, -0.548))))), sub(sqrt(sqrt(abs(add(X5, X1)))), neg(X0)), log(add(min(mul(add(max(0.405, X3), log(X5)), inv(min(X7, X9))), abs(min(neg(X6), inv(X8)))), neg(mul(mul(add(X5, X6), add(X8, X3)), log(sub(X1, -0.516)))))), sub(min(sqrt(X9), X6), mul(X2, log(X4))), log(add(min(X2, X6), mul(X8, X1))), min(sqrt(X9), X6), mul(max(mul(abs(0.308), mul(X6, X0)), add(neg(X9), min(X4, -0.368))), sub(abs(max(X8, X1)), abs(log(X0)))) ] ``` --- gplearn/genetic.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/gplearn/genetic.py b/gplearn/genetic.py index 54a90ee4..5d32e1f2 100644 --- a/gplearn/genetic.py +++ b/gplearn/genetic.py @@ -551,9 +551,9 @@ def fit(self, X, y, sample_weight=None): # Find the best individuals in the final generation fitness = np.array(fitness) if self._metric.greater_is_better: - hall_of_fame = fitness.argsort()[::-1][:self.hall_of_fame] + hall_of_fame = fitness.argsort(kind="stable")[::-1][:self.hall_of_fame] else: - hall_of_fame = fitness.argsort()[:self.hall_of_fame] + hall_of_fame = fitness.argsort(kind="stable")[:self.hall_of_fame] evaluation = np.array([gp.execute(X) for gp in [self._programs[-1][i] for i in hall_of_fame]])