quicksort pivoting strategy changes when slowdown is recognized (works well against worst cases)
This commit is contained in:
parent
83c79f4832
commit
4436c79821
37
thiersort.h
37
thiersort.h
@ -234,6 +234,12 @@
|
||||
*/
|
||||
|
||||
#include <stdint.h>
|
||||
#ifndef TS_NO_RAND
|
||||
#include <stdlib.h> /* For rand() call for pivoting in quicksort */
|
||||
#define TS_RAND 1 /* For use in branches */
|
||||
#else
|
||||
#define TS_RAND 0 /* For use in branches */
|
||||
#endif /* TS_NO_RAND */
|
||||
|
||||
#ifdef TS_SSE2
|
||||
#include <emmintrin.h>
|
||||
@ -539,7 +545,7 @@ static inline void ts_quicksort_inplace_impl(
|
||||
const TSU32 bi,
|
||||
void *reent_data),
|
||||
void *reent_data,
|
||||
TSBOOL const_check);
|
||||
TSBOOL slowdown_detected);
|
||||
|
||||
/** Simple inplace quicksort for tselems */
|
||||
static inline void ts_quicksort_inplace(
|
||||
@ -575,7 +581,7 @@ static inline void ts_quicksort_inplace_impl(
|
||||
const TSU32 bi,
|
||||
void *reent_data),
|
||||
void *reent_data,
|
||||
TSBOOL const_check) {
|
||||
TSBOOL slowdown_detected) {
|
||||
/* Must do this early exit! */
|
||||
// TODO: TS_UNLIKELY
|
||||
if(to == 0) return;
|
||||
@ -586,9 +592,18 @@ static inline void ts_quicksort_inplace_impl(
|
||||
|
||||
/* Pivoting */
|
||||
TSU32 len = (to - from);
|
||||
TSU32 mid = from + len / 2;
|
||||
const union tskey pivotkey = arr[mid].key;
|
||||
TSU32 pivoti = arr[mid].i;
|
||||
TSU32 pivi;
|
||||
/* Compiler should optimize this branch out compile time! */
|
||||
if(!slowdown_detected || !TS_RAND) {
|
||||
/* Also if TS_NO_RAND */
|
||||
pivi = from + len / 2;
|
||||
} else {
|
||||
/* Only if there is no TS_NO_RAND and only when slowdown in progress */
|
||||
/* This can help a bit to alleviate the worst case scenarios */
|
||||
pivi = from + (rand() % len);
|
||||
}
|
||||
const union tskey pivotkey = arr[pivi].key;
|
||||
TSU32 pivoti = arr[pivi].i;
|
||||
|
||||
/* Main loop */
|
||||
TSU32 left = from;
|
||||
@ -609,7 +624,7 @@ static inline void ts_quicksort_inplace_impl(
|
||||
TSU32 righti = arr[right].i;
|
||||
while((left < right) && !lt(rightkey, pivotkey, righti, pivoti, reent_data)) {
|
||||
/* Compiler should optimize this branch out compile time! */
|
||||
if(const_check) {
|
||||
if(slowdown_detected) {
|
||||
/* (**): Check for stepping over everything because its all the same.. */
|
||||
/* This with the above if gives two GE so an equality check */
|
||||
if(!lt(pivotkey, rightkey, pivoti, righti, reent_data)) {
|
||||
@ -658,8 +673,8 @@ static inline void ts_quicksort_inplace_impl(
|
||||
|
||||
/* Swap */
|
||||
const struct tselem tmp = arr[left];
|
||||
arr[left] = arr[mid];
|
||||
arr[mid] = tmp;
|
||||
arr[left] = arr[pivi];
|
||||
arr[pivi] = tmp;
|
||||
|
||||
/* XXX: Instead of (**) above, we could so some other sort here as a fallback (like a heapsort or merge) */
|
||||
/* This ignores constantly the same arrays */
|
||||
@ -691,9 +706,9 @@ static inline void ts_quicksort_fromto(
|
||||
if(from >= to) return;
|
||||
|
||||
TSU32 len = (to - from);
|
||||
TSU32 mid = from + len / 2;
|
||||
const union tskey pivotkey = src[mid].key;
|
||||
TSU32 pivoti = src[mid].i;
|
||||
TSU32 pivi = from + len / 2;
|
||||
const union tskey pivotkey = src[pivi].key;
|
||||
TSU32 pivoti = src[pivi].i;
|
||||
|
||||
TSU32 li = from;
|
||||
TSU32 ri = to - 1;
|
||||
|
||||
Loading…
x
Reference in New Issue
Block a user