-
-
Save bmitchelmore/1929681 to your computer and use it in GitHub Desktop.
var nums = [35, 25, 53, 3, 102, 203, 230, 1005]; | |
// Figure out the number of binary digits we're dealing with | |
var k = Math.max.apply(null, nums.map(function(i) { | |
return Math.ceil(Math.log(i)/Math.log(2)); | |
})); | |
for (var d = 0; d < k; ++d) { | |
for (var i = 0, p = 0, b = 1 << d, n = nums.length; i < n; ++i) { | |
var o = nums[i]; | |
if ((o & b) == 0) { | |
// this number is a 0 for this digit | |
// move it to the front of the list | |
nums.splice(p++, 0, nums.splice(i, 1)[0]); | |
} | |
} | |
} |
var nums = [1,3,2,3,2,4,2,1,3,2,1,3,4,2,6,4,2,6,8,3,1,7,4,3,2,4,1,8,8,8];
var nums = [1,3,2,3,2,4,2,1,3,2];
var nums = [1,3,2,3,2,4,2,1,3,2,1,3,4,2,6,4,2,6,8,3,1,7,4,3,2,4,1];
Doesn't work for such scenarios.
My version is much more verbose, but it seems to execute very quickly for 100,000+ items:
` var testArray = [ 331, 454, 230, 34, 343, 45, 59, 453, 345, 231, 9 ];
function radixBucketSort (arr) {
var idx1, idx2, idx3, len1, len2, radix, radixKey;
var radices = {}, buckets = {}, num, curr;
var currLen, radixStr, currBucket;
len1 = arr.length;
len2 = 10; // radix sort uses ten buckets
// find the relevant radices to process for efficiency
for (idx1 = 0;idx1 < len1;idx1++) {
radices[arr[idx1].toString().length] = 0;
}
// loop for each radix. For each radix we put all the items
// in buckets, and then pull them out of the buckets.
for (radix in radices) {
// put each array item in a bucket based on its radix value
len1 = arr.length;
for (idx1 = 0;idx1 < len1;idx1++) {
curr = arr[idx1];
// item length is used to find its current radix value
currLen = curr.toString().length;
// only put the item in a radix bucket if the item
// key is as long as the radix
if (currLen >= radix) {
// radix starts from beginning of key, so need to
// adjust to get redix values from start of stringified key
radixKey = curr.toString()[currLen - radix];
// create the bucket if it does not already exist
if (!buckets.hasOwnProperty(radixKey)) {
buckets[radixKey] = [];
}
// put the array value in the bucket
buckets[radixKey].push(curr);
} else {
if (!buckets.hasOwnProperty('0')) {
buckets['0'] = [];
}
buckets['0'].push(curr);
}
}
// for current radix, items are in buckets, now put them
// back in the array based on their buckets
// this index moves us through the array as we insert items
idx1 = 0;
// go through all the buckets
for (idx2 = 0;idx2 < len2;idx2++) {
// only process buckets with items
if (buckets[idx2] != null) {
currBucket = buckets[idx2];
// insert all bucket items into array
len1 = currBucket.length;
for (idx3 = 0;idx3 < len1;idx3++) {
arr[idx1++] = currBucket[idx3];
}
}
}
buckets = {};
}
}
radixBucketSort(testArray);
console.dir(testArray); `
Here is a version I came up with which seems pretty stable while sorting in place on the original input data. So far I've only tested it up to about 500,000 integers. This version will sort positive integer values ranging from 0 - 99, but is easily scale-able if you recognize the repetitive patterns which only differ in the enqueue process in which data[i]'s divisor grows in multiples of ten. So for example if you needed to scale this to support integers ranging from 0 - 999, you would just repeat the code block again but divide data[i] in the enqueue process by 100.
function Queue(){ this.dataStore = []; this.enqueue = enqueue; this.dequeue = dequeue; this.isEmpty = isEmpty; }; function enqueue(element){ this.dataStore.push(element); }; function dequeue(){ if(this.dataStore.length == 0){ return false; } else { return this.dataStore.shift(); } }; function isEmpty(){ if(this.dataStore.length == 0){ return true; } else { return false; } };
// for positive integer values ranging from 0 - 99. function radix(data){ var bin = []; var digIndex = []; for(var i = 0; i < 10; i++){ bin[i] = new Queue(); }; // Block 1------------------------------ for(var i = 0; i < data.length; i++){ bin[data[i]%10].enqueue(data[i]); }; for(var i = 0; i < bin.length; i++){ digIndex.push(bin[i].dataStore.length); }; for(var i = 0; i < digIndex.length - 1; i++){ digIndex[i + 1] += digIndex[i]; }; for(var i = bin.length - 1; i >= 0; i--){ while(!bin[i].isEmpty()){ data[--digIndex[i]] = bin[i].dequeue(); } }; // Block 2------------------------------ digIndex = []; // re-initialize digIndex for(var i = data.length - 1; i >= 0; i--){ bin[Math.floor(data[i]/10)%10].enqueue(data[i]); }; for(var i = 0; i < bin.length; i++){ digIndex.push(bin[i].dataStore.length); }; for(var i = 0; i < digIndex.length - 1; i++){ digIndex[i + 1] += digIndex[i]; }; for(var i = bin.length - 1; i >= 0; i--){ while(!bin[i].isEmpty()){ data[--digIndex[i]] = bin[i].dequeue(); } }; return data; }; var test = [1,5,22,67,88,12,99,4,68,71,0]; radix(test); console.log(test);
I believe splicing can be very expensive. At worst it can cost O(n) from each splice. I tried running this with a large array of 100,000 items, and it basically never finishes (I killed it after a minute). Value swapping may have been a more efficient choice.