Skip to content

Instantly share code, notes, and snippets.

@VisualMelon
Last active August 7, 2020 21:25
Show Gist options
  • Star 0 You must be signed in to star a gist
  • Fork 0 You must be signed in to fork a gist
  • Save VisualMelon/0ac1a1fd6e2db1273fb1d49a32d234ce to your computer and use it in GitHub Desktop.
Save VisualMelon/0ac1a1fd6e2db1273fb1d49a32d234ce to your computer and use it in GitHub Desktop.
MultiDictionaryReversal
using BenchmarkDotNet.Attributes;
using BenchmarkDotNet.Running;
using System;
using System.Collections.Concurrent;
using System.Collections.Generic;
using System.Diagnostics.CodeAnalysis;
using System.Linq;
using System.Threading.Tasks;
namespace MultiDictionaryInversion
{
class Program
{
public struct Kv<T>
{
public readonly T Key;
public readonly T Value;
public Kv(T key, T value)
{
Key = key;
Value = value;
}
}
public class KvComparer : IComparer<Kv<int>>
{
public int Compare(Kv<int> x, Kv<int> y)
{
return x.Key.CompareTo(y.Key);
}
}
static void Main(string[] args)
{
//static void NonWork()
//{
// for (int i = 0; i < 1000000000; i++)
// {
// i++;
// }
//}
//void Serialise()
//{
// long s = 0;
// var l = new List<int>();
// foreach (var kv in bench.Problem.Data)
// {
// foreach (var v in kv.Value)
// {
// l.Add(v);
// s += v;
// }
// }
// Console.WriteLine(l.Count + "\t" + s);
//}
var sw = new System.Diagnostics.Stopwatch();
var bench = new Benchmark();
bench.Problem = bench.Problems.Last();
void rock(string name, Action a)
{
Console.WriteLine(name);
sw.Restart();
a();
sw.Stop();
Console.WriteLine(sw.ElapsedMilliseconds);
sw.Restart();
var nonTasks = new Action[] { a, a };
Parallel.Invoke(new ParallelOptions() { MaxDegreeOfParallelism = 10 }, nonTasks);
sw.Stop();
Console.WriteLine(sw.ElapsedMilliseconds);
}
//rock("NonWork", NonWork); // check they are actual running in parallel
//rock("Serialise", Serialise);
//rock("BionicCodeLinqWithoutDictionaryWithValueTupleWithParallel", bench.BionicCodeLinqWithoutDictionaryWithValueTupleWithParallel1);
//rock("BionicCodeLinq", bench.BionicCodeLinq1);
//rock("Serial", bench.SerialLoop1);
//rock("ParallelSorts4", bench.ParallelSorts4);
//rock("ConcurrentDictionary4", bench.ConcurrentDictionary4);
BenchmarkRunner.Run<Benchmark>();
}
}
public class ListWithConcurrentAdd<T> : List<T>
{
const int StateFree = 0;
const int StateTaken = 1;
int state = StateFree;
public void SafeAdd(T thing)
{
// spin lock
while (System.Threading.Interlocked.CompareExchange(ref state, StateTaken, StateFree) == StateTaken);
this.Add(thing);
state = StateFree;
}
}
public static class Methods
{
public static Dictionary<T, List<T>> OpLinq<T>(Dictionary<T, List<T>> data)
{
return data
.SelectMany(x => x.Value.Select(y => Tuple.Create(x.Key, y)))
.GroupBy(x => x.Item2)
.ToDictionary(x => x.Key, x => x.Select(x => x.Item1).ToList());
}
public static ILookup<T, T> BionicCodeLinqWithoutDictionaryWithValueTupleWithParallel<T>(Dictionary<T, List<T>> data)
{
return data
.SelectMany(entry => entry.Value.Select(value => Tuple.Create(value, entry.Key)))
.AsParallel().ToLookup(tuple => tuple.Item1, tuple => tuple.Item2);
}
public static ILookup<T, T> BionicCodeLinqWithoutDictionaryWithValueTuple<T>(Dictionary<T, List<T>> data)
{
return data
.SelectMany(entry => entry.Value.Select(value => Tuple.Create(value, entry.Key)))
.ToLookup(tuple => tuple.Item1, tuple => tuple.Item2);
}
public static Dictionary<T, List<T>> BionicCodeLinq<T>(Dictionary<T, List<T>> data)
{
return data
.SelectMany(entry => entry.Value.Select(value => ValueTuple.Create(value, entry.Key)))
.ToLookup(tuple => tuple.Item1, tuple => tuple.Item2).ToDictionary(g => g.Key, g => g.ToList());
}
public static Dictionary<T, List<T>> SerialLoop<T>(Dictionary<T, List<T>> data)
{
var res = new Dictionary<T, List<T>>();
foreach (var kv in data)
{
foreach (var v in kv.Value)
{
if (!res.TryGetValue(v, out var list))
{
res.Add(v, list = new List<T>());
}
list.Add(kv.Key);
}
}
return res;
}
public static void Merge<T>(KeyValuePair<T, T>[] a, KeyValuePair<T, T>[] b, KeyValuePair<T, T>[] res) where T : IComparable<T>
{
int i = 0;
int j = 0;
int k = 0;
while (true)
{
var morea = i < a.Length;
var moreb = j < b.Length;
if (morea & moreb)
{
if (a[i].Key.CompareTo(b[j].Key) > 0)
{
res[k++] = b[j++];
}
else
{
res[k++] = a[i++];
}
}
else if (morea)
{
while (i < a.Length)
res[k++] = a[i++];
}
else if (moreb)
{
while (j < b.Length)
res[k++] = b[j++];
}
else
{
break;
}
}
}
public static Dictionary<T, List<T>> ParallelSorts<T>(Dictionary<T, List<T>> data, int threadNumber) where T : IComparable<T>
{
var kvs = new List<KeyValuePair<T, T>>();
foreach (var kv in data)
{
var k = kv.Key;
foreach (var v in kv.Value)
{
kvs.Add(new KeyValuePair<T, T>(v, k));
}
}
if (kvs.Count == 0)
{
return new Dictionary<T, List<T>>();
}
int threads = 1 << threadNumber;
int[] partitions = new int[threads + 1];
for (int pi = 0; pi < threads; pi++)
{
partitions[pi] = (kvs.Count * pi) / threads;
}
partitions[threads] = kvs.Count;
var subLists = new KeyValuePair<T, T>[threads][];
var tasks = new Action[threads];
for (int pi = 0; pi < threads; pi++)
{
var _pi = pi;
var sl = subLists[pi] = new KeyValuePair<T, T>[partitions[_pi + 1] - partitions[_pi]];
tasks[_pi] = () =>
{
kvs.CopyTo(partitions[_pi], sl, 0, sl.Length);
Array.Sort(sl, (a, b) => a.Key.CompareTo(b.Key));
};
}
Parallel.Invoke(tasks);
for (int stride = 1; stride < threads; stride *= 2)
{
tasks = new Action[threads / (stride * 2)];
for (int pi = 0; pi < threads; pi += stride * 2)
{
var a = subLists[pi];
var b = subLists[pi + stride];
var res = subLists[pi] = new KeyValuePair<T, T>[a.Length + b.Length];
subLists[pi + stride] = null;
tasks[pi / (stride * 2)] = () => Merge(a, b, res);
}
Parallel.Invoke(tasks);
}
var dictionary = new Dictionary<T, List<T>>();
var kvs2 = subLists[0];
var l = new List<T>();
T lastKey = kvs2[0].Key;
for (int i = 0; i < kvs2.Length; i++)
{
var next = kvs2[i];
if (next.Key.CompareTo(lastKey) != 0)
{
dictionary.Add(lastKey, l);
lastKey = next.Key;
l = new List<T>() { next.Value };
}
else
{
l.Add(next.Value);
}
}
dictionary.Add(lastKey, l);
return dictionary;
}
public static Dictionary<T, List<T>> ParallelLoop<T>(Dictionary<T, List<T>> data, int threads)
{
var res = new Dictionary<T, List<T>>();
var kvs = data.ToArray();
int[] partitions = new int[threads + 1];
for (int pi = 0; pi < threads; pi++)
{
partitions[pi] = (kvs.Length * pi) / threads;
}
partitions[threads] = kvs.Length;
var rwl = new System.Threading.ReaderWriterLockSlim();
void RunPartition(int start, int end)
{
for (int i = start; i < end; i++)
{
var kv = kvs[i];
foreach (var v in kv.Value)
{
// perform an low-contention check
rwl.EnterReadLock();
bool found = res.TryGetValue(v, out var list);
rwl.ExitReadLock();
if (!found)
{
// check again, and add a new list if we need one
rwl.EnterWriteLock();
if (!res.TryGetValue(v, out list))
res.Add(v, list = new ListWithConcurrentAdd<T>());
rwl.ExitWriteLock();
}
// this seems to be no better than using a simple lock
((ListWithConcurrentAdd<T>)list).SafeAdd(kv.Key);
}
}
}
var tasks = new Action[threads];
for (int pi = 0; pi < threads; pi++)
{
int _pi = pi;
tasks[_pi] = (() => RunPartition(partitions[_pi], partitions[_pi + 1]));
}
Parallel.Invoke(tasks);
return res;
}
public static Dictionary<T, List<T>> ConcurrentDictionary<T>(Dictionary<T, List<T>> data, int threads)
{
var res = new ConcurrentDictionary<T, List<T>>();
var kvs = data.ToList();
int[] partitions = new int[threads + 1];
for (int pi = 0; pi < threads; pi++)
{
partitions[pi] = (kvs.Count * pi) / threads;
}
partitions[threads] = kvs.Count;
void RunPartition(int start, int end)
{
for (int i = start; i < end; i++)
{
var kv = kvs[i];
foreach (var v in kv.Value)
{
var list = res.GetOrAdd(v, _v => new List<T>());
lock (list)
{
list.Add(kv.Key);
}
}
}
}
var tasks = new Action[threads];
for (int pi = 0; pi < threads; pi++)
{
int _pi = pi;
tasks[_pi] = (() => RunPartition(partitions[_pi], partitions[_pi + 1]));
}
Parallel.Invoke(tasks);
return res.ToDictionary(kv => kv.Key, kv => kv.Value);
}
public static Dictionary<T, List<T>> ConcurrentDictionaryDeferredValues<T>(Dictionary<T, List<T>> data, int threads)
{
var res = new ConcurrentDictionary<T, List<T>>();
var kvs = data.Keys.ToArray();
int[] partitions = new int[threads + 1];
for (int pi = 0; pi < threads; pi++)
{
partitions[pi] = (kvs.Length * pi) / threads;
}
partitions[threads] = kvs.Length;
void RunPartition(int start, int end)
{
for (int i = start; i < end; i++)
{
var k = kvs[i];
var vs = data[k];
foreach (var v in vs)
{
var list = res.GetOrAdd(v, _v => new List<T>());
lock (list)
{
list.Add(k);
}
}
}
}
var tasks = new Action[threads];
for (int pi = 0; pi < threads; pi++)
{
int _pi = pi;
tasks[_pi] = (() => RunPartition(partitions[_pi], partitions[_pi + 1]));
}
Parallel.Invoke(tasks);
return res.ToDictionary(kv => kv.Key, kv => kv.Value);
}
public static Dictionary<T, List<T>> MergingDictionaries<T>(Dictionary<T, List<T>> data, int threads)
{
var kvs = data.ToArray();
var partitions = new int[threads + 1];
var results = new Dictionary<T, List<T>>[threads];
for (int pi = 0; pi < threads; pi++)
{
partitions[pi] = (kvs.Length * pi) / threads;
results[pi] = new Dictionary<T, List<T>>();
}
partitions[threads] = kvs.Length;
void RunPartition(int start, int end, Dictionary<T, List<T>> dict)
{
for (int i = start; i < end; i++)
{
var kv = kvs[i];
foreach (var v in kv.Value)
{
if (!dict.TryGetValue(v, out var list))
dict.Add(v, list = new List<T>());
list.Add(kv.Key);
}
}
}
var tasks = new Action[threads];
for (int pi = 0; pi < threads; pi++)
{
int _pi = pi;
tasks[_pi] = (() => RunPartition(partitions[_pi], partitions[_pi + 1], results[_pi]));
}
Parallel.Invoke(tasks);
// merge (parallelise?)
var res = results[0];
for (int pi = 1; pi < threads; pi++)
{
foreach (var kv in results[pi])
{
if (res.TryGetValue(kv.Key, out var list))
{
list.AddRange(kv.Value);
}
else
{
res.Add(kv.Key, kv.Value);
}
}
}
return res;
}
public static Dictionary<T, List<T>> ParallelMergingDictionaries<T>(Dictionary<T, List<T>> data, int threads)
{
var kvs = data.ToArray();
var partitions = new int[threads + 1];
var res = new Dictionary<T, List<T>>();
for (int pi = 0; pi < threads; pi++)
{
partitions[pi] = (kvs.Length * pi) / threads;
}
partitions[threads] = kvs.Length;
var rwl = new System.Threading.ReaderWriterLockSlim();
void RunPartition(int start, int end)
{
// fill temp
var temp = new Dictionary<T, List<T>>();
for (int i = start; i < end; i++)
{
var kv = kvs[i];
foreach (var v in kv.Value)
{
if (!temp.TryGetValue(v, out var list))
temp.Add(v, list = new List<T>());
list.Add(kv.Key);
}
}
// parallel merge
foreach (var kv in temp)
{
// perform an low-contention check
rwl.EnterReadLock();
bool found = res.TryGetValue(kv.Key, out var list);
rwl.ExitReadLock();
if (!found)
{
// check again, and add us if there is nothing already
rwl.EnterWriteLock();
if (!res.TryGetValue(kv.Key, out list))
{
res.Add(kv.Key, kv.Value);
list = null;
}
rwl.ExitWriteLock();
}
if (list != null)
{
lock (list)
{
list.AddRange(kv.Value);
}
}
}
}
var tasks = new Action[threads];
for (int pi = 0; pi < threads; pi++)
{
int _pi = pi;
tasks[_pi] = (() => RunPartition(partitions[_pi], partitions[_pi + 1]));
}
Parallel.Invoke(tasks);
return res;
}
}
public class Problem
{
public Problem(string name, Dictionary<int, List<int>> data)
{
Name = name ?? throw new ArgumentNullException(nameof(name));
Data = data ?? throw new ArgumentNullException(nameof(data));
}
public string Name { get; }
public Dictionary<int, List<int>> Data { get; }
public override string ToString()
{
return Name;
}
public static Dictionary<int, List<int>> Random(Random rnd, int n, double µ)
{
var res = new Dictionary<int, List<int>>();
for (int i = 0; i < n; i++)
{
int c = (int)Math.Round(System.Math.Log(1 - rnd.NextDouble()) / -µ);
if (c > 0)
{
var l = new List<int>();
for (int j = 0; j < c; j++)
{
l.Add(rnd.Next(n));
}
res.Add(i, l);
}
}
return res;
}
}
public class Benchmark
{
public Problem[] Problems { get; set; } = new int[] { 10000, 100000, 1000000 }
.Select(n => new Problem($"n={n} µ=0.1", Problem.Random(new Random(1), n, 0.1)))
.Concat(new int[] { 10000, 100000, 1000000 }
.Select(n => new Problem($"n={n} µ=0.01", Problem.Random(new Random(1), n, 0.01))))
.ToArray();
[ParamsSource(nameof(Problems))]
public Problem Problem { get; set; }
//[Benchmark]
public void OpLinq1()
{
var res = Methods.OpLinq(Problem.Data);
}
//[Benchmark]
public void BionicCodeLinqWithoutDictionaryWithValueTupleWithParallel()
{
var res = Methods.BionicCodeLinqWithoutDictionaryWithValueTupleWithParallel(Problem.Data);
}
[Benchmark]
public void BionicCodeLinqWithoutDictionaryWithValueTuple1()
{
var res = Methods.BionicCodeLinqWithoutDictionaryWithValueTuple(Problem.Data);
}
//[Benchmark]
public void BionicCodeLinq1()
{
var res = Methods.BionicCodeLinq(Problem.Data);
}
//[Benchmark]
public void SerialLoop1()
{
var res = Methods.SerialLoop(Problem.Data);
}
//[Benchmark]
public void ParallelLoop1()
{
var res = Methods.ParallelLoop(Problem.Data, 1);
}
//[Benchmark]
public void ParallelLoop2()
{
var res = Methods.ParallelLoop(Problem.Data, 2);
}
//[Benchmark]
public void ParallelLoop4()
{
var res = Methods.ParallelLoop(Problem.Data, 4);
}
//[Benchmark]
public void ParallelLoop8()
{
var res = Methods.ParallelLoop(Problem.Data, 8);
}
//[Benchmark]
public void ConcurrentDictionary1()
{
var res = Methods.ConcurrentDictionary(Problem.Data, 1);
}
//[Benchmark]
public void ConcurrentDictionary2()
{
var res = Methods.ConcurrentDictionary(Problem.Data, 2);
}
//[Benchmark]
public void ConcurrentDictionary4()
{
var res = Methods.ConcurrentDictionary(Problem.Data, 4);
}
//[Benchmark]
public void ConcurrentDictionary8()
{
var res = Methods.ConcurrentDictionary(Problem.Data, 8);
}
//[Benchmark]
public void ConcurrentDictionaryDeferredValues1()
{
var res = Methods.ConcurrentDictionaryDeferredValues(Problem.Data, 1);
}
//[Benchmark]
public void ConcurrentDictionaryDeferredValues2()
{
var res = Methods.ConcurrentDictionaryDeferredValues(Problem.Data, 2);
}
//[Benchmark]
public void ConcurrentDictionaryDeferredValues4()
{
var res = Methods.ConcurrentDictionaryDeferredValues(Problem.Data, 4);
}
//[Benchmark]
public void ConcurrentDictionaryDeferredValues8()
{
var res = Methods.ConcurrentDictionaryDeferredValues(Problem.Data, 8);
}
//[Benchmark]
public void MergingDictionaries1()
{
var res = Methods.MergingDictionaries(Problem.Data, 1);
}
//[Benchmark]
public void MergingDictionaries2()
{
var res = Methods.MergingDictionaries(Problem.Data, 2);
}
//[Benchmark]
public void MergingDictionaries4()
{
var res = Methods.MergingDictionaries(Problem.Data, 4);
}
//[Benchmark]
public void MergingDictionaries8()
{
var res = Methods.MergingDictionaries(Problem.Data, 8);
}
//[Benchmark]
public void ParallelMergingDictionaries1()
{
var res = Methods.ParallelMergingDictionaries(Problem.Data, 1);
}
//[Benchmark]
public void ParallelMergingDictionaries2()
{
var res = Methods.ParallelMergingDictionaries(Problem.Data, 2);
}
//[Benchmark]
public void ParallelMergingDictionaries4()
{
var res = Methods.ParallelMergingDictionaries(Problem.Data, 4);
}
//[Benchmark]
public void ParallelMergingDictionaries8()
{
var res = Methods.ParallelMergingDictionaries(Problem.Data, 8);
}
//[Benchmark]
public void ParallelSorts1()
{
var res = Methods.ParallelSorts(Problem.Data, 0);
}
//[Benchmark]
public void ParallelSorts2()
{
var res = Methods.ParallelSorts(Problem.Data, 1);
}
//[Benchmark]
public void ParallelSorts4()
{
var res = Methods.ParallelSorts(Problem.Data, 2);
}
//[Benchmark]
public void ParallelSorts8()
{
var res = Methods.ParallelSorts(Problem.Data, 3);
}
}
}
BenchmarkDotNet=v0.12.1, OS=Windows 10.0.19041.388 (2004/?/20H1)
AMD Ryzen 7 3700X, 1 CPU, 16 logical and 8 physical cores
.NET Core SDK=5.0.100-preview.5.20279.10
  [Host]     : .NET Core 3.1.6 (CoreCLR 4.700.20.26901, CoreFX 4.700.20.31603), X64 RyuJIT
  DefaultJob : .NET Core 3.1.6 (CoreCLR 4.700.20.26901, CoreFX 4.700.20.31603), X64 RyuJIT

Method Problem Mean Error StdDev Median
Huh n=10000 µ=0.01 31.363 ms 0.3386 ms 0.3168 ms 31.361 ms
SerialLoop1 n=10000 µ=0.01 35.931 ms 0.7069 ms 0.8415 ms 36.088 ms
ParallelLoop2 n=10000 µ=0.01 61.540 ms 1.2070 ms 1.4368 ms 61.249 ms
ParallelLoop4 n=10000 µ=0.01 71.858 ms 2.2479 ms 6.5572 ms 70.871 ms
ConcurrentDictionary2 n=10000 µ=0.01 31.982 ms 0.4894 ms 0.4087 ms 31.874 ms
ConcurrentDictionary4 n=10000 µ=0.01 31.897 ms 0.6335 ms 1.5056 ms 31.992 ms
ConcurrentDictionaryDeferredValues2 n=10000 µ=0.01 32.182 ms 0.3783 ms 0.2953 ms 32.113 ms
ConcurrentDictionaryDeferredValues4 n=10000 µ=0.01 33.390 ms 0.6605 ms 1.1035 ms 33.361 ms
MergingDictionaries2 n=10000 µ=0.01 34.960 ms 0.6230 ms 0.5523 ms 34.959 ms
MergingDictionaries4 n=10000 µ=0.01 47.630 ms 0.5009 ms 0.4440 ms 47.596 ms
ParallelMergingDictionaries2 n=10000 µ=0.01 37.900 ms 0.1228 ms 0.2695 ms 37.932 ms
ParallelMergingDictionaries4 n=10000 µ=0.01 50.938 ms 1.0066 ms 1.1189 ms 50.826 ms
ParallelSorts2 n=10000 µ=0.01 99.040 ms 0.3797 ms 0.3552 ms 99.054 ms
ParallelSorts4 n=10000 µ=0.01 62.200 ms 0.1710 ms 0.1599 ms 62.158 ms
ParallelSorts8 n=10000 µ=0.01 45.941 ms 0.7675 ms 0.7179 ms 45.659 ms
Huh n=10000 µ=0.1 10.259 ms 0.1689 ms 0.1580 ms 10.306 ms
SerialLoop1 n=10000 µ=0.1 8.095 ms 0.0639 ms 0.0566 ms 8.079 ms
ParallelLoop2 n=10000 µ=0.1 11.593 ms 0.2413 ms 0.6885 ms 11.410 ms
ParallelLoop4 n=10000 µ=0.1 13.015 ms 0.2571 ms 0.5590 ms 13.013 ms
ConcurrentDictionary2 n=10000 µ=0.1 10.122 ms 0.1893 ms 0.1771 ms 10.129 ms
ConcurrentDictionary4 n=10000 µ=0.1 9.653 ms 0.1874 ms 0.2370 ms 9.674 ms
ConcurrentDictionaryDeferredValues2 n=10000 µ=0.1 10.067 ms 0.1915 ms 0.2049 ms 10.033 ms
ConcurrentDictionaryDeferredValues4 n=10000 µ=0.1 9.370 ms 0.1791 ms 0.2993 ms 9.341 ms
MergingDictionaries2 n=10000 µ=0.1 10.842 ms 0.2134 ms 0.3902 ms 10.712 ms
MergingDictionaries4 n=10000 µ=0.1 15.169 ms 0.2318 ms 0.2670 ms 15.159 ms
ParallelMergingDictionaries2 n=10000 µ=0.1 13.496 ms 0.1942 ms 0.1722 ms 13.423 ms
ParallelMergingDictionaries4 n=10000 µ=0.1 18.477 ms 0.3556 ms 0.4233 ms 18.416 ms
ParallelSorts2 n=10000 µ=0.1 12.054 ms 0.0342 ms 0.0320 ms 12.040 ms
ParallelSorts4 n=10000 µ=0.1 8.974 ms 0.0411 ms 0.0364 ms 8.967 ms
ParallelSorts8 n=10000 µ=0.1 9.987 ms 0.0696 ms 0.0651 ms 9.981 ms
Huh n=100000 µ=0.01 518.729 ms 7.1735 ms 5.9902 ms 516.710 ms
SerialLoop1 n=100000 µ=0.01 1,199.785 ms 14.9534 ms 13.9874 ms 1,193.700 ms
ParallelLoop2 n=100000 µ=0.01 1,541.562 ms 30.4498 ms 56.4407 ms 1,530.400 ms
ParallelLoop4 n=100000 µ=0.01 1,408.749 ms 64.9087 ms 191.3846 ms 1,421.528 ms
ConcurrentDictionary2 n=100000 µ=0.01 1,008.784 ms 5.5819 ms 4.3580 ms 1,007.749 ms
ConcurrentDictionary4 n=100000 µ=0.01 730.784 ms 14.4512 ms 25.6870 ms 728.527 ms
ConcurrentDictionaryDeferredValues2 n=100000 µ=0.01 1,095.557 ms 21.6359 ms 31.7137 ms 1,099.020 ms
ConcurrentDictionaryDeferredValues4 n=100000 µ=0.01 725.664 ms 14.1405 ms 15.1301 ms 721.591 ms
MergingDictionaries2 n=100000 µ=0.01 1,409.597 ms 9.5850 ms 8.4968 ms 1,407.724 ms
MergingDictionaries4 n=100000 µ=0.01 1,548.207 ms 30.5889 ms 39.7742 ms 1,564.697 ms
ParallelMergingDictionaries2 n=100000 µ=0.01 1,359.468 ms 26.4681 ms 55.2488 ms 1,360.113 ms
ParallelMergingDictionaries4 n=100000 µ=0.01 1,393.863 ms 11.4453 ms 10.1460 ms 1,394.967 ms
ParallelSorts2 n=100000 µ=0.01 1,205.158 ms 21.9159 ms 20.5002 ms 1,200.800 ms
ParallelSorts4 n=100000 µ=0.01 727.963 ms 1.9906 ms 1.5542 ms 728.385 ms
ParallelSorts8 n=100000 µ=0.01 516.042 ms 5.9154 ms 5.2439 ms 513.913 ms
Huh n=100000 µ=0.1 149.347 ms 2.3247 ms 2.0608 ms 149.543 ms
SerialLoop1 n=100000 µ=0.1 149.383 ms 2.9734 ms 5.1289 ms 147.335 ms
ParallelLoop2 n=100000 µ=0.1 159.384 ms 3.1397 ms 6.6227 ms 157.770 ms
ParallelLoop4 n=100000 µ=0.1 189.243 ms 4.4015 ms 12.9088 ms 188.084 ms
ConcurrentDictionary2 n=100000 µ=0.1 168.204 ms 3.2387 ms 4.2113 ms 166.493 ms
ConcurrentDictionary4 n=100000 µ=0.1 153.573 ms 2.9840 ms 6.6124 ms 153.941 ms
ConcurrentDictionaryDeferredValues2 n=100000 µ=0.1 169.814 ms 3.3162 ms 6.7742 ms 168.744 ms
ConcurrentDictionaryDeferredValues4 n=100000 µ=0.1 171.568 ms 3.4097 ms 7.2663 ms 170.455 ms
MergingDictionaries2 n=100000 µ=0.1 192.586 ms 1.3396 ms 1.1875 ms 192.780 ms
MergingDictionaries4 n=100000 µ=0.1 210.484 ms 4.0299 ms 3.9579 ms 210.291 ms
ParallelMergingDictionaries2 n=100000 µ=0.1 204.659 ms 2.9166 ms 2.4355 ms 205.400 ms
ParallelMergingDictionaries4 n=100000 µ=0.1 232.555 ms 4.9688 ms 14.6505 ms 232.649 ms
ParallelSorts2 n=100000 µ=0.1 142.611 ms 0.5190 ms 0.4334 ms 142.622 ms
ParallelSorts4 n=100000 µ=0.1 109.110 ms 2.1503 ms 1.9062 ms 109.033 ms
ParallelSorts8 n=100000 µ=0.1 85.055 ms 1.6262 ms 1.5971 ms 84.649 ms
Huh n=1000000 µ=0.01 11,406.109 ms 225.6709 ms 308.9008 ms 11,365.703 ms
SerialLoop1 n=1000000 µ=0.01 27,513.045 ms 456.2912 ms 426.8151 ms 27,363.127 ms
ParallelLoop2 n=1000000 µ=0.01 27,646.116 ms 551.0908 ms 1,209.6576 ms 27,059.960 ms
ParallelLoop4 n=1000000 µ=0.01 19,883.677 ms 902.2539 ms 2,544.8231 ms 18,874.036 ms
ConcurrentDictionary2 n=1000000 µ=0.01 20,331.969 ms 231.1296 ms 216.1987 ms 20,395.356 ms
ConcurrentDictionary4 n=1000000 µ=0.01 13,605.628 ms 271.1457 ms 437.8498 ms 13,453.914 ms
ConcurrentDictionaryDeferredValues2 n=1000000 µ=0.01 20,591.583 ms 121.7534 ms 101.6696 ms 20,618.593 ms
ConcurrentDictionaryDeferredValues4 n=1000000 µ=0.01 14,001.509 ms 269.2695 ms 359.4670 ms 14,123.263 ms
MergingDictionaries2 n=1000000 µ=0.01 22,944.087 ms 444.1345 ms 691.4639 ms 22,719.739 ms
MergingDictionaries4 n=1000000 µ=0.01 26,448.207 ms 511.0787 ms 984.6752 ms 26,300.232 ms
ParallelMergingDictionaries2 n=1000000 µ=0.01 25,022.089 ms 259.9266 ms 243.1355 ms 25,111.304 ms
ParallelMergingDictionaries4 n=1000000 µ=0.01 25,255.132 ms 133.8377 ms 125.1919 ms 25,242.958 ms
ParallelSorts2 n=1000000 µ=0.01 13,695.407 ms 94.1859 ms 88.1016 ms 13,669.899 ms
ParallelSorts4 n=1000000 µ=0.01 8,919.799 ms 51.6437 ms 48.3075 ms 8,927.933 ms
ParallelSorts8 n=1000000 µ=0.01 6,579.977 ms 25.9267 ms 24.2518 ms 6,578.922 ms
Huh n=1000000 µ=0.1 2,206.787 ms 7.4515 ms 6.2223 ms 2,207.031 ms
SerialLoop1 n=1000000 µ=0.1 2,801.615 ms 27.8231 ms 26.0257 ms 2,809.148 ms
ParallelLoop2 n=1000000 µ=0.1 2,940.443 ms 31.8582 ms 63.6242 ms 2,920.038 ms
ParallelLoop4 n=1000000 µ=0.1 2,556.818 ms 50.8591 ms 113.7537 ms 2,586.819 ms
ConcurrentDictionary2 n=1000000 µ=0.1 3,104.147 ms 14.4913 ms 11.3139 ms 3,103.152 ms
ConcurrentDictionary4 n=1000000 µ=0.1 2,489.005 ms 9.8713 ms 9.2336 ms 2,487.631 ms
ConcurrentDictionaryDeferredValues2 n=1000000 µ=0.1 3,137.481 ms 38.2311 ms 35.7614 ms 3,116.020 ms
ConcurrentDictionaryDeferredValues4 n=1000000 µ=0.1 2,515.881 ms 19.1950 ms 17.9550 ms 2,511.293 ms
MergingDictionaries2 n=1000000 µ=0.1 2,910.784 ms 11.9097 ms 9.9452 ms 2,910.627 ms
MergingDictionaries4 n=1000000 µ=0.1 3,292.523 ms 64.8974 ms 60.7050 ms 3,297.549 ms
ParallelMergingDictionaries2 n=1000000 µ=0.1 3,190.220 ms 22.1962 ms 20.7624 ms 3,182.783 ms
ParallelMergingDictionaries4 n=1000000 µ=0.1 3,009.543 ms 56.6593 ms 77.5559 ms 2,989.288 ms
ParallelSorts2 n=1000000 µ=0.1 1,581.943 ms 9.1477 ms 8.1092 ms 1,582.990 ms
ParallelSorts4 n=1000000 µ=0.1 1,101.037 ms 5.3140 ms 4.9707 ms 1,099.699 ms
ParallelSorts8 n=1000000 µ=0.1 1,163.204 ms 6.7435 ms 5.9779 ms 1,162.661 ms

Huh is ConcurrentDictionary8 (run by accident)

BenchmarkDotNet=v0.12.1, OS=Windows 10.0.19041.388 (2004/?/20H1)
AMD Ryzen 7 3700X, 1 CPU, 16 logical and 8 physical cores
.NET Core SDK=5.0.100-preview.5.20279.10
  [Host]     : .NET Core 3.1.6 (CoreCLR 4.700.20.26901, CoreFX 4.700.20.31603), X64 RyuJIT
  DefaultJob : .NET Core 3.1.6 (CoreCLR 4.700.20.26901, CoreFX 4.700.20.31603), X64 RyuJIT

Method Problem Mean Error StdDev
BionicCodeLinqWithoutDictionaryWithValueTupleWithParallel1 n=1000000 µ=0.1 4.282 s 0.0841 s 0.1621 s
SerialLoop1 n=1000000 µ=0.1 2.889 s 0.0174 s 0.0163 s
ConcurrentDictionary4 n=1000000 µ=0.1 2.338 s 0.0247 s 0.0219 s
ParallelSorts4 n=1000000 µ=0.1 1.141 s 0.0111 s 0.0104 s
BenchmarkDotNet=v0.12.1, OS=Windows 10.0.19041.388 (2004/?/20H1)
AMD Ryzen 7 3700X, 1 CPU, 16 logical and 8 physical cores
.NET Core SDK=5.0.100-preview.5.20279.10
  [Host]     : .NET Core 3.1.6 (CoreCLR 4.700.20.26901, CoreFX 4.700.20.31603), X64 RyuJIT
  DefaultJob : .NET Core 3.1.6 (CoreCLR 4.700.20.26901, CoreFX 4.700.20.31603), X64 RyuJIT

Method Problem Mean Error StdDev
BionicCodeLinqWithoutDictionaryWithValueTupleWithParallel1 n=10000 µ=0.01 146.28 ms 2.913 ms 4.787 ms
BionicCodeLinqWithoutDictionaryWithValueTupleWithParallel1 n=10000 µ=0.1 21.38 ms 0.426 ms 0.553 ms
BionicCodeLinqWithoutDictionaryWithValueTupleWithParallel1 n=100000 µ=0.01 1,606.17 ms 12.583 ms 11.154 ms
BionicCodeLinqWithoutDictionaryWithValueTupleWithParallel1 n=100000 µ=0.1 277.13 ms 5.378 ms 5.282 ms
BionicCodeLinqWithoutDictionaryWithValueTupleWithParallel1 n=1000000 µ=0.01 25,142.36 ms 478.130 ms 531.440 ms
BionicCodeLinqWithoutDictionaryWithValueTupleWithParallel1 n=1000000 µ=0.1 4,226.38 ms 81.928 ms 134.610 ms
BenchmarkDotNet=v0.12.1, OS=Windows 10.0.19041.388 (2004/?/20H1)
AMD Ryzen 7 3700X, 1 CPU, 16 logical and 8 physical cores
.NET Core SDK=5.0.100-preview.5.20279.10
  [Host]     : .NET Core 3.1.6 (CoreCLR 4.700.20.26901, CoreFX 4.700.20.31603), X64 RyuJIT
  DefaultJob : .NET Core 3.1.6 (CoreCLR 4.700.20.26901, CoreFX 4.700.20.31603), X64 RyuJIT

Method Problem Mean Error StdDev
BionicCodeLinqWithoutDictionaryWithValueTuple1 n=10000 µ=0.01 62.26 ms 0.718 ms 0.671 ms
BionicCodeLinqWithoutDictionaryWithValueTuple1 n=10000 µ=0.1 13.57 ms 0.124 ms 0.116 ms
BionicCodeLinqWithoutDictionaryWithValueTuple1 n=100000 µ=0.01 2,807.85 ms 15.035 ms 12.555 ms
BionicCodeLinqWithoutDictionaryWithValueTuple1 n=100000 µ=0.1 422.61 ms 6.181 ms 6.071 ms
BionicCodeLinqWithoutDictionaryWithValueTuple1 n=1000000 µ=0.01 55,810.90 ms 238.086 ms 222.706 ms
BionicCodeLinqWithoutDictionaryWithValueTuple1 n=1000000 µ=0.1 12,569.01 ms 133.934 ms 118.729 ms
using Microsoft.VisualStudio.TestTools.UnitTesting;
using MultiDictionaryInversion;
using System;
using System.Collections.Generic;
using System.Linq;
namespace Tests
{
[TestClass]
public class MethodTests
{
[TestMethod]
public void TestOpLinq1()
{
Test(Methods.OpLinq<int>);
}
[TestMethod]
public void TestBionicCodeLinq1()
{
Test(Methods.BionicCodeLinq<int>);
}
[TestMethod]
public void TestOpenLoop1()
{
Test(Methods.SerialLoop<int>);
}
[TestMethod]
public void TestParallelLoop1()
{
Test(d => Methods.ParallelLoop(d, 1));
}
[TestMethod]
public void TestParallelLoop2()
{
Test(d => Methods.ParallelLoop(d, 2));
}
[TestMethod]
public void TestParallelLoop4()
{
Test(d => Methods.ParallelLoop(d, 4));
}
[TestMethod]
public void TestParallelLoop8()
{
Test(d => Methods.ParallelLoop(d, 8));
}
[TestMethod]
public void TestConcurrentDictionary1()
{
Test(d => Methods.ConcurrentDictionary(d, 1));
}
[TestMethod]
public void TestConcurrentDictionary2()
{
Test(d => Methods.ConcurrentDictionary(d, 2));
}
[TestMethod]
public void TestConcurrentDictionary4()
{
Test(d => Methods.ConcurrentDictionary(d, 4));
}
[TestMethod]
public void TestConcurrentDictionary8()
{
Test(d => Methods.ConcurrentDictionary(d, 8));
}
[TestMethod]
public void TestConcurrentDictionaryDeferredValues1()
{
Test(d => Methods.ConcurrentDictionaryDeferredValues(d, 1));
}
[TestMethod]
public void TestConcurrentDictionaryDeferredValues2()
{
Test(d => Methods.ConcurrentDictionaryDeferredValues(d, 2));
}
[TestMethod]
public void TestConcurrentDictionaryDeferredValues4()
{
Test(d => Methods.ConcurrentDictionaryDeferredValues(d, 4));
}
[TestMethod]
public void TestConcurrentDictionaryDeferredValues8()
{
Test(d => Methods.ConcurrentDictionaryDeferredValues(d, 8));
}
[TestMethod]
public void TestMergingDictionaries1()
{
Test(d => Methods.MergingDictionaries(d, 1));
}
[TestMethod]
public void TestMergingDictionaries2()
{
Test(d => Methods.MergingDictionaries(d, 2));
}
[TestMethod]
public void TestMergingDictionaries4()
{
Test(d => Methods.MergingDictionaries(d, 4));
}
[TestMethod]
public void TestMergingDictionaries8()
{
Test(d => Methods.MergingDictionaries(d, 8));
}
[TestMethod]
public void TestParallelMergingDictionaries1()
{
Test(d => Methods.ParallelMergingDictionaries(d, 1));
}
[TestMethod]
public void TestParallelMergingDictionaries2()
{
Test(d => Methods.ParallelMergingDictionaries(d, 2));
}
[TestMethod]
public void TestParallelMergingDictionaries4()
{
Test(d => Methods.ParallelMergingDictionaries(d, 4));
}
[TestMethod]
public void TestParallelMergingDictionaries8()
{
Test(d => Methods.ParallelMergingDictionaries(d, 8));
}
[TestMethod]
public void TestParallelSorts1()
{
Test(d => Methods.ParallelSorts(d, 0));
}
[TestMethod]
public void TestParallelSorts2()
{
Test(d => Methods.ParallelSorts(d, 1));
}
[TestMethod]
public void TestParallelSorts4()
{
Test(d => Methods.ParallelSorts(d, 2));
}
[TestMethod]
public void TestParallelSorts8()
{
Test(d => Methods.ParallelSorts(d, 3));
}
private void Test(Func<Dictionary<int, List<int>>, Dictionary<int, List<int>>> method)
{
var task = Problem.Random(new Random(2), 1000, 0.1);
var reverse = method(task);
CollectionAssert.AreEquivalent(
task.SelectMany(kv => kv.Value.Select(v => (kv.Key, v))).ToList(),
reverse.SelectMany(kv => kv.Value.Select(v => (v, kv.Key))).ToList());
var unreverse = method(reverse);
CollectionAssert.AreEquivalent(
task.SelectMany(kv => kv.Value.Select(v => (kv.Key, v))).ToList(),
unreverse.SelectMany(kv => kv.Value.Select(v => (kv.Key, v))).ToList());
}
}
}
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment