Skip to content

Instantly share code, notes, and snippets.

@ig-sinicyn
Created October 28, 2016 11:10
Show Gist options
  • Save ig-sinicyn/e39767731110f843577c2c476e782ef0 to your computer and use it in GitHub Desktop.
Save ig-sinicyn/e39767731110f843577c2c476e782ef0 to your computer and use it in GitHub Desktop.
namespace system.collections.generic {
using system;
using system.collections;
using system.diagnostics;
using system.diagnostics.contracts;
using system.runtime.serialization;
using system.security.permissions;
[debugger_type_proxy(typeof(mscorlib_dictionary_debug_view<,>))]
[debugger_display("count = {count}")]
[serializable]
[system.runtime.interop_services.com_visible(false)]
public class dictionary<tkey,tvalue>: idictionary<tkey,tvalue>, idictionary, iread_only_dictionary<tkey, tvalue>, iserializable, ideserialization_callback {
private struct entry {
public int hash_code; // lower 31 bits of hash code, -1 if unused
public int next; // index of next entry, -1 if last
public tkey key; // key of entry
public tvalue value; // value of entry
}
private int[] buckets;
private entry[] entries;
private int count;
private int version;
private int free_list;
private int free_count;
private iequality_comparer<tkey> comparer;
private key_collection keys;
private value_collection values;
private object _sync_root;
// constants for serialization
private const string version_name = "version";
private const string hash_size_name = "hash_size"; // must save buckets.length
private const string key_value_pairs_name = "key_value_pairs";
private const string comparer_name = "comparer";
public dictionary(): this(0, null) {}
public dictionary(int capacity): this(capacity, null) {}
public dictionary(iequality_comparer<tkey> comparer): this(0, comparer) {}
public dictionary(int capacity, iequality_comparer<tkey> comparer) {
if (capacity < 0) throw_helper.throw_argument_out_of_range_exception(exception_argument.capacity);
if (capacity > 0) initialize(capacity);
this.comparer = comparer ?? equality_comparer<tkey>.default;
#if feature_coreclr
if (hash_helpers.s_use_randomized_string_hashing && comparer == equality_comparer<string>.default)
{
this.comparer = (iequality_comparer<tkey>) non_randomized_string_equality_comparer.default;
}
#endif // feature_coreclr
}
public dictionary(idictionary<tkey,tvalue> dictionary): this(dictionary, null) {}
public dictionary(idictionary<tkey,tvalue> dictionary, iequality_comparer<tkey> comparer):
this(dictionary != null? dictionary.count: 0, comparer) {
if( dictionary == null) {
throw_helper.throw_argument_null_exception(exception_argument.dictionary);
}
foreach (key_value_pair<tkey,tvalue> pair in dictionary) {
add(pair.key, pair.value);
}
}
protected dictionary(serialization_info info, streaming_context context) {
//we can't do anything with the keys and values until the entire graph has been deserialized
//and we have a resonable estimate that get_hash_code is not going to fail. for the time being,
//we'll just cache this. the graph is not valid until on_deserialization has been called.
hash_helpers.serialization_info_table.add(this, info);
}
public iequality_comparer<tkey> comparer {
get {
return comparer;
}
}
public int count {
get { return count - free_count; }
}
public key_collection keys {
get {
contract.ensures(contract.result<key_collection>() != null);
if (keys == null) keys = new key_collection(this);
return keys;
}
}
icollection<tkey> idictionary<tkey, tvalue>.keys {
get {
if (keys == null) keys = new key_collection(this);
return keys;
}
}
ienumerable<tkey> iread_only_dictionary<tkey, tvalue>.keys {
get {
if (keys == null) keys = new key_collection(this);
return keys;
}
}
public value_collection values {
get {
contract.ensures(contract.result<value_collection>() != null);
if (values == null) values = new value_collection(this);
return values;
}
}
icollection<tvalue> idictionary<tkey, tvalue>.values {
get {
if (values == null) values = new value_collection(this);
return values;
}
}
ienumerable<tvalue> iread_only_dictionary<tkey, tvalue>.values {
get {
if (values == null) values = new value_collection(this);
return values;
}
}
public tvalue this[tkey key] {
get {
int i = find_entry(key);
if (i >= 0) return entries[i].value;
throw_helper.throw_key_not_found_exception();
return default(tvalue);
}
set {
insert(key, value, false);
}
}
public void add(tkey key, tvalue value) {
insert(key, value, true);
}
void icollection<key_value_pair<tkey, tvalue>>.add(key_value_pair<tkey, tvalue> key_value_pair) {
add(key_value_pair.key, key_value_pair.value);
}
bool icollection<key_value_pair<tkey, tvalue>>.contains(key_value_pair<tkey, tvalue> key_value_pair) {
int i = find_entry(key_value_pair.key);
if( i >= 0 && equality_comparer<tvalue>.default.equals(entries[i].value, key_value_pair.value)) {
return true;
}
return false;
}
bool icollection<key_value_pair<tkey, tvalue>>.remove(key_value_pair<tkey, tvalue> key_value_pair) {
int i = find_entry(key_value_pair.key);
if( i >= 0 && equality_comparer<tvalue>.default.equals(entries[i].value, key_value_pair.value)) {
remove(key_value_pair.key);
return true;
}
return false;
}
public void clear() {
if (count > 0) {
for (int i = 0; i < buckets.length; i++) buckets[i] = -1;
array.clear(entries, 0, count);
free_list = -1;
count = 0;
free_count = 0;
version++;
}
}
public bool contains_key(tkey key) {
return find_entry(key) >= 0;
}
public bool contains_value(tvalue value) {
if (value == null) {
for (int i = 0; i < count; i++) {
if (entries[i].hash_code >= 0 && entries[i].value == null) return true;
}
}
else {
equality_comparer<tvalue> c = equality_comparer<tvalue>.default;
for (int i = 0; i < count; i++) {
if (entries[i].hash_code >= 0 && c.equals(entries[i].value, value)) return true;
}
}
return false;
}
private void copy_to(key_value_pair<tkey,tvalue>[] array, int index) {
if (array == null) {
throw_helper.throw_argument_null_exception(exception_argument.array);
}
if (index < 0 || index > array.length ) {
throw_helper.throw_argument_out_of_range_exception(exception_argument.index, exception_resource.argument_out_of_range_need_non_neg_num);
}
if (array.length - index < count) {
throw_helper.throw_argument_exception(exception_resource.arg_array_plus_off_too_small);
}
int count = this.count;
entry[] entries = this.entries;
for (int i = 0; i < count; i++) {
if (entries[i].hash_code >= 0) {
array[index++] = new key_value_pair<tkey,tvalue>(entries[i].key, entries[i].value);
}
}
}
public enumerator get_enumerator() {
return new enumerator(this, enumerator.key_value_pair);
}
ienumerator<key_value_pair<tkey, tvalue>> ienumerable<key_value_pair<tkey, tvalue>>.get_enumerator() {
return new enumerator(this, enumerator.key_value_pair);
}
[system.security.security_critical] // auto-generated_required
public virtual void get_object_data(serialization_info info, streaming_context context) {
if (info==null) {
throw_helper.throw_argument_null_exception(exception_argument.info);
}
info.add_value(version_name, version);
#if feature_randomized_string_hashing
info.add_value(comparer_name, hash_helpers.get_equality_comparer_for_serialization(comparer), typeof(iequality_comparer<tkey>));
#else
info.add_value(comparer_name, comparer, typeof(iequality_comparer<tkey>));
#endif
info.add_value(hash_size_name, buckets == null ? 0 : buckets.length); //this is the length of the bucket array.
if( buckets != null) {
key_value_pair<tkey, tvalue>[] array = new key_value_pair<tkey, tvalue>[count];
copy_to(array, 0);
info.add_value(key_value_pairs_name, array, typeof(key_value_pair<tkey, tvalue>[]));
}
}
private int find_entry(tkey key) {
if( key == null) {
throw_helper.throw_argument_null_exception(exception_argument.key);
}
if (buckets != null) {
int hash_code = comparer.get_hash_code(key) & 0x7fffffff;
for (int i = buckets[hash_code % buckets.length]; i >= 0; i = entries[i].next) {
if (entries[i].hash_code == hash_code && comparer.equals(entries[i].key, key)) return i;
}
}
return -1;
}
private void initialize(int capacity) {
int size = hash_helpers.get_prime(capacity);
buckets = new int[size];
for (int i = 0; i < buckets.length; i++) buckets[i] = -1;
entries = new entry[size];
free_list = -1;
}
private void insert(tkey key, tvalue value, bool add) {
if( key == null ) {
throw_helper.throw_argument_null_exception(exception_argument.key);
}
if (buckets == null) initialize(0);
int hash_code = comparer.get_hash_code(key) & 0x7fffffff;
int target_bucket = hash_code % buckets.length;
#if feature_randomized_string_hashing
int collision_count = 0;
#endif
for (int i = buckets[target_bucket]; i >= 0; i = entries[i].next) {
if (entries[i].hash_code == hash_code && comparer.equals(entries[i].key, key)) {
if (add) {
throw_helper.throw_argument_exception(exception_resource.argument_adding_duplicate);
}
entries[i].value = value;
version++;
return;
}
#if feature_randomized_string_hashing
collision_count++;
#endif
}
int index;
if (free_count > 0) {
index = free_list;
free_list = entries[index].next;
free_count--;
}
else {
if (count == entries.length)
{
resize();
target_bucket = hash_code % buckets.length;
}
index = count;
count++;
}
entries[index].hash_code = hash_code;
entries[index].next = buckets[target_bucket];
entries[index].key = key;
entries[index].value = value;
buckets[target_bucket] = index;
version++;
#if feature_randomized_string_hashing
#if feature_coreclr
// in case we hit the collision threshold we'll need to switch to the comparer which is using randomized string hashing
// in this case will be equality_comparer<string>.default.
// note, randomized string hashing is turned on by default on coreclr so equality_comparer<string>.default will
// be using randomized string hashing
if (collision_count > hash_helpers.hash_collision_threshold && comparer == non_randomized_string_equality_comparer.default)
{
comparer = (iequality_comparer<tkey>) equality_comparer<string>.default;
resize(entries.length, true);
}
#else
if(collision_count > hash_helpers.hash_collision_threshold && hash_helpers.is_well_known_equality_comparer(comparer))
{
comparer = (iequality_comparer<tkey>) hash_helpers.get_randomized_equality_comparer(comparer);
resize(entries.length, true);
}
#endif // feature_coreclr
#endif
}
public virtual void on_deserialization(object sender) {
serialization_info si_info;
hash_helpers.serialization_info_table.try_get_value(this, out si_info);
if (si_info==null) {
// it might be necessary to call on_deserialization from a container if the container object also implements
// on_deserialization. however, remoting will call on_deserialization again.
// we can return immediately if this function is called twice.
// note we set remove the serialization info from the table at the end of this method.
return;
}
int real_version = si_info.get_int32(version_name);
int hashsize = si_info.get_int32(hash_size_name);
comparer = (iequality_comparer<tkey>)si_info.get_value(comparer_name, typeof(iequality_comparer<tkey>));
if( hashsize != 0) {
buckets = new int[hashsize];
for (int i = 0; i < buckets.length; i++) buckets[i] = -1;
entries = new entry[hashsize];
free_list = -1;
key_value_pair<tkey, tvalue>[] array = (key_value_pair<tkey, tvalue>[])
si_info.get_value(key_value_pairs_name, typeof(key_value_pair<tkey, tvalue>[]));
if (array==null) {
throw_helper.throw_serialization_exception(exception_resource.serialization_missing_keys);
}
for (int i=0; i<array.length; i++) {
if ( array[i].key == null) {
throw_helper.throw_serialization_exception(exception_resource.serialization_null_key);
}
insert(array[i].key, array[i].value, true);
}
}
else {
buckets = null;
}
version = real_version;
hash_helpers.serialization_info_table.remove(this);
}
private void resize() {
resize(hash_helpers.expand_prime(count), false);
}
private void resize(int new_size, bool force_new_hash_codes) {
contract.assert(new_size >= entries.length);
int[] new_buckets = new int[new_size];
for (int i = 0; i < new_buckets.length; i++) new_buckets[i] = -1;
entry[] new_entries = new entry[new_size];
array.copy(entries, 0, new_entries, 0, count);
if(force_new_hash_codes) {
for (int i = 0; i < count; i++) {
if(new_entries[i].hash_code != -1) {
new_entries[i].hash_code = (comparer.get_hash_code(new_entries[i].key) & 0x7fffffff);
}
}
}
for (int i = 0; i < count; i++) {
if (new_entries[i].hash_code >= 0) {
int bucket = new_entries[i].hash_code % new_size;
new_entries[i].next = new_buckets[bucket];
new_buckets[bucket] = i;
}
}
buckets = new_buckets;
entries = new_entries;
}
public bool remove(tkey key) {
if(key == null) {
throw_helper.throw_argument_null_exception(exception_argument.key);
}
if (buckets != null) {
int hash_code = comparer.get_hash_code(key) & 0x7fffffff;
int bucket = hash_code % buckets.length;
int last = -1;
for (int i = buckets[bucket]; i >= 0; last = i, i = entries[i].next) {
if (entries[i].hash_code == hash_code && comparer.equals(entries[i].key, key)) {
if (last < 0) {
buckets[bucket] = entries[i].next;
}
else {
entries[last].next = entries[i].next;
}
entries[i].hash_code = -1;
entries[i].next = free_list;
entries[i].key = default(tkey);
entries[i].value = default(tvalue);
free_list = i;
free_count++;
version++;
return true;
}
}
}
return false;
}
public bool try_get_value(tkey key, out tvalue value) {
int i = find_entry(key);
if (i >= 0) {
value = entries[i].value;
return true;
}
value = default(tvalue);
return false;
}
// this is a convenience method for the internal callers that were converted from using hashtable.
// many were combining key doesn't exist and key exists but null value (for non-value types) checks.
// this allows them to continue getting that behavior with minimal code delta. this is basically
// try_get_value without the out param
internal tvalue get_value_or_default(tkey key) {
int i = find_entry(key);
if (i >= 0) {
return entries[i].value;
}
return default(tvalue);
}
bool icollection<key_value_pair<tkey,tvalue>>.is_read_only {
get { return false; }
}
void icollection<key_value_pair<tkey,tvalue>>.copy_to(key_value_pair<tkey,tvalue>[] array, int index) {
copy_to(array, index);
}
void icollection.copy_to(array array, int index) {
if (array == null) {
throw_helper.throw_argument_null_exception(exception_argument.array);
}
if (array.rank != 1) {
throw_helper.throw_argument_exception(exception_resource.arg_rank_multi_dim_not_supported);
}
if( array.get_lower_bound(0) != 0 ) {
throw_helper.throw_argument_exception(exception_resource.arg_non_zero_lower_bound);
}
if (index < 0 || index > array.length) {
throw_helper.throw_argument_out_of_range_exception(exception_argument.index, exception_resource.argument_out_of_range_need_non_neg_num);
}
if (array.length - index < count) {
throw_helper.throw_argument_exception(exception_resource.arg_array_plus_off_too_small);
}
key_value_pair<tkey,tvalue>[] pairs = array as key_value_pair<tkey,tvalue>[];
if (pairs != null) {
copy_to(pairs, index);
}
else if( array is dictionary_entry[]) {
dictionary_entry[] dict_entry_array = array as dictionary_entry[];
entry[] entries = this.entries;
for (int i = 0; i < count; i++) {
if (entries[i].hash_code >= 0) {
dict_entry_array[index++] = new dictionary_entry(entries[i].key, entries[i].value);
}
}
}
else {
object[] objects = array as object[];
if (objects == null) {
throw_helper.throw_argument_exception(exception_resource.argument_invalid_array_type);
}
try {
int count = this.count;
entry[] entries = this.entries;
for (int i = 0; i < count; i++) {
if (entries[i].hash_code >= 0) {
objects[index++] = new key_value_pair<tkey,tvalue>(entries[i].key, entries[i].value);
}
}
}
catch(array_type_mismatch_exception) {
throw_helper.throw_argument_exception(exception_resource.argument_invalid_array_type);
}
}
}
ienumerator ienumerable.get_enumerator() {
return new enumerator(this, enumerator.key_value_pair);
}
bool icollection.is_synchronized {
get { return false; }
}
object icollection.sync_root {
get {
if( _sync_root == null) {
system.threading.interlocked.compare_exchange<object>(ref _sync_root, new object(), null);
}
return _sync_root;
}
}
bool idictionary.is_fixed_size {
get { return false; }
}
bool idictionary.is_read_only {
get { return false; }
}
icollection idictionary.keys {
get { return (icollection)keys; }
}
icollection idictionary.values {
get { return (icollection)values; }
}
object idictionary.this[object key] {
get {
if( is_compatible_key(key)) {
int i = find_entry((tkey)key);
if (i >= 0) {
return entries[i].value;
}
}
return null;
}
set {
if (key == null)
{
throw_helper.throw_argument_null_exception(exception_argument.key);
}
throw_helper.if_null_and_nulls_are_illegal_then_throw<tvalue>(value, exception_argument.value);
try {
tkey temp_key = (tkey)key;
try {
this[temp_key] = (tvalue)value;
}
catch (invalid_cast_exception) {
throw_helper.throw_wrong_value_type_argument_exception(value, typeof(tvalue));
}
}
catch (invalid_cast_exception) {
throw_helper.throw_wrong_key_type_argument_exception(key, typeof(tkey));
}
}
}
private static bool is_compatible_key(object key) {
if( key == null) {
throw_helper.throw_argument_null_exception(exception_argument.key);
}
return (key is tkey);
}
void idictionary.add(object key, object value) {
if (key == null)
{
throw_helper.throw_argument_null_exception(exception_argument.key);
}
throw_helper.if_null_and_nulls_are_illegal_then_throw<tvalue>(value, exception_argument.value);
try {
tkey temp_key = (tkey)key;
try {
add(temp_key, (tvalue)value);
}
catch (invalid_cast_exception) {
throw_helper.throw_wrong_value_type_argument_exception(value, typeof(tvalue));
}
}
catch (invalid_cast_exception) {
throw_helper.throw_wrong_key_type_argument_exception(key, typeof(tkey));
}
}
bool idictionary.contains(object key) {
if(is_compatible_key(key)) {
return contains_key((tkey)key);
}
return false;
}
idictionary_enumerator idictionary.get_enumerator() {
return new enumerator(this, enumerator.dict_entry);
}
void idictionary.remove(object key) {
if(is_compatible_key(key)) {
remove((tkey)key);
}
}
[serializable]
public struct enumerator: ienumerator<key_value_pair<tkey,tvalue>>,
idictionary_enumerator
{
private dictionary<tkey,tvalue> dictionary;
private int version;
private int index;
private key_value_pair<tkey,tvalue> current;
private int get_enumerator_ret_type; // what should enumerator.current return?
internal const int dict_entry = 1;
internal const int key_value_pair = 2;
internal enumerator(dictionary<tkey,tvalue> dictionary, int get_enumerator_ret_type) {
this.dictionary = dictionary;
version = dictionary.version;
index = 0;
this.get_enumerator_ret_type = get_enumerator_ret_type;
current = new key_value_pair<tkey, tvalue>();
}
public bool move_next() {
if (version != dictionary.version) {
throw_helper.throw_invalid_operation_exception(exception_resource.invalid_operation_enum_failed_version);
}
// use unsigned comparison since we set index to dictionary.count+1 when the enumeration ends.
// dictionary.count+1 could be negative if dictionary.count is int32.max_value
while ((uint)index < (uint)dictionary.count) {
if (dictionary.entries[index].hash_code >= 0) {
current = new key_value_pair<tkey, tvalue>(dictionary.entries[index].key, dictionary.entries[index].value);
index++;
return true;
}
index++;
}
index = dictionary.count + 1;
current = new key_value_pair<tkey, tvalue>();
return false;
}
public key_value_pair<tkey,tvalue> current {
get { return current; }
}
public void dispose() {
}
object ienumerator.current {
get {
if( index == 0 || (index == dictionary.count + 1)) {
throw_helper.throw_invalid_operation_exception(exception_resource.invalid_operation_enum_op_cant_happen);
}
if (get_enumerator_ret_type == dict_entry) {
return new system.collections.dictionary_entry(current.key, current.value);
} else {
return new key_value_pair<tkey, tvalue>(current.key, current.value);
}
}
}
void ienumerator.reset() {
if (version != dictionary.version) {
throw_helper.throw_invalid_operation_exception(exception_resource.invalid_operation_enum_failed_version);
}
index = 0;
current = new key_value_pair<tkey, tvalue>();
}
dictionary_entry idictionary_enumerator.entry {
get {
if( index == 0 || (index == dictionary.count + 1)) {
throw_helper.throw_invalid_operation_exception(exception_resource.invalid_operation_enum_op_cant_happen);
}
return new dictionary_entry(current.key, current.value);
}
}
object idictionary_enumerator.key {
get {
if( index == 0 || (index == dictionary.count + 1)) {
throw_helper.throw_invalid_operation_exception(exception_resource.invalid_operation_enum_op_cant_happen);
}
return current.key;
}
}
object idictionary_enumerator.value {
get {
if( index == 0 || (index == dictionary.count + 1)) {
throw_helper.throw_invalid_operation_exception(exception_resource.invalid_operation_enum_op_cant_happen);
}
return current.value;
}
}
}
[debugger_type_proxy(typeof(mscorlib_dictionary_key_collection_debug_view<,>))]
[debugger_display("count = {count}")]
[serializable]
public sealed class key_collection: icollection<tkey>, icollection, iread_only_collection<tkey>
{
private dictionary<tkey,tvalue> dictionary;
public key_collection(dictionary<tkey,tvalue> dictionary) {
if (dictionary == null) {
throw_helper.throw_argument_null_exception(exception_argument.dictionary);
}
this.dictionary = dictionary;
}
public enumerator get_enumerator() {
return new enumerator(dictionary);
}
public void copy_to(tkey[] array, int index) {
if (array == null) {
throw_helper.throw_argument_null_exception(exception_argument.array);
}
if (index < 0 || index > array.length) {
throw_helper.throw_argument_out_of_range_exception(exception_argument.index, exception_resource.argument_out_of_range_need_non_neg_num);
}
if (array.length - index < dictionary.count) {
throw_helper.throw_argument_exception(exception_resource.arg_array_plus_off_too_small);
}
int count = dictionary.count;
entry[] entries = dictionary.entries;
for (int i = 0; i < count; i++) {
if (entries[i].hash_code >= 0) array[index++] = entries[i].key;
}
}
public int count {
get { return dictionary.count; }
}
bool icollection<tkey>.is_read_only {
get { return true; }
}
void icollection<tkey>.add(tkey item){
throw_helper.throw_not_supported_exception(exception_resource.not_supported_key_collection_set);
}
void icollection<tkey>.clear(){
throw_helper.throw_not_supported_exception(exception_resource.not_supported_key_collection_set);
}
bool icollection<tkey>.contains(tkey item){
return dictionary.contains_key(item);
}
bool icollection<tkey>.remove(tkey item){
throw_helper.throw_not_supported_exception(exception_resource.not_supported_key_collection_set);
return false;
}
ienumerator<tkey> ienumerable<tkey>.get_enumerator() {
return new enumerator(dictionary);
}
ienumerator ienumerable.get_enumerator() {
return new enumerator(dictionary);
}
void icollection.copy_to(array array, int index) {
if (array==null) {
throw_helper.throw_argument_null_exception(exception_argument.array);
}
if (array.rank != 1) {
throw_helper.throw_argument_exception(exception_resource.arg_rank_multi_dim_not_supported);
}
if( array.get_lower_bound(0) != 0 ) {
throw_helper.throw_argument_exception(exception_resource.arg_non_zero_lower_bound);
}
if (index < 0 || index > array.length) {
throw_helper.throw_argument_out_of_range_exception(exception_argument.index, exception_resource.argument_out_of_range_need_non_neg_num);
}
if (array.length - index < dictionary.count) {
throw_helper.throw_argument_exception(exception_resource.arg_array_plus_off_too_small);
}
tkey[] keys = array as tkey[];
if (keys != null) {
copy_to(keys, index);
}
else {
object[] objects = array as object[];
if (objects == null) {
throw_helper.throw_argument_exception(exception_resource.argument_invalid_array_type);
}
int count = dictionary.count;
entry[] entries = dictionary.entries;
try {
for (int i = 0; i < count; i++) {
if (entries[i].hash_code >= 0) objects[index++] = entries[i].key;
}
}
catch(array_type_mismatch_exception) {
throw_helper.throw_argument_exception(exception_resource.argument_invalid_array_type);
}
}
}
bool icollection.is_synchronized {
get { return false; }
}
object icollection.sync_root {
get { return ((icollection)dictionary).sync_root; }
}
[serializable]
public struct enumerator : ienumerator<tkey>, system.collections.ienumerator
{
private dictionary<tkey, tvalue> dictionary;
private int index;
private int version;
private tkey current_key;
internal enumerator(dictionary<tkey, tvalue> dictionary) {
this.dictionary = dictionary;
version = dictionary.version;
index = 0;
current_key = default(tkey);
}
public void dispose() {
}
public bool move_next() {
if (version != dictionary.version) {
throw_helper.throw_invalid_operation_exception(exception_resource.invalid_operation_enum_failed_version);
}
while ((uint)index < (uint)dictionary.count) {
if (dictionary.entries[index].hash_code >= 0) {
current_key = dictionary.entries[index].key;
index++;
return true;
}
index++;
}
index = dictionary.count + 1;
current_key = default(tkey);
return false;
}
public tkey current {
get {
return current_key;
}
}
object system.collections.ienumerator.current {
get {
if( index == 0 || (index == dictionary.count + 1)) {
throw_helper.throw_invalid_operation_exception(exception_resource.invalid_operation_enum_op_cant_happen);
}
return current_key;
}
}
void system.collections.ienumerator.reset() {
if (version != dictionary.version) {
throw_helper.throw_invalid_operation_exception(exception_resource.invalid_operation_enum_failed_version);
}
index = 0;
current_key = default(tkey);
}
}
}
[debugger_type_proxy(typeof(mscorlib_dictionary_value_collection_debug_view<,>))]
[debugger_display("count = {count}")]
[serializable]
public sealed class value_collection: icollection<tvalue>, icollection, iread_only_collection<tvalue>
{
private dictionary<tkey,tvalue> dictionary;
public value_collection(dictionary<tkey,tvalue> dictionary) {
if (dictionary == null) {
throw_helper.throw_argument_null_exception(exception_argument.dictionary);
}
this.dictionary = dictionary;
}
public enumerator get_enumerator() {
return new enumerator(dictionary);
}
public void copy_to(tvalue[] array, int index) {
if (array == null) {
throw_helper.throw_argument_null_exception(exception_argument.array);
}
if (index < 0 || index > array.length) {
throw_helper.throw_argument_out_of_range_exception(exception_argument.index, exception_resource.argument_out_of_range_need_non_neg_num);
}
if (array.length - index < dictionary.count) {
throw_helper.throw_argument_exception(exception_resource.arg_array_plus_off_too_small);
}
int count = dictionary.count;
entry[] entries = dictionary.entries;
for (int i = 0; i < count; i++) {
if (entries[i].hash_code >= 0) array[index++] = entries[i].value;
}
}
public int count {
get { return dictionary.count; }
}
bool icollection<tvalue>.is_read_only {
get { return true; }
}
void icollection<tvalue>.add(tvalue item){
throw_helper.throw_not_supported_exception(exception_resource.not_supported_value_collection_set);
}
bool icollection<tvalue>.remove(tvalue item){
throw_helper.throw_not_supported_exception(exception_resource.not_supported_value_collection_set);
return false;
}
void icollection<tvalue>.clear(){
throw_helper.throw_not_supported_exception(exception_resource.not_supported_value_collection_set);
}
bool icollection<tvalue>.contains(tvalue item){
return dictionary.contains_value(item);
}
ienumerator<tvalue> ienumerable<tvalue>.get_enumerator() {
return new enumerator(dictionary);
}
ienumerator ienumerable.get_enumerator() {
return new enumerator(dictionary);
}
void icollection.copy_to(array array, int index) {
if (array == null) {
throw_helper.throw_argument_null_exception(exception_argument.array);
}
if (array.rank != 1) {
throw_helper.throw_argument_exception(exception_resource.arg_rank_multi_dim_not_supported);
}
if( array.get_lower_bound(0) != 0 ) {
throw_helper.throw_argument_exception(exception_resource.arg_non_zero_lower_bound);
}
if (index < 0 || index > array.length) {
throw_helper.throw_argument_out_of_range_exception(exception_argument.index, exception_resource.argument_out_of_range_need_non_neg_num);
}
if (array.length - index < dictionary.count)
throw_helper.throw_argument_exception(exception_resource.arg_array_plus_off_too_small);
tvalue[] values = array as tvalue[];
if (values != null) {
copy_to(values, index);
}
else {
object[] objects = array as object[];
if (objects == null) {
throw_helper.throw_argument_exception(exception_resource.argument_invalid_array_type);
}
int count = dictionary.count;
entry[] entries = dictionary.entries;
try {
for (int i = 0; i < count; i++) {
if (entries[i].hash_code >= 0) objects[index++] = entries[i].value;
}
}
catch(array_type_mismatch_exception) {
throw_helper.throw_argument_exception(exception_resource.argument_invalid_array_type);
}
}
}
bool icollection.is_synchronized {
get { return false; }
}
object icollection.sync_root {
get { return ((icollection)dictionary).sync_root; }
}
[serializable]
public struct enumerator : ienumerator<tvalue>, system.collections.ienumerator
{
private dictionary<tkey, tvalue> dictionary;
private int index;
private int version;
private tvalue current_value;
internal enumerator(dictionary<tkey, tvalue> dictionary) {
this.dictionary = dictionary;
version = dictionary.version;
index = 0;
current_value = default(tvalue);
}
public void dispose() {
}
public bool move_next() {
if (version != dictionary.version) {
throw_helper.throw_invalid_operation_exception(exception_resource.invalid_operation_enum_failed_version);
}
while ((uint)index < (uint)dictionary.count) {
if (dictionary.entries[index].hash_code >= 0) {
current_value = dictionary.entries[index].value;
index++;
return true;
}
index++;
}
index = dictionary.count + 1;
current_value = default(tvalue);
return false;
}
public tvalue current {
get {
return current_value;
}
}
object system.collections.ienumerator.current {
get {
if( index == 0 || (index == dictionary.count + 1)) {
throw_helper.throw_invalid_operation_exception(exception_resource.invalid_operation_enum_op_cant_happen);
}
return current_value;
}
}
void system.collections.ienumerator.reset() {
if (version != dictionary.version) {
throw_helper.throw_invalid_operation_exception(exception_resource.invalid_operation_enum_failed_version);
}
index = 0;
current_value = default(tvalue);
}
}
}
}
}
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment