哈希冲突的解决方法 链地址法

在Java8开始,当哈希冲突达到一定的程度,每一个位置从链表转化为红黑树。

时间复杂度分析

哈希表的动态空间处理

  1. 平均每个地址承载的元素多过一定程度,即扩容(N/M >= upperTol)
  2. 平均每个地址承载的元素少过一定程度,即缩容(N/M <= lowerTol)

哈希表复杂度分析

刚开始我们在扩容的时候直接是2*M,它可能造成扩容后的哈希表分布不均匀,可以按着下面这个表格来设置M值。

代码实现

public class HashTable<K, V> {    private final int[] capacity        = {53, 97, 193, 389, 769, 1543, 3079, 6151, 12289, 24593,        49157, 98317, 196613, 393241, 786433, 1572869, 3145739, 6291469,         12582917, 25165843, 50331653, 100663319, 201326611, 402653189, 805306457, 1610612741 };        private static final int upperTol = 10;    private static final int lowerTol = 2;    private static final int initCapacity = 7;    private int CapacityIndex = 0;        private TreeMap<K, V>[] hashtable;    private int size;    private int M;    //hash表的长度,即具体有多少个位置(选择一个合适的素数)    public HashTable(){        //this.M = M;        this.M = capacity[CapacityIndex];        size = 0;        hashtable = new TreeMap[M];        for(int i = 0 ; i < M ; i ++)            hashtable[i] = new TreeMap<>();    }    /*public HashTable(){        this(initCapacity);    }*/    private int hash(K key){        //key.hashCode() & 0x7fffffff 取key.hashCode()的绝对值        return (key.hashCode() & 0x7fffffff) % M;    }    public int getSize(){        return size;    }    public void add(K key, V value){        TreeMap<K, V> map = hashtable[hash(key)];        if(map.containsKey(key))    //修改            map.put(key, value);        else{                        //添加            map.put(key, value);            size ++;                        if(size >= upperTol * M && CapacityIndex+1 < capacity.length)    //即size除以M >=upperTol                //resize(2 * M);                CapacityIndex ++;                resize(capacity[CapacityIndex]);        }    }    public V remove(K key){        V ret = null;        TreeMap<K, V> map = hashtable[hash(key)];        if(map.containsKey(key)){            ret = map.remove(key);            size --;                        if(size < lowerTol * M && CapacityIndex-1 >= 0)                CapacityIndex --;                //resize(M / 2);                resize(capacity[CapacityIndex]);        }        return ret;    }    public void set(K key, V value){        TreeMap<K, V> map = hashtable[hash(key)];        if(!map.containsKey(key))            throw new IllegalArgumentException(key + " doesn't exist!");        map.put(key, value);    }    public boolean contains(K key){        return hashtable[hash(key)].containsKey(key);    }    public V get(K key){        return hashtable[hash(key)].get(key);    }        private void resize(int newM){        TreeMap<K, V>[] newHashTable = new TreeMap[newM];        for(int i = 0 ; i < newM ; i ++)            newHashTable[i] = new TreeMap<>();                    //由于在hash()方法中有对M进行操作,在往新哈希表中存数据时应该用newM计算hash相应的hash值        int oldM = M;        this.M = newM;                for(int i = 0 ; i < oldM ; i ++){            TreeMap<K, V> map = hashtable[i];            for(K key: map.keySet())                newHashTable[hash(key)].put(key, map.get(key));        }        this.hashtable = newHashTable;    }}

哈希表的均摊复杂度为O(1),有这么好的性能其中一个原因是它牺牲了顺序性