brief introduction
ConcurrentHashMap is a frequently used data structure, which provides better write concurrency on the basis of thread safety. ConcurrentHashMap is very different from Map. volatile and CAS are widely used internally to reduce lock competition. Of course, the code is much more difficult to understand than HashMap. This chapter is based on jdk1 8. Make a basic introduction to ConcurrentHashMap.
ConcurrentHashMap class
public class ConcurrentHashMap<K,V> extends AbstractMap<K,V> implements ConcurrentMap<K,V>, Serializable
Inherit the AbstractMap abstract class and implement the ConcurrentMap interface
ConcurrentMap interface
public interface ConcurrentMap<K, V> extends Map<K, V>
Inherit Map interface
ConcurrentMap method
// Null returns the default value @Override default V getOrDefault(Object key, V defaultValue) { V v; return ((v = get(key)) != null) ? v : defaultValue; } // If there is a key, it will not be overwritten (if there is a new value for the put key, the original value will be overwritten) V putIfAbsent(K key, V value); // delete boolean remove(Object key, Object value); // Replace. Replace only when oldValue is the same boolean replace(K key, V oldValue, V newValue); // replace V replace(K key, V value);
The method of using Function is ignored here
Important internal class Node
static class Node<K,V> implements Map.Entry<K,V>
Implement map Entry
Node properties
// hash value of key final int hash; // key final K key; // value volatile V val; // Next node volatile Node<K,V> next;
Node constructor
Node(int hash, K key, V val, Node<K,V> next) { this.hash = hash; this.key = key; this.val = val; this.next = next; }
Node method
// Get key public final K getKey() { return key; } // Get value public final V getValue() { return val; } // Get node hashCode (key XOR) public final int hashCode() { return key.hashCode() ^ val.hashCode(); } // toString method public final String toString(){ return key + "=" + val; } // Throw exceptions directly to node settings public final V setValue(V value) { throw new UnsupportedOperationException(); } // Node equals public final boolean equals(Object o) { Object k, v, u; Map.Entry<?,?> e; // Must be map For the entry instance, the key cannot be null, and the value cannot be null. The key value is the same return ((o instanceof Map.Entry) && (k = (e = (Map.Entry<?,?>)o).getKey()) != null && (v = e.getValue()) != null && (k == key || k.equals(key)) && (v == (u = val) || v.equals(u))); } // Search key Node<K,V> find(int h, Object k) { Node<K,V> e = this; if (k != null) { do { K ek; // The current node is the same as h, and the key must be the same if (e.hash == h && ((ek = e.key) == k || (ek != null && k.equals(ek)))) return e; // Move the node down to continue the search } while ((e = e.next) != null); } return null; }
Important internal class TreeNode
static final class TreeNode<K,V> extends Node<K,V>
TreeNode inherits Node
TreeNode property
// Parent node TreeNode<K,V> parent; // red-black tree links // Left node TreeNode<K,V> left; // Right node TreeNode<K,V> right; // The previous Node (forms a two-way linked list with the next in the Node) TreeNode<K,V> prev; // colour boolean red;
TreeNode constructor
TreeNode(int hash, K key, V val, Node<K,V> next, TreeNode<K,V> parent) { // Initialize parent class properties super(hash, key, val, next); // Initialize parent node this.parent = parent; }
TreeNode method
Node<K,V> find(int h, Object k) { return findTreeNode(h, k, null); } // Find element final TreeNode<K,V> findTreeNode(int h, Object k, Class<?> kc) { if (k != null) { // Current node TreeNode<K,V> p = this; do { // Left and right nodes int ph, dir; K pk; TreeNode<K,V> q; TreeNode<K,V> pl = p.left, pr = p.right; // h is less than the current node, and hash goes to the left if ((ph = p.hash) > h) p = pl; // h is greater than the current node hash, go to the right else if (ph < h) p = pr; // hash is the same, key is the same else if ((pk = p.key) == k || (pk != null && k.equals(pk))) return p; // The hash is the same, the key is different, and the left node is empty else if (pl == null) p = pr; // The ash is the same, the key is different, and the right node is empty else if (pr == null) p = pl; // The hash is the same, the key is different, and the left and right are not empty else if ((kc != null || (kc = comparableClassFor(k)) != null) && (dir = compareComparables(kc, k, pk)) != 0) // Compare the results through the comparator. If it is less than 0, go to the left and if it is greater than 0, go to the right p = (dir < 0) ? pl : pr; // The comparator is empty. Recurse from the right first else if ((q = pr.findTreeNode(h, k, kc)) != null) return q; // I can't find it on the right. I'll find it on the left next time else p = pl; } while (p != null); } return null; }
Important internal class TreeBin
static final class TreeBin<K,V> extends Node<K,V>
TreeBin also inherits Node
TreeBin property
// Root node TreeNode<K,V> root; // Head node volatile TreeNode<K,V> first; // wait for volatile Thread waiter; // Locked state volatile int lockState; static final int WRITER = 1; static final int WAITER = 2; static final int READER = 4; // Memory operation unsafe class private static final sun.misc.Unsafe U; // lockState offset private static final long LOCKSTATE;
TreeBin load initialization
static { try { U = sun.misc.Unsafe.getUnsafe(); Class<?> k = TreeBin.class; LOCKSTATE = U.objectFieldOffset (k.getDeclaredField("lockState")); } catch (Exception e) { throw new Error(e); } }
TreeBin constructor
TreeBin(TreeNode<K,V> b) { // Initialize Node properties super(TREEBIN, null, null, null); // Set the current element b as the header node this.first = b; TreeNode<K,V> r = null; // Traverse b all next for (TreeNode<K,V> x = b, next; x != null; x = next) { // Get next next = (TreeNode<K,V>)x.next; // Clear left and right nodes x.left = x.right = null; // Initialize r, set as root node if (r == null) { x.parent = null; x.red = false; r = x; } else { // Get key and hash K k = x.key; int h = x.hash; // comparator Class<?> kc = null; // Ergodic r for (TreeNode<K,V> p = r;;) { int dir, ph; K pk = p.key; // Are you sure to go left or right if ((ph = p.hash) > h) dir = -1; else if (ph < h) dir = 1; else if ((kc == null && (kc = comparableClassFor(k)) == null) || (dir = compareComparables(kc, k, pk)) == 0) dir = tieBreakOrder(k, pk); TreeNode<K,V> xp = p; // Make sure the side is empty, add x if ((p = (dir <= 0) ? p.left : p.right) == null) { x.parent = xp; if (dir <= 0) xp.left = x; else xp.right = x; r = balanceInsertion(r, x); break; } } } } // Assignment root node this.root = r; // At run time, if the assertion function is turned off, these statements will have no effect. // If the assertion function is turned on, checkvariables will be executed, // If its value is false, the statement strongly throws an AssertionError object. assert checkInvariants(root); }
Compare size method
static int tieBreakOrder(Object a, Object b) { int d; if (a == null || b == null || (d = a.getClass().getName(). compareTo(b.getClass().getName())) == 0) d = (System.identityHashCode(a) <= System.identityHashCode(b) ? -1 : 1); return d; }
TreeBin lock node
private final void lockRoot() { // Use CAS to change lockState from 0 to WRITER (1) if (!U.compareAndSwapInt(this, LOCKSTATE, 0, WRITER)) // Modification failed contendedLock(); } private final void unlockRoot() { lockState = 0; } private final void contendedLock() { // Waiting state boolean waiting = false; for (int s;;) { // If lockState is 0, the result must be 0 if (((s = lockState) & ~WAITER) == 0) { // Use CAS to change lockState from 0 to WRITER (1) if (U.compareAndSwapInt(this, LOCKSTATE, s, WRITER)) { // Waiting state if (waiting) waiter = null; // Successful return return; } } else if ((s & WAITER) == 0) {// The result is 0 when s is not 2 // Use CAS to change lockState from s to s | WAITER (2) if (U.compareAndSwapInt(this, LOCKSTATE, s, s | WAITER)) { // Set whether to wait to true waiting = true; // Wait thread waiter = Thread.currentThread(); } } else if (waiting) // Block the current thread LockSupport.park(this); } }
TreeBin lookup
final Node<K,V> find(int h, Object k) { // Value cannot be empty if (k != null) { // Traverse from the beginning node for (Node<K,V> e = first; e != null; ) { int s; K ek; // Judge whether it is locked (if lockState is 0, it must not be entered) if (((s = lockState) & (WAITER|WRITER)) != 0) { // Are the hash es and key s of the current node the same if (e.hash == h && ((ek = e.key) == k || (ek != null && k.equals(ek)))) return e; // Node down e = e.next; } // Update lockState else if (U.compareAndSwapInt(this, LOCKSTATE, s, s + READER)) { TreeNode<K,V> r, p; try { // The header is not empty. Look it up from the tree p = ((r = root) == null ? null : r.findTreeNode(h, k, null)); } finally { Thread w; if (U.getAndAddInt(this, LOCKSTATE, -READER) == (READER|WAITER) && (w = waiter) != null) // Wake up blocked threads LockSupport.unpark(w); } return p; } } } return null; }
TreeBin add element
final TreeNode<K,V> putTreeVal(int h, K k, V v) { Class<?> kc = null; // Have you searched boolean searched = false; // Traverse from the following node for (TreeNode<K,V> p = root;;) { int dir, ph; K pk; // If the root node is empty, build a new root node if (p == null) { first = root = new TreeNode<K,V>(h, k, v, null, null); break; } // Determine left or right else if ((ph = p.hash) > h) dir = -1; else if (ph < h) dir = 1; else if ((pk = p.key) == k || (pk != null && k.equals(pk))) return p; else if ((kc == null && (kc = comparableClassFor(k)) == null) || (dir = compareComparables(kc, k, pk)) == 0) { if (!searched) { TreeNode<K,V> q, ch; // Like hash, the comparator fails and starts searching searched = true; if (((ch = p.left) != null && (q = ch.findTreeNode(h, k, kc)) != null) || ((ch = p.right) != null && (q = ch.findTreeNode(h, k, kc)) != null)) return q; } dir = tieBreakOrder(k, pk); } // Subordinate empty nodes begin to append TreeNode<K,V> xp = p; if ((p = (dir <= 0) ? p.left : p.right) == null) { // Get a head node TreeNode<K,V> x, f = first; // Update header node first = x = new TreeNode<K,V>(h, k, v, f, xp); // The head node is not empty. The parent of the original head node is set as a new node if (f != null) f.prev = x; // Insert in tree structure if (dir <= 0) xp.left = x; else xp.right = x; // Currently xp is a leaf node. The leaf node is red and needs to be rebalanced if (!xp.red) x.red = true; else { // Rebalancing requires locking lockRoot(); try { // Insert balance root = balanceInsertion(root, x); } finally { // Unlock unlockRoot(); } } break; } } assert checkInvariants(root); return null; }
TreeBin delete
final boolean removeTreeNode(TreeNode<K,V> p) { // Get upper and lower nodes TreeNode<K,V> next = (TreeNode<K,V>)p.next; TreeNode<K,V> pred = p.prev; // unlink traversal pointers TreeNode<K,V> r, rl; // If the parent node is empty, set next as the header node if (pred == null) first = next; else // The subordinate of the superior points to the current subordinate pred.next = next; // If the subordinate is empty, the superior of the subordinate points to the current superior if (next != null) next.prev = pred; // The header node is empty if (first == null) { // Set root to null root = null; return true; } // Delete in tree if ((r = root) == null || r.right == null || // too small (rl = r.left) == null || rl.left == null) // No tree, direct return (only one element) return true; // Lock head node lockRoot(); try { TreeNode<K,V> replacement; TreeNode<K,V> pl = p.left; TreeNode<K,V> pr = p.right; // Red black tree delete node reference HashMap if (pl != null && pr != null) { TreeNode<K,V> s = pr, sl; while ((sl = s.left) != null) // find successor s = sl; boolean c = s.red; s.red = p.red; p.red = c; // swap colors TreeNode<K,V> sr = s.right; TreeNode<K,V> pp = p.parent; if (s == pr) { // p was s's direct parent p.parent = s; s.right = p; } else { TreeNode<K,V> sp = s.parent; if ((p.parent = sp) != null) { if (s == sp.left) sp.left = p; else sp.right = p; } if ((s.right = pr) != null) pr.parent = s; } p.left = null; if ((p.right = sr) != null) sr.parent = p; if ((s.left = pl) != null) pl.parent = s; if ((s.parent = pp) == null) r = s; else if (p == pp.left) pp.left = s; else pp.right = s; if (sr != null) replacement = sr; else replacement = p; } else if (pl != null) // The right is empty replacement = pl; else if (pr != null) // Left blank replacement = pr; else replacement = p; if (replacement != p) { TreeNode<K,V> pp = replacement.parent = p.parent; if (pp == null) r = replacement; else if (p == pp.left) pp.left = replacement; else pp.right = replacement; p.left = p.right = p.parent = null; } // Delete and balance root = (p.red) ? r : balanceDeletion(r, replacement); if (p == replacement) { // detach pointers TreeNode<K,V> pp; if ((pp = p.parent) != null) { if (p == pp.left) pp.left = null; else if (p == pp.right) pp.right = null; p.parent = null; } } } finally { // Unlock unlockRoot(); } assert checkInvariants(root); return false; }
Left-handed right-handed
static <K,V> TreeNode<K,V> rotateLeft(TreeNode<K,V> root, TreeNode<K,V> p) { . . . } static <K,V> TreeNode<K,V> rotateRight(TreeNode<K,V> root, TreeNode<K,V> p) { . . . }
Insert delete balance
static <K,V> TreeNode<K,V> balanceInsertion(TreeNode<K,V> root, TreeNode<K,V> x) { . . . } static <K,V> TreeNode<K,V> balanceDeletion(TreeNode<K,V> root, TreeNode<K,V> x) { . . . }
Important internal class ForwardingNode
static final class ForwardingNode<K,V> extends Node<K,V>
ForwardingNode will be used during capacity expansion and movement
ForwardingNode property
// Array after capacity expansion final Node<K,V>[] nextTable; **ForwardingNode Constructor** ForwardingNode(Node<K,V>[] tab) { super(MOVED, null, null, null); this.nextTable = tab; }
ForwardingNode method
Node<K,V> find(int h, Object k) { // spin outer: for (Node<K,V>[] tab = nextTable;;) { Node<K,V> e; int n; // If the new array is empty or the header node is empty, return if (k == null || tab == null || (n = tab.length) == 0 || (e = tabAt(tab, (n - 1) & h)) == null) return null; // spin for (;;) { int eh; K ek; // The head node is the same if ((eh = e.hash) == h && ((ek = e.key) == k || (ek != null && k.equals(ek)))) return e; // hash is less than 0 (indicates treeBin) if (eh < 0) { // The header node is a ForwardingNode instance if (e instanceof ForwardingNode) { // Get the new array after capacity expansion tab = ((ForwardingNode<K,V>)e).nextTable; continue outer; } else // Continue to find return e.find(h, k); } // After reaching the tail node, return if ((e = e.next) == null) return null; } } }
ConcurrentHashMap property
// Maximum length of array private static final int MAXIMUM_CAPACITY = 1 << 30; // Initialize default length private static final int DEFAULT_CAPACITY = 16; // Maximum number of elements (used in toArray) static final int MAX_ARRAY_SIZE = Integer.MAX_VALUE - 8; // Default concurrency level private static final int DEFAULT_CONCURRENCY_LEVEL = 16; // Default load factor private static final float LOAD_FACTOR = 0.75f; // Linked list to tree threshold static final int TREEIFY_THRESHOLD = 8; // Tree to linked list threshold static final int UNTREEIFY_THRESHOLD = 6; // Tree to linked list, table length, threshold static final int MIN_TREEIFY_CAPACITY = 64; // When expanding and migrating elements, each thread processes 16 slots private static final int MIN_TRANSFER_STRIDE = 16; // The number of unique generation stamps used to generate each expansion private static int RESIZE_STAMP_BITS = 16; // Maximum number of capacity expansion threads private static final int MAX_RESIZERS = (1 << (32 - RESIZE_STAMP_BITS)) - 1; // The shift amount. The generated stamp is shifted and saved in sizeCtl as the base number of the expanded thread count, // After shifting in the opposite direction, it can be inversely solved to form a stamp private static final int RESIZE_STAMP_SHIFT = 32 - RESIZE_STAMP_BITS; // Indicates transfer in progress static final int MOVED = -1; // Indicates that it has been converted to a tree static final int TREEBIN = -2; // ReservationNode initializes the hash value static final int RESERVED = -3; // int Max static final int HASH_BITS = 0x7fffffff; // Get the number of available processors static final int NCPU = Runtime.getRuntime().availableProcessors(); private static final ObjectStreamField[] serialPersistentFields = { new ObjectStreamField("segments", Segment[].class), new ObjectStreamField("segmentMask", Integer.TYPE), new ObjectStreamField("segmentShift", Integer.TYPE) }; // Element array transient volatile Node<K,V>[] table; // New table array after capacity expansion private transient volatile Node<K,V>[] nextTable; // Number of elements counter value private transient volatile long baseCount; // Capacity expansion threshold private transient volatile int sizeCtl; // The starting index of the next transfer task private transient volatile int transferIndex; // counterCells expansion flag private transient volatile int cellsBusy; // Concurrent time private transient volatile CounterCell[] counterCells; // keyset private transient KeySetView<K,V> keySet; // Value set private transient ValuesView<K,V> values; // Key value entity collection private transient EntrySetView<K,V> entrySet; // Memory operation unsafe class private static final sun.misc.Unsafe U; // Expansion threshold offset private static final long SIZECTL; // transfer task subscript offset private static final long TRANSFERINDEX; // Number of elements offset private static final long BASECOUNT; // cellsBusy offset private static final long CELLSBUSY; // counterCells offset private static final long CELLVALUE; // table offset private static final long ABASE; // table array element offset private static final int ASHIFT;
Concurrent HashMap load initialization
static { try { U = sun.misc.Unsafe.getUnsafe(); Class<?> k = ConcurrentHashMap.class; SIZECTL = U.objectFieldOffset (k.getDeclaredField("sizeCtl")); TRANSFERINDEX = U.objectFieldOffset (k.getDeclaredField("transferIndex")); BASECOUNT = U.objectFieldOffset (k.getDeclaredField("baseCount")); CELLSBUSY = U.objectFieldOffset (k.getDeclaredField("cellsBusy")); Class<?> ck = CounterCell.class; CELLVALUE = U.objectFieldOffset (ck.getDeclaredField("value")); Class<?> ak = Node[].class; ABASE = U.arrayBaseOffset(ak); int scale = U.arrayIndexScale(ak); if ((scale & (scale - 1)) != 0) throw new Error("data type scale not a power of two"); ASHIFT = 31 - Integer.numberOfLeadingZeros(scale); } catch (Exception e) { throw new Error(e); } }
ConcurrentHashMap constructor
public ConcurrentHashMap() { } public ConcurrentHashMap(int initialCapacity) { if (initialCapacity < 0) throw new IllegalArgumentException(); // Calculate expansion threshold int cap = ((initialCapacity >= (MAXIMUM_CAPACITY >>> 1)) ? MAXIMUM_CAPACITY : tableSizeFor(initialCapacity + (initialCapacity >>> 1) + 1)); this.sizeCtl = cap; } public ConcurrentHashMap(Map<? extends K, ? extends V> m) { this.sizeCtl = DEFAULT_CAPACITY; putAll(m); } public ConcurrentHashMap(int initialCapacity, float loadFactor) { this(initialCapacity, loadFactor, 1); } public ConcurrentHashMap(int initialCapacity, float loadFactor, int concurrencyLevel) { if (!(loadFactor > 0.0f) || initialCapacity < 0 || concurrencyLevel <= 0) throw new IllegalArgumentException(); // concurrencyLevel indicates the estimated number of concurrent updates // The number of threads must be larger than the initialization capacity if (initialCapacity < concurrencyLevel) initialCapacity = concurrencyLevel; // Capacity expansion threshold long size = (long)(1.0 + (long)initialCapacity / loadFactor); int cap = (size >= (long)MAXIMUM_CAPACITY) ? MAXIMUM_CAPACITY : tableSizeFor((int)size); this.sizeCtl = cap; }
####hash algorithm of concurrent HashMap
static final int spread(int h) { // Mix the high and low positions and control the range return (h ^ (h >>> 16)) & HASH_BITS; }
In HashMap, is the hash algorithm Return (key = = null)? 0 : (h = key.hashCode()) ^ (h >>> 16); The spread method is actually a hash algorithm. Finally, the maximum value of and int is set to and just to control the result within the range of int.
Length calculation of ConcurrentHashMap array
private static final int tableSizeFor(int c) { int n = c - 1; n |= n >>> 1; n |= n >>> 2; n |= n >>> 4; n |= n >>> 8; n |= n >>> 16; return (n < 0) ? 1 : (n >= MAXIMUM_CAPACITY) ? MAXIMUM_CAPACITY : n + 1; }
Please refer to HashMap for this method, which has detailed derivation
ConcurrentHashMap operation array header node
static final <K,V> Node<K,V> tabAt(Node<K,V>[] tab, int i) { // ((long) I < < ashift) + abase element position return (Node<K,V>)U.getObjectVolatile(tab, ((long)i << ASHIFT) + ABASE); } static final <K,V> boolean casTabAt(Node<K,V>[] tab, int i, Node<K,V> c, Node<K,V> v) { // Modify the element in the slot from c to v return U.compareAndSwapObject(tab, ((long)i << ASHIFT) + ABASE, c, v); } static final <K,V> void setTabAt(Node<K,V>[] tab, int i, Node<K,V> v) { // v put it into the corresponding slot U.putObjectVolatile(tab, ((long)i << ASHIFT) + ABASE, v); }
ConcurrentHashMap add
public V put(K key, V value) { // add to return putVal(key, value, false); } final V putVal(K key, V value, boolean onlyIfAbsent) { // Throw exception when key or value is empty if (key == null || value == null) throw new NullPointerException(); // Get hash int hash = spread(key.hashCode()); int binCount = 0; // Traversal table for (Node<K,V>[] tab = table;;) { Node<K,V> f; int n, i, fh; // tab is empty (added for the first time) if (tab == null || (n = tab.length) == 0) tab = initTable(); // Find out whether the slot where the hash is located is empty else if ((f = tabAt(tab, i = (n - 1) & hash)) == null) { // Create a new node and put it into the corresponding slot (fail and start again) if (casTabAt(tab, i, null, new Node<K,V>(hash, key, value, null))) break; } // Expanding capacity else if ((fh = f.hash) == MOVED) tab = helpTransfer(tab, f); else { // Original value V oldVal = null; // Lock head node synchronized (f) { // The head node has not changed if (tabAt(tab, i) == f) { // Header node hash is greater than 0 if (fh >= 0) { // The initial value of the linked list is 1 binCount = 1; // Traversal from the beginning node (binCount plus 1 each time) for (Node<K,V> e = f;; ++binCount) { K ek; // Are the keys the same if (e.hash == hash && ((ek = e.key) == key || (ek != null && key.equals(ek)))) { // Get original value oldVal = e.val; // Whether the override switch is turned on or not, false will override if (!onlyIfAbsent) e.val = value; break; } // Key is not the same, e move down Node<K,V> pred = e; // The following node is empty if ((e = e.next) == null) { // Append element pred.next = new Node<K,V>(hash, key, value, null); break; } } } // If hash is less than 0, it means TreeBin (hash is - 2) else if (f instanceof TreeBin) { Node<K,V> p; // Tree add set to 2 binCount = 2; // Call tree structure addition if ((p = ((TreeBin<K,V>)f).putTreeVal(hash, key, value)) != null) { // Original value oldVal = p.val; if (!onlyIfAbsent) // Overwrite original value p.val = value; } } } } if (binCount != 0) { // binCount is greater than 8, linked list to tree if (binCount >= TREEIFY_THRESHOLD) treeifyBin(tab, i); // The original value is not empty. Either overwrite or ignore it if (oldVal != null) return oldVal; break; } } } // It must be added successfully if you can get here (do you need to expand the capacity) addCount(1L, binCount); return null; }
If you only look at the putVal method, ConcurrentHashMap is similar to HashMap, you will lock the header element, but if you really think so, it only means that you haven't read the third article.
ConcurrentHashMap initialization array
private final Node<K,V>[] initTable() { Node<K,V>[] tab; int sc; // table is not empty, exit the loop while ((tab = table) == null || tab.length == 0) { // Judge whether the expansion threshold is less than 0 if ((sc = sizeCtl) < 0) // Give up cpu Thread.yield(); // Modify the capacity expansion threshold to - 1 else if (U.compareAndSwapInt(this, SIZECTL, sc, -1)) { try { // Check whether the table has not been initialized if ((tab = table) == null || tab.length == 0) { // sc > 0 use sc, otherwise use the default initial length int n = (sc > 0) ? sc : DEFAULT_CAPACITY; // Create array Node<K,V>[] nt = (Node<K,V>[])new Node<?,?>[n]; // assignment table = tab = nt; // Calculate the next expansion threshold // n - n/4 is equivalent to n*0.75 sc = n - (n >>> 2); } } finally { // Set new capacity expansion threshold sizeCtl = sc; } break; } } return tab; }
Concurrent HashMap linked list to tree
// This method is called when binCount (linked list length) is greater than 8 private final void treeifyBin(Node<K,V>[] tab, int index) { Node<K,V> b; int n, sc; // Array cannot be empty if (tab != null) { // Judge whether it is less than 64 if ((n = tab.length) < MIN_TREEIFY_CAPACITY) // Capacity expansion part III tryPresize(n << 1); // If it is greater than 64, get the header node and judge whether it is TreeBin else if ((b = tabAt(tab, index)) != null && b.hash >= 0) { // Lock the head node synchronized (b) { // Take the header node again to judge whether it is the same as b if (tabAt(tab, index) == b) { TreeNode<K,V> hd = null, tl = null; for (Node<K,V> e = b; e != null; e = e.next) { // Build tree node TreeNode<K,V> p = new TreeNode<K,V>(e.hash, e.key, e.val, null, null); // Build a linked list structure of tree nodes, // The tree structure is not converted until TreeBin is initialized if ((p.prev = tl) == null) hd = p; else tl.next = p; tl = p; } // Build TreeBin and put it into the current slot setTabAt(tab, index, new TreeBin<K,V>(hd)); } } } } }
Concurrent HashMap tree to linked list
static <K,V> Node<K,V> untreeify(Node<K,V> b) { Node<K,V> hd = null, tl = null; // Traversal node for (Node<K,V> q = b; q != null; q = q.next) { // Build common nodes Node<K,V> p = new Node<K,V>(q.hash, q.key, q.val, null); if (tl == null) hd = p; else tl.next = p; tl = p; } return hd; }
Value of ConcurrentHashMap key
public V get(Object key) { Node<K,V>[] tab; Node<K,V> e, p; int n, eh; K ek; // Compute hash int h = spread(key.hashCode()); // Judge whether the array is empty, and then judge whether the head node in the slot is empty // (n - 1) & H is the remainder. Please see the derivation process of HashMap if ((tab = table) != null && (n = tab.length) > 0 && (e = tabAt(tab, (n - 1) & h)) != null) { // Is the head node hash the same if ((eh = e.hash) == h) { // Whether the key s are the same depends on the hash if ((ek = e.key) == key || (ek != null && key.equals(ek))) // key returns the value in the node return e.val; } // The hash is different. Judge whether the hash is less than 0 // The hash of TreeBin is - 2 else if (eh < 0) // Find in red black tree return (p = e.find(h, key)) != null ? p.val : null; // This means that it must be a linked list. Search through the linked list while ((e = e.next) != null) { // Judge whether the key is the same and move the node down if (e.hash == h && ((ek = e.key) == key || (ek != null && key.equals(ek)))) return e.val; } } return null; }
ConcurrentHashMap delete
public V remove(Object key) { return replaceNode(key, null, null); } final V replaceNode(Object key, V value, Object cv) { // Compute hash int hash = spread(key.hashCode()); // spin for (Node<K,V>[] tab = table;;) { Node<K,V> f; int n, i, fh; // Determine whether the array or header node is empty if (tab == null || (n = tab.length) == 0 || (f = tabAt(tab, i = (n - 1) & hash)) == null) // immediate withdrawal break; // Judge whether the hash is in the moving state else if ((fh = f.hash) == MOVED) // If yes, help move and delete after moving tab = helpTransfer(tab, f); else { // If you can get here, it means you're not moving again V oldVal = null; boolean validated = false; // Lock the head node synchronized (f) { // Judge whether the header node changes if (tabAt(tab, i) == f) { // Linked list deletion if (fh >= 0) { validated = true; // Traversal linked list for (Node<K,V> e = f, pred = null;;) { K ek; // Determine whether the key s are the same if (e.hash == hash && ((ek = e.key) == key || (ek != null && key.equals(ek)))) { V ev = e.val; // Judge whether the values are the same if (cv == null || cv == ev || (ev != null && cv.equals(ev))) { // Original value oldVal = ev; // Value is not empty. Use value to overwrite the original value, // Otherwise, remove the node, // If you are removing a head node, you need to set it as a new head node if (value != null) e.val = value; else if (pred != null) pred.next = e.next; else setTabAt(tab, i, e.next); } break; } // Node down pred = e; if ((e = e.next) == null) break; } } // Delete from tree structure else if (f instanceof TreeBin) { validated = true; TreeBin<K,V> t = (TreeBin<K,V>)f; TreeNode<K,V> r, p; // Find in tree structure if ((r = t.root) != null && (p = r.findTreeNode(hash, key, null)) != null) { V pv = p.val; // Judge whether the values are the same if (cv == null || cv == pv || (pv != null && cv.equals(pv))) { oldVal = pv; // Value is not empty. Use value to overwrite the original value, // Otherwise, delete it in the tree structure // If you are removing a head node, you need to set a new head node if (value != null) p.val = value; else if (t.removeTreeNode(p)) setTabAt(tab, i, untreeify(t.first)); } } } } } // Have you looked it up if (validated) { // Is the original value empty if (oldVal != null) { // Whether value is empty. If it is not empty, it will only be overwritten and not deleted if (value == null) // Length minus one addCount(-1L, -1); // Return original value return oldVal; } break; } } } return null; }
ConcurrentHashMap empty
public void clear() { long delta = 0L; int i = 0; Node<K,V>[] tab = table; // Traverse all elements of the array while (tab != null && i < tab.length) { int fh; // Get header node Node<K,V> f = tabAt(tab, i); // Header node is empty, array subscript plus 1 if (f == null) ++i; // Judge the status of head node else if ((fh = f.hash) == MOVED) { // Help move tab = helpTransfer(tab, f); // Move complete, reset array subscript i = 0; } // Normal deletion else { // Lock the head node synchronized (f) { // Judge whether the head node changes if (tabAt(tab, i) == f) { // Get the head node of the linked list or tree Node<K,V> p = (fh >= 0 ? f : (f instanceof TreeBin) ? ((TreeBin<K,V>)f).first : null); // Number of Traversals while (p != null) { --delta; p = p.next; } // Empty the current slot setTabAt(tab, i++, null); } } } } // Modify total length if (delta != 0L) addCount(delta, -1); }
Concurrent HashMap will lock the header element when modifying the linked list or tree. Other modification operations will not get the lock, and they will wait.
addCount of ConcurrentHashMap
private final void addCount(long x, int check) { CounterCell[] as; long b, s; // First judge whether counterCells is empty. If it is empty, modify baseCount through cas // Or failed to modify baseCount (s=b + x) if ((as = counterCells) != null || !U.compareAndSwapLong(this, BASECOUNT, b = baseCount, s = b + x)) { CounterCell a; long v; int m; boolean uncontended = true; // First judge that counterCells is empty, // Then take the remainder at random to obtain the counterCells subscript a and take the value // (the length of counter cells must be the nth power of 2) // If the value is empty, modify the position value of counterCells array a, and the modification fails, call fullAddCount if (as == null || (m = as.length - 1) < 0 || (a = as[ThreadLocalRandom.getProbe() & m]) == null || !(uncontended = U.compareAndSwapLong(a, CELLVALUE, v = a.value, v + x))) { fullAddCount(x, uncontended); return; } // The length of the linked list is less than or equal to 1 if (check <= 1) return; // Statistics s = sumCount(); } // The length of the linked list is greater than or equal to 0 if (check >= 0) { Node<K,V>[] tab, nt; int n, sc; // If map Size () is greater than sizeCtl capacity expansion threshold, and // Table is not empty; And the length of the table is less than 1 < < 30 // At this time, sc is the original capacity expansion threshold while (s >= (long)(sc = sizeCtl) && (tab = table) != null && (n = tab.length) < MAXIMUM_CAPACITY) { // Generate a capacity expansion stamp int rs = resizeStamp(n); // The original capacity expansion threshold is less than 0 (it must not be entered for the first time) if (sc < 0) { // If you can get here, you must be expanding // Here are five judgments, which means that you can't help move elements if you are satisfied if ((sc >>> RESIZE_STAMP_SHIFT) != rs || sc == rs + 1 || sc == rs + MAX_RESIZERS || (nt = nextTable) == null || transferIndex <= 0) break; // Modify threshold plus 1 if (U.compareAndSwapInt(this, SIZECTL, sc, sc + 1)) // Help move elements transfer(tab, nt); } // Modify the capacity expansion threshold to (RS < < 16) + 2, which must be negative else if (U.compareAndSwapInt(this, SIZECTL, sc, (rs << RESIZE_STAMP_SHIFT) + 2)) // Start migrating elements transfer(tab, null); // Number of statistical elements s = sumCount(); } } }
The logic here is very convoluted. We'll take it apart. Element counter counter cells, why use element counter?
What happens when concurrency is high and all threads modify the number of elements? The nodes are added and deleted efficiently, but all threads compete to change baseCount in the end. The performance loss is serious, and the performance is pulled down again. Counter cells is used to solve the performance loss of modifying baseCount. When there is no concurrency, the baseCount is modified directly. When there is concurrency, the baseCount must be modified. The modification fails, and the failure cannot be ignored. At this time, the failed element is randomly found in the counter cells array. The recording method is to add 1 to the original value of the element in the array (the element is an object). When obtaining the number of elements, you need to add baseCount and counterCells. All element values together are the total number of elements, which is also the principle of piecewise statistics of ConcurrentHashMap.
Statistical elements and usage of ConcurrentHashMap
public int size() { // Returns the number of current elements long n = sumCount(); return ((n < 0L) ? 0 : (n > (long)Integer.MAX_VALUE) ? Integer.MAX_VALUE : (int)n); } final long sumCount() { CounterCell[] as = counterCells; CounterCell a; long sum = baseCount; if (as != null) { // Traverse all counters for (int i = 0; i < as.length; ++i) { // Participate in statistics when the element is not empty if ((a = as[i]) != null) sum += a.value; } } return sum; } // Map is empty when statistics is 0 public boolean isEmpty() { return sumCount() <= 0L; }
Capacity expansion of ConcurrentHashMap
// The nextTab of the first capacity expansion thread is null private final void transfer(Node<K,V>[] tab, Node<K,V>[] nextTab) { int n = tab.length, stride; // A stripe is a number of slots per thread // Divide length/8 by the number of CPU cores. If the result is less than 16, use 16. if ((stride = (NCPU > 1) ? (n >>> 3) / NCPU : n) < MIN_TRANSFER_STRIDE) stride = MIN_TRANSFER_STRIDE; // nextTab is empty (called by the first thread) if (nextTab == null) { try { // New array (twice the original length) Node<K,V>[] nt = (Node<K,V>[])new Node<?,?>[n << 1]; nextTab = nt; } catch (Throwable ex) { // Array out of bounds, exceeding int maximum sizeCtl = Integer.MAX_VALUE; return; } // Assign nextTable with new array nextTable = nextTab; // Update the transfer subscript, which is the length of the old tab transferIndex = n; } // New array length int nextn = nextTab.length; // Create migration node ForwardingNode<K,V> fwd = new ForwardingNode<K,V>(nextTab); boolean advance = true; boolean finishing = false; for (int i = 0, bound = 0;;) { Node<K,V> f; int fh; // Lead task while (advance) { int nextIndex, nextBound; if (--i >= bound || finishing) advance = false; else if ((nextIndex = transferIndex) <= 0) { i = -1; advance = false; } // Modify the transferIndex, that is, the length interval value, and leave the remaining interval values for subsequent threads else if (U.compareAndSwapInt (this, TRANSFERINDEX, nextIndex, nextBound = (nextIndex > stride ? nextIndex - stride : 0))) { bound = nextBound; i = nextIndex - 1; advance = false; } } // If i is less than 0 // If I > = tab length // I. tab + if length >= nextTable. length if (i < 0 || i >= n || i + n >= nextn) { int sc; // If the expansion is completed if (finishing) { // Empty nextTable, assign a new table, and set a new capacity expansion threshold nextTable = null; table = nextTab; sizeCtl = (n << 1) - (n >>> 1); return; } // It shows that it is still expanding. Set sc-1 if (U.compareAndSwapInt(this, SIZECTL, sc = sizeCtl, sc - 1)) { // No threads are helping them expand. In other words, the expansion is over. if ((sc - 2) != resizeStamp(n) << RESIZE_STAMP_SHIFT) return; // Expansion completed finishing = advance = true; i = n; } } // Place fwd when the original slot is empty else if ((f = tabAt(tab, i)) == null) advance = casTabAt(tab, i, null, fwd); // The original slot head node is already a ForwardingNode object, ignored else if ((fh = f.hash) == MOVED) advance = true; // Start migration else { // Lock the head node synchronized (f) { // See if the head node has changed if (tabAt(tab, i) == f) { Node<K,V> ln, hn; // Is hash greater than 0 if (fh >= 0) { int runBit = fh & n; Node<K,V> lastRun = f; // Traversal linked list for (Node<K,V> p = f.next; p != null; p = p.next) { int b = p.hash & n; // Element classification if (b != runBit) { runBit = b; lastRun = p; } } // High bit 0 (no need to move) if (runBit == 0) { ln = lastRun; hn = null; } // High bit 1 (need to move) else { hn = lastRun; ln = null; } // Assemble a new linked list for (Node<K,V> p = f; p != lastRun; p = p.next) { int ph = p.hash; K pk = p.key; V pv = p.val; if ((ph & n) == 0) ln = new Node<K,V>(ph, pk, pv, ln); else hn = new Node<K,V>(ph, pk, pv, hn); } // Put in new array slot setTabAt(nextTab, i, ln); setTabAt(nextTab, i + n, hn); // Use placeholder in original slot setTabAt(tab, i, fwd); // success advance = true; } // The header node is TreeBin else if (f instanceof TreeBin) { TreeBin<K,V> t = (TreeBin<K,V>)f; TreeNode<K,V> lo = null, loTail = null; TreeNode<K,V> hi = null, hiTail = null; int lc = 0, hc = 0; // Traverse the tree and take two trees (lo, hi) for (Node<K,V> e = t.first; e != null; e = e.next) { int h = e.hash; TreeNode<K,V> p = new TreeNode<K,V> (h, e.key, e.val, null, null); if ((h & n) == 0) { if ((p.prev = loTail) == null) lo = p; else loTail.next = p; loTail = p; ++lc; } else { if ((p.prev = hiTail) == null) hi = p; else hiTail.next = p; hiTail = p; ++hc; } } // When the length is less than 6, the tree is unlinked ln = (lc <= UNTREEIFY_THRESHOLD) ? untreeify(lo) : (hc != 0) ? new TreeBin<K,V>(lo) : t; hn = (hc <= UNTREEIFY_THRESHOLD) ? untreeify(hi) : (lc != 0) ? new TreeBin<K,V>(hi) : t; // Put in new array slot setTabAt(nextTab, i, ln); setTabAt(nextTab, i + n, hn); // Use placeholder in original slot setTabAt(tab, i, fwd); // complete advance = true; } } } } } }
Capacity expansion is quite different from HashMap. In case of single core, one thread migrates all slots. In case of multithreading, each thread migrates at least 16 slots.
Concurrent HashMap chain to tree triggered capacity expansion
private final void tryPresize(int size) { // Size has been * 2 before it is imported. Judge the legitimacy of size int c = (size >= (MAXIMUM_CAPACITY >>> 1)) ? MAXIMUM_CAPACITY : tableSizeFor(size + (size >>> 1) + 1); int sc; while ((sc = sizeCtl) >= 0) { Node<K,V>[] tab = table; int n; // Array is empty if (tab == null || (n = tab.length) == 0) { n = (sc > c) ? sc : c; // Modify capacity expansion threshold if (U.compareAndSwapInt(this, SIZECTL, sc, -1)) { try { // Initialize table if (table == tab) { Node<K,V>[] nt = (Node<K,V>[])new Node<?,?>[n]; table = nt; sc = n - (n >>> 2); } } finally { // Set new capacity expansion threshold sizeCtl = sc; } } } // No expansion required else if (c <= sc || n >= MAXIMUM_CAPACITY) break; // Does the table change else if (tab == table) { // The following is the same as the second half of addcount int rs = resizeStamp(n); // The threshold is less than zero (modified to negative by the capacity expansion thread) if (sc < 0) { Node<K,V>[] nt; if ((sc >>> RESIZE_STAMP_SHIFT) != rs || sc == rs + 1 || sc == rs + MAX_RESIZERS || (nt = nextTable) == null || transferIndex <= 0) break; // Help migrate elements if (U.compareAndSwapInt(this, SIZECTL, sc, sc + 1)) transfer(tab, nt); } // Mark the migration else if (U.compareAndSwapInt(this, SIZECTL, sc, (rs << RESIZE_STAMP_SHIFT) + 2)) transfer(tab, null); } } }