ConcurrentHashMap vs HashMap:并发场景下的正确选择
特性HashMap线程安全❌ 不安全✅ 安全性能🟢 高(单线程)🟡 好(多线程)锁机制无分段锁/CAS并发读❌ 不安全✅ 支持并发写❌ 可能丢失数据✅ 支持迭代器fail-fast弱一致性null值✅ 支持❌ 不支持。
Java并发编程初级系列第9篇 HashMap是Java中最常用的集合类之一,但在并发环境下使用HashMap会出现严重问题。本文将详细对比HashMap和ConcurrentHashMap,帮你在并发场景下做出正确选择。
🚨 HashMap在并发环境下的问题
数据丢失问题
public class HashMapConcurrencyProblems {
/**
* 演示HashMap在并发环境下的数据丢失问题
*/
public static void demonstrateDataLoss() throws InterruptedException {
System.out.println("=== HashMap并发数据丢失演示 ===");
Map<String, Integer> hashMap = new HashMap<>();
int threadCount = 10;
int operationsPerThread = 1000;
// 创建多个线程同时操作HashMap
Thread[] threads = new Thread[threadCount];
for (int i = 0; i < threadCount; i++) {
final int threadId = i;
threads[i] = new Thread(() -> {
for (int j = 0; j < operationsPerThread; j++) {
String key = "key" + (threadId * operationsPerThread + j);
hashMap.put(key, threadId * operationsPerThread + j);
}
}, "Thread-" + i);
}
// 启动所有线程
long startTime = System.currentTimeMillis();
for (Thread thread : threads) {
thread.start();
}
// 等待所有线程完成
for (Thread thread : threads) {
thread.join();
}
long endTime = System.currentTimeMillis();
int expectedSize = threadCount * operationsPerThread;
int actualSize = hashMap.size();
System.out.println("期望大小: " + expectedSize);
System.out.println("实际大小: " + actualSize);
System.out.println("数据丢失: " + (expectedSize - actualSize));
System.out.println("执行时间: " + (endTime - startTime) + "ms");
if (actualSize < expectedSize) {
System.out.println("❌ 发生了数据丢失!HashMap不是线程安全的");
} else {
System.out.println("✅ 这次运行没有数据丢失(但不代表HashMap是安全的)");
}
}
/**
* 演示HashMap的死循环问题(JDK 8之前)
*/
public static void demonstrateInfiniteLoop() {
System.out.println("\n=== HashMap死循环问题说明 ===");
System.out.println("在JDK 8之前,HashMap在并发环境下可能出现死循环:");
System.out.println("1. 多线程同时进行put操作");
System.out.println("2. 触发HashMap的resize操作");
System.out.println("3. 在rehash过程中,链表可能形成环形结构");
System.out.println("4. 后续的get操作可能陷入无限循环");
System.out.println();
System.out.println("JDK 8改进了HashMap的实现,使用尾插法避免了死循环");
System.out.println("但仍然存在数据丢失和不一致的问题");
// 注意:这里不实际演示死循环,因为可能导致程序挂起
System.out.println("注意:即使在JDK 8+中,HashMap在并发环境下仍然不安全!");
}
/**
* 演示HashMap的数据不一致问题
*/
public static void demonstrateDataInconsistency() throws InterruptedException {
System.out.println("\n=== HashMap数据不一致演示 ===");
Map<String, Integer> hashMap = new HashMap<>();
// 线程1:不断put数据
Thread putThread = new Thread(() -> {
for (int i = 0; i < 10000; i++) {
hashMap.put("key" + i, i);
if (i % 1000 == 0) {
System.out.println("Put thread: 已插入 " + i + " 个元素");
}
}
}, "PutThread");
// 线程2:不断读取数据
Thread getThread = new Thread(() -> {
for (int i = 0; i < 1000; i++) {
try {
int size = hashMap.size();
// 遍历HashMap可能抛出异常或看到不一致的数据
int count = 0;
for (Map.Entry<String, Integer> entry : hashMap.entrySet()) {
count++;
if (count > size + 100) { // 防止可能的无限循环
System.out.println("❌ 检测到异常:遍历次数超过预期");
break;
}
}
if (i % 100 == 0) {
System.out.println("Get thread: size=" + size + ", 实际遍历=" + count);
}
} catch (Exception e) {
System.out.println("❌ 遍历时发生异常: " + e.getClass().getSimpleName());
}
try {
Thread.sleep(10);
} catch (InterruptedException e) {
Thread.currentThread().interrupt();
break;
}
}
}, "GetThread");
putThread.start();
Thread.sleep(100); // 让put线程先开始
getThread.start();
putThread.join();
getThread.join();
System.out.println("最终HashMap大小: " + hashMap.size());
}
public static void main(String[] args) throws InterruptedException {
demonstrateDataLoss();
demonstrateInfiniteLoop();
demonstrateDataInconsistency();
}
}
🛡️ ConcurrentHashMap的解决方案
基本用法和线程安全性
public class ConcurrentHashMapSafety {
/**
* 演示ConcurrentHashMap的线程安全性
*/
public static void demonstrateThreadSafety() throws InterruptedException {
System.out.println("=== ConcurrentHashMap线程安全演示 ===");
Map<String, Integer> concurrentMap = new ConcurrentHashMap<>();
int threadCount = 10;
int operationsPerThread = 1000;
Thread[] threads = new Thread[threadCount];
for (int i = 0; i < threadCount; i++) {
final int threadId = i;
threads[i] = new Thread(() -> {
for (int j = 0; j < operationsPerThread; j++) {
String key = "key" + (threadId * operationsPerThread + j);
concurrentMap.put(key, threadId * operationsPerThread + j);
}
}, "Thread-" + i);
}
long startTime = System.currentTimeMillis();
for (Thread thread : threads) {
thread.start();
}
for (Thread thread : threads) {
thread.join();
}
long endTime = System.currentTimeMillis();
int expectedSize = threadCount * operationsPerThread;
int actualSize = concurrentMap.size();
System.out.println("期望大小: " + expectedSize);
System.out.println("实际大小: " + actualSize);
System.out.println("数据完整性: " + (actualSize == expectedSize ? "✅ 完整" : "❌ 丢失"));
System.out.println("执行时间: " + (endTime - startTime) + "ms");
}
/**
* 演示ConcurrentHashMap的原子操作
*/
public static void demonstrateAtomicOperations() throws InterruptedException {
System.out.println("\n=== ConcurrentHashMap原子操作演示 ===");
ConcurrentHashMap<String, Integer> map = new ConcurrentHashMap<>();
// 初始化一些数据
for (int i = 0; i < 10; i++) {
map.put("counter" + i, 0);
}
int threadCount = 5;
Thread[] threads = new Thread[threadCount];
// 每个线程对每个counter进行1000次递增
for (int i = 0; i < threadCount; i++) {
final int threadId = i;
threads[i] = new Thread(() -> {
for (int j = 0; j < 10; j++) {
String key = "counter" + j;
// 使用原子操作进行递增
for (int k = 0; k < 1000; k++) {
map.compute(key, (k1, v) -> v == null ? 1 : v + 1);
}
}
}, "Thread-" + threadId);
}
for (Thread thread : threads) {
thread.start();
}
for (Thread thread : threads) {
thread.join();
}
// 检查结果
System.out.println("原子操作结果验证:");
for (int i = 0; i < 10; i++) {
String key = "counter" + i;
Integer value = map.get(key);
int expected = threadCount * 1000;
System.out.println(key + ": " + value + " (期望: " + expected + ") " +
(value.equals(expected) ? "✅" : "❌"));
}
}
/**
* 演示ConcurrentHashMap的高级操作
*/
public static void demonstrateAdvancedOperations() {
System.out.println("\n=== ConcurrentHashMap高级操作演示 ===");
ConcurrentHashMap<String, Integer> map = new ConcurrentHashMap<>();
// 初始化数据
for (int i = 0; i < 10; i++) {
map.put("item" + i, i * 10);
}
System.out.println("初始数据: " + map);
// putIfAbsent - 仅在key不存在时put
Integer oldValue = map.putIfAbsent("item5", 999);
System.out.println("putIfAbsent(item5, 999) 返回: " + oldValue + " (已存在,不会覆盖)");
oldValue = map.putIfAbsent("item10", 100);
System.out.println("putIfAbsent(item10, 100) 返回: " + oldValue + " (不存在,已添加)");
// replace - 替换已存在的值
boolean replaced = map.replace("item1", 10, 11);
System.out.println("replace(item1, 10, 11): " + replaced);
replaced = map.replace("item1", 10, 12);
System.out.println("replace(item1, 10, 12): " + replaced + " (值已经不是10)");
// compute - 计算新值
map.compute("item2", (key, value) -> value == null ? 1 : value * 2);
System.out.println("compute(item2, v -> v*2): " + map.get("item2"));
// computeIfAbsent - 仅在key不存在时计算
Integer computed = map.computeIfAbsent("item11", key -> key.length() * 10);
System.out.println("computeIfAbsent(item11, len*10): " + computed);
// merge - 合并值
Integer merged = map.merge("item3", 5, (oldVal, newVal) -> oldVal + newVal);
System.out.println("merge(item3, 5, sum): " + merged);
System.out.println("最终数据: " + map);
}
public static void main(String[] args) throws InterruptedException {
demonstrateThreadSafety();
demonstrateAtomicOperations();
demonstrateAdvancedOperations();
}
}
⚡ 性能对比
不同场景下的性能测试
public class PerformanceComparison {
private static final int THREAD_COUNT = 8;
private static final int OPERATIONS_PER_THREAD = 100000;
/**
* 读写混合场景性能测试
*/
public static void readWriteMixedTest() throws InterruptedException {
System.out.println("=== 读写混合场景性能测试 ===");
// 测试HashMap + synchronized
Map<String, Integer> synchronizedMap = Collections.synchronizedMap(new HashMap<>());
long syncTime = testReadWriteMixed("Synchronized HashMap", synchronizedMap);
// 测试ConcurrentHashMap
Map<String, Integer> concurrentMap = new ConcurrentHashMap<>();
long concurrentTime = testReadWriteMixed("ConcurrentHashMap", concurrentMap);
// 测试Hashtable
Map<String, Integer> hashtable = new Hashtable<>();
long hashtableTime = testReadWriteMixed("Hashtable", hashtable);
System.out.println("\n性能对比 (读写混合):");
System.out.println("Synchronized HashMap: " + syncTime + "ms");
System.out.println("ConcurrentHashMap: " + concurrentTime + "ms");
System.out.println("Hashtable: " + hashtableTime + "ms");
System.out.println("ConcurrentHashMap比Synchronized HashMap快: " +
String.format("%.1f", (double) syncTime / concurrentTime) + "x");
}
private static long testReadWriteMixed(String name, Map<String, Integer> map)
throws InterruptedException {
// 预填充数据
for (int i = 0; i < 1000; i++) {
map.put("key" + i, i);
}
Thread[] threads = new Thread[THREAD_COUNT];
CountDownLatch startLatch = new CountDownLatch(1);
CountDownLatch endLatch = new CountDownLatch(THREAD_COUNT);
for (int i = 0; i < THREAD_COUNT; i++) {
final int threadId = i;
threads[i] = new Thread(() -> {
try {
startLatch.await(); // 等待统一开始
Random random = new Random(threadId);
for (int j = 0; j < OPERATIONS_PER_THREAD; j++) {
if (random.nextInt(10) < 7) { // 70% 读操作
String key = "key" + random.nextInt(1000);
map.get(key);
} else { // 30% 写操作
String key = "key" + random.nextInt(1000);
map.put(key, random.nextInt());
}
}
} catch (InterruptedException e) {
Thread.currentThread().interrupt();
} finally {
endLatch.countDown();
}
});
}
// 启动所有线程
for (Thread thread : threads) {
thread.start();
}
long startTime = System.currentTimeMillis();
startLatch.countDown(); // 开始执行
endLatch.await(); // 等待所有线程完成
long endTime = System.currentTimeMillis();
System.out.println(name + " 完成,最终大小: " + map.size());
return endTime - startTime;
}
/**
* 高竞争写入场景测试
*/
public static void highContentionWriteTest() throws InterruptedException {
System.out.println("\n=== 高竞争写入场景测试 ===");
// 测试场景:多线程写入相同的key
String[] testMaps = {"Synchronized HashMap", "ConcurrentHashMap", "Hashtable"};
long[] times = new long[3];
// Synchronized HashMap
Map<String, Integer> syncMap = Collections.synchronizedMap(new HashMap<>());
times[0] = testHighContentionWrite(testMaps[0], syncMap);
// ConcurrentHashMap
Map<String, Integer> concurrentMap = new ConcurrentHashMap<>();
times[1] = testHighContentionWrite(testMaps[1], concurrentMap);
// Hashtable
Map<String, Integer> hashtable = new Hashtable<>();
times[2] = testHighContentionWrite(testMaps[2], hashtable);
System.out.println("\n高竞争写入性能对比:");
for (int i = 0; i < testMaps.length; i++) {
System.out.println(testMaps[i] + ": " + times[i] + "ms");
}
}
private static long testHighContentionWrite(String name, Map<String, Integer> map)
throws InterruptedException {
Thread[] threads = new Thread[THREAD_COUNT];
CountDownLatch startLatch = new CountDownLatch(1);
CountDownLatch endLatch = new CountDownLatch(THREAD_COUNT);
// 所有线程竞争写入相同的几个key
String[] hotKeys = {"hot1", "hot2", "hot3", "hot4", "hot5"};
for (int i = 0; i < THREAD_COUNT; i++) {
final int threadId = i;
threads[i] = new Thread(() -> {
try {
startLatch.await();
Random random = new Random(threadId);
for (int j = 0; j < OPERATIONS_PER_THREAD / 10; j++) {
String key = hotKeys[random.nextInt(hotKeys.length)];
map.put(key, threadId * 1000 + j);
}
} catch (InterruptedException e) {
Thread.currentThread().interrupt();
} finally {
endLatch.countDown();
}
});
}
for (Thread thread : threads) {
thread.start();
}
long startTime = System.currentTimeMillis();
startLatch.countDown();
endLatch.await();
long endTime = System.currentTimeMillis();
System.out.println(name + " 高竞争测试完成");
return endTime - startTime;
}
/**
* 纯读取场景测试
*/
public static void readOnlyTest() throws InterruptedException {
System.out.println("\n=== 纯读取场景测试 ===");
// 准备测试数据
Map<String, Integer> testData = new HashMap<>();
for (int i = 0; i < 10000; i++) {
testData.put("key" + i, i);
}
// Synchronized HashMap
Map<String, Integer> syncMap = Collections.synchronizedMap(new HashMap<>(testData));
long syncTime = testReadOnly("Synchronized HashMap", syncMap);
// ConcurrentHashMap
Map<String, Integer> concurrentMap = new ConcurrentHashMap<>(testData);
long concurrentTime = testReadOnly("ConcurrentHashMap", concurrentMap);
// Hashtable
Map<String, Integer> hashtable = new Hashtable<>(testData);
long hashtableTime = testReadOnly("Hashtable", hashtable);
System.out.println("\n纯读取性能对比:");
System.out.println("Synchronized HashMap: " + syncTime + "ms");
System.out.println("ConcurrentHashMap: " + concurrentTime + "ms");
System.out.println("Hashtable: " + hashtableTime + "ms");
}
private static long testReadOnly(String name, Map<String, Integer> map)
throws InterruptedException {
Thread[] threads = new Thread[THREAD_COUNT];
CountDownLatch startLatch = new CountDownLatch(1);
CountDownLatch endLatch = new CountDownLatch(THREAD_COUNT);
for (int i = 0; i < THREAD_COUNT; i++) {
final int threadId = i;
threads[i] = new Thread(() -> {
try {
startLatch.await();
Random random = new Random(threadId);
for (int j = 0; j < OPERATIONS_PER_THREAD; j++) {
String key = "key" + random.nextInt(10000);
map.get(key);
}
} catch (InterruptedException e) {
Thread.currentThread().interrupt();
} finally {
endLatch.countDown();
}
});
}
for (Thread thread : threads) {
thread.start();
}
long startTime = System.currentTimeMillis();
startLatch.countDown();
endLatch.await();
long endTime = System.currentTimeMillis();
return endTime - startTime;
}
public static void main(String[] args) throws InterruptedException {
System.out.println("开始性能测试...");
System.out.println("线程数: " + THREAD_COUNT);
System.out.println("每线程操作数: " + OPERATIONS_PER_THREAD);
System.out.println();
readWriteMixedTest();
highContentionWriteTest();
readOnlyTest();
}
}
🔧 ConcurrentHashMap内部原理
分段锁和CAS机制
public class ConcurrentHashMapInternals {
/**
* 演示ConcurrentHashMap的分段特性
*/
public static void demonstrateSegmentation() {
System.out.println("=== ConcurrentHashMap分段特性演示 ===");
ConcurrentHashMap<String, String> map = new ConcurrentHashMap<>();
System.out.println("ConcurrentHashMap的内部结构特点:");
System.out.println("1. JDK 7: 使用分段锁(Segment),默认16个段");
System.out.println("2. JDK 8+: 使用Node数组 + CAS + synchronized");
System.out.println("3. 只锁定需要修改的部分,而不是整个Map");
System.out.println();
// 添加数据观察内部结构
for (int i = 0; i < 100; i++) {
map.put("key" + i, "value" + i);
}
System.out.println("当前Map大小: " + map.size());
System.out.println("并发级别允许多个线程同时操作不同的段");
// 演示并发操作
demonstrateConcurrentOperations(map);
}
private static void demonstrateConcurrentOperations(ConcurrentHashMap<String, String> map) {
System.out.println("\n--- 并发操作演示 ---");
// 创建多个线程同时操作不同的key
Thread[] threads = new Thread[4];
for (int i = 0; i < 4; i++) {
final int threadId = i;
threads[i] = new Thread(() -> {
// 每个线程操作不同范围的key,减少锁竞争
int start = threadId * 25;
int end = (threadId + 1) * 25;
for (int j = start; j < end; j++) {
// 更新操作
map.put("key" + j, "updated_by_thread_" + threadId);
// 读取操作
String value = map.get("key" + j);
// 条件操作
map.computeIfPresent("key" + j, (k, v) -> v + "_computed");
}
System.out.println("线程" + threadId + "完成操作,范围: key" + start + " - key" + (end-1));
}, "Worker-" + i);
}
// 启动所有线程
for (Thread thread : threads) {
thread.start();
}
// 等待完成
for (Thread thread : threads) {
try {
thread.join();
} catch (InterruptedException e) {
Thread.currentThread().interrupt();
}
}
System.out.println("所有线程完成,最终大小: " + map.size());
}
/**
* 演示CAS操作的特点
*/
public static void demonstrateCASOperations() throws InterruptedException {
System.out.println("\n=== CAS操作特点演示 ===");
ConcurrentHashMap<String, AtomicInteger> map = new ConcurrentHashMap<>();
// 初始化计数器
for (int i = 0; i < 10; i++) {
map.put("counter" + i, new AtomicInteger(0));
}
int threadCount = 8;
Thread[] threads = new Thread[threadCount];
System.out.println("使用CAS操作进行无锁递增...");
for (int i = 0; i < threadCount; i++) {
final int threadId = i;
threads[i] = new Thread(() -> {
for (int j = 0; j < 1000; j++) {
// 使用ConcurrentHashMap的compute方法(内部使用CAS)
map.compute("counter" + (j % 10), (key, value) -> {
if (value == null) {
return new AtomicInteger(1);
} else {
value.incrementAndGet();
return value;
}
});
}
System.out.println("线程" + threadId + "完成1000次CAS操作");
}, "CASThread-" + i);
}
long startTime = System.currentTimeMillis();
for (Thread thread : threads) {
thread.start();
}
for (Thread thread : threads) {
thread.join();
}
long endTime = System.currentTimeMillis();
System.out.println("CAS操作完成,耗时: " + (endTime - startTime) + "ms");
// 验证结果
int totalExpected = threadCount * 1000;
int totalActual = 0;
for (int i = 0; i < 10; i++) {
int value = map.get("counter" + i).get();
totalActual += value;
System.out.println("counter" + i + ": " + value);
}
System.out.println("总计 - 期望: " + totalExpected + ", 实际: " + totalActual +
", 正确: " + (totalExpected == totalActual));
}
/**
* 演示不同大小HashMap的扩容行为
*/
public static void demonstrateResizing() {
System.out.println("\n=== 扩容行为演示 ===");
// 创建一个小容量的ConcurrentHashMap观察扩容
ConcurrentHashMap<String, Integer> map = new ConcurrentHashMap<>(4, 0.75f);
System.out.println("初始容量: 4, 负载因子: 0.75");
System.out.println("理论扩容阈值: " + (4 * 0.75) + " (3个元素)");
// 逐步添加元素观察扩容
for (int i = 0; i < 20; i++) {
map.put("key" + i, i);
if (i < 10) { // 只显示前10个的详细信息
System.out.println("添加key" + i + "后,大小: " + map.size());
}
}
System.out.println("最终大小: " + map.size());
System.out.println();
System.out.println("ConcurrentHashMap扩容特点:");
System.out.println("1. 扩容时不会阻塞读操作");
System.out.println("2. 写操作可能需要等待扩容完成");
System.out.println("3. 使用多线程并发扩容提高效率");
}
public static void main(String[] args) throws InterruptedException {
demonstrateSegmentation();
demonstrateCASOperations();
demonstrateResizing();
}
}
📊 使用场景对比
选择指南
public class UsageScenarios {
/**
* 不同场景的选择建议
*/
public static void showUsageGuidelines() {
System.out.println("=== Map选择指南 ===");
System.out.println();
System.out.printf("%-20s %-15s %-15s %-15s %-20s%n",
"场景", "HashMap", "Hashtable", "SynchronizedMap", "ConcurrentHashMap");
System.out.println("─".repeat(85));
System.out.printf("%-20s %-15s %-15s %-15s %-20s%n",
"单线程", "✅ 推荐", "❌ 性能差", "❌ 不必要", "❌ 开销大");
System.out.printf("%-20s %-15s %-15s %-15s %-20s%n",
"多线程读多写少", "❌ 不安全", "⚠️ 性能差", "⚠️ 性能差", "✅ 推荐");
System.out.printf("%-20s %-15s %-15s %-15s %-20s%n",
"多线程高并发写", "❌ 不安全", "❌ 阻塞严重", "❌ 阻塞严重", "✅ 推荐");
System.out.printf("%-20s %-15s %-15s %-15s %-20s%n",
"需要强一致性", "❌ 不安全", "✅ 可以", "✅ 可以", "⚠️ 弱一致性");
System.out.printf("%-20s %-15s %-15s %-15s %-20s%n",
"需要原子复合操作", "❌ 不安全", "❌ 需要额外同步", "❌ 需要额外同步", "✅ 支持");
System.out.printf("%-20s %-15s %-15s %-15s %-20s%n",
"JDK版本要求", "1.2+", "1.0+", "1.2+", "1.5+");
}
/**
* 实际场景示例
*/
public static void demonstrateRealWorldScenarios() throws InterruptedException {
System.out.println("\n=== 实际应用场景演示 ===");
// 场景1:缓存系统
demonstrateCacheScenario();
// 场景2:计数器系统
demonstrateCounterScenario();
// 场景3:配置管理
demonstrateConfigScenario();
}
/**
* 缓存系统场景
*/
private static void demonstrateCacheScenario() throws InterruptedException {
System.out.println("\n--- 场景1:缓存系统 ---");
System.out.println("特点:读多写少,需要高并发支持");
// 使用ConcurrentHashMap实现简单缓存
ConcurrentHashMap<String, String> cache = new ConcurrentHashMap<>();
// 模拟缓存预热
for (int i = 0; i < 1000; i++) {
cache.put("data" + i, "cached_value_" + i);
}
int readerThreads = 8;
int writerThreads = 2;
Thread[] threads = new Thread[readerThreads + writerThreads];
// 创建读线程(模拟高频读取)
for (int i = 0; i < readerThreads; i++) {
final int threadId = i;
threads[i] = new Thread(() -> {
Random random = new Random(threadId);
for (int j = 0; j < 10000; j++) {
String key = "data" + random.nextInt(1000);
String value = cache.get(key);
if (value == null) {
// 缓存未命中,模拟从数据库加载
cache.putIfAbsent(key, "loaded_value_" + key);
}
}
}, "Reader-" + i);
}
// 创建写线程(模拟缓存更新)
for (int i = 0; i < writerThreads; i++) {
final int threadId = i;
threads[readerThreads + i] = new Thread(() -> {
Random random = new Random(threadId + 1000);
for (int j = 0; j < 1000; j++) {
String key = "data" + random.nextInt(1000);
cache.put(key, "updated_value_" + System.currentTimeMillis());
try {
Thread.sleep(1); // 模拟写操作间隔
} catch (InterruptedException e) {
Thread.currentThread().interrupt();
break;
}
}
}, "Writer-" + i);
}
long startTime = System.currentTimeMillis();
for (Thread thread : threads) {
thread.start();
}
for (Thread thread : threads) {
thread.join();
}
long endTime = System.currentTimeMillis();
System.out.println("缓存系统测试完成:");
System.out.println("- 最终缓存大小: " + cache.size());
System.out.println("- 总耗时: " + (endTime - startTime) + "ms");
System.out.println("- ConcurrentHashMap支持高并发读写");
}
/**
* 计数器系统场景
*/
private static void demonstrateCounterScenario() throws InterruptedException {
System.out.println("\n--- 场景2:计数器系统 ---");
System.out.println("特点:需要原子操作,高并发更新");
ConcurrentHashMap<String, LongAdder> counters = new ConcurrentHashMap<>();
// 初始化计数器
String[] counterNames = {"page_view", "user_login", "api_call", "error_count", "success_count"};
for (String name : counterNames) {
counters.put(name, new LongAdder());
}
int threadCount = 10;
Thread[] threads = new Thread[threadCount];
for (int i = 0; i < threadCount; i++) {
final int threadId = i;
threads[i] = new Thread(() -> {
Random random = new Random(threadId);
for (int j = 0; j < 10000; j++) {
String counterName = counterNames[random.nextInt(counterNames.length)];
// 使用LongAdder进行高效计数
counters.get(counterName).increment();
// 模拟一些业务逻辑
if (random.nextInt(100) < 5) { // 5%的概率
// 批量增加
counters.get(counterName).add(10);
}
}
}, "Counter-" + i);
}
long startTime = System.currentTimeMillis();
for (Thread thread : threads) {
thread.start();
}
for (Thread thread : threads) {
thread.join();
}
long endTime = System.currentTimeMillis();
System.out.println("计数器系统测试完成:");
for (String name : counterNames) {
System.out.println("- " + name + ": " + counters.get(name).sum());
}
System.out.println("- 总耗时: " + (endTime - startTime) + "ms");
System.out.println("- ConcurrentHashMap + LongAdder提供高效计数");
}
/**
* 配置管理场景
*/
private static void demonstrateConfigScenario() throws InterruptedException {
System.out.println("\n--- 场景3:配置管理 ---");
System.out.println("特点:读多写极少,需要实时更新");
ConcurrentHashMap<String, String> config = new ConcurrentHashMap<>();
// 初始化配置
config.put("database.url", "jdbc:mysql://localhost:3306/app");
config.put("database.username", "app_user");
config.put("cache.size", "1000");
config.put("thread.pool.size", "10");
config.put("timeout.seconds", "30");
// 配置读取线程(高频)
Thread[] readerThreads = new Thread[5];
volatile boolean running = true;
for (int i = 0; i < 5; i++) {
final int threadId = i;
readerThreads[i] = new Thread(() -> {
int readCount = 0;
while (running) {
// 模拟应用程序读取配置
String dbUrl = config.get("database.url");
String cacheSize = config.get("cache.size");
String threadPoolSize = config.get("thread.pool.size");
readCount++;
try {
Thread.sleep(1); // 模拟处理时间
} catch (InterruptedException e) {
Thread.currentThread().interrupt();
break;
}
}
System.out.println("配置读取线程" + threadId + "完成,读取次数: " + readCount);
}, "ConfigReader-" + i);
}
// 配置更新线程(低频)
Thread writerThread = new Thread(() -> {
try {
Thread.sleep(1000);
// 模拟配置热更新
config.put("cache.size", "2000");
System.out.println("配置更新: cache.size = 2000");
Thread.sleep(2000);
config.put("thread.pool.size", "20");
System.out.println("配置更新: thread.pool.size = 20");
Thread.sleep(2000);
config.put("timeout.seconds", "60");
System.out.println("配置更新: timeout.seconds = 60");
} catch (InterruptedException e) {
Thread.currentThread().interrupt();
}
}, "ConfigWriter");
// 启动所有线程
for (Thread thread : readerThreads) {
thread.start();
}
writerThread.start();
// 运行6秒后停止
Thread.sleep(6000);
running = false;
writerThread.join();
for (Thread thread : readerThreads) {
thread.join();
}
System.out.println("配置管理测试完成:");
System.out.println("- 最终配置: " + config);
System.out.println("- ConcurrentHashMap支持配置的实时读取和更新");
}
public static void main(String[] args) throws InterruptedException {
showUsageGuidelines();
demonstrateRealWorldScenarios();
}
}
📝 最佳实践
使用建议和注意事项
public class BestPractices {
/**
* ConcurrentHashMap使用最佳实践
*/
public static void demonstrateBestPractices() {
System.out.println("=== ConcurrentHashMap最佳实践 ===");
// 1. 合理设置初始容量
demonstrateProperInitialization();
// 2. 使用原子操作方法
demonstrateAtomicOperations();
// 3. 避免常见陷阱
demonstrateCommonPitfalls();
// 4. 性能优化建议
demonstratePerformanceOptimization();
}
/**
* 合理的初始化设置
*/
private static void demonstrateProperInitialization() {
System.out.println("\n--- 1. 合理设置初始容量 ---");
// 推荐:根据预期大小设置初始容量
int expectedSize = 10000;
ConcurrentHashMap<String, String> goodMap = new ConcurrentHashMap<>(expectedSize);
System.out.println("✅ 推荐:设置合适的初始容量避免频繁扩容");
// 不推荐:使用默认容量
ConcurrentHashMap<String, String> defaultMap = new ConcurrentHashMap<>();
System.out.println("⚠️ 默认容量16可能导致频繁扩容");
// 推荐:设置合适的负载因子
ConcurrentHashMap<String, String> customMap = new ConcurrentHashMap<>(1000, 0.75f);
System.out.println("✅ 推荐:负载因子0.75在空间和时间上平衡较好");
System.out.println("\n初始化建议:");
System.out.println("- 预估数据量,设置合适的初始容量");
System.out.println("- 负载因子通常使用默认的0.75");
System.out.println("- 避免频繁扩容影响性能");
}
/**
* 使用原子操作方法
*/
private static void demonstrateAtomicOperations() {
System.out.println("\n--- 2. 使用原子操作方法 ---");
ConcurrentHashMap<String, Integer> map = new ConcurrentHashMap<>();
// ❌ 错误:非原子的复合操作
System.out.println("❌ 错误做法:");
System.out.println("if (map.containsKey(key)) {");
System.out.println(" map.put(key, map.get(key) + 1);");
System.out.println("} else {");
System.out.println(" map.put(key, 1);");
System.out.println("}");
// ✅ 正确:使用原子操作
System.out.println("\n✅ 正确做法:");
System.out.println("map.compute(key, (k, v) -> v == null ? 1 : v + 1);");
// 演示各种原子操作
String key = "counter";
// putIfAbsent - 仅当key不存在时put
Integer oldValue = map.putIfAbsent(key, 1);
System.out.println("putIfAbsent结果: " + oldValue);
// compute - 计算新值
map.compute(key, (k, v) -> v == null ? 1 : v + 1);
System.out.println("compute后的值: " + map.get(key));
// computeIfPresent - 仅当key存在时计算
map.computeIfPresent(key, (k, v) -> v * 2);
System.out.println("computeIfPresent后的值: " + map.get(key));
// merge - 合并值
map.merge(key, 10, Integer::sum);
System.out.println("merge后的值: " + map.get(key));
System.out.println("\n原子操作的优势:");
System.out.println("- 避免竞态条件");
System.out.println("- 提高性能(减少锁竞争)");
System.out.println("- 代码更简洁");
}
/**
* 避免常见陷阱
*/
private static void demonstrateCommonPitfalls() {
System.out.println("\n--- 3. 避免常见陷阱 ---");
ConcurrentHashMap<String, List<String>> map = new ConcurrentHashMap<>();
// 陷阱1:复合操作不是原子的
System.out.println("陷阱1:复合操作的原子性");
System.out.println("❌ 错误:");
System.out.println("if (!map.containsKey(key)) {");
System.out.println(" map.put(key, new ArrayList<>());");
System.out.println("}");
System.out.println("map.get(key).add(value); // 可能NPE");
System.out.println("\n✅ 正确:");
System.out.println("map.computeIfAbsent(key, k -> new ArrayList<>()).add(value);");
// 演示正确做法
String key = "list1";
String value = "item1";
map.computeIfAbsent(key, k -> new ArrayList<>()).add(value);
System.out.println("安全添加元素到列表: " + map.get(key));
// 陷阱2:迭代时的弱一致性
System.out.println("\n陷阱2:迭代时的弱一致性");
ConcurrentHashMap<String, Integer> iterMap = new ConcurrentHashMap<>();
for (int i = 0; i < 10; i++) {
iterMap.put("key" + i, i);
}
System.out.println("ConcurrentHashMap的迭代器是弱一致性的:");
System.out.println("- 可能看到迭代开始后的修改");
System.out.println("- 不会抛出ConcurrentModificationException");
System.out.println("- 但不保证看到所有并发修改");
// 陷阱3:size()和isEmpty()的近似性
System.out.println("\n陷阱3:size()的近似性");
System.out.println("在高并发环境下,size()返回的是近似值");
System.out.println("- 适用于监控和统计");
System.out.println("- 不适用于精确的业务逻辑判断");
}
/**
* 性能优化建议
*/
private static void demonstratePerformanceOptimization() {
System.out.println("\n--- 4. 性能优化建议 ---");
System.out.println("优化建议:");
System.out.println();
System.out.println("1. 减少锁竞争:");
System.out.println(" - 使用原子操作方法(compute, merge等)");
System.out.println(" - 避免热点key的频繁更新");
System.out.println(" - 考虑数据分片");
System.out.println("\n2. 合理设置容量:");
System.out.println(" - 根据数据量设置初始容量");
System.out.println(" - 避免频繁扩容");
System.out.println(" - 负载因子通常保持默认0.75");
System.out.println("\n3. 选择合适的操作:");
System.out.println(" - 读多写少:直接使用get/put");
System.out.println(" - 高并发写:使用compute系列方法");
System.out.println(" - 计数场景:考虑LongAdder");
System.out.println("\n4. 监控和调优:");
System.out.println(" - 监控扩容频率");
System.out.println(" - 观察锁竞争情况");
System.out.println(" - 根据实际场景调整参数");
// 示例:高效的计数实现
System.out.println("\n高效计数示例:");
ConcurrentHashMap<String, LongAdder> counters = new ConcurrentHashMap<>();
String counterKey = "api_calls";
// 使用LongAdder进行高效计数
counters.computeIfAbsent(counterKey, k -> new LongAdder()).increment();
System.out.println("✅ 使用LongAdder进行高并发计数");
System.out.println("计数值: " + counters.get(counterKey).sum());
}
public static void main(String[] args) {
demonstrateBestPractices();
}
}
📝 总结
🎯 核心要点
| 特性 | HashMap | ConcurrentHashMap |
|---|---|---|
| 线程安全 | ❌ 不安全 | ✅ 安全 |
| 性能 | 🟢 高(单线程) | 🟡 好(多线程) |
| 锁机制 | 无 | 分段锁/CAS |
| 并发读 | ❌ 不安全 | ✅ 支持 |
| 并发写 | ❌ 可能丢失数据 | ✅ 支持 |
| 迭代器 | fail-fast | 弱一致性 |
| null值 | ✅ 支持 | ❌ 不支持 |
💡 选择建议
-
单线程环境:使用HashMap,性能最好
-
多线程读多写少:使用ConcurrentHashMap
-
多线程高并发写:使用ConcurrentHashMap
-
需要强一致性:使用synchronized包装或其他方案
-
简单同步需求:可考虑Collections.synchronizedMap
⚠️ 注意事项
-
HashMap并发问题:数据丢失、死循环(JDK8前)、数据不一致
-
ConcurrentHashMap特点:弱一致性、不支持null、size()近似值
-
性能考虑:根据读写比例选择合适的Map实现
-
原子操作:使用compute系列方法保证操作原子性
🚀 最佳实践
-
合理初始化:设置合适的初始容量和负载因子
-
使用原子方法:compute、merge、putIfAbsent等
-
避免复合操作:多个操作组合不是原子的
-
性能监控:关注扩容频率和锁竞争情况
🔧 实际应用
-
缓存系统:ConcurrentHashMap + 读写分离
-
计数器:ConcurrentHashMap + LongAdder
-
配置管理:ConcurrentHashMap支持热更新
-
会话管理:ConcurrentHashMap存储用户会话
ConcurrentHashMap是Java并发编程中的重要工具,正确使用它能够构建高性能、线程安全的应用程序!
更多推荐



所有评论(0)