Master Heaps & Priority Queues
Learn heap operations with interactive tree visualizations. Master insert, extract, and heapify. Understand Dijkstra's algorithm, heap sort, and priority queues.
Heap Visualization
Heap is empty
Insert elements to build the heap tree
Array is empty
Heap Operations
Heap Algorithms
Code Implementation
class MinHeap:
def __init__(self):
self.heap = []
def parent(self, i):
return (i - 1) // 2
def left_child(self, i):
return 2 * i + 1
def right_child(self, i):
return 2 * i + 2
def insert(self, value):
self.heap.append(value)
self._bubble_up(len(self.heap) - 1)
def extract_min(self):
if not self.heap:
return None
if len(self.heap) == 1:
return self.heap.pop()
min_val = self.heap[0]
self.heap[0] = self.heap.pop()
self._bubble_down(0)
return min_val
def peek(self):
return self.heap[0] if self.heap else None
def _bubble_up(self, index):
while index > 0:
parent_idx = self.parent(index)
if self.heap[index] < self.heap[parent_idx]:
self.heap[index], self.heap[parent_idx] = \
self.heap[parent_idx], self.heap[index]
index = parent_idx
else:
break
def _bubble_down(self, index):
while self.left_child(index) < len(self.heap):
smaller_child = self._get_smaller_child(index)
if self.heap[index] > self.heap[smaller_child]:
self.heap[index], self.heap[smaller_child] = \
self.heap[smaller_child], self.heap[index]
index = smaller_child
else:
break
def _get_smaller_child(self, index):
left = self.left_child(index)
right = self.right_child(index)
if right >= len(self.heap):
return left
return left if self.heap[left] < self.heap[right] else right
# Usage
heap = MinHeap()
heap.insert(10)
heap.insert(5)
heap.insert(20)
print(heap.extract_min()) # Output: 5class MinHeap {
constructor() {
this.heap = [];
}
parent(i) { return Math.floor((i - 1) / 2); }
leftChild(i) { return 2 * i + 1; }
rightChild(i) { return 2 * i + 2; }
insert(value) {
this.heap.push(value);
this.bubbleUp(this.heap.length - 1);
}
extractMin() {
if (this.heap.length === 0) return null;
if (this.heap.length === 1) return this.heap.pop();
const min = this.heap[0];
this.heap[0] = this.heap.pop();
this.bubbleDown(0);
return min;
}
peek() {
return this.heap.length > 0 ? this.heap[0] : null;
}
bubbleUp(index) {
while (index > 0) {
const parentIdx = this.parent(index);
if (this.heap[index] < this.heap[parentIdx]) {
[this.heap[index], this.heap[parentIdx]] =
[this.heap[parentIdx], this.heap[index]];
index = parentIdx;
} else break;
}
}
bubbleDown(index) {
while (this.leftChild(index) < this.heap.length) {
const smallerChild = this.getSmallerChild(index);
if (this.heap[index] > this.heap[smallerChild]) {
[this.heap[index], this.heap[smallerChild]] =
[this.heap[smallerChild], this.heap[index]];
index = smallerChild;
} else break;
}
}
getSmallerChild(index) {
const left = this.leftChild(index);
const right = this.rightChild(index);
if (right >= this.heap.length) return left;
return this.heap[left] < this.heap[right] ? left : right;
}
}
// Usage
const heap = new MinHeap();
heap.insert(10);
heap.insert(5);
heap.insert(20);
console.log(heap.extractMin()); // Output: 5package heaps
type MinHeap struct {
elements []int
}
func NewMinHeap() *MinHeap {
return &MinHeap{elements: make([]int, 0)}
}
func (h *MinHeap) Parent(i int) int { return (i - 1) / 2 }
func (h *MinHeap) LeftChild(i int) int { return 2*i + 1 }
func (h *MinHeap) RightChild(i int) int { return 2*i + 2 }
func (h *MinHeap) Insert(value int) {
h.elements = append(h.elements, value)
h.bubbleUp(len(h.elements) - 1)
}
func (h *MinHeap) ExtractMin() (int, bool) {
if len(h.elements) == 0 {
return 0, false
}
if len(h.elements) == 1 {
min := h.elements[0]
h.elements = h.elements[:0]
return min, true
}
min := h.elements[0]
h.elements[0] = h.elements[len(h.elements)-1]
h.elements = h.elements[:len(h.elements)-1]
h.bubbleDown(0)
return min, true
}
func (h *MinHeap) Peek() (int, bool) {
if len(h.elements) == 0 {
return 0, false
}
return h.elements[0], true
}
func (h *MinHeap) bubbleUp(index int) {
for index > 0 {
parent := h.Parent(index)
if h.elements[index] < h.elements[parent] {
h.elements[index], h.elements[parent] =
h.elements[parent], h.elements[index]
index = parent
} else {
break
}
}
}
func (h *MinHeap) bubbleDown(index int) {
for h.LeftChild(index) < len(h.elements) {
smallerChild := h.getSmallerChild(index)
if h.elements[index] > h.elements[smallerChild] {
h.elements[index], h.elements[smallerChild] =
h.elements[smallerChild], h.elements[index]
index = smallerChild
} else {
break
}
}
}
func (h *MinHeap) getSmallerChild(index int) int {
left := h.LeftChild(index)
right := h.RightChild(index)
if right >= len(h.elements) {
return left
}
if h.elements[left] < h.elements[right] {
return left
}
return right
}import java.util.ArrayList;
public class MinHeap {
private ArrayList<Integer> heap;
public MinHeap() {
heap = new ArrayList<>();
}
private int parent(int i) { return (i - 1) / 2; }
private int leftChild(int i) { return 2 * i + 1; }
private int rightChild(int i) { return 2 * i + 2; }
public void insert(int value) {
heap.add(value);
bubbleUp(heap.size() - 1);
}
public Integer extractMin() {
if (heap.isEmpty()) return null;
if (heap.size() == 1) return heap.remove(0);
int min = heap.get(0);
heap.set(0, heap.remove(heap.size() - 1));
bubbleDown(0);
return min;
}
public Integer peek() {
return heap.isEmpty() ? null : heap.get(0);
}
private void bubbleUp(int index) {
while (index > 0) {
int parentIdx = parent(index);
if (heap.get(index) < heap.get(parentIdx)) {
swap(index, parentIdx);
index = parentIdx;
} else break;
}
}
private void bubbleDown(int index) {
while (leftChild(index) < heap.size()) {
int smallerChild = getSmallerChild(index);
if (heap.get(index) > heap.get(smallerChild)) {
swap(index, smallerChild);
index = smallerChild;
} else break;
}
}
private int getSmallerChild(int index) {
int left = leftChild(index);
int right = rightChild(index);
if (right >= heap.size()) return left;
return heap.get(left) < heap.get(right) ? left : right;
}
private void swap(int i, int j) {
int temp = heap.get(i);
heap.set(i, heap.get(j));
heap.set(j, temp);
}
}#include <vector>
#include <stdexcept>
class MinHeap {
private:
std::vector<int> heap;
int parent(int i) { return (i - 1) / 2; }
int leftChild(int i) { return 2 * i + 1; }
int rightChild(int i) { return 2 * i + 2; }
void bubbleUp(int index) {
while (index > 0) {
int parentIdx = parent(index);
if (heap[index] < heap[parentIdx]) {
std::swap(heap[index], heap[parentIdx]);
index = parentIdx;
} else break;
}
}
void bubbleDown(int index) {
while (leftChild(index) < heap.size()) {
int smallerChild = getSmallerChild(index);
if (heap[index] > heap[smallerChild]) {
std::swap(heap[index], heap[smallerChild]);
index = smallerChild;
} else break;
}
}
int getSmallerChild(int index) {
int left = leftChild(index);
int right = rightChild(index);
if (right >= heap.size()) return left;
return heap[left] < heap[right] ? left : right;
}
public:
void insert(int value) {
heap.push_back(value);
bubbleUp(heap.size() - 1);
}
int extractMin() {
if (heap.empty())
throw std::runtime_error("Heap is empty");
if (heap.size() == 1) {
int min = heap[0];
heap.pop_back();
return min;
}
int min = heap[0];
heap[0] = heap.back();
heap.pop_back();
bubbleDown(0);
return min;
}
int peek() {
if (heap.empty())
throw std::runtime_error("Heap is empty");
return heap[0];
}
bool isEmpty() { return heap.empty(); }
size_t size() { return heap.size(); }
};pub struct MinHeap {
heap: Vec<i32>,
}
impl MinHeap {
pub fn new() -> Self {
MinHeap { heap: Vec::new() }
}
fn parent(&self, i: usize) -> usize {
(i.saturating_sub(1)) / 2
}
fn left_child(&self, i: usize) -> usize {
2 * i + 1
}
fn right_child(&self, i: usize) -> usize {
2 * i + 2
}
pub fn insert(&mut self, value: i32) {
self.heap.push(value);
let last_idx = self.heap.len() - 1;
self.bubble_up(last_idx);
}
pub fn extract_min(&mut self) -> Option<i32> {
if self.heap.is_empty() {
return None;
}
if self.heap.len() == 1 {
return self.heap.pop();
}
let min = self.heap[0];
self.heap[0] = self.heap.pop().unwrap();
self.bubble_down(0);
Some(min)
}
pub fn peek(&self) -> Option<&i32> {
self.heap.first()
}
fn bubble_up(&mut self, mut index: usize) {
while index > 0 {
let parent_idx = self.parent(index);
if self.heap[index] < self.heap[parent_idx] {
self.heap.swap(index, parent_idx);
index = parent_idx;
} else {
break;
}
}
}
fn bubble_down(&mut self, mut index: usize) {
while self.left_child(index) < self.heap.len() {
let smaller_child = self.get_smaller_child(index);
if self.heap[index] > self.heap[smaller_child] {
self.heap.swap(index, smaller_child);
index = smaller_child;
} else {
break;
}
}
}
fn get_smaller_child(&self, index: usize) -> usize {
let left = self.left_child(index);
let right = self.right_child(index);
if right >= self.heap.len() {
left
} else if self.heap[left] < self.heap[right] {
left
} else {
right
}
}
}Big O Complexity
Logarithmic time operation
Constant space usage
All Operations
Heap Property
Min-heap: parent †children. Max-heap: parent ℠children. Complete binary tree stored in array.
Did You Know?
Dijkstra's algorithm uses min-heaps for efficient shortest path finding
Floyd's build-heap algorithm runs in O(n) time, not O(n log n)!
Priority queues in operating systems use heaps for task scheduling
Real-World Applications
Heaps power critical algorithms and systems. See how priority queues work in practice.
Dijkstra's Shortest Path
Find shortest paths in graphs using min-heap priority queue. Essential for GPS navigation and network routing.
OS Task Scheduling
Operating systems use priority queues to schedule processes based on priority levels and deadlines.
Heap Sort Algorithm
In-place sorting algorithm with guaranteed O(n log n) time and O(1) space complexity.
Top K Elements
Find K largest/smallest elements efficiently using a heap of size K. Common in rankings and analytics.
Running Median
Track median in streaming data using two heaps: max-heap for lower half, min-heap for upper half.
Merge K Sorted Lists
Efficiently merge multiple sorted sequences using min-heap with heads of each list.
Free Interactive Heap Data Structure Visualizer: Learn Binary Heaps, Priority Queues & Heap Sort Online
Master heap data structures with real-time visualizations for min-heaps, max-heaps, and priority queues. Learn heap operations like insert O(log n), extract-min/max O(log n), and heapify O(n) with interactive step-by-step animations. Perfect for algorithm interviews, computer science students, and developers implementing Dijkstra's algorithm or heap sort.
What Is a Heap Data Structure (And Why Learn It)?
A heap is a specialized tree-based data structure that satisfies the heap property: in a min-heap, every parent node is smaller than its children; in a max-heap, every parent is larger. Heaps are complete binary trees stored efficiently in arrays, making them the foundation for priority queues, heap sort (O(n log n)), and graph algorithms like Dijkstra's shortest path algorithm. According to GeeksforGeeks, heaps are among the top 5 most-asked data structures in technical interviews at Google, Meta, and Amazon.
Unlike binary search trees (BST), heaps don't maintain full orderingâonly the parent-child relationship matters. This partial ordering enables O(1) access to min/max elements and O(log n) insertions/deletions, making heaps ideal for real-time systems like task schedulers, event-driven simulations, and streaming data (median finding). Our interactive visualizer shows both tree and array representations, helping you understand why heaps beat BSTs for priority-based operations.
Why Heap Data Structures Matter for Developers:
Critical for Algorithm Interviews
- âą Top K problems: Find K largest/smallest elements in O(n log k)
- âą Merge K sorted lists: Heap-based solution beats brute force
- âą Running median: Dual-heap technique for streaming data
- âą Heap sort: In-place O(n log n) sorting without extra space
Real-World Production Systems
- âą Priority queues: OS task scheduling, database query optimization
- âą Graph algorithms: Dijkstra, Prim's MST, A* pathfinding
- âą Event simulation: Discrete event systems, game engines
- âą Rate limiting: Token bucket algorithms with min-heaps
Heap vs Binary Search Tree (BST)
Priority Queue Operations
Find Min/Max: O(1)
Extract Min/Max: O(log n)
Insert: O(log n) Perfect for priority queues, median finding, top KSorted Order Operations
Search: O(log n)
Range Queries: O(log n + k)
Successor/Predecessor: O(log n) Better for searching, range queries, ordered traversalHow to Use the Interactive Heap Visualizer (Step-by-Step Guide)
đĄ Pro Tip: Build Heap from Array (Floyd's Algorithm)
Use the Build Heap button to convert random arrays into valid heaps in O(n) timeâfaster than inserting elements one-by-one! Floyd's algorithm starts from the last non-leaf node (index n/2 - 1) and heapifies downward. This is how Python's heapq.heapify() works under the hood. Try it with: [15, 10, 20, 8, 12, 25, 30] and watch the O(n) transformation!
7 Essential Heap Operations with Time Complexity
Add element to the end of array (maintaining complete binary tree property), then bubble up by comparing with parent and swapping if heap property violated. Worst case: element bubbles from leaf to root = tree height = log n. Try inserting 1 into min-heap [5, 10, 15, 20]âwatch it bubble to root in 2 swaps. Used in priority queue enqueue().
Remove root (highest priority), move last element to root, then bubble down by comparing with children and swapping with smaller child (min-heap) or larger child (max-heap) until heap property restored. Worst case: log n swaps from root to leaf. This is dequeue() in priority queues. Essential for Dijkstra's algorithm to extract next closest vertex. See Wikipedia's heap extract operation.
Return root element without removing itâinstant O(1) access to minimum (min-heap) or maximum (max-heap). This is why heaps beat BSTs for priority queues: constant-time priority access. Used to check next task in scheduler without dequeueing. Try Peek button to see current root valueâperfect for checking if priority queue has urgent tasks.
Convert unsorted array into valid heap in linear time using Floyd's algorithm: start from last non-leaf node (index ân/2â - 1) and heapify each subtree bottom-up. Faster than n inserts (which would be O(n log n)) because most nodes are near bottom. Used in heapq.heapify() and heap sort preprocessing. Click Build Heap to see O(n) construction with random array. See proof at Princeton's heapify analysis.
In-place sorting algorithm: (1) build max-heap O(n), (2) repeatedly extract max and place at array end = n Ă O(log n) = O(n log n) total. Space: O(1) unlike merge sort's O(n). Not stable but guaranteed O(n log n) worst case (quicksort degrades to O(nÂČ)). Used when memory is limited. Click Heap Sort to see step-by-step extractionâwatch heap shrink as sorted portion grows! Compare with our complexity calculator.
Modify element priority then restore heap property: if priority increased (min-heap) or decreased (max-heap), bubble up; otherwise bubble down. Critical for Dijkstra's algorithm to update vertex distances efficiently. Without this, Dijkstra would be O(nÂČ) instead of O((V+E) log V). Advanced featureâimplement by finding element, modifying value, then calling bubble-up or bubble-down based on change direction.
Remove element at index i: (1) replace with last element, (2) remove last, (3) restore heap by bubble-up or bubble-down depending on comparison with parent. Rarely used compared to extract-min/max but needed for removing specific tasks from priority queues (e.g., canceling scheduled events). Try manually: delete middle element from heap [1, 3, 5, 7, 9, 11] and verify heap property maintained.
10 Real-World Heap & Priority Queue Applications
1. Dijkstra's Shortest Path Algorithm (GPS Navigation)
Min-heap priority queue stores vertices with tentative distances as priorities. Extract min gives next closest unvisited vertex. Time: O((V + E) log V) with binary heap, O(V log V + E) with Fibonacci heap. Every GPS system (Google Maps, Waze) uses Dijkstra or A* (heap-based) to find optimal routes. Without heaps, Dijkstra degrades to O(VÂČ) with array-based priority queue. Try our distance calculator to see route optimization.
2. Operating System Task Scheduling (CPU Process Management)
OS kernels (Linux, Windows) use priority heaps to schedule processes. Higher priority tasks (real-time, kernel threads) execute first. Heap operations: enqueue new process O(log n), dequeue highest priority O(log n), change priority O(log n). Linux CFS scheduler uses red-black tree (self-balancing BST), but priority-based schedulers prefer heaps. Windows uses multi-level feedback queue with heap per priority level. Essential for responsivenessâprevents low-priority tasks starving high-priority ones.
3. Merge K Sorted Lists (LeetCode #23 - Hard)
Classic interview problem: Merge k sorted linked lists using min-heap. Insert first element from each list into heap (k inserts), then repeatedly extract-min and insert next element from that list until all exhausted. Time: O(N log k) where N = total elements, k = number of lists. Beats brute force O(kN) by using heap to track k minimum candidates. Used in external sorting (merge-sort on disk) and distributed systems (merging sorted shards). Try: [[1,4,5], [1,3,4], [2,6]] â heap tracks 3 minimums at once.
4. Running Median from Data Stream (LeetCode #295 - Hard)
Dual-heap technique: Maintain max-heap for lower half, min-heap for upper half. Median = root of larger heap (odd total) or average of both roots (even total). Insert: O(log n), get median: O(1). Used in real-time statistics (stock prices, sensor data) and streaming analytics. Single heap can't maintain median efficiently, but two heaps balance perfectly. Example: stream [5, 15, 1, 3] â max-heap: [3, 1], min-heap: [5, 15], median = (3+5)/2 = 4. Essential for percentile calculations in monitoring systems.
5. Top K Frequent Elements (LeetCode #347 - Medium)
Find K most frequent elements in array using min-heap of size K. Count frequencies O(n), then iterate: if element's frequency greater than heap root, replace root and heapify. Time: O(n log k), space: O(k). Better than sorting O(n log n) when k << n. Used in analytics (trending topics, hot products), search autocomplete (frequent queries), and recommendation systems (popular items). Try: nums = [1,1,1,2,2,3], k = 2 â heap tracks top 2 frequencies â output [1,2]. Beats bucket sort when K is small.
6. Huffman Coding (Data Compression)
Build optimal prefix-free encoding tree using min-heap priority queue. Insert character frequencies, repeatedly extract two minimum nodes, create parent with combined frequency, reinsert. Greedy algorithm produces minimum-length encoding (ZIP, JPEG, MP3). Time: O(n log n) for n unique characters. Heap ensures we always merge lowest-frequency symbols first, minimizing total code length. Used in all lossless compression. Example: "AAABBC" â A: 3, B: 2, C: 1 â heap builds optimal tree â A: 0, B: 10, C: 11.
7. Event-Driven Simulation (Discrete Event Systems)
Simulate systems with events scheduled at future times. Min-heap priority queue stores events by timestampâextract-min gives next event chronologically. Used in network simulators (ns-3), game engines (Unity event loop), queue theory models (customer arrivals), and financial trading systems (order matching). Time advances by jumping to next event, not iterating every time unitâefficient for sparse events. Process event at time t, possibly schedule new events for future, repeat until heap empty.
8. Prim's Minimum Spanning Tree (Network Design)
Find minimum-cost tree connecting all vertices using min-heap. Similar to Dijkstra: extract min-weight edge, add to MST, insert adjacent edges. Time: O(E log V) with binary heap. Used in network routing (OSPF protocol), circuit design (connecting components), and clustering algorithms. Competes with Kruskal's algorithm (uses union-find). Prim's better for dense graphs; Kruskal's better for sparse. Try: graph with edges [(A-B: 2), (B-C: 3), (A-C: 4)] â heap extracts edges by weight â MST cost = 5.
9. Load Balancing (Distribute Tasks to Servers)
Assign incoming tasks to least-loaded server using min-heap where priority = current server load. Extract min-load server, assign task, increment load, reinsert into heap. Time: O(log n) per assignment. Used in Nginx load balancer, Kubernetes scheduler, CDN routing. Ensures even distribution without scanning all servers (which would be O(n)). Heap version scales to millions of servers. Combine with our network latency tool for latency-aware routing.
10. Database Query Optimization (External Merge Sort)
When sorting data larger than RAM, databases use external merge sort with heap: divide data into sorted runs fitting in memory, then merge using k-way merge with min-heap (see #3). PostgreSQL, MySQL, Oracle all use heap-based merge for ORDER BY on large tables. Time: O(N log k) where k = number of runs. Minimizes disk I/Oâcritical for performance. Each run provides one element to heap; extract-min writes to output; fetch next from that run. See our SQL formatter for query examples.
8 Common Heap Implementation Mistakes (And How to Fix Them)
1. Confusing Array Indices (0-indexed vs 1-indexed)
Mistake: Using 1-indexed formulas (parent = i/2, left = 2i, right = 2i+1) with 0-indexed arrays causes off-by-one errors. Fix: For 0-indexed: parent = (i-1)/2, left = 2i+1, right = 2i+2. For 1-indexed: waste index 0 or adjust formulas. Most modern languages (Python, Java, C++) use 0-indexing, so memorize 0-indexed formulas. Test edge case: root at index 0 has left child at 1, right at 2. See Wikipedia's indexing section.
2. Incorrect Bubble-Down Logic (Comparing with Wrong Child)
Mistake: In bubble-down, always swapping with left child instead of choosing smaller child (min-heap) or larger child (max-heap). Causes heap property violation in right subtree. Fix: Compare both children, swap with appropriate one. Code: smaller = (heap[left] < heap[right]) ? left : right. If only left child exists (right out of bounds), use left. Test: extract from [1, 5, 3, 9, 7, 10, 6]âmust compare 5 vs 3, swap with 3.
3. Not Handling Edge Cases (Empty Heap, Single Element)
Mistake: Calling extract-min on empty heap (heap underflow) or not checking bounds during bubble operations. Fix: Check if (heap.length === 0) throw Error("Heap underflow") before extract. In bubble-down, verify leftChild < heap.length before accessing. Single-element heap is validâextract should just return that element without errors. Defensive programming prevents runtime crashes.
4. Forgetting Complete Binary Tree Property
Mistake: Manually placing elements at specific positions (like BST insert) breaks completenessâheap must fill level-by-level, left-to-right. Fix: Always insert at end of array (maintains completeness), then bubble up. Never insert at arbitrary positions. Heap property (parent-child relationship) is separate from structure property (complete binary tree). Both must hold. Completeness guarantees O(log n) height and efficient array representationâlose it, lose efficiency.
5. Using Wrong Heap Type for Problem
Mistake: Using max-heap for finding K smallest elements (should use min-heap) or vice versa. Causes incorrect results. Fix: Rule of thumb: to find K largest, use min-heap of size K (keep K largest by evicting smallest); to find K smallest, use max-heap of size K (keep K smallest by evicting largest). Counterintuitive but correct! Test: find 3 largest from [1,5,3,7,2,9,4] â min-heap [5,7,9], extract all â [5,7,9] in ascending order, reverse for descending.
6. Inefficient Build Heap (Using N Inserts Instead of Heapify)
Mistake: Building heap from array by inserting elements one-by-one = O(n log n). Fix: Use Floyd's heapify algorithm = O(n). Start from last non-leaf (index n/2 - 1), heapify each subtree moving upward. Much faster for large datasets. Python's heapq.heapify() uses thisâconverts list in-place O(n). Interview bonus: explaining why heapify is O(n) not O(n log n) shows deep understanding (most work done at bottom levels where height is small).
7. Not Considering Stability in Heap Sort
Mistake: Assuming heap sort is stable (preserves equal elements' relative order)âit's not! Max-heap extraction reorders equal elements. Fix: If stability required, use merge sort or store (value, original_index) pairs as priorities. Heap sort trades stability for in-place O(1) space. Important for multi-key sorting (e.g., sort by age then nameâname order lost if heap sort by age). Document non-stability or use stable alternative. Not a bug, but a limitation to be aware of.
8. Ignoring Duplicate Elements
Mistake: Thinking heap can't handle duplicatesâit can! Heap property only compares †or â„, not strict inequality. Multiple equal elements can coexist at same level. Fix: No special handling needed; duplicates work naturally. Common in frequency heaps (multiple items with same count). Test: insert [5, 3, 5, 1, 5] into min-heapâthree 5s will be placed according to heap property, might appear at different levels. Duplicates don't break heap; they're just less interesting for visualization.
Frequently Asked Questions
What's the difference between min-heap and max-heap?
Min-heap: parent †children (smallest at root, extractMin() O(log n)). Max-heap: parent â„ children (largest at root, extractMax() O(log n)). Both are complete binary trees with O(log n) operations. Use min-heap for Dijkstra, finding minimums, ascending heap sort. Use max-heap for descending heap sort, finding maximums. Conversion: negate values or flip comparison operators. Most languages (Python heapq, C++ priority_queue) default to one typeâcheck documentation! Toggle between types in our visualizer to see structural differences.
Why is heap stored as array instead of tree nodes?
Space efficiency: Array uses O(n) space for n elements; explicit tree nodes need 2n pointers (left, right) plus node overhead = 3n space minimum. Cache locality: Array elements stored contiguously in memoryâCPU cache prefetching speeds up access. Tree nodes scattered across heap memory cause cache misses. Simple navigation: Parent/child indices computed via arithmetic (no pointer chasing). No null checks: Array bounds naturally handle missing children. Downside: array requires contiguous memory blockâproblematic for massive heaps (use chunked arrays or Fibonacci heaps for billions of elements). See StackOverflow discussion.
When should I use heap vs balanced BST (AVL/Red-Black)?
Use heap when: Only need min/max access (priority queue), don't need sorted traversal, don't need search arbitrary element. Simpler implementation, faster constants. Use BST when: Need search O(log n), range queries, ordered iteration, successor/predecessor. Comparison: Both O(log n) insert/delete but BST maintains full ordering while heap only partial. Heap peek O(1) vs BST find-min O(log n). Heap build O(n) vs BST build O(n log n) from unsorted data. Example: Dijkstra needs decrease-key (possible in heap, awkward in BST). Database indexes need range scans (BST wins). Priority queues always use heaps. Check our binary tree visualizer for BST comparison.
How does decrease-key work in heaps for Dijkstra's algorithm?
Problem: Need to update element's priority mid-heap without full rebuild. Solutions: (1) Lazy deletion: Insert updated element with new priority, mark old as deleted, skip deleted during extractionâsimpler but heap size inflates. (2) Position tracking: Maintain hash map for element â index, decrease value at index, bubble up if priority improvedâfaster but complex. (3) Fibonacci heap: Amortized O(1) decrease-key (vs binary heap O(log n))âoptimal for Dijkstra O(V log V + E) but complicated. Most implementations use (1) lazilyâslightly slower but much simpler code. Python's heapq doesn't support decrease-key natively; use workaounds. C++ std::priority_queue also lacks it. Java PriorityQueue same issueâuse custom heap with tracking.
Can heaps be used for finding median in O(1)?
Yes, with dual-heap technique! Maintain max-heap for smaller half, min-heap for larger half. Balance sizes so difference †1. Insert O(log n): Add to appropriate heap, rebalance if size difference > 1. Get median O(1): Root of larger heap (odd count) or average of both roots (even count). Used in streaming statistics (running median), percentile calculations (P50/P95), and real-time analytics. LeetCode #295 "Find Median from Data Stream" tests this patternâhard problem but elegant solution. Balance maintenance is key: after insert, if one heap has 2+ more elements, extract its root and insert into other. Try: stream [1, 5, 3, 7, 9] â max-heap: [3, 1], min-heap: [7, 9], median = 5 (root of min-heap).
What's the space complexity of heaps?
Array storage: O(n) space for n elementsâeach element stored once, no overhead. Heap sort: O(1) extra space if sorting in-place (build heap in original array, extract to same array). Recursive heapify: O(log n) stack space for recursion depthâprefer iterative to avoid. Priority queue: O(n) where n = current size, grows/shrinks with enqueue/dequeue. Compare to BST: O(n) storage plus O(n) pointer overhead = ~3n space total. Heap is most space-efficient priority queue implementation. Advanced: Fibonacci heap O(n) but with extra pointers for decrease-key optimization. Use our data structures hub to compare space usage.
How do I implement priority queue with custom comparator?
Pass comparison function to heap operations instead of hardcoding < or >. Example (JavaScript): const pq = new MinHeap((a, b) => a.priority - b.priority); Comparator returns negative if a < b, positive if a > b, zero if equal. Use for complex objects (sort by multiple fields), custom orderings (reverse sort), or non-numeric priorities (string comparison). Python: heapq uses tuplesâ(priority, item) naturally sorts by priority. Java: PriorityQueue<Task>(Comparator.comparing(Task::getPriority)). Common pattern: (distance, node) for Dijkstra, (cost, edge) for MST.
Are there alternatives to binary heaps for better performance?
Yes! D-ary heap: Each node has D children (not 2)âfewer swaps but more comparisons. Fibonacci heap: Amortized O(1) insert/decrease-key, O(log n) extract-minâasymptotically better but high constants, rarely used in practice. Pairing heap: Simpler than Fibonacci, similar performance, used in LEDA library. Binomial heap: Supports efficient merge O(log n)âuseful for parallel algorithms. Skew heap: Self-adjusting heap, O(log n) amortized. Bottom line: Binary heap wins for simplicity and constant factors in 99% of cases. Use advanced heaps only when profiling shows bottleneck. See Wikipedia comparison table.
Advanced Heap Techniques for Interviews & Production
Heap + Hash Map for Fast Lookups
Combine heap with hash map storing element â index. Enables O(log n) update/delete arbitrary elements (normally not supported). Essential for Dijkstra decrease-key, LRU cache with priorities, and task schedulers with cancellation. Trade-off: 2Ă space but maintains O(log n) operations. Update heap: modify array, update map, bubble up/down. Example: map[element] = newIndex after swap.
Lazy Deletion Pattern
Instead of physically removing elements, mark as deleted and skip during extraction. Insert updated elements without removing old. Simpler than tracking positions but heap grows with "tombstones". Rebuild periodically when deleted > 50% of size. Used in Dijkstra implementationsâinsert node with updated distance, ignore outdated entries on extract. Space trade-off for code simplicity. Our visualizer uses immediate deletion for clarityâproduction systems often use lazy.
Parallel Heap Construction
Build heap bottom-up in parallel: heapify independent subtrees concurrently. Works because subtrees don't interfere until upper levels. Achieves O(n/p + log n) time on p processors. Used in multi-threaded sorting, distributed systems. Requires careful synchronization at merge points. Advanced technique for massive datasets (billions of elements). Standard heapify is so fast (O(n)) that parallelization only helps for huge inputs or slow comparisons (e.g., string keys).
External Heaps for Disk-Based Priority Queues
When heap doesn't fit in RAM, store in blocks on disk. Minimize I/O by using B-heap (generalization of B-tree) or buffer pool for hot nodes. Each operation may require disk readsâaim for O(1/B Ă log N/B) I/O complexity where B = block size. Used in massive graph algorithms (billion-node graphs), external sorting, and geographic databases. Trade CPU speed for disk latency. Techniques: buffer root in RAM, batch updates, compress heap blocks. Research topicâstandard libraries don't provide external heaps.
Soft Heaps for Approximate Selection
Violate heap property on a controlled fraction Δ of elementsâextract-min might be slightly wrong. Enables faster O(log log n) operations for certain algorithms (e.g., deterministic O(n) selection). Trade accuracy for speed. Theoretical interestârarely implemented. Used in some graph algorithms where approximate priorities suffice. If you need guaranteed correctness, stick with standard heaps. Advanced topic covered in algorithms coursesâdemonstrates heap property can be relaxed for performance gains.
Persistent Heaps (Functional Programming)
Immutable heaps where operations return new heap without modifying originalâenables time travel, undo/redo, concurrent access without locks. Implemented using path copying (share unchanged nodes). Used in functional languages (Haskell, Scala), version control systems. O(log n) operations but higher constants due to copying. Example: insert returns new heap root, original heap unchanged. Trade memory for immutability benefits. Check our data structures hub for persistent structures.
Master Other Data Structures & Algorithms
Build complete algorithmic knowledge with our interactive visualizers and learning tools:
Ready to Master Heaps & Priority Queues?
Use our interactive visualizer to understand heap operations with step-by-step animations. Perfect for algorithm interviews, computer science courses, and implementing Dijkstra, heap sort, or priority queues in production code. 100% free, no signup, works offline.
Trusted by 100,000+ developers and CS students learning algorithms and data structures