Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
16 changes: 16 additions & 0 deletions Cargo.lock

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

1 change: 1 addition & 0 deletions Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -160,6 +160,7 @@ dirs = "6.0.0"
env_logger = "0.11.8"
epoll = "4.3.3"
flume = "0.11.1"
itertools = "0.14.0"
libc = "0.2.167"
log = "0.4.22"
signal-hook = "0.3.18"
Expand Down
2 changes: 2 additions & 0 deletions virtio-devices/src/mem.rs
Original file line number Diff line number Diff line change
Expand Up @@ -392,6 +392,8 @@ impl BlocksState {
}
}

// TODO We can avoid creating a new bitmap here, if we switch the code
// to use Vec<u64> to keep dirty bits and just pass it as is.
MemoryRangeTable::from_bitmap(bitmap, start_addr, VIRTIO_MEM_DEFAULT_BLOCK_SIZE)
}
}
Expand Down
10 changes: 7 additions & 3 deletions virtio-devices/src/vhost_user/vu_common_ctrl.rs
Original file line number Diff line number Diff line change
Expand Up @@ -573,12 +573,16 @@ impl VhostUserHandle {
// divide it by 8.
let len = region.size() / 8;
// SAFETY: region is of size len
let bitmap = unsafe {
let bitmap: &[u64] = unsafe {
// Cast the pointer to u64
let ptr = region.as_ptr() as *const u64;
std::slice::from_raw_parts(ptr, len).to_vec()
std::slice::from_raw_parts(ptr, len)
};
Ok(MemoryRangeTable::from_bitmap(bitmap, 0, 4096))
Ok(MemoryRangeTable::from_bitmap(
bitmap.iter().copied(),
0,
4096,
))
} else {
Err(Error::MissingShmLogRegion)
}
Expand Down
1 change: 1 addition & 0 deletions vm-migration/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -6,6 +6,7 @@ version = "0.1.0"

[dependencies]
anyhow = { workspace = true }
itertools = { workspace = true }
serde = { workspace = true, features = ["derive", "rc"] }
serde_json = { workspace = true }
thiserror = { workspace = true }
Expand Down
88 changes: 88 additions & 0 deletions vm-migration/src/bitpos_iterator.rs
Original file line number Diff line number Diff line change
@@ -0,0 +1,88 @@
// Copyright © 2025 Cyberus Technology GmbH
//
// SPDX-License-Identifier: Apache-2.0

/// An iterator that turns a sequence of u64s into a sequence of bit positions
/// that are set.
///
/// This is useful to iterate over dirty memory bitmaps.
struct BitposIterator<I> {
underlying_it: I,

/// How many u64's we've already consumed.
word_pos: usize,

/// If we already started working on a u64, it's here. Together with the bit
/// position where we have to continue.
current_word: Option<(u64, u32)>,
}

impl<I> Iterator for BitposIterator<I>
where
I: Iterator<Item = u64>,
{
type Item = u64;

fn next(&mut self) -> Option<Self::Item> {
loop {
if self.current_word.is_none() {
self.current_word = self.underlying_it.next().map(|w| (w, 0));
}

let (word, word_bit) = self.current_word?;

// Continue early if there is no chance to find something.
if word != 0 && word_bit < 64 {
let shifted_word = word >> word_bit;
if shifted_word != 0 {
let zeroes = shifted_word.trailing_zeros();

self.current_word = Some((word, zeroes + word_bit + 1));
let next_bitpos =
u64::try_from(self.word_pos).unwrap() * 64 + u64::from(word_bit + zeroes);

return Some(next_bitpos);
}
}

self.current_word = None;
self.word_pos += 1;
}
}
}

pub trait BitposIteratorExt: Iterator<Item = u64> + Sized {
/// Turn an iterator over `u64` into an iterator over the bit positions of
/// all 1s. We basically treat the incoming `u64` as one gigantic integer
/// and just spit out which bits are set.
fn bit_positions(self) -> impl Iterator<Item = u64> {
BitposIterator {
underlying_it: self,
word_pos: 0,
current_word: None,
}
}
}

impl<I: Iterator<Item = u64> + Sized> BitposIteratorExt for I {}

#[cfg(test)]
mod tests {
use super::*;

fn bitpos_check(inp: &[u64], out: &[u64]) {
assert_eq!(inp.iter().copied().bit_positions().collect::<Vec<_>>(), out);
}

#[test]
fn bitpos_iterator_works() {
bitpos_check(&[], &[]);
bitpos_check(&[0], &[]);
bitpos_check(&[1], &[0]);
bitpos_check(&[5], &[0, 2]);
bitpos_check(&[3 + 32], &[0, 1, 5]);
bitpos_check(&[1 << 63], &[63]);

bitpos_check(&[1, 1 + 32], &[0, 64, 69]);
}
}
1 change: 1 addition & 0 deletions vm-migration/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -9,6 +9,7 @@ use thiserror::Error;

use crate::protocol::MemoryRangeTable;

mod bitpos_iterator;
pub mod protocol;

#[derive(Error, Debug)]
Expand Down
61 changes: 36 additions & 25 deletions vm-migration/src/protocol.rs
Original file line number Diff line number Diff line change
Expand Up @@ -5,10 +5,12 @@

use std::io::{Read, Write};

use itertools::Itertools;
use serde::{Deserialize, Serialize};
use vm_memory::ByteValued;

use crate::MigratableError;
use crate::bitpos_iterator::BitposIteratorExt;

// Migration protocol
// 1: Source establishes communication with destination (file socket or TCP connection.)
Expand Down Expand Up @@ -215,38 +217,47 @@ pub struct MemoryRange {
pub length: u64,
}

impl MemoryRange {
/// Turn an iterator over the dirty bitmap into an iterator of dirty ranges.
pub fn dirty_ranges(
bitmap: impl IntoIterator<Item = u64>,
start_addr: u64,
page_size: u64,
) -> impl Iterator<Item = Self> {
bitmap
.into_iter()
.bit_positions()
// Turn them into single-element ranges for coalesce.
.map(|b| b..(b + 1))
// Merge adjacent ranges.
.coalesce(|prev, curr| {
if prev.end == curr.start {
Ok(prev.start..curr.end)
} else {
Err((prev, curr))
}
})
.map(move |r| Self {
gpa: start_addr + r.start * page_size,
length: (r.end - r.start) * page_size,
})
}
}

#[derive(Clone, Default, Serialize, Deserialize)]
pub struct MemoryRangeTable {
data: Vec<MemoryRange>,
}

impl MemoryRangeTable {
pub fn from_bitmap(bitmap: Vec<u64>, start_addr: u64, page_size: u64) -> Self {
let mut table = MemoryRangeTable::default();
let mut entry: Option<MemoryRange> = None;
for (i, block) in bitmap.iter().enumerate() {
for j in 0..64 {
let is_page_dirty = ((block >> j) & 1u64) != 0u64;
let page_offset = ((i * 64) + j) as u64 * page_size;
if is_page_dirty {
if let Some(entry) = &mut entry {
entry.length += page_size;
} else {
entry = Some(MemoryRange {
gpa: start_addr + page_offset,
length: page_size,
});
}
} else if let Some(entry) = entry.take() {
table.push(entry);
}
}
}
if let Some(entry) = entry.take() {
table.push(entry);
pub fn from_bitmap(
bitmap: impl IntoIterator<Item = u64>,
start_addr: u64,
page_size: u64,
) -> Self {
Self {
data: MemoryRange::dirty_ranges(bitmap, start_addr, page_size).collect(),
}

table
}

pub fn regions(&self) -> &[MemoryRange] {
Expand Down
5 changes: 2 additions & 3 deletions vmm/src/memory_manager.rs
Original file line number Diff line number Diff line change
Expand Up @@ -2615,11 +2615,10 @@ impl Migratable for MemoryManager {
}
};

let dirty_bitmap: Vec<u64> = vm_dirty_bitmap
let dirty_bitmap = vm_dirty_bitmap
.iter()
.zip(vmm_dirty_bitmap.iter())
.map(|(x, y)| x | y)
.collect();
.map(|(x, y)| x | y);

let sub_table = MemoryRangeTable::from_bitmap(dirty_bitmap, r.gpa, 4096);

Expand Down
Loading