| Original file line number | Diff line number | Diff line change |
|---|---|---|
| @@ -0,0 +1,92 @@ | ||
| // SPDX-License-Identifier: GPL-2.0 | ||
|
|
||
| use core::ops::{Deref, DerefMut}; | ||
| use kernel::{ | ||
| bindings, | ||
| bindings::*, | ||
| user_ptr::{ReadableFromBytes, WritableToBytes}, | ||
| }; | ||
|
|
||
| macro_rules! pub_no_prefix { | ||
| ($prefix:ident, $($newname:ident),+) => { | ||
| $(pub const $newname: u32 = concat_idents!($prefix, $newname);)+ | ||
| }; | ||
| } | ||
|
|
||
| pub_no_prefix!( | ||
| binder_driver_return_protocol_, | ||
| BR_OK, | ||
| BR_ERROR, | ||
| BR_TRANSACTION, | ||
| BR_REPLY, | ||
| BR_DEAD_REPLY, | ||
| BR_TRANSACTION_COMPLETE, | ||
| BR_INCREFS, | ||
| BR_ACQUIRE, | ||
| BR_RELEASE, | ||
| BR_DECREFS, | ||
| BR_NOOP, | ||
| BR_SPAWN_LOOPER, | ||
| BR_DEAD_BINDER, | ||
| BR_CLEAR_DEATH_NOTIFICATION_DONE, | ||
| BR_FAILED_REPLY | ||
| ); | ||
|
|
||
| pub_no_prefix!( | ||
| binder_driver_command_protocol_, | ||
| BC_TRANSACTION, | ||
| BC_REPLY, | ||
| BC_FREE_BUFFER, | ||
| BC_INCREFS, | ||
| BC_ACQUIRE, | ||
| BC_RELEASE, | ||
| BC_DECREFS, | ||
| BC_INCREFS_DONE, | ||
| BC_ACQUIRE_DONE, | ||
| BC_REGISTER_LOOPER, | ||
| BC_ENTER_LOOPER, | ||
| BC_EXIT_LOOPER, | ||
| BC_REQUEST_DEATH_NOTIFICATION, | ||
| BC_CLEAR_DEATH_NOTIFICATION, | ||
| BC_DEAD_BINDER_DONE | ||
| ); | ||
|
|
||
| macro_rules! decl_wrapper { | ||
| ($newname:ident, $wrapped:ty) => { | ||
| #[derive(Copy, Clone, Default)] | ||
| pub(crate) struct $newname($wrapped); | ||
|
|
||
| // TODO: This must be justified by inspecting the type, so should live outside the macro or | ||
| // the macro should be somehow marked unsafe. | ||
| unsafe impl ReadableFromBytes for $newname {} | ||
| unsafe impl WritableToBytes for $newname {} | ||
|
|
||
| impl Deref for $newname { | ||
| type Target = $wrapped; | ||
| fn deref(&self) -> &Self::Target { | ||
| &self.0 | ||
| } | ||
| } | ||
|
|
||
| impl DerefMut for $newname { | ||
| fn deref_mut(&mut self) -> &mut Self::Target { | ||
| &mut self.0 | ||
| } | ||
| } | ||
| }; | ||
| } | ||
|
|
||
| decl_wrapper!(BinderNodeDebugInfo, bindings::binder_node_debug_info); | ||
| decl_wrapper!(BinderNodeInfoForRef, bindings::binder_node_info_for_ref); | ||
| decl_wrapper!(FlatBinderObject, bindings::flat_binder_object); | ||
| decl_wrapper!(BinderTransactionData, bindings::binder_transaction_data); | ||
| decl_wrapper!(BinderWriteRead, bindings::binder_write_read); | ||
| decl_wrapper!(BinderVersion, bindings::binder_version); | ||
|
|
||
| impl BinderVersion { | ||
| pub(crate) fn current() -> Self { | ||
| Self(bindings::binder_version { | ||
| protocol_version: bindings::BINDER_CURRENT_PROTOCOL_VERSION as _, | ||
| }) | ||
| } | ||
| } |
| Original file line number | Diff line number | Diff line change |
|---|---|---|
| @@ -0,0 +1,180 @@ | ||
| // SPDX-License-Identifier: GPL-2.0 | ||
|
|
||
| use alloc::{boxed::Box, sync::Arc}; | ||
| use core::ptr::NonNull; | ||
|
|
||
| pub use crate::raw_list::{Cursor, GetLinks, Links}; | ||
| use crate::{raw_list, raw_list::RawList}; | ||
|
|
||
| // TODO: Use the one from `kernel::file_operations::PointerWrapper` instead. | ||
| pub trait Wrapper<T: ?Sized> { | ||
| fn into_pointer(self) -> NonNull<T>; | ||
| unsafe fn from_pointer(ptr: NonNull<T>) -> Self; | ||
| fn as_ref(&self) -> &T; | ||
| } | ||
|
|
||
| impl<T: ?Sized> Wrapper<T> for Box<T> { | ||
| fn into_pointer(self) -> NonNull<T> { | ||
| NonNull::new(Box::into_raw(self)).unwrap() | ||
| } | ||
|
|
||
| unsafe fn from_pointer(ptr: NonNull<T>) -> Self { | ||
| Box::from_raw(ptr.as_ptr()) | ||
| } | ||
|
|
||
| fn as_ref(&self) -> &T { | ||
| AsRef::as_ref(self) | ||
| } | ||
| } | ||
|
|
||
| impl<T: ?Sized> Wrapper<T> for Arc<T> { | ||
| fn into_pointer(self) -> NonNull<T> { | ||
| NonNull::new(Arc::into_raw(self) as _).unwrap() | ||
| } | ||
|
|
||
| unsafe fn from_pointer(ptr: NonNull<T>) -> Self { | ||
| Arc::from_raw(ptr.as_ptr()) | ||
| } | ||
|
|
||
| fn as_ref(&self) -> &T { | ||
| AsRef::as_ref(self) | ||
| } | ||
| } | ||
|
|
||
| impl<T: ?Sized> Wrapper<T> for &T { | ||
| fn into_pointer(self) -> NonNull<T> { | ||
| NonNull::from(self) | ||
| } | ||
|
|
||
| unsafe fn from_pointer(ptr: NonNull<T>) -> Self { | ||
| &*ptr.as_ptr() | ||
| } | ||
|
|
||
| fn as_ref(&self) -> &T { | ||
| self | ||
| } | ||
| } | ||
|
|
||
| pub trait GetLinksWrapped: GetLinks { | ||
| type Wrapped: Wrapper<Self::EntryType>; | ||
| } | ||
|
|
||
| impl<T: ?Sized> GetLinksWrapped for Box<T> | ||
| where | ||
| Box<T>: GetLinks, | ||
| { | ||
| type Wrapped = Box<<Box<T> as GetLinks>::EntryType>; | ||
| } | ||
|
|
||
| impl<T: GetLinks + ?Sized> GetLinks for Box<T> { | ||
| type EntryType = T::EntryType; | ||
| fn get_links(data: &Self::EntryType) -> &Links<Self::EntryType> { | ||
| <T as GetLinks>::get_links(data) | ||
| } | ||
| } | ||
|
|
||
| impl<T: ?Sized> GetLinksWrapped for Arc<T> | ||
| where | ||
| Arc<T>: GetLinks, | ||
| { | ||
| type Wrapped = Arc<<Arc<T> as GetLinks>::EntryType>; | ||
| } | ||
|
|
||
| impl<T: GetLinks + ?Sized> GetLinks for Arc<T> { | ||
| type EntryType = T::EntryType; | ||
| fn get_links(data: &Self::EntryType) -> &Links<Self::EntryType> { | ||
| <T as GetLinks>::get_links(data) | ||
| } | ||
| } | ||
|
|
||
| pub struct List<G: GetLinksWrapped> { | ||
| list: RawList<G>, | ||
| } | ||
|
|
||
| impl<G: GetLinksWrapped> List<G> { | ||
| pub fn new() -> Self { | ||
| Self { | ||
| list: RawList::new(), | ||
| } | ||
| } | ||
|
|
||
| pub fn is_empty(&self) -> bool { | ||
| self.list.is_empty() | ||
| } | ||
|
|
||
| pub fn push_back(&mut self, data: G::Wrapped) { | ||
| let ptr = data.into_pointer(); | ||
| if !unsafe { self.list.push_back(ptr.as_ref()) } { | ||
| // If insertion failed, rebuild object so that it can be freed. | ||
| unsafe { G::Wrapped::from_pointer(ptr) }; | ||
| } | ||
| } | ||
|
|
||
| pub unsafe fn insert_after(&mut self, existing: NonNull<G::EntryType>, data: G::Wrapped) { | ||
| let ptr = data.into_pointer(); | ||
| let entry = &*existing.as_ptr(); | ||
| if !self.list.insert_after(entry, ptr.as_ref()) { | ||
| // If insertion failed, rebuild object so that it can be freed. | ||
| G::Wrapped::from_pointer(ptr); | ||
| } | ||
| } | ||
|
|
||
| pub unsafe fn remove(&mut self, data: &G::Wrapped) -> Option<G::Wrapped> { | ||
| let entry_ref = Wrapper::as_ref(data); | ||
| if self.list.remove(entry_ref) { | ||
| Some(G::Wrapped::from_pointer(NonNull::from(entry_ref))) | ||
| } else { | ||
| None | ||
| } | ||
| } | ||
|
|
||
| pub fn pop_front(&mut self) -> Option<G::Wrapped> { | ||
| let front = self.list.pop_front()?; | ||
| Some(unsafe { G::Wrapped::from_pointer(front) }) | ||
| } | ||
|
|
||
| pub fn cursor_front(&self) -> Cursor<'_, G> { | ||
| self.list.cursor_front() | ||
| } | ||
|
|
||
| pub fn cursor_front_mut(&mut self) -> CursorMut<'_, G> { | ||
| CursorMut::new(self.list.cursor_front_mut()) | ||
| } | ||
| } | ||
|
|
||
| impl<G: GetLinksWrapped> Drop for List<G> { | ||
| fn drop(&mut self) { | ||
| while self.pop_front().is_some() {} | ||
| } | ||
| } | ||
|
|
||
| pub struct CursorMut<'a, G: GetLinksWrapped> { | ||
| cursor: raw_list::CursorMut<'a, G>, | ||
| } | ||
|
|
||
| impl<'a, G: GetLinksWrapped> CursorMut<'a, G> { | ||
| fn new(cursor: raw_list::CursorMut<'a, G>) -> Self { | ||
| Self { cursor } | ||
| } | ||
|
|
||
| pub fn current(&mut self) -> Option<&mut G::EntryType> { | ||
| self.cursor.current() | ||
| } | ||
|
|
||
| pub fn remove_current(&mut self) -> Option<G::Wrapped> { | ||
| let ptr = self.cursor.remove_current()?; | ||
| Some(unsafe { G::Wrapped::from_pointer(ptr) }) | ||
| } | ||
|
|
||
| pub fn peek_next(&mut self) -> Option<&mut G::EntryType> { | ||
| self.cursor.peek_next() | ||
| } | ||
|
|
||
| pub fn peek_prev(&mut self) -> Option<&mut G::EntryType> { | ||
| self.cursor.peek_prev() | ||
| } | ||
|
|
||
| pub fn move_next(&mut self) { | ||
| self.cursor.move_next(); | ||
| } | ||
| } |
| Original file line number | Diff line number | Diff line change |
|---|---|---|
| @@ -0,0 +1,189 @@ | ||
| // SPDX-License-Identifier: GPL-2.0 | ||
|
|
||
| use alloc::boxed::Box; | ||
| use core::ptr::NonNull; | ||
| use kernel::{prelude::*, Error}; | ||
|
|
||
| use crate::linked_list::{CursorMut, GetLinks, Links, List}; | ||
|
|
||
| pub(crate) struct RangeAllocator<T> { | ||
| list: List<Box<Descriptor<T>>>, | ||
| } | ||
|
|
||
| #[derive(Debug, PartialEq, Eq)] | ||
| enum DescriptorState { | ||
| Free, | ||
| Reserved, | ||
| Allocated, | ||
| } | ||
|
|
||
| impl<T> RangeAllocator<T> { | ||
| pub(crate) fn new(size: usize) -> KernelResult<Self> { | ||
| let desc = Box::try_new(Descriptor::new(0, size))?; | ||
| let mut list = List::new(); | ||
| list.push_back(desc); | ||
| Ok(Self { list }) | ||
| } | ||
|
|
||
| fn find_best_match(&self, size: usize) -> Option<NonNull<Descriptor<T>>> { | ||
| // TODO: Use a binary tree instead of list for this lookup. | ||
| let mut best = None; | ||
| let mut best_size = usize::MAX; | ||
| let mut cursor = self.list.cursor_front(); | ||
| while let Some(desc) = cursor.current() { | ||
| if desc.state == DescriptorState::Free { | ||
| if size == desc.size { | ||
| return Some(NonNull::from(desc)); | ||
| } | ||
|
|
||
| if size < desc.size && desc.size < best_size { | ||
| best = Some(NonNull::from(desc)); | ||
| best_size = desc.size; | ||
| } | ||
| } | ||
|
|
||
| cursor.move_next(); | ||
| } | ||
| best | ||
| } | ||
|
|
||
| pub(crate) fn reserve_new(&mut self, size: usize) -> KernelResult<usize> { | ||
| let desc_ptr = match self.find_best_match(size) { | ||
| None => return Err(Error::ENOMEM), | ||
| Some(found) => found, | ||
| }; | ||
|
|
||
| // SAFETY: We hold the only mutable reference to list, so it cannot have changed. | ||
| let desc = unsafe { &mut *desc_ptr.as_ptr() }; | ||
| if desc.size == size { | ||
| desc.state = DescriptorState::Reserved; | ||
| return Ok(desc.offset); | ||
| } | ||
|
|
||
| // We need to break up the descriptor. | ||
| let new = Box::try_new(Descriptor::new(desc.offset + size, desc.size - size))?; | ||
| unsafe { self.list.insert_after(desc_ptr, new) }; | ||
| desc.state = DescriptorState::Reserved; | ||
| desc.size = size; | ||
| Ok(desc.offset) | ||
| } | ||
|
|
||
| fn free_with_cursor(cursor: &mut CursorMut<Box<Descriptor<T>>>) -> KernelResult { | ||
| let mut size = match cursor.current() { | ||
| None => return Err(Error::EINVAL), | ||
| Some(ref mut entry) => { | ||
| match entry.state { | ||
| DescriptorState::Free => return Err(Error::EINVAL), | ||
| DescriptorState::Allocated => return Err(Error::EPERM), | ||
| DescriptorState::Reserved => {} | ||
| } | ||
| entry.state = DescriptorState::Free; | ||
| entry.size | ||
| } | ||
| }; | ||
|
|
||
| // Try to merge with the next entry. | ||
| if let Some(next) = cursor.peek_next() { | ||
| if next.state == DescriptorState::Free { | ||
| next.offset -= size; | ||
| next.size += size; | ||
| size = next.size; | ||
| cursor.remove_current(); | ||
| } | ||
| } | ||
|
|
||
| // Try to merge with the previous entry. | ||
| if let Some(prev) = cursor.peek_prev() { | ||
| if prev.state == DescriptorState::Free { | ||
| prev.size += size; | ||
| cursor.remove_current(); | ||
| } | ||
| } | ||
|
|
||
| Ok(()) | ||
| } | ||
|
|
||
| fn find_at_offset(&mut self, offset: usize) -> Option<CursorMut<Box<Descriptor<T>>>> { | ||
| let mut cursor = self.list.cursor_front_mut(); | ||
| while let Some(desc) = cursor.current() { | ||
| if desc.offset == offset { | ||
| return Some(cursor); | ||
| } | ||
|
|
||
| if desc.offset > offset { | ||
| return None; | ||
| } | ||
|
|
||
| cursor.move_next(); | ||
| } | ||
| None | ||
| } | ||
|
|
||
| pub(crate) fn reservation_abort(&mut self, offset: usize) -> KernelResult { | ||
| // TODO: The force case is currently O(n), but could be made O(1) with unsafe. | ||
| let mut cursor = self.find_at_offset(offset).ok_or(Error::EINVAL)?; | ||
| Self::free_with_cursor(&mut cursor) | ||
| } | ||
|
|
||
| pub(crate) fn reservation_commit(&mut self, offset: usize, data: Option<T>) -> KernelResult { | ||
| // TODO: This is currently O(n), make it O(1). | ||
| let mut cursor = self.find_at_offset(offset).ok_or(Error::ENOENT)?; | ||
| let desc = cursor.current().unwrap(); | ||
| desc.state = DescriptorState::Allocated; | ||
| desc.data = data; | ||
| Ok(()) | ||
| } | ||
|
|
||
| /// Takes an entry at the given offset from [`DescriptorState::Allocated`] to | ||
| /// [`DescriptorState::Reserved`]. | ||
| /// | ||
| /// Returns the size of the existing entry and the data associated with it. | ||
| pub(crate) fn reserve_existing(&mut self, offset: usize) -> KernelResult<(usize, Option<T>)> { | ||
| // TODO: This is currently O(n), make it O(log n). | ||
| let mut cursor = self.find_at_offset(offset).ok_or(Error::ENOENT)?; | ||
| let desc = cursor.current().unwrap(); | ||
| if desc.state != DescriptorState::Allocated { | ||
| return Err(Error::ENOENT); | ||
| } | ||
| desc.state = DescriptorState::Reserved; | ||
| Ok((desc.size, desc.data.take())) | ||
| } | ||
|
|
||
| pub(crate) fn for_each<F: Fn(usize, usize, Option<T>)>(&mut self, callback: F) { | ||
| let mut cursor = self.list.cursor_front_mut(); | ||
| while let Some(desc) = cursor.current() { | ||
| if desc.state == DescriptorState::Allocated { | ||
| callback(desc.offset, desc.size, desc.data.take()); | ||
| } | ||
|
|
||
| cursor.move_next(); | ||
| } | ||
| } | ||
| } | ||
|
|
||
| struct Descriptor<T> { | ||
| state: DescriptorState, | ||
| size: usize, | ||
| offset: usize, | ||
| links: Links<Descriptor<T>>, | ||
| data: Option<T>, | ||
| } | ||
|
|
||
| impl<T> Descriptor<T> { | ||
| fn new(offset: usize, size: usize) -> Self { | ||
| Self { | ||
| size, | ||
| offset, | ||
| state: DescriptorState::Free, | ||
| links: Links::new(), | ||
| data: None, | ||
| } | ||
| } | ||
| } | ||
|
|
||
| impl<T> GetLinks for Descriptor<T> { | ||
| type EntryType = Self; | ||
| fn get_links(desc: &Self) -> &Links<Self> { | ||
| &desc.links | ||
| } | ||
| } |
| Original file line number | Diff line number | Diff line change |
|---|---|---|
| @@ -0,0 +1,288 @@ | ||
| // SPDX-License-Identifier: GPL-2.0 | ||
|
|
||
| use core::{ | ||
| cell::UnsafeCell, | ||
| ptr, | ||
| ptr::NonNull, | ||
| sync::atomic::{AtomicBool, Ordering}, | ||
| }; | ||
|
|
||
| pub trait GetLinks { | ||
| type EntryType: ?Sized; | ||
| fn get_links(data: &Self::EntryType) -> &Links<Self::EntryType>; | ||
| } | ||
|
|
||
| pub struct Links<T: ?Sized>(UnsafeCell<ListEntry<T>>); | ||
|
|
||
| impl<T: ?Sized> Links<T> { | ||
| pub fn new() -> Self { | ||
| Self(UnsafeCell::new(ListEntry::new())) | ||
| } | ||
| } | ||
|
|
||
| struct ListEntry<T: ?Sized> { | ||
| next: Option<NonNull<T>>, | ||
| prev: Option<NonNull<T>>, | ||
| inserted: AtomicBool, | ||
| } | ||
|
|
||
| impl<T: ?Sized> ListEntry<T> { | ||
| fn new() -> Self { | ||
| Self { | ||
| next: None, | ||
| prev: None, | ||
| inserted: AtomicBool::new(false), | ||
| } | ||
| } | ||
|
|
||
| fn acquire_for_insertion(&mut self) -> bool { | ||
| self.inserted | ||
| .compare_exchange(false, true, Ordering::Acquire, Ordering::Relaxed) | ||
| .is_ok() | ||
| } | ||
|
|
||
| fn release_after_removal(&mut self) { | ||
| self.inserted.store(false, Ordering::Release); | ||
| } | ||
| } | ||
|
|
||
| pub struct RawList<G: GetLinks> { | ||
| head: Option<NonNull<G::EntryType>>, | ||
| } | ||
|
|
||
| impl<G: GetLinks> RawList<G> { | ||
| pub fn new() -> Self { | ||
| Self { head: None } | ||
| } | ||
|
|
||
| pub fn is_empty(&self) -> bool { | ||
| self.head.is_none() | ||
| } | ||
|
|
||
| fn insert_after_priv( | ||
| &mut self, | ||
| existing: &G::EntryType, | ||
| new_entry: &mut ListEntry<G::EntryType>, | ||
| new_ptr: Option<NonNull<G::EntryType>>, | ||
| ) -> bool { | ||
| if !new_entry.acquire_for_insertion() { | ||
| // Nothing to do if already inserted. | ||
| return false; | ||
| } | ||
|
|
||
| { | ||
| let existing_links = unsafe { &mut *G::get_links(existing).0.get() }; | ||
| new_entry.next = existing_links.next; | ||
| existing_links.next = new_ptr; | ||
| } | ||
|
|
||
| new_entry.prev = Some(NonNull::from(existing)); | ||
| let next_links = unsafe { &mut *G::get_links(new_entry.next.unwrap().as_ref()).0.get() }; | ||
| next_links.prev = new_ptr; | ||
| true | ||
| } | ||
|
|
||
| pub fn insert_after(&mut self, existing: &G::EntryType, new: &G::EntryType) -> bool { | ||
| let new_entry = unsafe { &mut *G::get_links(new).0.get() }; | ||
| self.insert_after_priv(existing, new_entry, Some(NonNull::from(new))) | ||
| } | ||
|
|
||
| fn push_back_internal(&mut self, new: &G::EntryType) -> bool { | ||
| let new_entry = unsafe { &mut *G::get_links(new).0.get() }; | ||
| let new_ptr = Some(NonNull::from(new)); | ||
| match self.back() { | ||
| Some(back) => self.insert_after_priv(unsafe { back.as_ref() }, new_entry, new_ptr), | ||
| None => { | ||
| if !new_entry.acquire_for_insertion() { | ||
| // Nothing to do if already inserted. | ||
| return false; | ||
| } | ||
| self.head = new_ptr; | ||
| new_entry.next = new_ptr; | ||
| new_entry.prev = new_ptr; | ||
| true | ||
| } | ||
| } | ||
| } | ||
|
|
||
| pub unsafe fn push_back(&mut self, new: &G::EntryType) -> bool { | ||
| self.push_back_internal(new) | ||
| } | ||
|
|
||
| fn remove_internal(&mut self, data: &G::EntryType) -> bool { | ||
| let links = unsafe { &mut *G::get_links(data).0.get() }; | ||
| let next = if let Some(next) = links.next { | ||
| next | ||
| } else { | ||
| // Nothing to do if the entry is not on the list. | ||
| return false; | ||
| }; | ||
|
|
||
| if ptr::eq(data, next.as_ptr()) { | ||
| // We're removing the only element. | ||
| self.head = None | ||
| } else { | ||
| // Update the head if we're removing it. | ||
| if let Some(raw_head) = self.head { | ||
| if ptr::eq(data, raw_head.as_ptr()) { | ||
| self.head = Some(next); | ||
| } | ||
| } | ||
|
|
||
| unsafe { &mut *G::get_links(links.prev.unwrap().as_ref()).0.get() }.next = links.next; | ||
| unsafe { &mut *G::get_links(next.as_ref()).0.get() }.prev = links.prev; | ||
| } | ||
|
|
||
| // Reset the links of the element we're removing so that we know it's not on any list. | ||
| links.next = None; | ||
| links.prev = None; | ||
| links.release_after_removal(); | ||
| true | ||
| } | ||
|
|
||
| pub unsafe fn remove(&mut self, data: &G::EntryType) -> bool { | ||
| self.remove_internal(data) | ||
| } | ||
|
|
||
| fn pop_front_internal(&mut self) -> Option<NonNull<G::EntryType>> { | ||
| let head = self.head?; | ||
| unsafe { self.remove(head.as_ref()) }; | ||
| Some(head) | ||
| } | ||
|
|
||
| pub fn pop_front(&mut self) -> Option<NonNull<G::EntryType>> { | ||
| self.pop_front_internal() | ||
| } | ||
|
|
||
| pub fn front(&self) -> Option<NonNull<G::EntryType>> { | ||
| self.head | ||
| } | ||
|
|
||
| pub fn back(&self) -> Option<NonNull<G::EntryType>> { | ||
| unsafe { &*G::get_links(self.head?.as_ref()).0.get() }.prev | ||
| } | ||
|
|
||
| pub fn cursor_front(&self) -> Cursor<'_, G> { | ||
| Cursor::new(self, self.front()) | ||
| } | ||
|
|
||
| pub fn cursor_front_mut(&mut self) -> CursorMut<'_, G> { | ||
| CursorMut::new(self, self.front()) | ||
| } | ||
| } | ||
|
|
||
| struct CommonCursor<G: GetLinks> { | ||
| cur: Option<NonNull<G::EntryType>>, | ||
| } | ||
|
|
||
| impl<G: GetLinks> CommonCursor<G> { | ||
| fn new(cur: Option<NonNull<G::EntryType>>) -> Self { | ||
| Self { cur } | ||
| } | ||
|
|
||
| fn move_next(&mut self, list: &RawList<G>) { | ||
| match self.cur.take() { | ||
| None => self.cur = list.head, | ||
| Some(cur) => { | ||
| if let Some(head) = list.head { | ||
| // SAFETY: We have a shared ref to the linked list, so the links can't change. | ||
| let links = unsafe { &*G::get_links(cur.as_ref()).0.get() }; | ||
| if links.next.unwrap() != head { | ||
| self.cur = links.next; | ||
| } | ||
| } | ||
| } | ||
| } | ||
| } | ||
|
|
||
| fn move_prev(&mut self, list: &RawList<G>) { | ||
| match list.head { | ||
| None => self.cur = None, | ||
| Some(head) => { | ||
| let next = match self.cur.take() { | ||
| None => head, | ||
| Some(cur) => { | ||
| if cur == head { | ||
| return; | ||
| } | ||
| cur | ||
| } | ||
| }; | ||
| // SAFETY: There's a shared ref to the list, so the links can't change. | ||
| let links = unsafe { &*G::get_links(next.as_ref()).0.get() }; | ||
| self.cur = links.prev; | ||
| } | ||
| } | ||
| } | ||
| } | ||
|
|
||
| pub struct Cursor<'a, G: GetLinks> { | ||
| cursor: CommonCursor<G>, | ||
| list: &'a RawList<G>, | ||
| } | ||
|
|
||
| impl<'a, G: GetLinks> Cursor<'a, G> { | ||
| fn new(list: &'a RawList<G>, cur: Option<NonNull<G::EntryType>>) -> Self { | ||
| Self { | ||
| list, | ||
| cursor: CommonCursor::new(cur), | ||
| } | ||
| } | ||
|
|
||
| pub fn current(&self) -> Option<&'a G::EntryType> { | ||
| let cur = self.cursor.cur?; | ||
| // SAFETY: Objects must be kept alive while on the list. | ||
| Some(unsafe { &*cur.as_ptr() }) | ||
| } | ||
|
|
||
| pub fn move_next(&mut self) { | ||
| self.cursor.move_next(self.list); | ||
| } | ||
| } | ||
|
|
||
| pub struct CursorMut<'a, G: GetLinks> { | ||
| cursor: CommonCursor<G>, | ||
| list: &'a mut RawList<G>, | ||
| } | ||
|
|
||
| impl<'a, G: GetLinks> CursorMut<'a, G> { | ||
| fn new(list: &'a mut RawList<G>, cur: Option<NonNull<G::EntryType>>) -> Self { | ||
| Self { | ||
| list, | ||
| cursor: CommonCursor::new(cur), | ||
| } | ||
| } | ||
|
|
||
| pub fn current(&mut self) -> Option<&mut G::EntryType> { | ||
| let cur = self.cursor.cur?; | ||
| // SAFETY: Objects must be kept alive while on the list. | ||
| Some(unsafe { &mut *cur.as_ptr() }) | ||
| } | ||
|
|
||
| /// Removes the entry the cursor is pointing to and advances the cursor to the next entry. It | ||
| /// returns a raw pointer to the removed element (if one is removed). | ||
| pub fn remove_current(&mut self) -> Option<NonNull<G::EntryType>> { | ||
| let entry = self.cursor.cur?; | ||
| self.cursor.move_next(self.list); | ||
| unsafe { self.list.remove(entry.as_ref()) }; | ||
| Some(entry) | ||
| } | ||
|
|
||
| pub fn peek_next(&mut self) -> Option<&mut G::EntryType> { | ||
| let mut new = CommonCursor::new(self.cursor.cur); | ||
| new.move_next(self.list); | ||
| // SAFETY: Objects must be kept alive while on the list. | ||
| Some(unsafe { &mut *new.cur?.as_ptr() }) | ||
| } | ||
|
|
||
| pub fn peek_prev(&mut self) -> Option<&mut G::EntryType> { | ||
| let mut new = CommonCursor::new(self.cursor.cur); | ||
| new.move_prev(self.list); | ||
| // SAFETY: Objects must be kept alive while on the list. | ||
| Some(unsafe { &mut *new.cur?.as_ptr() }) | ||
| } | ||
|
|
||
| pub fn move_next(&mut self) { | ||
| self.cursor.move_next(self.list); | ||
| } | ||
| } |
| Original file line number | Diff line number | Diff line change |
|---|---|---|
| @@ -0,0 +1,216 @@ | ||
| // SPDX-License-Identifier: GPL-2.0 | ||
|
|
||
| #![no_std] | ||
| #![feature(global_asm, try_reserve, allocator_api, concat_idents)] | ||
|
|
||
| use alloc::{boxed::Box, sync::Arc}; | ||
| use core::{marker::PhantomData, pin::Pin, ptr}; | ||
| use kernel::{ | ||
| bindings, c_types, cstr, | ||
| miscdev::Registration, | ||
| prelude::*, | ||
| user_ptr::{UserSlicePtrReader, UserSlicePtrWriter}, | ||
| Error, | ||
| }; | ||
|
|
||
| mod allocation; | ||
| mod context; | ||
| mod defs; | ||
| mod linked_list; | ||
| mod node; | ||
| mod process; | ||
| mod range_alloc; | ||
| mod raw_list; | ||
| mod thread; | ||
| mod transaction; | ||
|
|
||
| use {context::Context, thread::Thread}; | ||
|
|
||
| module! { | ||
| type: BinderModule, | ||
| name: b"rust_binder", | ||
| author: b"Wedson Almeida Filho", | ||
| description: b"Android Binder", | ||
| license: b"GPL v2", | ||
| params: {}, | ||
| } | ||
|
|
||
| enum Either<L, R> { | ||
| Left(L), | ||
| Right(R), | ||
| } | ||
|
|
||
| trait DeliverToRead { | ||
| /// Performs work. Returns true if remaining work items in the queue should be processed | ||
| /// immediately, or false if it should return to caller before processing additional work | ||
| /// items. | ||
| fn do_work( | ||
| self: Arc<Self>, | ||
| thread: &Thread, | ||
| writer: &mut UserSlicePtrWriter, | ||
| ) -> KernelResult<bool>; | ||
|
|
||
| /// Cancels the given work item. This is called instead of [`DeliverToRead::do_work`] when work | ||
| /// won't be delivered. | ||
| fn cancel(self: Arc<Self>) {} | ||
|
|
||
| /// Returns the linked list links for the work item. | ||
| fn get_links(&self) -> &linked_list::Links<dyn DeliverToRead>; | ||
| } | ||
|
|
||
| impl linked_list::GetLinks for Arc<dyn DeliverToRead> { | ||
| type EntryType = dyn DeliverToRead; | ||
| fn get_links(obj: &dyn DeliverToRead) -> &linked_list::Links<dyn DeliverToRead> { | ||
| obj.get_links() | ||
| } | ||
| } | ||
|
|
||
| struct DeliverCode { | ||
| code: u32, | ||
| links: linked_list::Links<dyn DeliverToRead>, | ||
| } | ||
|
|
||
| impl DeliverCode { | ||
| fn new(code: u32) -> Self { | ||
| Self { | ||
| code, | ||
| links: linked_list::Links::new(), | ||
| } | ||
| } | ||
| } | ||
|
|
||
| impl DeliverToRead for DeliverCode { | ||
| fn do_work( | ||
| self: Arc<Self>, | ||
| _thread: &Thread, | ||
| writer: &mut UserSlicePtrWriter, | ||
| ) -> KernelResult<bool> { | ||
| writer.write(&self.code)?; | ||
| Ok(true) | ||
| } | ||
|
|
||
| fn get_links(&self) -> &linked_list::Links<dyn DeliverToRead> { | ||
| &self.links | ||
| } | ||
| } | ||
|
|
||
| extern "C" { | ||
| #[allow(improper_ctypes)] | ||
| fn rust_helper_alloc_pages( | ||
| gfp_mask: bindings::gfp_t, | ||
| order: c_types::c_uint, | ||
| ) -> *mut bindings::page; | ||
|
|
||
| #[allow(improper_ctypes)] | ||
| fn rust_helper_kmap(page: *mut bindings::page) -> *mut c_types::c_void; | ||
|
|
||
| #[allow(improper_ctypes)] | ||
| fn rust_helper_kunmap(page: *mut bindings::page); | ||
| } | ||
|
|
||
| /// Pages holds a reference to a set of pages of order `ORDER`. Having the order as a generic const | ||
| /// allows the struct to have the same size as pointer. | ||
| struct Pages<const ORDER: u32> { | ||
| pages: *mut bindings::page, | ||
| } | ||
|
|
||
| impl<const ORDER: u32> Pages<ORDER> { | ||
| fn new() -> KernelResult<Self> { | ||
| // TODO: Consider whether we want to allow callers to specify flags. | ||
| let pages = unsafe { | ||
| rust_helper_alloc_pages( | ||
| bindings::GFP_KERNEL | bindings::__GFP_ZERO | bindings::__GFP_HIGHMEM, | ||
| ORDER, | ||
| ) | ||
| }; | ||
| if pages.is_null() { | ||
| return Err(Error::ENOMEM); | ||
| } | ||
| Ok(Self { pages }) | ||
| } | ||
|
|
||
| fn insert_page(&self, vma: &mut bindings::vm_area_struct, address: usize) -> KernelResult { | ||
| let ret = unsafe { bindings::vm_insert_page(vma, address as _, self.pages) }; | ||
| if ret != 0 { | ||
| Err(Error::from_kernel_errno(ret)) | ||
| } else { | ||
| Ok(()) | ||
| } | ||
| } | ||
|
|
||
| fn copy_into_page( | ||
| &self, | ||
| reader: &mut UserSlicePtrReader, | ||
| offset: usize, | ||
| len: usize, | ||
| ) -> KernelResult { | ||
| let mapping = self.kmap(0).unwrap(); | ||
| unsafe { reader.read_raw((mapping.ptr as usize + offset) as _, len) }?; | ||
| Ok(()) | ||
| } | ||
|
|
||
| unsafe fn read(&self, dest: *mut u8, offset: usize, len: usize) { | ||
| let mapping = self.kmap(0).unwrap(); | ||
| ptr::copy((mapping.ptr as *mut u8).add(offset), dest, len); | ||
| } | ||
|
|
||
| unsafe fn write(&self, src: *const u8, offset: usize, len: usize) { | ||
| let mapping = self.kmap(0).unwrap(); | ||
| ptr::copy(src, (mapping.ptr as *mut u8).add(offset), len); | ||
| } | ||
|
|
||
| fn kmap(&self, index: usize) -> Option<PageMapping> { | ||
| if index >= 1usize << ORDER { | ||
| return None; | ||
| } | ||
| let page = unsafe { self.pages.add(index) }; | ||
| let ptr = unsafe { rust_helper_kmap(page) }; | ||
| Some(PageMapping { | ||
| page, | ||
| ptr, | ||
| _phantom: PhantomData, | ||
| }) | ||
| } | ||
| } | ||
|
|
||
| impl<const ORDER: u32> Drop for Pages<ORDER> { | ||
| fn drop(&mut self) { | ||
| unsafe { bindings::__free_pages(self.pages, ORDER) }; | ||
| } | ||
| } | ||
|
|
||
| struct PageMapping<'a> { | ||
| page: *mut bindings::page, | ||
| ptr: *mut c_types::c_void, | ||
| _phantom: PhantomData<&'a i32>, | ||
| } | ||
|
|
||
| impl Drop for PageMapping<'_> { | ||
| fn drop(&mut self) { | ||
| unsafe { rust_helper_kunmap(self.page) }; | ||
| } | ||
| } | ||
|
|
||
| const fn ptr_align(value: usize) -> usize { | ||
| let size = core::mem::size_of::<usize>() - 1; | ||
| (value + size) & !size | ||
| } | ||
|
|
||
| unsafe impl Sync for BinderModule {} | ||
|
|
||
| struct BinderModule { | ||
| _reg: Pin<Box<Registration<Arc<Context>>>>, | ||
| } | ||
|
|
||
| impl KernelModule for BinderModule { | ||
| fn init() -> KernelResult<Self> { | ||
| let pinned_ctx = Context::new()?; | ||
| let ctx = unsafe { Pin::into_inner_unchecked(pinned_ctx) }; | ||
| let reg = Registration::<Arc<Context>>::new_pinned::<process::Process>( | ||
| cstr!("rust_binder"), | ||
| None, | ||
| ctx, | ||
| )?; | ||
| Ok(Self { _reg: reg }) | ||
| } | ||
| } |
| Original file line number | Diff line number | Diff line change |
|---|---|---|
| @@ -0,0 +1,207 @@ | ||
| // SPDX-License-Identifier: GPL-2.0 | ||
|
|
||
| use alloc::sync::Arc; | ||
| use core::sync::atomic::{AtomicBool, Ordering}; | ||
| use kernel::{bindings, prelude::*, sync::Ref, user_ptr::UserSlicePtrWriter}; | ||
|
|
||
| use crate::{ | ||
| defs::*, | ||
| linked_list::Links, | ||
| node::NodeRef, | ||
| process::Process, | ||
| ptr_align, | ||
| thread::{BinderResult, Thread}, | ||
| DeliverToRead, Either, | ||
| }; | ||
|
|
||
| pub(crate) struct Transaction { | ||
| // TODO: Node should be released when the buffer is released. | ||
| node_ref: Option<NodeRef>, | ||
| stack_next: Option<Arc<Transaction>>, | ||
| pub(crate) from: Arc<Thread>, | ||
| to: Ref<Process>, | ||
| free_allocation: AtomicBool, | ||
| code: u32, | ||
| flags: u32, | ||
| data_size: usize, | ||
| offsets_size: usize, | ||
| data_address: usize, | ||
| links: Links<dyn DeliverToRead>, | ||
| } | ||
|
|
||
| impl Transaction { | ||
| pub(crate) fn new( | ||
| node_ref: NodeRef, | ||
| stack_next: Option<Arc<Transaction>>, | ||
| from: &Arc<Thread>, | ||
| tr: &BinderTransactionData, | ||
| ) -> BinderResult<Self> { | ||
| let to = node_ref.node.owner.clone(); | ||
| let alloc = from.copy_transaction_data(&to, tr)?; | ||
| let data_address = alloc.ptr; | ||
| alloc.keep_alive(); | ||
| Ok(Self { | ||
| node_ref: Some(node_ref), | ||
| stack_next, | ||
| from: from.clone(), | ||
| to, | ||
| code: tr.code, | ||
| flags: tr.flags, | ||
| data_size: tr.data_size as _, | ||
| data_address, | ||
| offsets_size: tr.offsets_size as _, | ||
| links: Links::new(), | ||
| free_allocation: AtomicBool::new(true), | ||
| }) | ||
| } | ||
|
|
||
| pub(crate) fn new_reply( | ||
| from: &Arc<Thread>, | ||
| to: Ref<Process>, | ||
| tr: &BinderTransactionData, | ||
| ) -> BinderResult<Self> { | ||
| let alloc = from.copy_transaction_data(&to, tr)?; | ||
| let data_address = alloc.ptr; | ||
| alloc.keep_alive(); | ||
| Ok(Self { | ||
| node_ref: None, | ||
| stack_next: None, | ||
| from: from.clone(), | ||
| to, | ||
| code: tr.code, | ||
| flags: tr.flags, | ||
| data_size: tr.data_size as _, | ||
| data_address, | ||
| offsets_size: tr.offsets_size as _, | ||
| links: Links::new(), | ||
| free_allocation: AtomicBool::new(true), | ||
| }) | ||
| } | ||
|
|
||
| /// Determines if the transaction is stacked on top of the given transaction. | ||
| pub(crate) fn is_stacked_on(&self, onext: &Option<Arc<Self>>) -> bool { | ||
| match (&self.stack_next, onext) { | ||
| (None, None) => true, | ||
| (Some(stack_next), Some(next)) => Arc::ptr_eq(stack_next, next), | ||
| _ => false, | ||
| } | ||
| } | ||
|
|
||
| /// Returns a pointer to the next transaction on the transaction stack, if there is one. | ||
| pub(crate) fn clone_next(&self) -> Option<Arc<Self>> { | ||
| let next = self.stack_next.as_ref()?; | ||
| Some(next.clone()) | ||
| } | ||
|
|
||
| /// Searches in the transaction stack for a thread that belongs to the target process. This is | ||
| /// useful when finding a target for a new transaction: if the node belongs to a process that | ||
| /// is already part of the transaction stack, we reuse the thread. | ||
| fn find_target_thread(&self) -> Option<Arc<Thread>> { | ||
| let process = &self.node_ref.as_ref()?.node.owner; | ||
|
|
||
| let mut it = &self.stack_next; | ||
| while let Some(transaction) = it { | ||
| if Ref::ptr_eq(&transaction.from.process, process) { | ||
| return Some(transaction.from.clone()); | ||
| } | ||
| it = &transaction.stack_next; | ||
| } | ||
| None | ||
| } | ||
|
|
||
| /// Searches in the transaction stack for a transaction originating at the given thread. | ||
| pub(crate) fn find_from(&self, thread: &Thread) -> Option<Arc<Transaction>> { | ||
| let mut it = &self.stack_next; | ||
| while let Some(transaction) = it { | ||
| if core::ptr::eq(thread, transaction.from.as_ref()) { | ||
| return Some(transaction.clone()); | ||
| } | ||
|
|
||
| it = &transaction.stack_next; | ||
| } | ||
| None | ||
| } | ||
|
|
||
| /// Submits the transaction to a work queue. Use a thread if there is one in the transaction | ||
| /// stack, otherwise use the destination process. | ||
| pub(crate) fn submit(self: Arc<Self>) -> BinderResult { | ||
| if let Some(thread) = self.find_target_thread() { | ||
| thread.push_work(self) | ||
| } else { | ||
| let process = self.to.clone(); | ||
| process.push_work(self) | ||
| } | ||
| } | ||
| } | ||
|
|
||
| impl DeliverToRead for Transaction { | ||
| fn do_work( | ||
| self: Arc<Self>, | ||
| thread: &Thread, | ||
| writer: &mut UserSlicePtrWriter, | ||
| ) -> KernelResult<bool> { | ||
| /* TODO: Initialise the following fields from tr: | ||
| pub sender_pid: pid_t, | ||
| pub sender_euid: uid_t, | ||
| */ | ||
| let mut tr = BinderTransactionData::default(); | ||
|
|
||
| if let Some(nref) = &self.node_ref { | ||
| let (ptr, cookie) = nref.node.get_id(); | ||
| tr.target.ptr = ptr as _; | ||
| tr.cookie = cookie as _; | ||
| }; | ||
|
|
||
| tr.code = self.code; | ||
| tr.flags = self.flags; | ||
| tr.data_size = self.data_size as _; | ||
| tr.data.ptr.buffer = self.data_address as _; | ||
| tr.offsets_size = self.offsets_size as _; | ||
| if tr.offsets_size > 0 { | ||
| tr.data.ptr.offsets = (self.data_address + ptr_align(self.data_size)) as _; | ||
| } | ||
|
|
||
| // When `drop` is called, we don't want the allocation to be freed because it is now the | ||
| // user's reponsibility to free it. | ||
| self.free_allocation.store(false, Ordering::Relaxed); | ||
|
|
||
| let code = if self.node_ref.is_none() { | ||
| BR_REPLY | ||
| } else { | ||
| BR_TRANSACTION | ||
| }; | ||
|
|
||
| // Write the transaction code and data to the user buffer. On failure we complete the | ||
| // transaction with an error. | ||
| if let Err(err) = writer.write(&code).and_then(|_| writer.write(&tr)) { | ||
| let reply = Either::Right(BR_FAILED_REPLY); | ||
| self.from.deliver_reply(reply, &self); | ||
| return Err(err); | ||
| } | ||
|
|
||
| // When this is not a reply and not an async transaction, update `current_transaction`. If | ||
| // it's a reply, `current_transaction` has already been updated appropriately. | ||
| if self.node_ref.is_some() && tr.flags & bindings::transaction_flags_TF_ONE_WAY == 0 { | ||
| thread.set_current_transaction(self); | ||
| } | ||
|
|
||
| Ok(false) | ||
| } | ||
|
|
||
| fn cancel(self: Arc<Self>) { | ||
| let reply = Either::Right(BR_DEAD_REPLY); | ||
| self.from.deliver_reply(reply, &self); | ||
| } | ||
|
|
||
| fn get_links(&self) -> &Links<dyn DeliverToRead> { | ||
| &self.links | ||
| } | ||
| } | ||
|
|
||
| impl Drop for Transaction { | ||
| fn drop(&mut self) { | ||
| if self.free_allocation.load(Ordering::Relaxed) { | ||
| self.to.buffer_get(self.data_address); | ||
| } | ||
| } | ||
| } |