99 lines
3.1 KiB
Rust
99 lines
3.1 KiB
Rust
use std::sync::{Arc, Weak, Mutex};
|
|
use std::ptr;
|
|
use r9_macro::SharedFromSelfLock;
|
|
use r9_macro_derive::SharedFromSelfLock;
|
|
|
|
#[derive(Default)]
|
|
#[derive(SharedFromSelfLock)]
|
|
pub struct ListHeadLock<T> {
|
|
prev: Weak::<Mutex::<Self>>,
|
|
next: Weak::<Mutex::<Self>>,
|
|
data: Weak::<Mutex::<T>>,
|
|
_self_wp: Weak::<Mutex::<Self>>,
|
|
}
|
|
|
|
unsafe impl<T> Send for ListHeadLock<T> {}
|
|
unsafe impl<T> Sync for ListHeadLock<T> {}
|
|
|
|
impl<T> ListHeadLock<T> {
|
|
|
|
fn new(data: Weak::<Mutex::<T>>) -> Arc::<Mutex::<Self>> {
|
|
let this = Arc::new(Mutex::new(Self{
|
|
prev: Default::default(),
|
|
next: Default::default(),
|
|
data: data,
|
|
_self_wp: Default::default(),
|
|
}));
|
|
this.lock().unwrap()._self_wp = Arc::downgrade(&this);
|
|
this.lock().unwrap().init();
|
|
return this;
|
|
}
|
|
|
|
pub fn new_head() -> Arc::<Mutex::<Self>> {
|
|
return Self::new(Default::default());
|
|
}
|
|
|
|
pub fn new_node(data: Weak::<Mutex::<T>>) -> Arc::<Mutex::<Self>> {
|
|
return Self::new(data);
|
|
}
|
|
|
|
fn init(&mut self) {
|
|
self.prev = Arc::downgrade(&self.shared_from_self_lock());
|
|
self.next = Arc::downgrade(&self.shared_from_self_lock());
|
|
}
|
|
|
|
pub fn data(&self) -> Weak::<Mutex::<T>> {
|
|
return self.data.clone();
|
|
}
|
|
|
|
pub fn del_init(&mut self) {
|
|
self.next.upgrade().unwrap().lock().unwrap()._self_wp = self.prev.clone();
|
|
self.prev.upgrade().unwrap().lock().unwrap().next = self.next.clone();
|
|
self.init();
|
|
}
|
|
|
|
pub fn empty(&self) -> bool {
|
|
return ptr::eq(self.next.as_ptr(),Arc::as_ptr(&self.shared_from_self_lock()));
|
|
}
|
|
|
|
pub fn add_tail(head: &Arc::<Mutex::<Self>>, pnew: &Arc::<Mutex::<Self>>) {
|
|
let prev = &head.lock().unwrap().prev.clone();
|
|
let next = head;
|
|
|
|
next.lock().unwrap().prev = Arc::downgrade(pnew);
|
|
pnew.lock().unwrap().next = Arc::downgrade(&next);
|
|
pnew.lock().unwrap().prev = prev.clone();
|
|
prev.upgrade().unwrap().lock().unwrap().next = Arc::downgrade(pnew);
|
|
}
|
|
|
|
pub fn first_entry(&self) -> Weak::<Mutex::<T>> {
|
|
if self.empty() {
|
|
panic!("");
|
|
}
|
|
return self.next.upgrade().unwrap().lock().unwrap().data();
|
|
}
|
|
|
|
pub fn replace_init(head: &Arc::<Mutex::<Self>>, pnew: &Arc::<Mutex::<Self>>) {
|
|
if head.lock().unwrap().empty() {
|
|
return;
|
|
}
|
|
pnew.lock().unwrap().next = head.lock().unwrap().next.clone();
|
|
pnew.lock().unwrap().next.upgrade().unwrap().lock().unwrap().prev = Arc::downgrade(pnew);
|
|
pnew.lock().unwrap().prev = head.lock().unwrap().prev.clone();
|
|
pnew.lock().unwrap().prev.upgrade().unwrap().lock().unwrap().next = Arc::downgrade(pnew);
|
|
head.lock().unwrap().init();
|
|
}
|
|
|
|
pub fn for_each(&self, cb: fn (&Weak::<Mutex::<T>>) -> bool) {
|
|
let mut pos = self.next.clone();
|
|
let self_weak = Arc::downgrade(&self.shared_from_self_lock());
|
|
while !Weak::ptr_eq(&pos, &self_weak) {
|
|
if !cb(&pos.upgrade().unwrap().lock().unwrap().data()) {
|
|
break;
|
|
}
|
|
pos = pos.upgrade().unwrap().lock().unwrap().next.clone();
|
|
}
|
|
}
|
|
|
|
}
|