Home
last modified time | relevance | path

Searched refs:lock (Results 1 – 25 of 53) sorted by relevance

123

/DragonOS-0.1.5/kernel/src/common/
Dspinlock.h22 int8_t lock; // 1:unlocked 0:locked member
25 extern void __arch_spin_lock(spinlock_t *lock);
26 extern void __arch_spin_unlock(spinlock_t *lock);
28 extern void __arch_spin_lock_no_preempt(spinlock_t *lock);
29 extern void __arch_spin_unlock_no_preempt(spinlock_t *lock);
31 extern long __arch_spin_trylock(spinlock_t *lock);
38 void spin_lock(spinlock_t *lock) in spin_lock() argument
40 __arch_spin_lock(lock); in spin_lock()
48 void spin_unlock(spinlock_t *lock) in spin_unlock() argument
50 __arch_spin_unlock(lock); in spin_unlock()
[all …]
Dmutex.h39 void mutex_init(mutex_t *lock);
46 void mutex_lock(mutex_t *lock);
53 void mutex_unlock(mutex_t *lock);
62 int mutex_trylock(mutex_t *lock);
69 #define mutex_is_locked(lock) ((atomic_read(&(lock)->count) == 1) ? 0 : 1) argument
Dkfifo.h130 …ays_inline kfifo_in_locked(struct kfifo_t *fifo, const void *from, uint32_t size, spinlock_t *lock) in kfifo_in_locked() argument
132 spin_lock(lock); in kfifo_in_locked()
134 spin_unlock(lock); in kfifo_in_locked()
147 …t __always_inline kfifo_out_locked(struct kfifo_t *fifo, void *to, uint32_t size, spinlock_t *lock) in kfifo_out_locked() argument
149 spin_lock(lock); in kfifo_out_locked()
151 spin_unlock(lock); in kfifo_out_locked()
Dwait_queue.h38 void wait_queue_sleep_on_unlock(wait_queue_node_t *wait_queue_head, void *lock);
57 spinlock_t lock; // 队列需要有一个自旋锁,虽然目前内部并没有使用,但是以后可能会用.[在completion内部使用] member
102 void wait_queue_sleep_with_node_unlock(wait_queue_head_t *q, wait_queue_node_t *wait, void *lock);
/DragonOS-0.1.5/kernel/src/libs/
Dmutex.c10 void mutex_init(mutex_t *lock) in mutex_init() argument
12 atomic_set(&lock->count, 1); in mutex_init()
13 spin_init(&lock->wait_lock); in mutex_init()
14 list_init(&lock->wait_list); in mutex_init()
24 static void __mutex_acquire(mutex_t *lock) in __mutex_acquire() argument
32 void mutex_lock(mutex_t *lock) in mutex_lock() argument
38 spin_lock(&lock->wait_lock); in mutex_lock()
39 if (likely(mutex_is_locked(lock))) in mutex_lock()
45 spin_unlock(&lock->wait_lock); in mutex_lock()
51 list_append(&lock->wait_list, &waiter->list); in mutex_lock()
[all …]
Dspinlock.rs15 pub fn spin_lock_irqsave(lock: *mut spinlock_t, flags: &mut u64) { in spin_lock_irqsave()
18 spin_lock(lock); in spin_lock_irqsave()
24 pub fn spin_unlock_irqrestore(lock: *mut spinlock_t, flags: &u64) { in spin_unlock_irqrestore()
26 spin_unlock(lock); in spin_unlock_irqrestore()
34 pub fn spin_is_locked(lock: &spinlock_t) -> bool { in spin_is_locked()
35 let val = unsafe { read_volatile(&lock.lock as *const i8) }; in spin_is_locked()
42 Self { lock: 1 } in default()
47 pub fn spin_lock_irq(lock: *mut spinlock_t) { in spin_lock_irq()
50 spin_lock(lock); in spin_lock_irq()
55 pub fn spin_unlock_irq(lock: *mut spinlock_t) { in spin_unlock_irq()
[all …]
Dlockref.rs14 pub lock: RawSpinlock, field
21 lock: RawSpinlock, field
37 lock: RawSpinlock::INIT,
61 if !old.lock.is_locked() { in cmpxchg_loop()
66 new.lock.set_value(false); in cmpxchg_loop()
136 self.lock.lock(); in inc()
138 self.lock.unlock(); in inc()
159 self.lock.lock(); in inc_not_zero()
166 self.lock.unlock(); in inc_not_zero()
188 self.lock.lock(); in inc_not_dead()
[all …]
Dlockref.c13 … while (likely(!spin_is_locked(&old.lock))) \
46 spin_lock(&lock_ref->lock); in lockref_inc()
48 spin_unlock(&lock_ref->lock); in lockref_inc()
70 spin_lock(&lock_ref->lock); in lockref_inc_not_zero()
77 spin_unlock(&lock_ref->lock); in lockref_inc_not_zero()
104 spin_lock(&lock_ref->lock); in lockref_dec()
110 spin_unlock(&lock_ref->lock); in lockref_dec()
161 spin_lock(&lock_ref->lock); in lockref_dec_not_zero()
167 spin_unlock(&lock_ref->lock); in lockref_dec_not_zero()
193 spin_lock(&lock_ref->lock); in lockref_dec_or_lock_not_zero()
[all …]
Drwlock.rs33 lock: AtomicU32, field
41 lock: &'a AtomicU32, field
70 lock: AtomicU32::new(0), in new()
97 let value = self.lock.fetch_add(READER, Ordering::Acquire); in current_reader()
101 self.lock.fetch_sub(READER, Ordering::Release); in current_reader()
126 self.lock.fetch_sub(READER, Ordering::Release); in try_read()
131 lock: &self.lock, in try_read()
152 let state = self.lock.load(Ordering::Relaxed); in reader_count()
160 return (self.lock.load(Ordering::Relaxed) & WRITER) / WRITER; in writer_count()
169 .lock in try_write()
[all …]
Dmutex.rs39 lock: &'a Mutex<T>, field
61 pub fn lock(&self) -> MutexGuard<T> { in lock() method
63 let mut inner: SpinLockGuard<MutexInner> = self.inner.lock(); in lock()
83 return MutexGuard { lock: self }; in lock()
92 let mut inner = self.inner.lock(); in try_lock()
100 return Ok(MutexGuard { lock: self }); in try_lock()
115 let mut inner: SpinLockGuard<MutexInner> = self.inner.lock(); in unlock()
153 return unsafe { &*self.lock.data.get() }; in deref()
160 return unsafe { &mut *self.lock.data.get() }; in deref_mut()
167 self.lock.unlock(); in drop()
Dwait_queue.rs22 lock: Default::default(), in default()
44 let mut guard: SpinLockGuard<InnerWaitQueue> = self.0.lock(); in sleep()
53 let mut guard: SpinLockGuard<InnerWaitQueue> = self.0.lock(); in sleep_uninterruptible()
63 let mut guard: SpinLockGuard<InnerWaitQueue> = self.0.lock(); in sleep_unlock_spinlock()
74 let mut guard: SpinLockGuard<InnerWaitQueue> = self.0.lock(); in sleep_unlock_mutex()
85 let mut guard: SpinLockGuard<InnerWaitQueue> = self.0.lock(); in sleep_uninterruptible_unlock_spinlock()
96 let mut guard: SpinLockGuard<InnerWaitQueue> = self.0.lock(); in sleep_uninterruptible_unlock_mutex()
112 let mut guard: SpinLockGuard<InnerWaitQueue> = self.0.lock(); in wakeup()
132 return self.0.lock().wait_list.len(); in len()
Dwait_queue_head.c14 spin_init(&wait_queue->lock); in wait_queue_head_init()
37 void wait_queue_sleep_with_node_unlock(wait_queue_head_t *q, wait_queue_node_t *wait, void *lock) in wait_queue_sleep_with_node_unlock() argument
43 spin_unlock((spinlock_t *)lock); in wait_queue_sleep_with_node_unlock()
/DragonOS-0.1.5/kernel/src/arch/x86_64/asm/
Dspinlock.c4 void __arch_spin_lock(spinlock_t *lock) in __arch_spin_lock() argument
15 : "=m"(lock->lock)::"memory"); in __arch_spin_lock()
19 void __arch_spin_unlock(spinlock_t *lock) in __arch_spin_unlock() argument
22 __asm__ __volatile__("movb $1, %0 \n\t" : "=m"(lock->lock)::"memory"); in __arch_spin_unlock()
25 void __arch_spin_lock_no_preempt(spinlock_t *lock) in __arch_spin_lock_no_preempt() argument
36 : "=m"(lock->lock)::"memory"); in __arch_spin_lock_no_preempt()
39 void __arch_spin_unlock_no_preempt(spinlock_t *lock) in __arch_spin_unlock_no_preempt() argument
41 __asm__ __volatile__("movb $1, %0 \n\t" : "=m"(lock->lock)::"memory"); in __arch_spin_unlock_no_preempt()
44 long __arch_spin_trylock(spinlock_t *lock) in __arch_spin_trylock() argument
50 : "=q"(tmp_val), "=m"(lock->lock) in __arch_spin_trylock()
/DragonOS-0.1.5/kernel/src/sched/
Dcompletion.c23 spin_lock(&x->wait_queue.lock); in complete()
29 spin_unlock(&x->wait_queue.lock); in complete()
39 spin_lock(&x->wait_queue.lock); in complete_all()
45 spin_unlock(&x->wait_queue.lock); in complete_all()
70 spin_unlock(&x->wait_queue.lock); in __wait_for_common()
73 spin_lock(&x->wait_queue.lock); in __wait_for_common()
94 spin_lock(&x->wait_queue.lock); in wait_for_completion()
96 spin_unlock(&x->wait_queue.lock); in wait_for_completion()
109 spin_lock(&x->wait_queue.lock); in wait_for_completion_timeout()
111 spin_unlock(&x->wait_queue.lock); in wait_for_completion_timeout()
[all …]
Drt.rs41 lock: RawSpinlock, field
50 lock: RawSpinlock::INIT, in new()
55 self.lock.lock(); in enqueue()
59 self.lock.unlock(); in enqueue()
63 self.lock.unlock(); in enqueue()
69 self.lock.lock(); in dequeue()
77 self.lock.unlock(); in dequeue()
81 self.lock.lock(); in enqueue_front()
85 self.lock.unlock(); in enqueue_front()
89 self.lock.unlock(); in enqueue_front()
Dcfs.rs43 lock: RawSpinlock, field
54 lock: RawSpinlock::INIT, in new()
69 self.lock.lock(); in enqueue()
73 self.lock.unlock(); in enqueue()
78 self.lock.unlock(); in enqueue()
84 self.lock.lock(); in dequeue()
92 self.lock.unlock(); in dequeue()
152 current_cpu_queue.lock.lock(); in timer_update_jiffies()
158 current_cpu_queue.lock.unlock(); in timer_update_jiffies()
/DragonOS-0.1.5/docs/kernel/locking/
Dmutex.md49 &emsp;&emsp;当需要读取、修改Mutex保护的数据时,请先使用Mutex的`lock()`方法。该方法会返回一个`MutexGuard`。您可以使用被保护的数据的成员函数来进行一些操作。或…
56 let mut g :MutexGuard<Vec<i32>>= x.lock();
101 ### 4.2. lock - 加锁
106 pub fn lock(&self) -> MutexGuard<T>
145 **`void mutex_init(mutex_t *lock)`**
151 **`void mutex_lock(mutex_t *lock)`**
157 **`void mutex_unlock(mutex_t *lock)`**
163 **`void mutex_trylock(mutex_t *lock)`**
169 **`void mutex_is_locked(mutex_t *lock)`**
Dspinlock.md19 需要先调用`lock()`方法,然后当离开临界区时,手动调用`unlock()`方法。我们并没有向编译器显式地指定该自旋锁到底保护的是哪些数据。
39 lock: RawSpinlock,
55 &emsp;&emsp;当需要读取、修改SpinLock保护的数据时,请先使用SpinLock的`lock()`方法。该方法会返回一个`SpinLockGuard`。您可以使用被保护的数据的成员函数…
62 let mut g :SpinLockGuard<Vec<i32>>= x.lock();
94 &emsp;&emsp;`SpinLock`之所以能够实现编译期检查,是因为它引入了一个`SpinLockGuard`作为守卫。我们在编写代码的时候,保证只有调用`SpinLock`的`lock()…
/DragonOS-0.1.5/kernel/src/filesystem/ramfs/
Dmod.rs107 let mut root_guard: SpinLockGuard<RamFSInode> = result.root_inode.0.lock(); in new()
130 let inode: SpinLockGuard<RamFSInode> = self.0.lock(); in read_at()
163 let mut inode: SpinLockGuard<RamFSInode> = self.0.lock(); in write_at()
184 let inode: SpinLockGuard<RamFSInode> = self.0.lock(); in poll()
197 return self.0.lock().fs.upgrade().unwrap(); in fs()
205 let inode = self.0.lock(); in metadata()
213 let mut inode = self.0.lock(); in set_metadata()
225 let mut inode = self.0.lock(); in resize()
242 let mut inode = self.0.lock(); in create_with_data()
278 result.0.lock().self_ref = Arc::downgrade(&result); in create_with_data()
[all …]
/DragonOS-0.1.5/kernel/src/driver/keyboard/
Dps2_keyboard.rs57 result.0.lock().self_ref = Arc::downgrade(&result); in new()
65 self.0.lock().fs = fs; in set_fs()
85 let guard = self.0.lock(); in read_at()
109 let guard = self.0.lock(); in open()
116 let guard = self.0.lock(); in close()
129 return Ok(self.0.lock().metadata.clone()); in metadata()
133 let mut inode = self.0.lock(); in set_metadata()
145 return self.0.lock().fs.upgrade().unwrap(); in fs()
/DragonOS-0.1.5/kernel/src/driver/disk/ahci/
Dahci_inode.rs63 result.0.lock().self_ref = Arc::downgrade(&result); in new()
71 self.0.lock().fs = fs; in set_fs()
89 return Ok(self.0.lock().metadata.clone()); in metadata()
93 return self.0.lock().fs.upgrade().unwrap(); in fs()
101 let mut inode = self.0.lock(); in set_metadata()
131 return self.0.lock().disk.read_at(offset, len, buf); in read_at()
150 return self.0.lock().disk.write_at(offset, len, buf); in write_at()
/DragonOS-0.1.5/kernel/src/filesystem/procfs/
Dmod.rs269 let mut root_guard: SpinLockGuard<ProcFSInode> = result.root_inode.0.lock(); in new()
293 _sf.0.lock().fdata.pid = pid; in register_pid()
294 _sf.0.lock().fdata.ftype = ProcFileType::ProcStatus; in register_pid()
324 let mut inode: SpinLockGuard<ProcFSInode> = self.0.lock(); in open()
346 let guard: SpinLockGuard<ProcFSInode> = self.0.lock(); in close()
374 let inode: SpinLockGuard<ProcFSInode> = self.0.lock(); in read_at()
422 let inode: SpinLockGuard<ProcFSInode> = self.0.lock(); in poll()
435 return self.0.lock().fs.upgrade().unwrap(); in fs()
443 let inode = self.0.lock(); in metadata()
450 let mut inode = self.0.lock(); in set_metadata()
[all …]
/DragonOS-0.1.5/kernel/src/filesystem/devfs/
Dnull_dev.rs58 result.0.lock().self_ref = Arc::downgrade(&result); in new()
66 self.0.lock().fs = fs; in set_fs()
84 return Ok(self.0.lock().metadata.clone()); in metadata()
88 return self.0.lock().fs.upgrade().unwrap(); in fs()
96 let mut inode = self.0.lock(); in set_metadata()
Dzero_dev.rs58 result.0.lock().self_ref = Arc::downgrade(&result); in new()
66 self.0.lock().fs = fs; in set_fs()
84 return Ok(self.0.lock().metadata.clone()); in metadata()
88 return self.0.lock().fs.upgrade().unwrap(); in fs()
96 let mut inode = self.0.lock(); in set_metadata()
/DragonOS-0.1.5/user/libs/libc/
D.gitignore2 Cargo.lock

123