cortex_m/peripheral/
dwt.rs1#[cfg(not(armv6m))]
4use volatile_register::WO;
5use volatile_register::{RO, RW};
6
7use crate::peripheral::DWT;
8
9#[repr(C)]
11pub struct RegisterBlock {
12    pub ctrl: RW<u32>,
14    #[cfg(not(armv6m))]
16    pub cyccnt: RW<u32>,
17    #[cfg(not(armv6m))]
19    pub cpicnt: RW<u32>,
20    #[cfg(not(armv6m))]
22    pub exccnt: RW<u32>,
23    #[cfg(not(armv6m))]
25    pub sleepcnt: RW<u32>,
26    #[cfg(not(armv6m))]
28    pub lsucnt: RW<u32>,
29    #[cfg(not(armv6m))]
31    pub foldcnt: RW<u32>,
32    #[cfg(armv6m)]
34    reserved: [u32; 6],
35    pub pcsr: RO<u32>,
37    #[cfg(armv6m)]
39    pub c: [Comparator; 2],
40    #[cfg(not(armv6m))]
41    pub c: [Comparator; 16],
43    #[cfg(not(armv6m))]
44    reserved: [u32; 932],
45    #[cfg(not(armv6m))]
47    pub lar: WO<u32>,
48    #[cfg(not(armv6m))]
50    pub lsr: RO<u32>,
51}
52
53#[repr(C)]
55pub struct Comparator {
56    pub comp: RW<u32>,
58    pub mask: RW<u32>,
60    pub function: RW<u32>,
62    reserved: u32,
63}
64
65const NUMCOMP_OFFSET: u32 = 28;
67const NOTRCPKT: u32 = 1 << 27;
68const NOEXTTRIG: u32 = 1 << 26;
69const NOCYCCNT: u32 = 1 << 25;
70const NOPRFCNT: u32 = 1 << 24;
71const CYCCNTENA: u32 = 1 << 0;
72
73impl DWT {
74    #[inline]
78    pub fn num_comp() -> u8 {
79        unsafe { ((*Self::PTR).ctrl.read() >> NUMCOMP_OFFSET) as u8 }
81    }
82
83    #[cfg(not(armv6m))]
85    #[inline]
86    pub fn has_exception_trace() -> bool {
87        unsafe { (*Self::PTR).ctrl.read() & NOTRCPKT == 0 }
89    }
90
91    #[cfg(not(armv6m))]
93    #[inline]
94    pub fn has_external_match() -> bool {
95        unsafe { (*Self::PTR).ctrl.read() & NOEXTTRIG == 0 }
97    }
98
99    #[cfg(not(armv6m))]
101    #[inline]
102    pub fn has_cycle_counter() -> bool {
103        unsafe { (*Self::PTR).ctrl.read() & NOCYCCNT == 0 }
105    }
106
107    #[cfg(not(armv6m))]
109    #[inline]
110    pub fn has_profiling_counter() -> bool {
111        unsafe { (*Self::PTR).ctrl.read() & NOPRFCNT == 0 }
113    }
114
115    #[cfg(not(armv6m))]
124    #[inline]
125    pub fn enable_cycle_counter(&mut self) {
126        unsafe { self.ctrl.modify(|r| r | CYCCNTENA) }
127    }
128
129    #[cfg(not(armv6m))]
131    #[inline]
132    pub fn disable_cycle_counter(&mut self) {
133        unsafe { self.ctrl.modify(|r| r & !CYCCNTENA) }
134    }
135
136    #[cfg(not(armv6m))]
138    #[inline]
139    pub fn cycle_counter_enabled() -> bool {
140        unsafe { (*Self::PTR).ctrl.read() & CYCCNTENA != 0 }
142    }
143
144    #[cfg(not(armv6m))]
146    #[inline]
147    #[deprecated(
148        since = "0.7.4",
149        note = "Use `cycle_count` which follows the C-GETTER convention"
150    )]
151    pub fn get_cycle_count() -> u32 {
152        Self::cycle_count()
153    }
154
155    #[cfg(not(armv6m))]
157    #[inline]
158    pub fn cycle_count() -> u32 {
159        unsafe { (*Self::PTR).cyccnt.read() }
161    }
162
163    #[cfg(not(armv6m))]
165    #[inline]
166    pub fn set_cycle_count(&mut self, count: u32) {
167        unsafe { self.cyccnt.write(count) }
168    }
169
170    #[cfg(not(armv6m))]
174    #[inline]
175    pub fn unlock() {
176        unsafe { (*Self::PTR).lar.write(0xC5AC_CE55) }
178    }
179
180    #[cfg(not(armv6m))]
188    #[inline]
189    pub fn cpi_count() -> u8 {
190        unsafe { (*Self::PTR).cpicnt.read() as u8 }
192    }
193
194    #[cfg(not(armv6m))]
196    #[inline]
197    pub fn set_cpi_count(&mut self, count: u8) {
198        unsafe { self.cpicnt.write(count as u32) }
199    }
200
201    #[cfg(not(armv6m))]
203    #[inline]
204    pub fn exception_count() -> u8 {
205        unsafe { (*Self::PTR).exccnt.read() as u8 }
207    }
208
209    #[cfg(not(armv6m))]
211    #[inline]
212    pub fn set_exception_count(&mut self, count: u8) {
213        unsafe { self.exccnt.write(count as u32) }
214    }
215
216    #[cfg(not(armv6m))]
224    #[inline]
225    pub fn sleep_count() -> u8 {
226        unsafe { (*Self::PTR).sleepcnt.read() as u8 }
228    }
229
230    #[cfg(not(armv6m))]
232    #[inline]
233    pub fn set_sleep_count(&mut self, count: u8) {
234        unsafe { self.sleepcnt.write(count as u32) }
235    }
236
237    #[cfg(not(armv6m))]
239    #[inline]
240    pub fn lsu_count() -> u8 {
241        unsafe { (*Self::PTR).lsucnt.read() as u8 }
243    }
244
245    #[cfg(not(armv6m))]
247    #[inline]
248    pub fn set_lsu_count(&mut self, count: u8) {
249        unsafe { self.lsucnt.write(count as u32) }
250    }
251
252    #[cfg(not(armv6m))]
256    #[inline]
257    pub fn fold_count() -> u8 {
258        unsafe { (*Self::PTR).foldcnt.read() as u8 }
260    }
261
262    #[cfg(not(armv6m))]
264    #[inline]
265    pub fn set_fold_count(&mut self, count: u8) {
266        unsafe { self.foldcnt.write(count as u32) }
267    }
268}