1#[repr(transparent)]
2#[derive(Clone, Eq, PartialEq)]
3pub struct vm_attributes(pub SeL4Bitfield<u32, 1usize>);
4pub type vm_attributes_t = vm_attributes;
5impl vm_attributes {
6 pub fn new(riscvExecuteNever: u32) -> Self {
7 let mut this = Self(Bitfield::zeroed());
8 this.set_riscvExecuteNever(riscvExecuteNever);
9 this
10 }
11 pub fn unpack(&self) -> vm_attributes_Unpacked {
12 vm_attributes_Unpacked {
13 riscvExecuteNever: self.get_riscvExecuteNever(),
14 }
15 }
16 #[allow(dead_code)]
17 pub fn get_riscvExecuteNever(&self) -> u32 {
18 self.0.get_bits(0usize..1usize)
19 }
20 pub fn set_riscvExecuteNever(&mut self, riscvExecuteNever: u32) {
21 self.0.set_bits(0usize..1usize, riscvExecuteNever)
22 }
23 #[allow(dead_code)]
24 pub const fn width_of_riscvExecuteNever() -> usize {
25 1usize - 0usize
26 }
27}
28impl fmt::Debug for vm_attributes {
29 fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
30 self.unpack().fmt(f)?;
31 write!(f, ".pack()")?;
32 Ok(())
33 }
34}
35#[derive(Debug, Clone, PartialEq, Eq)]
36pub struct vm_attributes_Unpacked {
37 pub riscvExecuteNever: u32,
38}
39impl vm_attributes_Unpacked {
40 pub fn pack(self) -> vm_attributes {
41 match self {
42 Self { riscvExecuteNever } => vm_attributes::new(riscvExecuteNever),
43 }
44 }
45}
46#[repr(transparent)]
47#[derive(Clone, Eq, PartialEq)]
48pub struct seL4_Fault_NullFault(pub SeL4Bitfield<u32, 12usize>);
49impl seL4_Fault_NullFault {
50 pub fn new() -> Self {
51 let mut this = Self(Bitfield::zeroed());
52 this.set_seL4_FaultType(seL4_Fault_tag::seL4_Fault_NullFault);
53 this
54 }
55 pub fn unpack(&self) -> seL4_Fault_NullFault_Unpacked {
56 seL4_Fault_NullFault_Unpacked {}
57 }
58 #[allow(dead_code)]
59 fn get_seL4_FaultType(&self) -> u32 {
60 self.0.get_bits(0usize..4usize)
61 }
62 fn set_seL4_FaultType(&mut self, seL4_FaultType: u32) {
63 self.0.set_bits(0usize..4usize, seL4_FaultType)
64 }
65 #[allow(dead_code)]
66 const fn width_of_seL4_FaultType() -> usize {
67 4usize - 0usize
68 }
69}
70impl fmt::Debug for seL4_Fault_NullFault {
71 fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
72 self.unpack().fmt(f)?;
73 write!(f, ".pack()")?;
74 Ok(())
75 }
76}
77#[derive(Debug, Clone, PartialEq, Eq)]
78pub struct seL4_Fault_NullFault_Unpacked {}
79impl seL4_Fault_NullFault_Unpacked {
80 pub fn pack(self) -> seL4_Fault_NullFault {
81 match self {
82 Self {} => seL4_Fault_NullFault::new(),
83 }
84 }
85}
86#[repr(transparent)]
87#[derive(Clone, Eq, PartialEq)]
88pub struct seL4_Fault_CapFault(pub SeL4Bitfield<u32, 12usize>);
89impl seL4_Fault_CapFault {
90 pub fn new(
91 IP: u32,
92 Addr: u32,
93 InRecvPhase: u32,
94 LookupFailureType: u32,
95 MR4: u32,
96 MR5: u32,
97 MR6: u32,
98 ) -> Self {
99 let mut this = Self(Bitfield::zeroed());
100 this.set_IP(IP);
101 this.set_Addr(Addr);
102 this.set_InRecvPhase(InRecvPhase);
103 this.set_LookupFailureType(LookupFailureType);
104 this.set_MR4(MR4);
105 this.set_MR5(MR5);
106 this.set_MR6(MR6);
107 this.set_seL4_FaultType(seL4_Fault_tag::seL4_Fault_CapFault);
108 this
109 }
110 pub fn unpack(&self) -> seL4_Fault_CapFault_Unpacked {
111 seL4_Fault_CapFault_Unpacked {
112 IP: self.get_IP(),
113 Addr: self.get_Addr(),
114 InRecvPhase: self.get_InRecvPhase(),
115 LookupFailureType: self.get_LookupFailureType(),
116 MR4: self.get_MR4(),
117 MR5: self.get_MR5(),
118 MR6: self.get_MR6(),
119 }
120 }
121 #[allow(dead_code)]
122 pub fn get_IP(&self) -> u32 {
123 self.0.get_bits(224usize..256usize)
124 }
125 pub fn set_IP(&mut self, IP: u32) {
126 self.0.set_bits(224usize..256usize, IP)
127 }
128 #[allow(dead_code)]
129 pub const fn width_of_IP() -> usize {
130 256usize - 224usize
131 }
132 #[allow(dead_code)]
133 pub fn get_Addr(&self) -> u32 {
134 self.0.get_bits(192usize..224usize)
135 }
136 pub fn set_Addr(&mut self, Addr: u32) {
137 self.0.set_bits(192usize..224usize, Addr)
138 }
139 #[allow(dead_code)]
140 pub const fn width_of_Addr() -> usize {
141 224usize - 192usize
142 }
143 #[allow(dead_code)]
144 pub fn get_InRecvPhase(&self) -> u32 {
145 self.0.get_bits(160usize..192usize)
146 }
147 pub fn set_InRecvPhase(&mut self, InRecvPhase: u32) {
148 self.0.set_bits(160usize..192usize, InRecvPhase)
149 }
150 #[allow(dead_code)]
151 pub const fn width_of_InRecvPhase() -> usize {
152 192usize - 160usize
153 }
154 #[allow(dead_code)]
155 pub fn get_LookupFailureType(&self) -> u32 {
156 self.0.get_bits(128usize..160usize)
157 }
158 pub fn set_LookupFailureType(&mut self, LookupFailureType: u32) {
159 self.0.set_bits(128usize..160usize, LookupFailureType)
160 }
161 #[allow(dead_code)]
162 pub const fn width_of_LookupFailureType() -> usize {
163 160usize - 128usize
164 }
165 #[allow(dead_code)]
166 pub fn get_MR4(&self) -> u32 {
167 self.0.get_bits(96usize..128usize)
168 }
169 pub fn set_MR4(&mut self, MR4: u32) {
170 self.0.set_bits(96usize..128usize, MR4)
171 }
172 #[allow(dead_code)]
173 pub const fn width_of_MR4() -> usize {
174 128usize - 96usize
175 }
176 #[allow(dead_code)]
177 pub fn get_MR5(&self) -> u32 {
178 self.0.get_bits(64usize..96usize)
179 }
180 pub fn set_MR5(&mut self, MR5: u32) {
181 self.0.set_bits(64usize..96usize, MR5)
182 }
183 #[allow(dead_code)]
184 pub const fn width_of_MR5() -> usize {
185 96usize - 64usize
186 }
187 #[allow(dead_code)]
188 pub fn get_MR6(&self) -> u32 {
189 self.0.get_bits(32usize..64usize)
190 }
191 pub fn set_MR6(&mut self, MR6: u32) {
192 self.0.set_bits(32usize..64usize, MR6)
193 }
194 #[allow(dead_code)]
195 pub const fn width_of_MR6() -> usize {
196 64usize - 32usize
197 }
198 #[allow(dead_code)]
199 fn get_seL4_FaultType(&self) -> u32 {
200 self.0.get_bits(0usize..4usize)
201 }
202 fn set_seL4_FaultType(&mut self, seL4_FaultType: u32) {
203 self.0.set_bits(0usize..4usize, seL4_FaultType)
204 }
205 #[allow(dead_code)]
206 const fn width_of_seL4_FaultType() -> usize {
207 4usize - 0usize
208 }
209}
210impl fmt::Debug for seL4_Fault_CapFault {
211 fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
212 self.unpack().fmt(f)?;
213 write!(f, ".pack()")?;
214 Ok(())
215 }
216}
217#[derive(Debug, Clone, PartialEq, Eq)]
218pub struct seL4_Fault_CapFault_Unpacked {
219 pub IP: u32,
220 pub Addr: u32,
221 pub InRecvPhase: u32,
222 pub LookupFailureType: u32,
223 pub MR4: u32,
224 pub MR5: u32,
225 pub MR6: u32,
226}
227impl seL4_Fault_CapFault_Unpacked {
228 pub fn pack(self) -> seL4_Fault_CapFault {
229 match self {
230 Self { IP, Addr, InRecvPhase, LookupFailureType, MR4, MR5, MR6 } => {
231 seL4_Fault_CapFault::new(
232 IP,
233 Addr,
234 InRecvPhase,
235 LookupFailureType,
236 MR4,
237 MR5,
238 MR6,
239 )
240 }
241 }
242 }
243}
244#[repr(transparent)]
245#[derive(Clone, Eq, PartialEq)]
246pub struct seL4_Fault_UnknownSyscall(pub SeL4Bitfield<u32, 12usize>);
247impl seL4_Fault_UnknownSyscall {
248 pub fn new(
249 FaultIP: u32,
250 SP: u32,
251 RA: u32,
252 A0: u32,
253 A1: u32,
254 A2: u32,
255 A3: u32,
256 A4: u32,
257 A5: u32,
258 A6: u32,
259 Syscall: u32,
260 ) -> Self {
261 let mut this = Self(Bitfield::zeroed());
262 this.set_FaultIP(FaultIP);
263 this.set_SP(SP);
264 this.set_RA(RA);
265 this.set_A0(A0);
266 this.set_A1(A1);
267 this.set_A2(A2);
268 this.set_A3(A3);
269 this.set_A4(A4);
270 this.set_A5(A5);
271 this.set_A6(A6);
272 this.set_Syscall(Syscall);
273 this.set_seL4_FaultType(seL4_Fault_tag::seL4_Fault_UnknownSyscall);
274 this
275 }
276 pub fn unpack(&self) -> seL4_Fault_UnknownSyscall_Unpacked {
277 seL4_Fault_UnknownSyscall_Unpacked {
278 FaultIP: self.get_FaultIP(),
279 SP: self.get_SP(),
280 RA: self.get_RA(),
281 A0: self.get_A0(),
282 A1: self.get_A1(),
283 A2: self.get_A2(),
284 A3: self.get_A3(),
285 A4: self.get_A4(),
286 A5: self.get_A5(),
287 A6: self.get_A6(),
288 Syscall: self.get_Syscall(),
289 }
290 }
291 #[allow(dead_code)]
292 pub fn get_FaultIP(&self) -> u32 {
293 self.0.get_bits(352usize..384usize)
294 }
295 pub fn set_FaultIP(&mut self, FaultIP: u32) {
296 self.0.set_bits(352usize..384usize, FaultIP)
297 }
298 #[allow(dead_code)]
299 pub const fn width_of_FaultIP() -> usize {
300 384usize - 352usize
301 }
302 #[allow(dead_code)]
303 pub fn get_SP(&self) -> u32 {
304 self.0.get_bits(320usize..352usize)
305 }
306 pub fn set_SP(&mut self, SP: u32) {
307 self.0.set_bits(320usize..352usize, SP)
308 }
309 #[allow(dead_code)]
310 pub const fn width_of_SP() -> usize {
311 352usize - 320usize
312 }
313 #[allow(dead_code)]
314 pub fn get_RA(&self) -> u32 {
315 self.0.get_bits(288usize..320usize)
316 }
317 pub fn set_RA(&mut self, RA: u32) {
318 self.0.set_bits(288usize..320usize, RA)
319 }
320 #[allow(dead_code)]
321 pub const fn width_of_RA() -> usize {
322 320usize - 288usize
323 }
324 #[allow(dead_code)]
325 pub fn get_A0(&self) -> u32 {
326 self.0.get_bits(256usize..288usize)
327 }
328 pub fn set_A0(&mut self, A0: u32) {
329 self.0.set_bits(256usize..288usize, A0)
330 }
331 #[allow(dead_code)]
332 pub const fn width_of_A0() -> usize {
333 288usize - 256usize
334 }
335 #[allow(dead_code)]
336 pub fn get_A1(&self) -> u32 {
337 self.0.get_bits(224usize..256usize)
338 }
339 pub fn set_A1(&mut self, A1: u32) {
340 self.0.set_bits(224usize..256usize, A1)
341 }
342 #[allow(dead_code)]
343 pub const fn width_of_A1() -> usize {
344 256usize - 224usize
345 }
346 #[allow(dead_code)]
347 pub fn get_A2(&self) -> u32 {
348 self.0.get_bits(192usize..224usize)
349 }
350 pub fn set_A2(&mut self, A2: u32) {
351 self.0.set_bits(192usize..224usize, A2)
352 }
353 #[allow(dead_code)]
354 pub const fn width_of_A2() -> usize {
355 224usize - 192usize
356 }
357 #[allow(dead_code)]
358 pub fn get_A3(&self) -> u32 {
359 self.0.get_bits(160usize..192usize)
360 }
361 pub fn set_A3(&mut self, A3: u32) {
362 self.0.set_bits(160usize..192usize, A3)
363 }
364 #[allow(dead_code)]
365 pub const fn width_of_A3() -> usize {
366 192usize - 160usize
367 }
368 #[allow(dead_code)]
369 pub fn get_A4(&self) -> u32 {
370 self.0.get_bits(128usize..160usize)
371 }
372 pub fn set_A4(&mut self, A4: u32) {
373 self.0.set_bits(128usize..160usize, A4)
374 }
375 #[allow(dead_code)]
376 pub const fn width_of_A4() -> usize {
377 160usize - 128usize
378 }
379 #[allow(dead_code)]
380 pub fn get_A5(&self) -> u32 {
381 self.0.get_bits(96usize..128usize)
382 }
383 pub fn set_A5(&mut self, A5: u32) {
384 self.0.set_bits(96usize..128usize, A5)
385 }
386 #[allow(dead_code)]
387 pub const fn width_of_A5() -> usize {
388 128usize - 96usize
389 }
390 #[allow(dead_code)]
391 pub fn get_A6(&self) -> u32 {
392 self.0.get_bits(64usize..96usize)
393 }
394 pub fn set_A6(&mut self, A6: u32) {
395 self.0.set_bits(64usize..96usize, A6)
396 }
397 #[allow(dead_code)]
398 pub const fn width_of_A6() -> usize {
399 96usize - 64usize
400 }
401 #[allow(dead_code)]
402 pub fn get_Syscall(&self) -> u32 {
403 self.0.get_bits(32usize..64usize)
404 }
405 pub fn set_Syscall(&mut self, Syscall: u32) {
406 self.0.set_bits(32usize..64usize, Syscall)
407 }
408 #[allow(dead_code)]
409 pub const fn width_of_Syscall() -> usize {
410 64usize - 32usize
411 }
412 #[allow(dead_code)]
413 fn get_seL4_FaultType(&self) -> u32 {
414 self.0.get_bits(0usize..4usize)
415 }
416 fn set_seL4_FaultType(&mut self, seL4_FaultType: u32) {
417 self.0.set_bits(0usize..4usize, seL4_FaultType)
418 }
419 #[allow(dead_code)]
420 const fn width_of_seL4_FaultType() -> usize {
421 4usize - 0usize
422 }
423}
424impl fmt::Debug for seL4_Fault_UnknownSyscall {
425 fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
426 self.unpack().fmt(f)?;
427 write!(f, ".pack()")?;
428 Ok(())
429 }
430}
431#[derive(Debug, Clone, PartialEq, Eq)]
432pub struct seL4_Fault_UnknownSyscall_Unpacked {
433 pub FaultIP: u32,
434 pub SP: u32,
435 pub RA: u32,
436 pub A0: u32,
437 pub A1: u32,
438 pub A2: u32,
439 pub A3: u32,
440 pub A4: u32,
441 pub A5: u32,
442 pub A6: u32,
443 pub Syscall: u32,
444}
445impl seL4_Fault_UnknownSyscall_Unpacked {
446 pub fn pack(self) -> seL4_Fault_UnknownSyscall {
447 match self {
448 Self { FaultIP, SP, RA, A0, A1, A2, A3, A4, A5, A6, Syscall } => {
449 seL4_Fault_UnknownSyscall::new(
450 FaultIP,
451 SP,
452 RA,
453 A0,
454 A1,
455 A2,
456 A3,
457 A4,
458 A5,
459 A6,
460 Syscall,
461 )
462 }
463 }
464 }
465}
466#[repr(transparent)]
467#[derive(Clone, Eq, PartialEq)]
468pub struct seL4_Fault_UserException(pub SeL4Bitfield<u32, 12usize>);
469impl seL4_Fault_UserException {
470 pub fn new(FaultIP: u32, SP: u32, Number: u32, Code: u32) -> Self {
471 let mut this = Self(Bitfield::zeroed());
472 this.set_FaultIP(FaultIP);
473 this.set_SP(SP);
474 this.set_Number(Number);
475 this.set_Code(Code);
476 this.set_seL4_FaultType(seL4_Fault_tag::seL4_Fault_UserException);
477 this
478 }
479 pub fn unpack(&self) -> seL4_Fault_UserException_Unpacked {
480 seL4_Fault_UserException_Unpacked {
481 FaultIP: self.get_FaultIP(),
482 SP: self.get_SP(),
483 Number: self.get_Number(),
484 Code: self.get_Code(),
485 }
486 }
487 #[allow(dead_code)]
488 pub fn get_FaultIP(&self) -> u32 {
489 self.0.get_bits(128usize..160usize)
490 }
491 pub fn set_FaultIP(&mut self, FaultIP: u32) {
492 self.0.set_bits(128usize..160usize, FaultIP)
493 }
494 #[allow(dead_code)]
495 pub const fn width_of_FaultIP() -> usize {
496 160usize - 128usize
497 }
498 #[allow(dead_code)]
499 pub fn get_SP(&self) -> u32 {
500 self.0.get_bits(96usize..128usize)
501 }
502 pub fn set_SP(&mut self, SP: u32) {
503 self.0.set_bits(96usize..128usize, SP)
504 }
505 #[allow(dead_code)]
506 pub const fn width_of_SP() -> usize {
507 128usize - 96usize
508 }
509 #[allow(dead_code)]
510 pub fn get_Number(&self) -> u32 {
511 self.0.get_bits(64usize..96usize)
512 }
513 pub fn set_Number(&mut self, Number: u32) {
514 self.0.set_bits(64usize..96usize, Number)
515 }
516 #[allow(dead_code)]
517 pub const fn width_of_Number() -> usize {
518 96usize - 64usize
519 }
520 #[allow(dead_code)]
521 pub fn get_Code(&self) -> u32 {
522 self.0.get_bits(32usize..64usize)
523 }
524 pub fn set_Code(&mut self, Code: u32) {
525 self.0.set_bits(32usize..64usize, Code)
526 }
527 #[allow(dead_code)]
528 pub const fn width_of_Code() -> usize {
529 64usize - 32usize
530 }
531 #[allow(dead_code)]
532 fn get_seL4_FaultType(&self) -> u32 {
533 self.0.get_bits(0usize..4usize)
534 }
535 fn set_seL4_FaultType(&mut self, seL4_FaultType: u32) {
536 self.0.set_bits(0usize..4usize, seL4_FaultType)
537 }
538 #[allow(dead_code)]
539 const fn width_of_seL4_FaultType() -> usize {
540 4usize - 0usize
541 }
542}
543impl fmt::Debug for seL4_Fault_UserException {
544 fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
545 self.unpack().fmt(f)?;
546 write!(f, ".pack()")?;
547 Ok(())
548 }
549}
550#[derive(Debug, Clone, PartialEq, Eq)]
551pub struct seL4_Fault_UserException_Unpacked {
552 pub FaultIP: u32,
553 pub SP: u32,
554 pub Number: u32,
555 pub Code: u32,
556}
557impl seL4_Fault_UserException_Unpacked {
558 pub fn pack(self) -> seL4_Fault_UserException {
559 match self {
560 Self { FaultIP, SP, Number, Code } => {
561 seL4_Fault_UserException::new(FaultIP, SP, Number, Code)
562 }
563 }
564 }
565}
566#[repr(transparent)]
567#[derive(Clone, Eq, PartialEq)]
568pub struct seL4_Fault_VMFault(pub SeL4Bitfield<u32, 12usize>);
569impl seL4_Fault_VMFault {
570 pub fn new(IP: u32, Addr: u32, PrefetchFault: u32, FSR: u32) -> Self {
571 let mut this = Self(Bitfield::zeroed());
572 this.set_IP(IP);
573 this.set_Addr(Addr);
574 this.set_PrefetchFault(PrefetchFault);
575 this.set_FSR(FSR);
576 this.set_seL4_FaultType(seL4_Fault_tag::seL4_Fault_VMFault);
577 this
578 }
579 pub fn unpack(&self) -> seL4_Fault_VMFault_Unpacked {
580 seL4_Fault_VMFault_Unpacked {
581 IP: self.get_IP(),
582 Addr: self.get_Addr(),
583 PrefetchFault: self.get_PrefetchFault(),
584 FSR: self.get_FSR(),
585 }
586 }
587 #[allow(dead_code)]
588 pub fn get_IP(&self) -> u32 {
589 self.0.get_bits(96usize..128usize)
590 }
591 pub fn set_IP(&mut self, IP: u32) {
592 self.0.set_bits(96usize..128usize, IP)
593 }
594 #[allow(dead_code)]
595 pub const fn width_of_IP() -> usize {
596 128usize - 96usize
597 }
598 #[allow(dead_code)]
599 pub fn get_Addr(&self) -> u32 {
600 self.0.get_bits(64usize..96usize)
601 }
602 pub fn set_Addr(&mut self, Addr: u32) {
603 self.0.set_bits(64usize..96usize, Addr)
604 }
605 #[allow(dead_code)]
606 pub const fn width_of_Addr() -> usize {
607 96usize - 64usize
608 }
609 #[allow(dead_code)]
610 pub fn get_PrefetchFault(&self) -> u32 {
611 self.0.get_bits(32usize..64usize)
612 }
613 pub fn set_PrefetchFault(&mut self, PrefetchFault: u32) {
614 self.0.set_bits(32usize..64usize, PrefetchFault)
615 }
616 #[allow(dead_code)]
617 pub const fn width_of_PrefetchFault() -> usize {
618 64usize - 32usize
619 }
620 #[allow(dead_code)]
621 pub fn get_FSR(&self) -> u32 {
622 self.0.get_bits(27usize..32usize)
623 }
624 pub fn set_FSR(&mut self, FSR: u32) {
625 self.0.set_bits(27usize..32usize, FSR)
626 }
627 #[allow(dead_code)]
628 pub const fn width_of_FSR() -> usize {
629 32usize - 27usize
630 }
631 #[allow(dead_code)]
632 fn get_seL4_FaultType(&self) -> u32 {
633 self.0.get_bits(0usize..4usize)
634 }
635 fn set_seL4_FaultType(&mut self, seL4_FaultType: u32) {
636 self.0.set_bits(0usize..4usize, seL4_FaultType)
637 }
638 #[allow(dead_code)]
639 const fn width_of_seL4_FaultType() -> usize {
640 4usize - 0usize
641 }
642}
643impl fmt::Debug for seL4_Fault_VMFault {
644 fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
645 self.unpack().fmt(f)?;
646 write!(f, ".pack()")?;
647 Ok(())
648 }
649}
650#[derive(Debug, Clone, PartialEq, Eq)]
651pub struct seL4_Fault_VMFault_Unpacked {
652 pub IP: u32,
653 pub Addr: u32,
654 pub PrefetchFault: u32,
655 pub FSR: u32,
656}
657impl seL4_Fault_VMFault_Unpacked {
658 pub fn pack(self) -> seL4_Fault_VMFault {
659 match self {
660 Self { IP, Addr, PrefetchFault, FSR } => {
661 seL4_Fault_VMFault::new(IP, Addr, PrefetchFault, FSR)
662 }
663 }
664 }
665}
666pub mod seL4_Fault_tag {
667 pub const seL4_Fault_NullFault: u32 = 0;
668 pub const seL4_Fault_CapFault: u32 = 1;
669 pub const seL4_Fault_UnknownSyscall: u32 = 2;
670 pub const seL4_Fault_UserException: u32 = 3;
671 pub const seL4_Fault_VMFault: u32 = 5;
672}
673#[repr(transparent)]
674#[derive(Clone, PartialEq, Eq)]
675pub struct seL4_Fault(pub SeL4Bitfield<u32, 12usize>);
676impl seL4_Fault {
677 pub fn splay(self) -> seL4_Fault_Splayed {
678 match self.get_tag() {
679 seL4_Fault_tag::seL4_Fault_NullFault => {
680 seL4_Fault_Splayed::NullFault(seL4_Fault_NullFault(self.0))
681 }
682 seL4_Fault_tag::seL4_Fault_CapFault => {
683 seL4_Fault_Splayed::CapFault(seL4_Fault_CapFault(self.0))
684 }
685 seL4_Fault_tag::seL4_Fault_UnknownSyscall => {
686 seL4_Fault_Splayed::UnknownSyscall(seL4_Fault_UnknownSyscall(self.0))
687 }
688 seL4_Fault_tag::seL4_Fault_UserException => {
689 seL4_Fault_Splayed::UserException(seL4_Fault_UserException(self.0))
690 }
691 seL4_Fault_tag::seL4_Fault_VMFault => {
692 seL4_Fault_Splayed::VMFault(seL4_Fault_VMFault(self.0))
693 }
694 _ => panic!(),
695 }
696 }
697 pub fn get_tag(&self) -> u32 {
698 self.0.get_bits(0usize..4usize)
699 }
700}
701impl fmt::Debug for seL4_Fault {
702 fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
703 self.clone().splay().fmt(f)?;
704 write!(f, ".unsplay()")?;
705 Ok(())
706 }
707}
708#[derive(Debug, Clone, PartialEq, Eq)]
709pub enum seL4_Fault_Splayed {
710 NullFault(seL4_Fault_NullFault),
711 CapFault(seL4_Fault_CapFault),
712 UnknownSyscall(seL4_Fault_UnknownSyscall),
713 UserException(seL4_Fault_UserException),
714 VMFault(seL4_Fault_VMFault),
715}
716impl seL4_Fault_Splayed {
717 pub fn unsplay(self) -> seL4_Fault {
718 match self {
719 seL4_Fault_Splayed::NullFault(seL4_Fault_NullFault(bitfield)) => {
720 seL4_Fault(bitfield)
721 }
722 seL4_Fault_Splayed::CapFault(seL4_Fault_CapFault(bitfield)) => {
723 seL4_Fault(bitfield)
724 }
725 seL4_Fault_Splayed::UnknownSyscall(seL4_Fault_UnknownSyscall(bitfield)) => {
726 seL4_Fault(bitfield)
727 }
728 seL4_Fault_Splayed::UserException(seL4_Fault_UserException(bitfield)) => {
729 seL4_Fault(bitfield)
730 }
731 seL4_Fault_Splayed::VMFault(seL4_Fault_VMFault(bitfield)) => {
732 seL4_Fault(bitfield)
733 }
734 }
735 }
736}
737impl seL4_Fault_NullFault {
738 pub fn unsplay(self) -> seL4_Fault {
739 seL4_Fault(self.0)
740 }
741}
742impl seL4_Fault_NullFault_Unpacked {
743 pub fn unsplay(self) -> seL4_Fault {
744 self.pack().unsplay()
745 }
746}
747impl seL4_Fault_CapFault {
748 pub fn unsplay(self) -> seL4_Fault {
749 seL4_Fault(self.0)
750 }
751}
752impl seL4_Fault_CapFault_Unpacked {
753 pub fn unsplay(self) -> seL4_Fault {
754 self.pack().unsplay()
755 }
756}
757impl seL4_Fault_UnknownSyscall {
758 pub fn unsplay(self) -> seL4_Fault {
759 seL4_Fault(self.0)
760 }
761}
762impl seL4_Fault_UnknownSyscall_Unpacked {
763 pub fn unsplay(self) -> seL4_Fault {
764 self.pack().unsplay()
765 }
766}
767impl seL4_Fault_UserException {
768 pub fn unsplay(self) -> seL4_Fault {
769 seL4_Fault(self.0)
770 }
771}
772impl seL4_Fault_UserException_Unpacked {
773 pub fn unsplay(self) -> seL4_Fault {
774 self.pack().unsplay()
775 }
776}
777impl seL4_Fault_VMFault {
778 pub fn unsplay(self) -> seL4_Fault {
779 seL4_Fault(self.0)
780 }
781}
782impl seL4_Fault_VMFault_Unpacked {
783 pub fn unsplay(self) -> seL4_Fault {
784 self.pack().unsplay()
785 }
786}