1use super::super::unwind_rule::UnwindRuleAarch64;
2
3struct PrologueDetectorAarch64 {
4 sp_offset: i32,
5}
6
7#[derive(Clone, Debug, PartialEq, Eq)]
8enum PrologueStepResult {
9 UnexpectedInstruction(UnexpectedInstructionType),
10 ValidPrologueInstruction,
11}
12
13#[derive(Clone, Debug, PartialEq, Eq)]
14enum PrologueResult {
15 ProbablyAlreadyInBody(UnexpectedInstructionType),
16 FoundFunctionStart { sp_offset: i32 },
17}
18
19#[derive(Clone, Debug, PartialEq, Eq)]
20enum PrologueInstructionType {
21 NotExpectedInPrologue,
22 CouldBePartOfPrologueIfThereIsAlsoAStackPointerSub,
23 VeryLikelyPartOfPrologue,
24}
25
26#[derive(Clone, Debug, PartialEq, Eq)]
27enum UnexpectedInstructionType {
28 StoreOfWrongSize,
29 StoreReferenceRegisterNotSp,
30 AddSubNotOperatingOnSp,
31 NoNextInstruction,
32 NoStackPointerSubBeforeStore,
33 Unknown,
34}
35
36impl PrologueDetectorAarch64 {
37 pub fn new() -> Self {
38 Self { sp_offset: 0 }
39 }
40
41 pub fn analyze_slices(
42 &mut self,
43 slice_from_start: &[u8],
44 slice_to_end: &[u8],
45 ) -> PrologueResult {
46 if slice_to_end.len() < 4 {
68 return PrologueResult::ProbablyAlreadyInBody(
69 UnexpectedInstructionType::NoNextInstruction,
70 );
71 }
72 let next_instruction = u32::from_le_bytes([
73 slice_to_end[0],
74 slice_to_end[1],
75 slice_to_end[2],
76 slice_to_end[3],
77 ]);
78 let next_instruction_type = Self::analyze_prologue_instruction_type(next_instruction);
79 if next_instruction_type == PrologueInstructionType::NotExpectedInPrologue {
80 return PrologueResult::ProbablyAlreadyInBody(UnexpectedInstructionType::Unknown);
81 }
82 let instructions = slice_from_start
83 .chunks_exact(4)
84 .map(|c| u32::from_le_bytes([c[0], c[1], c[2], c[3]]))
85 .rev();
86 for instruction in instructions {
87 if let PrologueStepResult::UnexpectedInstruction(_) =
88 self.reverse_step_instruction(instruction)
89 {
90 break;
91 }
92 }
93 if next_instruction_type
94 == PrologueInstructionType::CouldBePartOfPrologueIfThereIsAlsoAStackPointerSub
95 && self.sp_offset == 0
96 {
97 return PrologueResult::ProbablyAlreadyInBody(
98 UnexpectedInstructionType::NoStackPointerSubBeforeStore,
99 );
100 }
101 PrologueResult::FoundFunctionStart {
102 sp_offset: self.sp_offset,
103 }
104 }
105
106 pub fn analyze_prologue_instruction_type(word: u32) -> PrologueInstructionType {
108 if word == 0xd503237f || word == 0x910003fd {
110 return PrologueInstructionType::VeryLikelyPartOfPrologue;
111 }
112
113 let bits_22_to_32 = word >> 22;
114
115 if bits_22_to_32 & 0b1011111001 == 0b1010100000 {
117 let writeback_bits = bits_22_to_32 & 0b110;
120 let reference_reg = ((word >> 5) & 0b11111) as u16;
121 if writeback_bits == 0b000 || reference_reg != 31 {
122 return PrologueInstructionType::NotExpectedInPrologue;
123 }
124 if writeback_bits == 0b100 {
128 return PrologueInstructionType::CouldBePartOfPrologueIfThereIsAlsoAStackPointerSub;
130 }
131 return PrologueInstructionType::VeryLikelyPartOfPrologue;
132 }
133 if bits_22_to_32 & 0b1011111110 == 0b1001000100 {
136 let result_reg = (word & 0b11111) as u16;
139 let input_reg = ((word >> 5) & 0b11111) as u16;
140 let is_sub = ((word >> 30) & 0b1) == 0b1;
141 let expected_result_reg = if is_sub { 31 } else { 29 };
142 if input_reg != 31 || result_reg != expected_result_reg {
143 return PrologueInstructionType::NotExpectedInPrologue;
144 }
145 return PrologueInstructionType::VeryLikelyPartOfPrologue;
146 }
147 PrologueInstructionType::NotExpectedInPrologue
148 }
149
150 pub fn reverse_step_instruction(&mut self, word: u32) -> PrologueStepResult {
152 if word == 0xd503237f {
154 return PrologueStepResult::ValidPrologueInstruction;
155 }
156
157 if (word >> 22) & 0b1011111001 == 0b1010100000 {
159 let writeback_bits = (word >> 23) & 0b11;
162 if writeback_bits == 0b00 {
163 return PrologueStepResult::UnexpectedInstruction(
165 UnexpectedInstructionType::StoreOfWrongSize,
166 );
167 }
168 let reference_reg = ((word >> 5) & 0b11111) as u16;
169 if reference_reg != 31 {
170 return PrologueStepResult::UnexpectedInstruction(
171 UnexpectedInstructionType::StoreReferenceRegisterNotSp,
172 );
173 }
174 let is_preindexed_writeback = writeback_bits == 0b11;
175 let is_postindexed_writeback = writeback_bits == 0b01; if is_preindexed_writeback || is_postindexed_writeback {
177 let imm7 = (((((word >> 15) & 0b1111111) as i16) << 9) >> 6) as i32;
178 self.sp_offset -= imm7; }
180 return PrologueStepResult::ValidPrologueInstruction;
181 }
182 if (word >> 23) & 0b111111111 == 0b110100010 {
184 let result_reg = (word & 0b11111) as u16;
187 let input_reg = ((word >> 5) & 0b11111) as u16;
188 if result_reg != 31 || input_reg != 31 {
189 return PrologueStepResult::UnexpectedInstruction(
190 UnexpectedInstructionType::AddSubNotOperatingOnSp,
191 );
192 }
193 let mut imm12 = ((word >> 10) & 0b111111111111) as i32;
194 let shift_immediate_by_12 = ((word >> 22) & 0b1) == 0b1;
195 if shift_immediate_by_12 {
196 imm12 <<= 12
197 }
198 self.sp_offset += imm12; return PrologueStepResult::ValidPrologueInstruction;
200 }
201 PrologueStepResult::UnexpectedInstruction(UnexpectedInstructionType::Unknown)
202 }
203}
204
205pub fn unwind_rule_from_detected_prologue(
206 slice_from_start: &[u8],
207 slice_to_end: &[u8],
208) -> Option<UnwindRuleAarch64> {
209 let mut detector = PrologueDetectorAarch64::new();
210 match detector.analyze_slices(slice_from_start, slice_to_end) {
211 PrologueResult::ProbablyAlreadyInBody(_) => None,
212 PrologueResult::FoundFunctionStart { sp_offset } => {
213 let sp_offset_by_16 = u16::try_from(sp_offset / 16).ok()?;
214 let rule = if sp_offset_by_16 == 0 {
215 UnwindRuleAarch64::NoOp
216 } else {
217 UnwindRuleAarch64::OffsetSp { sp_offset_by_16 }
218 };
219 Some(rule)
220 }
221 }
222}
223
224#[cfg(test)]
225mod test {
226 use super::*;
227
228 #[test]
229 fn test_prologue_1() {
230 let bytes = &[
240 0xff, 0x43, 0x01, 0xd1, 0xf6, 0x57, 0x02, 0xa9, 0xf4, 0x4f, 0x03, 0xa9, 0xfd, 0x7b,
241 0x04, 0xa9, 0xfd, 0x03, 0x01, 0x91, 0xf4, 0x03, 0x04, 0xaa, 0xf5, 0x03, 0x01, 0xaa,
242 ];
243 assert_eq!(
244 unwind_rule_from_detected_prologue(&bytes[..0], &bytes[0..]),
245 Some(UnwindRuleAarch64::NoOp)
246 );
247 assert_eq!(
248 unwind_rule_from_detected_prologue(&bytes[..4], &bytes[4..]),
249 Some(UnwindRuleAarch64::OffsetSp { sp_offset_by_16: 5 })
250 );
251 assert_eq!(
252 unwind_rule_from_detected_prologue(&bytes[..8], &bytes[8..]),
253 Some(UnwindRuleAarch64::OffsetSp { sp_offset_by_16: 5 })
254 );
255 assert_eq!(
256 unwind_rule_from_detected_prologue(&bytes[..12], &bytes[12..]),
257 Some(UnwindRuleAarch64::OffsetSp { sp_offset_by_16: 5 })
258 );
259 assert_eq!(
260 unwind_rule_from_detected_prologue(&bytes[..16], &bytes[16..]),
261 Some(UnwindRuleAarch64::OffsetSp { sp_offset_by_16: 5 })
262 );
263 assert_eq!(
264 unwind_rule_from_detected_prologue(&bytes[..20], &bytes[20..]),
265 None
266 );
267 assert_eq!(
268 unwind_rule_from_detected_prologue(&bytes[..24], &bytes[24..]),
269 None
270 );
271 assert_eq!(
272 unwind_rule_from_detected_prologue(&bytes[..28], &bytes[28..]),
273 None
274 );
275 }
276
277 #[test]
278 fn test_prologue_with_pacibsp() {
279 let bytes = &[
292 0x08, 0x58, 0x29, 0xb8, 0xc0, 0x03, 0x5f, 0xd6, 0x7f, 0x23, 0x03, 0xd5, 0xf8, 0x5f,
293 0xbc, 0xa9, 0xf6, 0x57, 0x01, 0xa9, 0xf4, 0x4f, 0x02, 0xa9, 0xfd, 0x7b, 0x03, 0xa9,
294 0xfd, 0xc3, 0x00, 0x91, 0xf3, 0x03, 0x02, 0xaa, 0xf4, 0x03, 0x01, 0xaa,
295 ];
296 assert_eq!(
297 unwind_rule_from_detected_prologue(&bytes[..0], &bytes[0..]),
298 None
299 );
300 assert_eq!(
301 unwind_rule_from_detected_prologue(&bytes[..4], &bytes[4..]),
302 None
303 );
304 assert_eq!(
305 unwind_rule_from_detected_prologue(&bytes[..8], &bytes[8..]),
306 Some(UnwindRuleAarch64::NoOp)
307 );
308 assert_eq!(
309 unwind_rule_from_detected_prologue(&bytes[..12], &bytes[12..]),
310 Some(UnwindRuleAarch64::NoOp)
311 );
312 assert_eq!(
313 unwind_rule_from_detected_prologue(&bytes[..16], &bytes[16..]),
314 Some(UnwindRuleAarch64::OffsetSp { sp_offset_by_16: 4 })
315 );
316 assert_eq!(
317 unwind_rule_from_detected_prologue(&bytes[..20], &bytes[20..]),
318 Some(UnwindRuleAarch64::OffsetSp { sp_offset_by_16: 4 })
319 );
320 assert_eq!(
321 unwind_rule_from_detected_prologue(&bytes[..24], &bytes[24..]),
322 Some(UnwindRuleAarch64::OffsetSp { sp_offset_by_16: 4 })
323 );
324 assert_eq!(
325 unwind_rule_from_detected_prologue(&bytes[..28], &bytes[28..]),
326 Some(UnwindRuleAarch64::OffsetSp { sp_offset_by_16: 4 })
327 );
328 assert_eq!(
329 unwind_rule_from_detected_prologue(&bytes[..32], &bytes[32..]),
330 None
331 );
332 }
333
334 #[test]
335 fn test_prologue_with_mov_fp_sp() {
336 let bytes = &[
343 0x7f, 0x23, 0x03, 0xd5, 0xfd, 0x7b, 0xbf, 0xa9, 0xfd, 0x03, 0x00, 0x91, 0x68, 0x04,
344 0x00, 0x51,
345 ];
346 assert_eq!(
347 unwind_rule_from_detected_prologue(&bytes[..0], &bytes[0..]),
348 Some(UnwindRuleAarch64::NoOp)
349 );
350 assert_eq!(
351 unwind_rule_from_detected_prologue(&bytes[..4], &bytes[4..]),
352 Some(UnwindRuleAarch64::NoOp)
353 );
354 assert_eq!(
355 unwind_rule_from_detected_prologue(&bytes[..8], &bytes[8..]),
356 Some(UnwindRuleAarch64::OffsetSp { sp_offset_by_16: 1 })
357 );
358 assert_eq!(
359 unwind_rule_from_detected_prologue(&bytes[..12], &bytes[12..]),
360 None
361 );
362 }
363
364 #[test]
365 fn test_no_prologue_despite_stack_store() {
366 let bytes = &[
376 0xe8, 0x17, 0x00, 0xf9, 0x03, 0x00, 0x00, 0x14, 0xff, 0xff, 0x01, 0xa9, 0xff, 0x17,
377 0x00, 0xf9, 0xe0, 0x03, 0x00, 0x91,
378 ];
379 assert_eq!(
380 unwind_rule_from_detected_prologue(&bytes[..0], &bytes[0..]),
381 None
382 );
383 assert_eq!(
384 unwind_rule_from_detected_prologue(&bytes[..4], &bytes[4..]),
385 None
386 );
387 assert_eq!(
388 unwind_rule_from_detected_prologue(&bytes[..8], &bytes[8..]),
389 None
390 );
391 assert_eq!(
392 unwind_rule_from_detected_prologue(&bytes[..12], &bytes[12..]),
393 None
394 );
395 assert_eq!(
396 unwind_rule_from_detected_prologue(&bytes[..16], &bytes[16..]),
397 None
398 );
399 }
400}