1 : /*
2 : * Copyright (c) 2013 The Native Client Authors. All rights reserved.
3 : * Use of this source code is governed by a BSD-style license that can be
4 : * found in the LICENSE file.
5 : */
6 :
7 : #ifndef NATIVE_CLIENT_SRC_TRUSTED_VALIDATOR_ARM_INST_CLASSES_INLINE_H_
8 : #define NATIVE_CLIENT_SRC_TRUSTED_VALIDATOR_ARM_INST_CLASSES_INLINE_H_
9 :
10 : #include "native_client/src/trusted/validator_arm/inst_classes.h"
11 : #include "native_client/src/trusted/validator_arm/validator.h"
12 :
13 :
14 : // The following static inline methods are defined here, so that
15 : // methods in class ClassDecoder (direct and derived) can (inline)
16 : // call them. By keeping all code for each type of violation in a
17 : // single file, it is easier to maintain (rather than having some code
18 : // in the python code generator files).
19 :
20 : namespace nacl_arm_dec {
21 :
22 : // Reports unsafe loads/stores of a base address by the given instruction
23 : // pair. If the instruction pair defines a safe load/store of a base address,
24 : // it updates the Critical set with the address of the second instruction,
25 : // so that later code can check that the instruction pair is atomic.
26 : //
27 : // See comment associated with Violation::LOADSTORE_VIOLATION for details
28 : // on when a base address is considered safe.
29 : static inline ViolationSet get_loadstore_violations(
30 : const nacl_arm_val::DecodedInstruction& first,
31 : const nacl_arm_val::DecodedInstruction& second,
32 : const nacl_arm_val::SfiValidator& sfi,
33 0 : nacl_arm_val::AddressSet* critical) {
34 0 : Register base = second.base_address_register();
35 :
36 : if (base.Equals(Register::None()) // not a load/store
37 0 : || sfi.is_data_address_register(base)) {
38 0 : return kNoViolations;
39 : }
40 :
41 : // PC + immediate addressing is always safe.
42 0 : if (second.is_literal_load()) return kNoViolations;
43 :
44 : // The following checks if this represents a thread address pointer access,
45 : // which means the instruction must be one of the following forms:
46 : // ldr Rn, [r9] ; load use thread pointer.
47 : // ldr Rn, [r9, #4] ; load IRT thread pointer.
48 0 : if (second.is_load_thread_address_pointer()) return kNoViolations;
49 :
50 : if (first.defines(base)
51 : && first.clears_bits(sfi.data_address_mask())
52 0 : && first.always_dominates(second)) {
53 : return sfi.validate_instruction_pair_allowed(
54 0 : first, second, critical, LOADSTORE_CROSSES_BUNDLE_VIOLATION);
55 : }
56 :
57 : if (sfi.conditional_memory_access_allowed_for_sfi() &&
58 : first.sets_Z_if_bits_clear(base, sfi.data_address_mask()) &&
59 : second.is_eq_conditional_on(first)
60 0 : ) {
61 : return sfi.validate_instruction_pair_allowed(
62 : first, second, critical,
63 0 : LOADSTORE_CROSSES_BUNDLE_VIOLATION);
64 : }
65 :
66 0 : return ViolationBit(LOADSTORE_VIOLATION);
67 0 : }
68 :
69 : // The following generates the diagnostics that corresponds to the violations
70 : // collected by method get_loadstore_violations method (above).
71 : static inline void generate_loadstore_diagnostics(
72 : ViolationSet violations,
73 : const nacl_arm_val::DecodedInstruction& first,
74 : const nacl_arm_val::DecodedInstruction& second,
75 : const nacl_arm_val::SfiValidator& sfi,
76 0 : nacl_arm_val::ProblemSink* out) {
77 0 : if (ContainsViolation(violations, LOADSTORE_CROSSES_BUNDLE_VIOLATION)) {
78 : out->ReportProblemDiagnostic(
79 : LOADSTORE_CROSSES_BUNDLE_VIOLATION,
80 : second.addr(),
81 : "Load/store base %s is not properly masked, "
82 : "because instruction pair [%08" NACL_PRIx32 ", %08" NACL_PRIx32
83 : "] crosses bundle boundary.",
84 0 : second.base_address_register().ToString(), first.addr(), second.addr());
85 : }
86 0 : if (ContainsViolation(violations, LOADSTORE_VIOLATION)) {
87 0 : Register base = second.base_address_register();
88 :
89 0 : if (first.defines(base)) {
90 0 : if (first.clears_bits(sfi.data_address_mask())) {
91 0 : if (first.defines(Register::Conditions())) {
92 : out->ReportProblemDiagnostic(
93 : LOADSTORE_VIOLATION,
94 : second.addr(),
95 : "Load/store base %s is not properly masked, "
96 : "because instruction %08" NACL_PRIx32
97 : " sets APSR condition flags.",
98 : base.ToString(),
99 0 : first.addr());
100 0 : } else {
101 : out->ReportProblemDiagnostic(
102 : LOADSTORE_VIOLATION,
103 : second.addr(),
104 : "Load/store base %s is not properly masked, "
105 : "because the conditions (%s, %s) on "
106 : "[%08" NACL_PRIx32 ", %08" NACL_PRIx32
107 : "] don't guarantee atomicity",
108 : base.ToString(),
109 : Instruction::ToString(first.inst().GetCondition()),
110 : Instruction::ToString(second.inst().GetCondition()),
111 : first.addr(),
112 0 : second.addr());
113 : }
114 0 : } else {
115 : out->ReportProblemDiagnostic(
116 : LOADSTORE_VIOLATION,
117 : second.addr(),
118 : "Load/store base %s is not properly masked.",
119 0 : base.ToString());
120 0 : }
121 0 : } else if (first.sets_Z_if_bits_clear(base, sfi.data_address_mask())) {
122 0 : if (sfi.conditional_memory_access_allowed_for_sfi()) {
123 : out->ReportProblemDiagnostic(
124 : LOADSTORE_VIOLATION,
125 : second.addr(),
126 : "Load/store base %s is not properly masked, because "
127 : "%08" NACL_PRIx32 " is not conditional on EQ",
128 : base.ToString(),
129 0 : second.addr());
130 0 : } else {
131 : out->ReportProblemDiagnostic(
132 : LOADSTORE_VIOLATION,
133 : second.addr(),
134 : "Load/store base %s is not properly masked, "
135 : "because [%08" NACL_PRIx32 ", %08" NACL_PRIx32 "] instruction "
136 : "pair is disallowed on this CPU",
137 : base.ToString(),
138 : first.addr(),
139 0 : second.addr());
140 0 : }
141 0 : } else if (base.Equals(Register::Pc())) {
142 0 : const char* pc = Register::Pc().ToString();
143 : out->ReportProblemDiagnostic(
144 : LOADSTORE_VIOLATION,
145 : second.addr(),
146 : "Native Client only allows updates on %s of "
147 : "the form '%s + immediate'.",
148 : pc,
149 0 : pc);
150 0 : } else {
151 : out->ReportProblemDiagnostic(
152 : LOADSTORE_VIOLATION,
153 : second.addr(),
154 : "Load/store base %s is not properly masked.",
155 0 : base.ToString());
156 : }
157 : }
158 0 : }
159 :
160 : // Reports any unsafe indirect branches. If the instruction pair defines
161 : // a safe indirect branch, it updates the Critical set with the address
162 : // of the branch, so that later code can check that the instruction pair
163 : // is atomic.
164 : //
165 : // A destination address is safe if it has specific bits masked off by its
166 : // immediate predecessor.
167 : static inline ViolationSet get_branch_mask_violations(
168 : const nacl_arm_val::DecodedInstruction& first,
169 : const nacl_arm_val::DecodedInstruction& second,
170 : const nacl_arm_val::SfiValidator& sfi,
171 0 : nacl_arm_val::AddressSet* critical) {
172 0 : Register target(second.branch_target_register());
173 0 : if (target.Equals(Register::None())) return kNoViolations;
174 :
175 : if (first.defines(target) &&
176 : first.clears_bits(sfi.code_address_mask()) &&
177 0 : first.always_dominates(second)) {
178 : return sfi.validate_instruction_pair_allowed(
179 0 : first, second, critical, BRANCH_MASK_CROSSES_BUNDLE_VIOLATION);
180 : }
181 :
182 0 : return ViolationBit(BRANCH_MASK_VIOLATION);
183 0 : }
184 :
185 : // The following generates the diagnostics that corresponds to the violations
186 : // collected by method get_branch_mask_violations (above).
187 : static inline void generate_branch_mask_diagnostics(
188 : ViolationSet violations,
189 : const nacl_arm_val::DecodedInstruction& first,
190 : const nacl_arm_val::DecodedInstruction& second,
191 : const nacl_arm_val::SfiValidator& sfi,
192 0 : nacl_arm_val::ProblemSink* out) {
193 0 : if (ContainsViolation(violations, BRANCH_MASK_CROSSES_BUNDLE_VIOLATION)) {
194 : out->ReportProblemDiagnostic(
195 : BRANCH_MASK_CROSSES_BUNDLE_VIOLATION,
196 : second.addr(),
197 : "Destination branch on %s is not properly masked, "
198 : "because instruction pair [%08" NACL_PRIx32 ", %08" NACL_PRIx32 "] "
199 : "crosses bundle boundary",
200 : second.branch_target_register().ToString(),
201 : first.addr(),
202 0 : second.addr());
203 : }
204 0 : if (ContainsViolation(violations, BRANCH_MASK_VIOLATION)) {
205 0 : Register target(second.branch_target_register());
206 0 : if (first.defines(target)) {
207 0 : if (first.clears_bits(sfi.code_address_mask())) {
208 0 : if (first.defines(Register::Conditions())) {
209 : out->ReportProblemDiagnostic(
210 : BRANCH_MASK_VIOLATION,
211 : second.addr(),
212 : "Destination branch on %s is not properly masked, "
213 : "because instruction %08" NACL_PRIx32
214 : " sets APSR condition flags",
215 : target.ToString(),
216 0 : first.addr());
217 0 : } else {
218 : out->ReportProblemDiagnostic(
219 : BRANCH_MASK_VIOLATION,
220 : second.addr(),
221 : "Destination branch on %s is not properly masked, "
222 : "because the conditions (%s, %s) on "
223 : "[%08" NACL_PRIx32 ", %08" NACL_PRIx32
224 : "] don't guarantee atomicity",
225 : target.ToString(),
226 : Instruction::ToString(first.inst().GetCondition()),
227 : Instruction::ToString(second.inst().GetCondition()),
228 : first.addr(),
229 0 : second.addr());
230 : }
231 0 : return;
232 : }
233 : }
234 : out->ReportProblemDiagnostic(
235 : BRANCH_MASK_VIOLATION,
236 : second.addr(),
237 : "Destination branch on %s is not properly masked.",
238 0 : target.ToString());
239 : }
240 0 : }
241 :
242 : // Reports any instructions that update a data-address register without
243 : // a valid mask. If the instruction pair safely updates the data-address
244 : // register, it updates the Critical set with the address of the the
245 : // second instruction, so that later code can check that the instruction
246 : // pair is atomic.
247 : static inline ViolationSet get_data_register_update_violations(
248 : const nacl_arm_val::DecodedInstruction& first,
249 : const nacl_arm_val::DecodedInstruction& second,
250 : const nacl_arm_val::SfiValidator& sfi,
251 0 : nacl_arm_val::AddressSet* critical) {
252 :
253 0 : RegisterList data_registers(sfi.data_address_registers());
254 :
255 : // Don't need to check if no data address register updates.
256 0 : if (!first.defines_any(data_registers)) return kNoViolations;
257 :
258 : // A single safe data register update doesn't affect control flow.
259 0 : if (first.clears_bits(sfi.data_address_mask())) return kNoViolations;
260 :
261 : // Small immediate base register writeback to data address registers
262 : // (e.g. SP) doesn't need to be an instruction pair.
263 : if (first.base_address_register_writeback_small_immediate() &&
264 0 : sfi.data_address_registers().Contains(first.base_address_register())) {
265 0 : return kNoViolations;
266 : }
267 :
268 : // Data address register modification followed by bit clear.
269 0 : RegisterList data_addr_defs(first.defs().Intersect(data_registers));
270 : if (second.defines_all(data_addr_defs)
271 : && second.clears_bits(sfi.data_address_mask())
272 0 : && second.always_postdominates(first)) {
273 : return sfi.validate_instruction_pair_allowed(
274 : first, second, critical,
275 0 : DATA_REGISTER_UPDATE_CROSSES_BUNDLE_VIOLATION);
276 : }
277 :
278 0 : return ViolationBit(DATA_REGISTER_UPDATE_VIOLATION);
279 0 : }
280 :
281 : // The following generates the diagnostics that corresponds to the violations
282 : // collected by method get_data_register_violations (above).
283 : static inline void generate_data_register_update_diagnostics(
284 : ViolationSet violations,
285 : const nacl_arm_val::DecodedInstruction& first,
286 : const nacl_arm_val::DecodedInstruction& second,
287 : const nacl_arm_val::SfiValidator& sfi,
288 0 : nacl_arm_val::ProblemSink* out) {
289 : if (ContainsViolation(violations,
290 0 : DATA_REGISTER_UPDATE_CROSSES_BUNDLE_VIOLATION)) {
291 0 : RegisterList data_registers(sfi.data_address_registers());
292 0 : RegisterList data_addr_defs(first.defs().Intersect(data_registers));
293 0 : for (Register::Number r = 0; r < Register::kNumberGPRs; ++r) {
294 0 : Register reg(r);
295 0 : if (data_addr_defs.Contains(reg)) {
296 : out->ReportProblemDiagnostic(
297 : DATA_REGISTER_UPDATE_CROSSES_BUNDLE_VIOLATION,
298 : first.addr(),
299 : "Updating %s without masking in following instruction, "
300 : "because instruction pair [%08" NACL_PRIx32 ", %08" NACL_PRIx32
301 : "] crosses bundle boundary.",
302 : reg.ToString(),
303 : first.addr(),
304 0 : second.addr());
305 : }
306 0 : }
307 : }
308 0 : if (ContainsViolation(violations, DATA_REGISTER_UPDATE_VIOLATION)) {
309 0 : RegisterList data_registers(sfi.data_address_registers());
310 0 : RegisterList data_addr_defs(first.defs().Intersect(data_registers));
311 : if (second.defines_all(data_addr_defs) &&
312 0 : second.clears_bits(sfi.data_address_mask())) {
313 0 : for (Register::Number r = 0; r < Register::kNumberGPRs; ++r) {
314 0 : Register reg(r);
315 0 : if (data_addr_defs.Contains(reg)) {
316 0 : if (first.defines(Register::Conditions())) {
317 : out->ReportProblemDiagnostic(
318 : DATA_REGISTER_UPDATE_VIOLATION,
319 : first.addr(),
320 : "Updating %s without masking in following instruction, "
321 : "because instruction %08" NACL_PRIx32 " sets APSR "
322 : "condition flags.",
323 : reg.ToString(),
324 0 : first.addr());
325 0 : } else {
326 : out->ReportProblemDiagnostic(
327 : DATA_REGISTER_UPDATE_VIOLATION,
328 : first.addr(),
329 : "Updating %s without masking in following instruction, "
330 : "because the conditions (%s, %s) on "
331 : "[%08" NACL_PRIx32 ", %08" NACL_PRIx32 "] don't "
332 : "guarantee atomicity",
333 : reg.ToString(),
334 : Instruction::ToString(first.inst().GetCondition()),
335 : Instruction::ToString(second.inst().GetCondition()),
336 : first.addr(),
337 0 : second.addr());
338 : }
339 0 : }
340 0 : }
341 : } else {
342 0 : for (Register::Number r = 0; r < Register::kNumberGPRs; ++r) {
343 0 : Register reg(r);
344 0 : if (data_addr_defs.Contains(reg)) {
345 : out->ReportProblemDiagnostic(
346 : DATA_REGISTER_UPDATE_VIOLATION,
347 : first.addr(),
348 : "Updating %s without masking in following instruction.",
349 0 : reg.ToString());
350 : }
351 0 : }
352 : }
353 : }
354 0 : }
355 :
356 : // Checks if the call instruction isn't the last instruction in the
357 : // bundle.
358 : //
359 : // This is not a security check per se. Rather, it is a check to prevent
360 : // imbalancing the CPU's return stack, thereby decreasing performance.
361 : static inline ViolationSet get_call_position_violations(
362 : const nacl_arm_val::DecodedInstruction& inst,
363 0 : const nacl_arm_val::SfiValidator& sfi) {
364 : // Identify linking branches through their definitions:
365 0 : if (inst.defines_all(RegisterList(Register::Pc()).Add(Register::Lr()))) {
366 0 : uint32_t last_slot = sfi.bundle_for_address(inst.addr()).end_addr() - 4;
367 0 : if (inst.addr() != last_slot) {
368 0 : return ViolationBit(CALL_POSITION_VIOLATION);
369 : }
370 : }
371 0 : return kNoViolations;
372 0 : }
373 :
374 : // The following generates the diagnostics that corresponds to the violations
375 : // collected by method get_call_position_violations (above).
376 : static inline void generate_call_position_diagnostics(
377 : ViolationSet violations,
378 : const nacl_arm_val::DecodedInstruction& inst,
379 : const nacl_arm_val::SfiValidator& sfi,
380 0 : nacl_arm_val::ProblemSink* out) {
381 : UNREFERENCED_PARAMETER(sfi);
382 0 : if (ContainsViolation(violations, CALL_POSITION_VIOLATION)) {
383 : out->ReportProblemDiagnostic(
384 : CALL_POSITION_VIOLATION,
385 : inst.addr(),
386 0 : "Call not last instruction in instruction bundle.");
387 : }
388 0 : }
389 :
390 : // Checks that the instruction doesn't set a read-only register
391 : static inline ViolationSet get_read_only_violations(
392 : const nacl_arm_val::DecodedInstruction& inst,
393 0 : const nacl_arm_val::SfiValidator& sfi) {
394 : return inst.defines_any(sfi.read_only_registers())
395 : ? ViolationBit(READ_ONLY_VIOLATION)
396 0 : : kNoViolations;
397 0 : }
398 :
399 : // The following generates the diagnostics that corresponds to the violations
400 : // collected by method get_read_only_violations (above).
401 : static inline void generate_read_only_diagnostics(
402 : ViolationSet violations,
403 : const nacl_arm_val::DecodedInstruction& inst,
404 : const nacl_arm_val::SfiValidator& sfi,
405 0 : nacl_arm_val::ProblemSink* out) {
406 : UNREFERENCED_PARAMETER(sfi);
407 0 : if (ContainsViolation(violations, READ_ONLY_VIOLATION)) {
408 0 : RegisterList& read_only = inst.defs().Intersect(sfi.read_only_registers());
409 0 : for (Register::Number r = 0; r < Register::kNumberGPRs; ++r) {
410 0 : Register reg(r);
411 0 : if (read_only.Contains(reg)) {
412 : out->ReportProblemDiagnostic(
413 : READ_ONLY_VIOLATION,
414 : inst.addr(),
415 : "Updates read-only register: %s.",
416 0 : reg.ToString());
417 : }
418 0 : }
419 : }
420 0 : }
421 :
422 : // Checks that instruction doesn't read the thread local pointer.
423 : static inline ViolationSet get_read_thread_local_pointer_violations(
424 0 : const nacl_arm_val::DecodedInstruction& inst) {
425 : return (inst.uses(Register::Tp()) && !inst.is_load_thread_address_pointer())
426 : ? ViolationBit(READ_THREAD_LOCAL_POINTER_VIOLATION)
427 0 : : kNoViolations;
428 0 : }
429 :
430 : // The following generates the diagnostics that corresponds to the violations
431 : // collected by method get_read_thread_local_pointer_violations (above).
432 : static inline void generate_read_thread_local_pointer_diagnostics(
433 : ViolationSet violations,
434 : const nacl_arm_val::DecodedInstruction& inst,
435 : const nacl_arm_val::SfiValidator& sfi,
436 0 : nacl_arm_val::ProblemSink* out) {
437 : UNREFERENCED_PARAMETER(sfi);
438 0 : if (ContainsViolation(violations, READ_THREAD_LOCAL_POINTER_VIOLATION)) {
439 : out->ReportProblemDiagnostic(
440 : READ_THREAD_LOCAL_POINTER_VIOLATION,
441 : inst.addr(),
442 : "Use of thread pointer %s not legal outside of load thread pointer "
443 : "instruction(s)",
444 0 : Register::Tp().ToString());
445 : }
446 0 : }
447 :
448 : // Checks that writes to the program counter are only from branches. Updates
449 : // branches to contain the target of safe branches.
450 : static inline ViolationSet get_pc_writes_violations(
451 : const nacl_arm_val::DecodedInstruction& inst,
452 : const nacl_arm_val::SfiValidator& sfi,
453 0 : nacl_arm_val::AddressSet* branches) {
454 :
455 : // Safe if a relative branch.
456 0 : if (inst.is_relative_branch()) {
457 0 : branches->add(inst.addr());
458 0 : return kNoViolations;
459 : }
460 :
461 : // If branch to register, it is checked by get_branch_mask_violation.
462 0 : if (!inst.branch_target_register().Equals(Register::None()))
463 0 : return kNoViolations;
464 :
465 0 : if (!inst.defines(nacl_arm_dec::Register::Pc())) return kNoViolations;
466 :
467 0 : if (inst.clears_bits(sfi.code_address_mask())) return kNoViolations;
468 :
469 0 : return ViolationBit(PC_WRITES_VIOLATION);
470 0 : }
471 :
472 :
473 : // The following generates the diagnostics that corresponds to the violations
474 : // collected by method get_pc_writes_violations (above).
475 : static inline void generate_pc_writes_diagnostics(
476 : ViolationSet violations,
477 : const nacl_arm_val::DecodedInstruction& inst,
478 : const nacl_arm_val::SfiValidator& sfi,
479 0 : nacl_arm_val::ProblemSink* out) {
480 : UNREFERENCED_PARAMETER(sfi);
481 0 : if (ContainsViolation(violations, PC_WRITES_VIOLATION)) {
482 : out->ReportProblemDiagnostic(
483 : PC_WRITES_VIOLATION,
484 : inst.addr(),
485 : "Destination branch on %s is not properly masked.",
486 0 : Register::Pc().ToString());
487 : }
488 0 : }
489 :
490 : // If a pool head, mark address appropriately and then skip over
491 : // the constant bundle.
492 : static inline void validate_literal_pool_head(
493 : const nacl_arm_val::DecodedInstruction& inst,
494 : const nacl_arm_val::SfiValidator& sfi,
495 : nacl_arm_val::AddressSet* critical,
496 0 : uint32_t* next_inst_addr) {
497 0 : if (inst.is_literal_pool_head() && sfi.is_bundle_head(inst.addr())) {
498 : // Add each instruction in this bundle to the critical set.
499 : // Skip over the literal pool head (which is also the bundle head):
500 : // indirect branches to it are legal, direct branches should therefore
501 : // also be legal.
502 0 : uint32_t last_data_addr = sfi.bundle_for_address(inst.addr()).end_addr();
503 0 : for (; *next_inst_addr < last_data_addr; *next_inst_addr += 4) {
504 0 : critical->add(*next_inst_addr);
505 0 : }
506 : }
507 0 : }
508 :
509 : } // namespace nacl_arm_dec
510 :
511 : #endif // NATIVE_CLIENT_SRC_TRUSTED_VALIDATOR_ARM_INST_CLASSES_INLINE_H_
|