1 | // RUN: %clang -std=gnu++11 -O2 -ffast-math -g %s -o %t |
2 | // RUN: %dexter --fail-lt 1.0 -w \ |
3 | // RUN: --binary %t --debugger 'lldb' -- %s |
4 | // RUN: %clang -std=gnu++11 -O0 -ffast-math -g %s -o %t |
5 | // RUN: %dexter --fail-lt 1.0 -w \ |
6 | // RUN: --binary %t --debugger 'lldb' -- %s |
7 | |
8 | // REQUIRES: lldb |
9 | // Currently getting intermittent failures on darwin. |
10 | // UNSUPPORTED: system-windows, system-darwin |
11 | |
12 | //// Check that the debugging experience with __attribute__((optnone)) at O2 |
13 | //// matches O0. Test scalar floating point arithmetic with -ffast-math. |
14 | |
15 | //// Example of strength reduction. |
16 | //// The division by 10.0f can be rewritten as a multiply by 0.1f. |
17 | //// A / 10.f ==> A * 0.1f |
18 | //// This is safe with fastmath since we treat the two operations |
19 | //// as equally precise. However we don't want this to happen |
20 | //// with optnone. |
21 | __attribute__((optnone)) |
22 | float test_fdiv(float A) { |
23 | float result; |
24 | result = A / 10.f; // DexLabel('fdiv_assign') |
25 | return result; // DexLabel('fdiv_ret') |
26 | } |
27 | // DexExpectWatchValue('A', 4, on_line=ref('fdiv_assign')) |
28 | // DexExpectWatchValue('result', '0.400000006', on_line=ref('fdiv_ret')) |
29 | |
30 | //// (A * B) - (A * C) ==> A * (B - C) |
31 | __attribute__((optnone)) |
32 | float test_distributivity(float A, float B, float C) { |
33 | float result; |
34 | float op1 = A * B; |
35 | float op2 = A * C; // DexLabel('distributivity_op2') |
36 | result = op1 - op2; // DexLabel('distributivity_result') |
37 | return result; // DexLabel('distributivity_ret') |
38 | } |
39 | // DexExpectWatchValue('op1', '20', on_line=ref('distributivity_op2')) |
40 | // DexExpectWatchValue('op2', '24', on_line=ref('distributivity_result')) |
41 | // DexExpectWatchValue('result', '-4', on_line=ref('distributivity_ret')) |
42 | |
43 | //// (A + B) + C == A + (B + C) |
44 | //// therefore, ((A + B) + C) + (A + (B + C))) |
45 | //// can be rewritten as |
46 | //// 2.0f * ((A + B) + C) |
47 | //// Clang is currently unable to spot this optimization |
48 | //// opportunity with fastmath. |
49 | __attribute__((optnone)) |
50 | float test_associativity(float A, float B, float C) { |
51 | float result; |
52 | float op1 = A + B; |
53 | float op2 = B + C; |
54 | op1 += C; // DexLabel('associativity_op1') |
55 | op2 += A; |
56 | result = op1 + op2; // DexLabel('associativity_result') |
57 | return result; // DexLabel('associativity_ret') |
58 | } |
59 | // DexExpectWatchValue('op1', '9', '15', from_line=ref('associativity_op1'), to_line=ref('associativity_result')) |
60 | // DexExpectWatchValue('op2', '11', '15', from_line=ref('associativity_op1'), to_line=ref('associativity_result')) |
61 | // DexExpectWatchValue('result', '30', on_line=ref('associativity_ret')) |
62 | |
63 | //// With fastmath, the ordering of instructions doesn't matter |
64 | //// since we work under the assumption that there is no loss |
65 | //// in precision. This simplifies things for the optimizer which |
66 | //// can then decide to reorder instructions and fold |
67 | //// redundant operations like this: |
68 | //// A += 5.0f |
69 | //// A -= 5.0f |
70 | //// --> |
71 | //// A |
72 | //// This function can be simplified to a return A + B. |
73 | __attribute__((optnone)) |
74 | float test_simplify_fp_operations(float A, float B) { |
75 | float result = A + 10.0f; // DexLabel('fp_operations_result') |
76 | result += B; // DexLabel('fp_operations_add') |
77 | result -= 10.0f; |
78 | return result; // DexLabel('fp_operations_ret') |
79 | } |
80 | // DexExpectWatchValue('A', '8.25', on_line=ref('fp_operations_result')) |
81 | // DexExpectWatchValue('B', '26.3999996', on_line=ref('fp_operations_result')) |
82 | // DexExpectWatchValue('result', '18.25', '44.6500015', '34.6500015', from_line=ref('fp_operations_add'), to_line=ref('fp_operations_ret')) |
83 | |
84 | //// Again, this is a simple return A + B. |
85 | //// Clang is unable to spot the opportunity to fold the code sequence. |
86 | __attribute__((optnone)) |
87 | float test_simplify_fp_operations_2(float A, float B, float C) { |
88 | float result = A + C; // DexLabel('fp_operations_2_result') |
89 | result += B; |
90 | result -= C; // DexLabel('fp_operations_2_subtract') |
91 | return result; // DexLabel('fp_operations_2_ret') |
92 | } |
93 | // DexExpectWatchValue('A', '9.11999988', on_line=ref('fp_operations_2_result')) |
94 | // DexExpectWatchValue('B', '61.050003', on_line=ref('fp_operations_2_result')) |
95 | // DexExpectWatchValue('C', '1002.11102', on_line=ref('fp_operations_2_result')) |
96 | // DexExpectWatchValue('result', '1072.28101', '70.1699829', from_line=ref('fp_operations_2_subtract'), to_line=ref('fp_operations_2_ret')) |
97 | |
98 | int main() { |
99 | float result = test_fdiv(A: 4.0f); |
100 | result += test_distributivity(A: 4.0f, B: 5.0f, C: 6.0f); |
101 | result += test_associativity(A: 4.0f, B: 5.0f, C: 6.0f); |
102 | result += test_simplify_fp_operations(A: 8.25, B: result); |
103 | result += test_simplify_fp_operations_2(A: 9.12, B: result, C: 1002.111); |
104 | return static_cast<int>(result); |
105 | } |
106 | |