|
84 | 84 | do_stub(compiler, count_positives) \ |
85 | 85 | do_arch_entry(aarch64, compiler, count_positives, count_positives, \ |
86 | 86 | count_positives) \ |
87 | | - do_stub(compiler, count_positives_long) \ |
88 | | - do_arch_entry(aarch64, compiler, count_positives_long, \ |
| 87 | + do_arch_entry(aarch64, compiler, count_positives, \ |
89 | 88 | count_positives_long, count_positives_long) \ |
90 | 89 | do_stub(compiler, compare_long_string_LL) \ |
91 | 90 | do_arch_entry(aarch64, compiler, compare_long_string_LL, \ |
|
108 | 107 | do_stub(compiler, string_indexof_linear_ul) \ |
109 | 108 | do_arch_entry(aarch64, compiler, string_indexof_linear_ul, \ |
110 | 109 | string_indexof_linear_ul, string_indexof_linear_ul) \ |
111 | | - /* this uses the entry for ghash_processBlocks */ \ |
112 | | - do_stub(compiler, ghash_processBlocks_wide) \ |
| 110 | + do_stub(compiler, ghash_processBlocks_small) \ |
| 111 | + do_arch_entry(aarch64, compiler, ghash_processBlocks_small, \ |
| 112 | + ghash_processBlocks_small, ghash_processBlocks_small) \ |
113 | 113 |
|
114 | 114 |
|
115 | 115 | #define STUBGEN_FINAL_BLOBS_ARCH_DO(do_stub, \ |
|
139 | 139 | do_stub(final, spin_wait) \ |
140 | 140 | do_arch_entry_init(aarch64, final, spin_wait, spin_wait, \ |
141 | 141 | spin_wait, empty_spin_wait) \ |
142 | | - /* stub only -- entries are not stored in StubRoutines::aarch64 */ \ |
143 | 142 | /* n.b. these are not the same as the generic atomic stubs */ \ |
144 | 143 | do_stub(final, atomic_entry_points) \ |
| 144 | + do_arch_entry(aarch64, final, atomic_entry_points, \ |
| 145 | + atomic_fetch_add_4_impl, atomic_fetch_add_4_impl) \ |
| 146 | + do_arch_entry(aarch64, final, atomic_entry_points, \ |
| 147 | + atomic_fetch_add_8_impl, atomic_fetch_add_8_impl) \ |
| 148 | + do_arch_entry(aarch64, final, atomic_entry_points, \ |
| 149 | + atomic_fetch_add_4_relaxed_impl, \ |
| 150 | + atomic_fetch_add_4_relaxed_impl) \ |
| 151 | + do_arch_entry(aarch64, final, atomic_entry_points, \ |
| 152 | + atomic_fetch_add_8_relaxed_impl, \ |
| 153 | + atomic_fetch_add_8_relaxed_impl) \ |
| 154 | + do_arch_entry(aarch64, final, atomic_entry_points, \ |
| 155 | + atomic_xchg_4_impl, atomic_xchg_4_impl) \ |
| 156 | + do_arch_entry(aarch64, final, atomic_entry_points, \ |
| 157 | + atomic_xchg_8_impl, atomic_xchg_8_impl) \ |
| 158 | + do_arch_entry(aarch64, final, atomic_entry_points, \ |
| 159 | + atomic_cmpxchg_1_impl, atomic_cmpxchg_1_impl) \ |
| 160 | + do_arch_entry(aarch64, final, atomic_entry_points, \ |
| 161 | + atomic_cmpxchg_4_impl, atomic_cmpxchg_4_impl) \ |
| 162 | + do_arch_entry(aarch64, final, atomic_entry_points, \ |
| 163 | + atomic_cmpxchg_8_impl, atomic_cmpxchg_8_impl) \ |
| 164 | + do_arch_entry(aarch64, final, atomic_entry_points, \ |
| 165 | + atomic_cmpxchg_1_relaxed_impl, \ |
| 166 | + atomic_cmpxchg_1_relaxed_impl) \ |
| 167 | + do_arch_entry(aarch64, final, atomic_entry_points, \ |
| 168 | + atomic_cmpxchg_4_relaxed_impl, \ |
| 169 | + atomic_cmpxchg_4_relaxed_impl) \ |
| 170 | + do_arch_entry(aarch64, final, atomic_entry_points, \ |
| 171 | + atomic_cmpxchg_8_relaxed_impl, \ |
| 172 | + atomic_cmpxchg_8_relaxed_impl) \ |
| 173 | + do_arch_entry(aarch64, final, atomic_entry_points, \ |
| 174 | + atomic_cmpxchg_4_release_impl, \ |
| 175 | + atomic_cmpxchg_4_release_impl) \ |
| 176 | + do_arch_entry(aarch64, final, atomic_entry_points, \ |
| 177 | + atomic_cmpxchg_8_release_impl, \ |
| 178 | + atomic_cmpxchg_8_release_impl) \ |
| 179 | + do_arch_entry(aarch64, final, atomic_entry_points, \ |
| 180 | + atomic_cmpxchg_4_seq_cst_impl, \ |
| 181 | + atomic_cmpxchg_4_seq_cst_impl) \ |
| 182 | + do_arch_entry(aarch64, final, atomic_entry_points, \ |
| 183 | + atomic_cmpxchg_8_seq_cst_impl, \ |
| 184 | + atomic_cmpxchg_8_seq_cst_impl) \ |
145 | 185 |
|
146 | 186 |
|
147 | 187 | #endif // CPU_AARCH64_STUBDECLARATIONS_HPP |
0 commit comments