1 /* Optimized strncpy/stpncpy implementation for PowerPC64/POWER8.
2 Copyright (C) 2015 Free Software Foundation, Inc.
3 This file is part of the GNU C Library.
5 The GNU C Library is free software; you can redistribute it and/or
6 modify it under the terms of the GNU Lesser General Public
7 License as published by the Free Software Foundation; either
8 version 2.1 of the License, or (at your option) any later version.
10 The GNU C Library is distributed in the hope that it will be useful,
11 but WITHOUT ANY WARRANTY; without even the implied warranty of
12 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
13 Lesser General Public License for more details.
15 You should have received a copy of the GNU Lesser General Public
16 License along with the GNU C Library; if not, see
17 <http://www.gnu.org/licenses/>. */
22 # define FUNC_NAME __stpncpy
24 # define FUNC_NAME strncpy
27 /* Implements the function
29 char * [r3] strncpy (char *dest [r3], const char *src [r4], size_t n [r5])
33 char * [r3] stpncpy (char *dest [r3], const char *src [r4], size_t n [r5])
35 if USE_AS_STPCPY is defined.
37 The implementation uses unaligned doubleword access to avoid specialized
38 code paths depending of data alignment. Although recent powerpc64 uses
39 64K as default, the page cross handling assumes minimum page size of
43 EALIGN (FUNC_NAME, 4, 0)
45 /* Check if the [src]+15 will cross a 4K page by checking if the bit
46 indicating the page size changes. Basically:
48 uint64_t srcin = (uint64_t)src;
49 uint64_t ob = srcin & 4096UL;
50 uint64_t nb = (srcin+15UL) & 4096UL;
57 /* Since it is a leaf function, save some non-volatile registers on the
58 protected/red zone. */
72 beq cr7,L(unaligned_lt_16)
78 /* At this points there is 1 to 15 bytes to check and write. Since it could
79 be either from first unaligned 16 bytes access or from bulk copy, the code
80 uses an unrolled byte read/write instead of trying to analyze the cmpb
86 beq cr7,L(short_path_loop_end_1)
91 beq cr7,L(zero_pad_start_1)
95 beq cr0,L(short_path_loop_end_0)
99 beq cr7,L(zero_pad_start_prepare_1)
101 b L(short_path_loop_1)
109 beq cr5,L(zero_pad_start_1)
110 beq r7,L(short_path_loop_end_0)
114 beq cr7,L(zero_pad_start)
116 L(short_path_loop_1):
122 bne cr0,L(short_path_loop)
123 #ifdef USE_AS_STPNCPY
125 b L(short_path_loop_end)
128 L(short_path_loop_end_0):
129 #ifdef USE_AS_STPNCPY
131 b L(short_path_loop_end)
133 L(short_path_loop_end_1):
134 #ifdef USE_AS_STPNCPY
137 L(short_path_loop_end):
138 /* Restore non-volatile registers. */
147 /* This code pads the remainder dest with NULL bytes. The algorithm
148 calculate the remanining size and issues a doubleword unrolled
149 loops followed by a byte a byte set. */
157 #ifdef USE_AS_STPNCPY
160 beq- cr0,L(zero_pad_loop_b_start)
164 beq cr7,L(zero_pad_loop_b_prepare)
168 beq cr0,L(zero_pad_loop_dw_2)
171 b L(zero_pad_loop_dw_1)
177 beq cr0,L(zero_pad_loop_dw_2)
179 L(zero_pad_loop_dw_1):
183 bne cr7,L(zero_pad_loop_dw)
185 L(zero_pad_loop_dw_2):
187 L(zero_pad_loop_b_start):
194 beq- cr7,L(short_path_loop_end)
196 /* Write remaining 1-8 bytes. */
213 /* Restore non-volatile registers. */
222 /* The common case where [src]+16 will not cross a 4K page boundary.
223 In this case the code fast check the first 16 bytes by using doubleword
224 read/compares and update destiny if neither total size or null byte
225 is found in destiny. */
229 ble cr7,L(short_path)
234 bne cr7,L(short_path_prepare_2)
240 ble cr7,L(short_path_prepare_1_1)
244 bne cr7,L(short_path_prepare_2_1)
248 /* Neither the null byte was found or total length was reached,
249 align to 16 bytes and issue a bulk copy/compare. */
252 /* In the case of 4k page boundary cross, the algorithm first align
253 the address to a doubleword, calculate a mask based on alignment
254 to ignore the bytes and continue using doubleword. */
257 rldicr r11,r4,0,59 /* Align the address to 8 bytes boundary. */
258 li r6,-1 /* MASK = 0xffffffffffffffffUL. */
259 sldi r9,r9,3 /* Calculate padding. */
260 ld r7,0(r11) /* Load doubleword from memory. */
261 #ifdef __LITTLE_ENDIAN__
262 sld r9,r6,r9 /* MASK = MASK << padding. */
264 srd r9,r6,r9 /* MASK = MASK >> padding. */
266 orc r9,r7,r9 /* Mask bits that are not part of the
269 cmpb r9,r9,r7 /* Check for null bytes in DWORD1. */
271 bne cr7,L(short_path_prepare_2)
272 subf r8,r8,r5 /* Adjust total length. */
273 cmpldi cr7,r8,8 /* Check if length was reached. */
274 ble cr7,L(short_path_prepare_2)
276 /* For next checks we have aligned address, so we check for more
277 three doublewords to make sure we can read 16 unaligned bytes
278 to start the bulk copy with 16 aligned addresses. */
282 bne cr7,L(short_path_prepare_2)
285 ble cr7,L(short_path_prepare_2)
289 bne cr7,L(short_path_prepare_2)
292 ble cr7,L(short_path_prepare_2)
296 bne cr7,L(short_path_prepare_2)
298 /* No null byte found in the 32 bytes readed and length not reached,
299 read source again using unaligned loads and store them. */
307 /* Align source to 16 bytes and adjust destiny and size. */
314 /* The bulk read/compare/copy loads two doublewords, compare and merge
315 in a single register for speed. This is an attempt to speed up the
316 null-checking process for bigger strings. */
319 ble cr7,L(short_path_prepare_1_2)
321 /* Main loop for large sizes, unrolled 2 times to get better use of
329 bne cr0,L(short_path_prepare_2_3)
336 ble cr7,L(short_path_1)
356 bne cr0,L(short_path_prepare_2_2)
361 ble cr7,L(short_path_1)
372 bne cr0,L(short_path_2)
376 bgt cr7,L(loop_start)
384 L(short_path_prepare_1_1):
388 L(short_path_prepare_1_2):
393 L(short_path_prepare_2):
396 L(short_path_prepare_2_1):
400 L(short_path_prepare_2_2):
405 L(short_path_prepare_2_3):
410 L(zero_pad_loop_b_prepare):
413 b L(zero_pad_loop_b_start)
414 L(zero_pad_start_prepare_1):
417 b L(zero_pad_start_1)
420 #ifdef USE_AS_STPNCPY
421 libc_hidden_def (__stpncpy)
423 libc_hidden_builtin_def (strncpy)