This converts inversion lists to use their own scalar type.
EiMR |UV* |_invlist_array_init |NN SV* const invlist|const bool will_have_0
EiMR |UV* |invlist_array |NN SV* const invlist
EsM |void |invlist_extend |NN SV* const invlist|const UV len
-EiMR |U8* |get_invlist_offset_addr|NN SV* invlist
+EiMR |bool* |get_invlist_offset_addr|NN SV* invlist
EiMR |UV |invlist_max |NN SV* const invlist
EiM |void |invlist_set_len|NN SV* const invlist|const UV len
EiMR |IV* |get_invlist_previous_index_addr|NN SV* invlist
PERL_ARGS_ASSERT__GET_INVLIST_LEN_ADDR;
- return &(LvTARGLEN(invlist));
+ return &(((XINVLIST*) SvANY(invlist))->count);
}
PERL_STATIC_INLINE UV
typedef struct xpvnv XPVNV;
typedef struct xpvmg XPVMG;
typedef struct xpvlv XPVLV;
+typedef struct xpvinvlist XINVLIST;
typedef struct xpvav XPVAV;
typedef struct xpvhv XPVHV;
typedef struct xpvgv XPVGV;
EXTCONST bool
PL_valid_types_NVX[] = { 0, 0, 1, 0, 0, 0, 1, 1, 0, 1, 1, 0, 0, 0, 0, 0 };
EXTCONST bool
-PL_valid_types_PVX[] = { 0, 0, 0, 1, 0, 1, 1, 1, 0, 1, 1, 0, 0, 1, 1, 1 };
+PL_valid_types_PVX[] = { 0, 0, 0, 1, 1, 1, 1, 1, 0, 1, 1, 0, 0, 1, 1, 1 };
EXTCONST bool
PL_valid_types_RV[] = { 0, 1, 0, 1, 0, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 1 };
EXTCONST bool
#define PERL_ARGS_ASSERT_GET_INVLIST_ITER_ADDR \
assert(invlist)
-PERL_STATIC_INLINE U8* S_get_invlist_offset_addr(pTHX_ SV* invlist)
+PERL_STATIC_INLINE bool* S_get_invlist_offset_addr(pTHX_ SV* invlist)
__attribute__warn_unused_result__
__attribute__nonnull__(pTHX_1);
#define PERL_ARGS_ASSERT_GET_INVLIST_OFFSET_ADDR \
/* This section of code defines the inversion list object and its methods. The
* interfaces are highly subject to change, so as much as possible is static to
- * this file. An inversion list is here implemented as a malloc'd C UV array.
- * Currently it is a SVt_PVLV, with some of the header fields from that
- * repurposed for uses here.
+ * this file. An inversion list is here implemented as a malloc'd C UV array
+ * as an SVt_INVLIST scalar.
*
* An inversion list for Unicode is an array of code points, sorted by ordinal
* number. The zeroth element is the first code point in the list. The 1th
* element is either the element reserved for 0, if TRUE, or the element
* after it, if FALSE */
- U8* offset = get_invlist_offset_addr(invlist);
+ bool* offset = get_invlist_offset_addr(invlist);
UV* zero_addr = (UV *) SvPVX(invlist);
PERL_ARGS_ASSERT__INVLIST_ARRAY_INIT;
PERL_STATIC_INLINE IV*
S_get_invlist_previous_index_addr(pTHX_ SV* invlist)
{
- /* Return the address of the UV that is reserved to hold the cached index
+ /* Return the address of the IV that is reserved to hold the cached index
* */
PERL_ARGS_ASSERT_GET_INVLIST_PREVIOUS_INDEX_ADDR;
- return &(((XPVLV*) SvANY(invlist))->xiv_u.xivu_iv);
+ return &(((XINVLIST*) SvANY(invlist))->prev_index);
}
PERL_STATIC_INLINE IV
: FROM_INTERNAL_SIZE(SvLEN(invlist));
}
-PERL_STATIC_INLINE U8*
+PERL_STATIC_INLINE bool*
S_get_invlist_offset_addr(pTHX_ SV* invlist)
{
/* Return the address of the field that says whether the inversion list is
PERL_ARGS_ASSERT_GET_INVLIST_OFFSET_ADDR;
- return (U8*) &(LvFLAGS(invlist));
+ return &(((XINVLIST*) SvANY(invlist))->is_offset);
}
#ifndef PERL_IN_XSUB_RE
* system default is used instead */
SV* new_list;
- U8* offset_addr;
+ bool* offset_addr;
if (initial_size < 0) {
initial_size = 10;
}
/* Allocate the initial space */
- new_list = newSV_type(SVt_PVLV);
+ new_list = newSV_type(SVt_INVLIST);
SvGROW(new_list, TO_INTERNAL_SIZE(initial_size) + 1); /* 1 is for trailing
NUL */
invlist_set_len(new_list, 0);
inversion list of the correct vintage.
*/
- SV* invlist = newSV_type(SVt_PVLV);
+ SV* invlist = newSV_type(SVt_INVLIST);
PERL_ARGS_ASSERT__NEW_INVLIST_C_ARRAY;
PERL_ARGS_ASSERT_GET_INVLIST_ITER_ADDR;
- return &(LvTARGOFF(invlist));
+ return &(((XINVLIST*) SvANY(invlist))->iterator);
}
PERL_STATIC_INLINE void
SVt_PV, FALSE, NONV, HASARENA,
FIT_ARENA(0, sizeof(XPV) - STRUCT_OFFSET(XPV, xpv_cur)) },
- /* The invlist placeholder pretends to be an RV for now.
- Also it's marked as "can't upgrade" to stop anyone using it before it's
- implemented. */
- { 0, 0, 0, SVt_INVLIST, TRUE, NONV, NOARENA, 0 },
+ { sizeof(XINVLIST) - STRUCT_OFFSET(XPV, xpv_cur),
+ copy_length(XINVLIST, is_offset) - STRUCT_OFFSET(XPV, xpv_cur),
+ + STRUCT_OFFSET(XPV, xpv_cur),
+ SVt_INVLIST, TRUE, NONV, HASARENA,
+ FIT_ARENA(0, sizeof(XINVLIST) - STRUCT_OFFSET(XPV, xpv_cur)) },
{ sizeof(XPVIV) - STRUCT_OFFSET(XPV, xpv_cur),
copy_length(XPVIV, xiv_u) - STRUCT_OFFSET(XPV, xpv_cur),
case SVt_PVGV:
case SVt_PVCV:
case SVt_PVLV:
+ case SVt_INVLIST:
case SVt_REGEXP:
case SVt_PVMG:
case SVt_PVNV:
}
break;
- /* case SVt_INVLIST: */
+ case SVt_INVLIST:
case SVt_PVLV:
case SVt_PVGV:
case SVt_PVMG:
case SVt_PVMG:
case SVt_PVNV:
case SVt_PVIV:
+ case SVt_INVLIST:
case SVt_PV:
freescalar:
/* Don't bother with SvOOK_off(sv); as we're only going to
SvANY(dstr) = new_XNV();
SvNV_set(dstr, SvNVX(sstr));
break;
- /* case SVt_INVLIST: */
default:
{
/* These are all the types that need complex bodies allocating. */
case SVt_PVMG:
case SVt_PVNV:
case SVt_PVIV:
+ case SVt_INVLIST:
case SVt_PV:
assert(sv_type_details->body_size);
if (sv_type_details->arena) {
char xlv_flags; /* 1 = negative offset 2 = negative len */
};
+struct xpvinvlist {
+ _XPV_HEAD;
+ IV prev_index;
+ STRLEN iterator;
+ STRLEN count;
+ bool is_offset; /* */
+};
+
/* This structure works in 3 ways - regular scalar, GV with GP, or fast
Boyer-Moore. */
struct xpvgv {