From 90bdfbb6e7a3eaca717f454ebffcf7eebe736af5 Mon Sep 17 00:00:00 2001 From: rmathew Date: Sun, 4 Jul 2004 15:19:16 +0000 Subject: [PATCH] * verify.c: Insert a short blurb at the start referring to the JVMS. Minor formatting fixes all over the place. (merge_type_state): Remove redundant nested if statement. (verify_jvm_instructions): Ensure current_subr is initialised to NULL_TREE. Cii: Enter Log. Lines beginning with `CVS:' are removed automaticalli git-svn-id: svn+ssh://gcc.gnu.org/svn/gcc/trunk@84084 138bc75d-0d04-0410-961f-82ee72b054a4 --- gcc/java/ChangeLog | 8 ++ gcc/java/verify.c | 313 +++++++++++++++++++++++++++++++++++------------------ 2 files changed, 213 insertions(+), 108 deletions(-) diff --git a/gcc/java/ChangeLog b/gcc/java/ChangeLog index efa1c0c..638aa9d 100644 --- a/gcc/java/ChangeLog +++ b/gcc/java/ChangeLog @@ -1,3 +1,11 @@ +2004-07-04 Ranjit Mathew + + * verify.c: Insert a short blurb at the start referring to the JVMS. + (merge_type_state): Remove redundant nested if statement. + (verify_jvm_instructions): Ensure current_subr is initialised to + NULL_TREE. + Minor formatting fixes all over the place. + 2004-07-02 Richard Henderson * jcf-write.c (generate_bytecode_insns ): Rewrite. diff --git a/gcc/java/verify.c b/gcc/java/verify.c index ed3f945..750c517 100644 --- a/gcc/java/verify.c +++ b/gcc/java/verify.c @@ -24,6 +24,11 @@ Java and all Java-based marks are trademarks or registered trademarks of Sun Microsystems, Inc. in the United States and other countries. The Free Software Foundation is independent of Sun Microsystems, Inc. */ +/* This bytecode verifier is an implementation of the bytecode +verification process described in section 4.9 of "The Java(TM) Virtual +Machine Specification", Second Edition, by Tim Lindholm and Frank Yellin, +published by Addison-Wesley in 1999. */ + #include "config.h" #include "system.h" #include "coretypes.h" @@ -82,7 +87,7 @@ check_pending_block (tree target_label) push_pending_label (target_label); } - if (current_subr == NULL) + if (current_subr == NULL_TREE) { if (LABEL_IN_SUBR (target_label)) return "might transfer control into subroutine"; @@ -113,7 +118,7 @@ subroutine_nesting (tree label) int nesting = 0; while (label != NULL_TREE && LABEL_IN_SUBR (label)) { - if (! LABEL_IS_SUBR_START(label)) + if (! LABEL_IS_SUBR_START (label)) label = LABEL_SUBR_START (label); label = LABEL_SUBR_CONTEXT (label); nesting++; @@ -250,11 +255,9 @@ merge_type_state (tree label) tree return_map; if (vec == NULL_TREE) { - if (!vec) - { - vec = make_tree_vec (cur_length); - LABEL_TYPE_STATE (label) = vec; - } + vec = make_tree_vec (cur_length); + LABEL_TYPE_STATE (label) = vec; + while (--cur_length >= 0) TREE_VEC_ELT (vec, cur_length) = type_map [cur_length]; return 1; @@ -281,7 +284,7 @@ merge_type_state (tree label) /* If there has been a change, note that since we must re-verify. However, if the label is the start of a subroutine, we don't care about local variables that are neither - set nor used in the sub-routine. */ + set nor used in the subroutine. */ if (return_map == NULL_TREE || i >= nlocals || TREE_VEC_ELT (return_map, i) != TYPE_UNUSED || (TYPE_IS_WIDE (new_type) @@ -306,33 +309,33 @@ merge_type_state (tree label) static void type_stack_dup (int size, int offset) { - tree type[4]; + tree type [4]; int index; for (index = 0; index < size + offset; index++) { - type[index] = stack_type_map[stack_pointer - 1]; - if (type[index] == void_type_node) + type [index] = stack_type_map [stack_pointer - 1]; + if (type [index] == void_type_node) { index++; - type[index] = stack_type_map[stack_pointer - 2]; - if (! TYPE_IS_WIDE (type[index])) + type [index] = stack_type_map [stack_pointer - 2]; + if (! TYPE_IS_WIDE (type [index])) abort (); if (index == size || index == size + offset) /* Dup operation splits 64-bit number. */ abort (); } - pop_type (type[index]); + pop_type (type [index]); } for (index = size; --index >= 0; ) { - if (type[index] != void_type_node) - push_type (type[index]); + if (type [index] != void_type_node) + push_type (type [index]); } for (index = size + offset; --index >= 0; ) { - if (type[index] != void_type_node) - push_type (type[index]); + if (type [index] != void_type_node) + push_type (type [index]); } } @@ -437,7 +440,9 @@ verify_jvm_instructions (JCF* jcf, const unsigned char *byte_ops, long length) pending_blocks = NULL_TREE; - /* Handle the exception table. */ + current_subr = NULL_TREE; + + /* Handle the exception table. */ method_init_exceptions (); JCF_SEEK (jcf, DECL_CODE_OFFSET (current_function_decl) + length); eh_count = JCF_readu2 (jcf); @@ -447,8 +452,8 @@ verify_jvm_instructions (JCF* jcf, const unsigned char *byte_ops, long length) starts = xmalloc (eh_count * sizeof (struct pc_index)); for (i = 0; i < eh_count; ++i) { - starts[i].start_pc = GET_u2 (jcf->read_ptr + 8 * i); - starts[i].index = i; + starts [i].start_pc = GET_u2 (jcf->read_ptr + 8 * i); + starts [i].index = i; } qsort (starts, eh_count, sizeof (struct pc_index), start_pc_cmp); @@ -456,7 +461,7 @@ verify_jvm_instructions (JCF* jcf, const unsigned char *byte_ops, long length) { int start_pc, end_pc, handler_pc, catch_type; - p = jcf->read_ptr + 8 * starts[i].index; + p = jcf->read_ptr + 8 * starts [i].index; start_pc = GET_u2 (p); end_pc = GET_u2 (p+2); @@ -490,6 +495,7 @@ verify_jvm_instructions (JCF* jcf, const unsigned char *byte_ops, long length) for (PC = 0;;) { tree type, tmp; + if (((PC != INVALID_PC && instruction_bits [PC] & BCODE_TARGET) != 0) || PC == 0) @@ -497,57 +503,61 @@ verify_jvm_instructions (JCF* jcf, const unsigned char *byte_ops, long length) PUSH_PENDING (lookup_label (PC)); INVALIDATE_PC; } + /* Check if there are any more pending blocks in the current subroutine. Because we push pending blocks in a last-in-first-out order, and because we don't push anything from our caller until we are done with this subroutine or - anything nested in it, then we are done if the top of the + anything nested in it, we are done if the top of the pending_blocks stack is not in a subroutine, or it is in our caller. */ - if (current_subr - && PC == INVALID_PC) + if (current_subr && PC == INVALID_PC) { if (pending_blocks == NULL_TREE || (subroutine_nesting (pending_blocks) < subroutine_nesting (current_subr))) { - int size = DECL_MAX_LOCALS(current_function_decl)+stack_pointer; + int size + = DECL_MAX_LOCALS (current_function_decl) + stack_pointer; + tree ret_map = LABEL_RETURN_TYPE_STATE (current_subr); tmp = LABEL_RETURN_LABELS (current_subr); /* FIXME: If we exit a subroutine via a throw, we might have returned to an earlier caller. Obviously a "ret" can only return one level, but a throw may - return many levels.*/ + return many levels. */ current_subr = LABEL_SUBR_CONTEXT (current_subr); if (RETURN_MAP_ADJUSTED (ret_map)) { - /* Since we are done with this subroutine , set up + /* Since we are done with this subroutine, set up the (so far known) return address as pending - - with the merged type state. */ + with the merged type state. */ for ( ; tmp != NULL_TREE; tmp = TREE_CHAIN (tmp)) { tree return_label = TREE_VALUE (tmp); tree return_state = LABEL_TYPE_STATE (return_label); if (return_state == NULL_TREE) { - /* This means means we had not verified the - subroutine earlier, so this is the first jsr to - call it. In this case, the type_map of the return + /* This means we had not verified the subroutine + earlier, so this is the first jsr to call it. + In this case, the type_map of the return address is just the current type_map - and that - is handled by the following PUSH_PENDING. */ + is handled by the following PUSH_PENDING. */ } else { /* In this case we have to do a merge. But first restore the type_map for unused slots to those - that were in effect at the jsr. */ - for (index = size; --index >= 0; ) + that were in effect at the jsr. */ + for (index = size; --index >= 0; ) { - type_map[index] = TREE_VEC_ELT (ret_map, index); - if (type_map[index] == TYPE_UNUSED) - type_map[index] + type_map [index] + = TREE_VEC_ELT (ret_map, index); + + if (type_map [index] == TYPE_UNUSED) + type_map [index] = TREE_VEC_ELT (return_state, index); } } @@ -556,11 +566,14 @@ verify_jvm_instructions (JCF* jcf, const unsigned char *byte_ops, long length) } } } + if (PC == INVALID_PC) { label = pending_blocks; + if (label == NULL_TREE) break; /* We're done! */ + pending_blocks = LABEL_PENDING_CHAIN (label); LABEL_CHANGED (label) = 0; @@ -571,52 +584,60 @@ verify_jvm_instructions (JCF* jcf, const unsigned char *byte_ops, long length) /* Restore type_map and stack_pointer from LABEL_TYPE_STATE (label), and continue - compiling from there. */ + compiling from there. */ load_type_state (label); + PC = LABEL_PC (label); } else if (PC >= length) - VERIFICATION_ERROR ("falling through end of method"); + VERIFICATION_ERROR ("falling through the end of the method"); - /* fprintf (stderr, "** %d\n", PC); */ oldpc = PC; - if (!(instruction_bits [PC] & BCODE_INSTRUCTION_START) && ! wide) + if (! (instruction_bits [PC] & BCODE_INSTRUCTION_START) && ! wide) VERIFICATION_ERROR ("PC not at instruction start"); - instruction_bits[PC] |= BCODE_VERIFIED; + instruction_bits [PC] |= BCODE_VERIFIED; eh_ranges = find_handler (oldpc); - op_code = byte_ops[PC++]; + op_code = byte_ops [PC++]; switch (op_code) { int is_static, is_putting; + case OPCODE_nop: break; + case OPCODE_iconst_m1: case OPCODE_iconst_0: case OPCODE_iconst_1: case OPCODE_iconst_2: case OPCODE_iconst_3: case OPCODE_iconst_4: case OPCODE_iconst_5: i = op_code - OPCODE_iconst_0; goto push_int; push_int: - if (byte_ops[PC] == OPCODE_newarray - || byte_ops[PC] == OPCODE_anewarray) + if (byte_ops [PC] == OPCODE_newarray + || byte_ops [PC] == OPCODE_anewarray) int_value = i; PUSH_TYPE (int_type_node); break; + case OPCODE_lconst_0: case OPCODE_lconst_1: PUSH_TYPE (long_type_node); break; + case OPCODE_fconst_0: case OPCODE_fconst_1: case OPCODE_fconst_2: PUSH_TYPE (float_type_node); break; + case OPCODE_dconst_0: case OPCODE_dconst_1: PUSH_TYPE (double_type_node); break; + case OPCODE_bipush: i = IMMEDIATE_s1; goto push_int; + case OPCODE_sipush: i = IMMEDIATE_s2; goto push_int; + case OPCODE_iload: type = int_type_node; goto general_load; case OPCODE_lload: type = long_type_node; goto general_load; case OPCODE_fload: type = float_type_node; goto general_load; @@ -652,13 +673,13 @@ verify_jvm_instructions (JCF* jcf, const unsigned char *byte_ops, long length) >= DECL_MAX_LOCALS (current_function_decl))) VERIFICATION_ERROR_WITH_INDEX ("invalid local variable index %d in load"); - tmp = type_map[index]; + tmp = type_map [index]; if (tmp == TYPE_UNKNOWN) VERIFICATION_ERROR_WITH_INDEX ("loading local variable %d which has unknown type"); else if (tmp == TYPE_SECOND || (TYPE_IS_WIDE (type) - && type_map[index+1] != void_type_node) + && type_map [index+1] != void_type_node) || (type == ptr_type_node ? TREE_CODE (tmp) != POINTER_TYPE : type == int_type_node @@ -707,21 +728,23 @@ verify_jvm_instructions (JCF* jcf, const unsigned char *byte_ops, long length) return 0; } POP_TYPE_CONV (type, type, NULL); - type_map[index] = type; + type_map [index] = type; - /* If local variable changed, we need to reconsider eh handlers. */ + /* If a local variable has changed, we need to reconsider exception + handlers. */ prev_eh_ranges = NULL_EH_RANGE; /* Allocate decl and rtx for this variable now, so if we're not - optimizing, we get a temporary that survives the whole method. */ + optimizing, we get a temporary that survives the whole method. */ find_local_variable (index, type, oldpc); if (TYPE_IS_WIDE (type)) - type_map[index+1] = TYPE_SECOND; + type_map [index+1] = TYPE_SECOND; + /* ... fall through to note_used ... */ note_used: /* For store or load, note that local variable INDEX is used. - This is needed to verify try-finally sub-routines. */ + This is needed to verify try-finally subroutines. */ if (current_subr) { tree vec = LABEL_RETURN_TYPE_STATE (current_subr); @@ -772,15 +795,18 @@ verify_jvm_instructions (JCF* jcf, const unsigned char *byte_ops, long length) type = double_type_node; goto binop; case OPCODE_dneg: type = double_type_node; goto unop; + unop: pop_type (type); PUSH_TYPE (type); break; + binop: pop_type (type); pop_type (type); PUSH_TYPE (type); break; + case OPCODE_lshl: case OPCODE_lshr: case OPCODE_lushr: @@ -788,17 +814,19 @@ verify_jvm_instructions (JCF* jcf, const unsigned char *byte_ops, long length) pop_type (long_type_node); PUSH_TYPE (long_type_node); break; + case OPCODE_iinc: index = wide ? IMMEDIATE_u2 : IMMEDIATE_u1; PC += wide + 1; wide = 0; if (index < 0 || index >= DECL_MAX_LOCALS (current_function_decl)) VERIFICATION_ERROR ("invalid local variable index in iinc"); - tmp = type_map[index]; + tmp = type_map [index]; if (tmp == NULL_TREE || ! INTEGRAL_TYPE_P (tmp) || TYPE_PRECISION (tmp) > 32) VERIFICATION_ERROR ("invalid local variable type in iinc"); break; + case OPCODE_i2l: pop_type (int_type_node); PUSH_TYPE (long_type_node); break; case OPCODE_i2f: @@ -823,6 +851,7 @@ verify_jvm_instructions (JCF* jcf, const unsigned char *byte_ops, long length) pop_type (double_type_node); PUSH_TYPE (long_type_node); break; case OPCODE_d2f: pop_type (double_type_node); PUSH_TYPE (float_type_node); break; + case OPCODE_lcmp: type = long_type_node; goto compare; case OPCODE_fcmpl: @@ -834,6 +863,7 @@ verify_jvm_instructions (JCF* jcf, const unsigned char *byte_ops, long length) compare: pop_type (type); pop_type (type); PUSH_TYPE (int_type_node); break; + case OPCODE_ifeq: case OPCODE_ifne: case OPCODE_iflt: @@ -855,15 +885,18 @@ verify_jvm_instructions (JCF* jcf, const unsigned char *byte_ops, long length) case OPCODE_if_acmpne: pop_type (object_ptr_type_node); pop_type (object_ptr_type_node); goto cond; + cond: PUSH_PENDING (lookup_label (oldpc + IMMEDIATE_s2)); break; + case OPCODE_goto: PUSH_PENDING (lookup_label (oldpc + IMMEDIATE_s2)); INVALIDATE_PC; break; + case OPCODE_wide: - switch (byte_ops[PC]) + switch (byte_ops [PC]) { case OPCODE_iload: case OPCODE_lload: case OPCODE_fload: case OPCODE_dload: case OPCODE_aload: @@ -877,6 +910,7 @@ verify_jvm_instructions (JCF* jcf, const unsigned char *byte_ops, long length) VERIFICATION_ERROR ("invalid use of wide instruction"); } break; + case OPCODE_return: type = void_type_node; goto ret; case OPCODE_ireturn: if ((TREE_CODE (return_type) == BOOLEAN_TYPE @@ -896,13 +930,15 @@ verify_jvm_instructions (JCF* jcf, const unsigned char *byte_ops, long length) else type = NULL_TREE; goto ret; + ret: if (type != return_type) VERIFICATION_ERROR ("incorrect ?return opcode"); if (type != void_type_node) - POP_TYPE(type, "return value has wrong type"); + POP_TYPE (type, "return value has wrong type"); INVALIDATE_PC; break; + case OPCODE_getstatic: is_putting = 0; is_static = 1; goto field; case OPCODE_putstatic: is_putting = 1; is_static = 1; goto field; case OPCODE_getfield: is_putting = 0; is_static = 0; goto field; @@ -911,69 +947,90 @@ verify_jvm_instructions (JCF* jcf, const unsigned char *byte_ops, long length) { tree field_signature, field_type; index = IMMEDIATE_u2; - if (index <= 0 || index >= JPOOL_SIZE(current_jcf)) + + if (index <= 0 || index >= JPOOL_SIZE (current_jcf)) VERIFICATION_ERROR_WITH_INDEX ("bad constant pool index %d"); + if (JPOOL_TAG (current_jcf, index) != CONSTANT_Fieldref) VERIFICATION_ERROR ("field instruction does not reference a Fieldref"); - field_signature = COMPONENT_REF_SIGNATURE (¤t_jcf->cpool, index); + + field_signature + = COMPONENT_REF_SIGNATURE (¤t_jcf->cpool, index); + field_type = get_type_from_signature (field_signature); + if (is_putting) POP_TYPE (field_type, "incorrect type for field"); + if (! is_static) { - int clindex = COMPONENT_REF_CLASS_INDEX (¤t_jcf->cpool, - index); + int clindex + = COMPONENT_REF_CLASS_INDEX (¤t_jcf->cpool, index); + tree self_type = get_class_constant (current_jcf, clindex); + /* Defer actual checking until next pass. */ - POP_TYPE(self_type, "incorrect type for field reference"); + POP_TYPE (self_type, "incorrect type for field reference"); } + if (! is_putting) PUSH_TYPE (field_type); break; } + case OPCODE_new: PUSH_TYPE (get_class_constant (jcf, IMMEDIATE_u2)); break; + case OPCODE_dup: wide = 1; index = 0; goto dup; case OPCODE_dup_x1: wide = 1; index = 1; goto dup; case OPCODE_dup_x2: wide = 1; index = 2; goto dup; case OPCODE_dup2: wide = 2; index = 0; goto dup; case OPCODE_dup2_x1: wide = 2; index = 1; goto dup; case OPCODE_dup2_x2: wide = 2; index = 2; goto dup; + dup: if (wide + index > stack_pointer) VERIFICATION_ERROR ("stack underflow - dup* operation"); type_stack_dup (wide, index); wide = 0; break; + case OPCODE_pop: index = 1; goto pop; case OPCODE_pop2: index = 2; goto pop; + pop: if (stack_pointer < index) VERIFICATION_ERROR ("stack underflow"); stack_pointer -= index; break; + case OPCODE_swap: if (stack_pointer < 2) VERIFICATION_ERROR ("stack underflow (in swap)"); else { - tree type1 = stack_type_map[stack_pointer - 1]; - tree type2 = stack_type_map[stack_pointer - 2]; + tree type1 = stack_type_map [stack_pointer - 1]; + tree type2 = stack_type_map [stack_pointer - 2]; + if (type1 == void_type_node || type2 == void_type_node) VERIFICATION_ERROR ("verifier (swap): double or long value"); - stack_type_map[stack_pointer - 2] = type1; - stack_type_map[stack_pointer - 1] = type2; + + stack_type_map [stack_pointer - 2] = type1; + stack_type_map [stack_pointer - 1] = type2; } break; + case OPCODE_ldc: index = IMMEDIATE_u1; goto ldc; case OPCODE_ldc2_w: case OPCODE_ldc_w: index = IMMEDIATE_u2; goto ldc; + ldc: - if (index <= 0 || index >= JPOOL_SIZE(current_jcf)) + if (index <= 0 || index >= JPOOL_SIZE (current_jcf)) VERIFICATION_ERROR_WITH_INDEX ("bad constant pool index %d in ldc"); + int_value = -1; switch (JPOOL_TAG (current_jcf, index) & ~CONSTANT_ResolvedFlag) { @@ -1005,10 +1062,13 @@ verify_jvm_instructions (JCF* jcf, const unsigned char *byte_ops, long length) tree sig, method_name, method_type, self_type; int self_is_interface, tag; index = IMMEDIATE_u2; - if (index <= 0 || index >= JPOOL_SIZE(current_jcf)) + + if (index <= 0 || index >= JPOOL_SIZE (current_jcf)) VERIFICATION_ERROR_WITH_INDEX ("bad constant pool index %d for invoke"); + tag = JPOOL_TAG (current_jcf, index); + if (op_code == OPCODE_invokeinterface) { if (tag != CONSTANT_InterfaceMethodref) @@ -1020,18 +1080,25 @@ verify_jvm_instructions (JCF* jcf, const unsigned char *byte_ops, long length) if (tag != CONSTANT_Methodref) VERIFICATION_ERROR ("invoke does not reference a Methodref"); } + sig = COMPONENT_REF_SIGNATURE (¤t_jcf->cpool, index); - self_type = get_class_constant - (current_jcf, COMPONENT_REF_CLASS_INDEX (¤t_jcf->cpool, - index)); + + self_type + = get_class_constant (current_jcf, + COMPONENT_REF_CLASS_INDEX + (¤t_jcf->cpool, index)); + if (! CLASS_LOADED_P (self_type)) load_class (self_type, 1); + self_is_interface = CLASS_INTERFACE (TYPE_NAME (self_type)); method_name = COMPONENT_REF_NAME (¤t_jcf->cpool, index); method_type = parse_signature_string (IDENTIFIER_POINTER (sig), IDENTIFIER_LENGTH (sig)); + if (TREE_CODE (method_type) != FUNCTION_TYPE) VERIFICATION_ERROR ("bad method signature"); + pmessage = pop_argument_types (TYPE_ARG_TYPES (method_type)); if (pmessage != NULL) { @@ -1039,10 +1106,11 @@ verify_jvm_instructions (JCF* jcf, const unsigned char *byte_ops, long length) goto pop_type_error; } - /* Can't invoke */ + /* Can't invoke . */ if (ID_CLINIT_P (method_name)) VERIFICATION_ERROR ("invoke opcode can't invoke "); - /* Apart invokespecial, can't invoke */ + + /* Apart from invokespecial, can't invoke . */ if (op_code != OPCODE_invokespecial && ID_INIT_P (method_name)) VERIFICATION_ERROR ("invoke opcode can't invoke "); @@ -1060,11 +1128,14 @@ verify_jvm_instructions (JCF* jcf, const unsigned char *byte_ops, long length) if (!nargs || notZero) VERIFICATION_ERROR ("invalid argument number in invokeinterface"); + /* If we verify/resolve the constant pool, as we should, this test (and the one just following) are redundant. */ if (! self_is_interface) - VERIFICATION_ERROR ("invokeinterface calls method not in interface"); + VERIFICATION_ERROR + ("invokeinterface calls method not in interface"); break; + default: if (self_is_interface) VERIFICATION_ERROR ("method in interface called"); @@ -1077,9 +1148,9 @@ verify_jvm_instructions (JCF* jcf, const unsigned char *byte_ops, long length) } case OPCODE_arraylength: - /* Type checking actually made during code generation */ - pop_type( ptr_type_node ); - PUSH_TYPE( int_type_node ); + /* Type checking actually made during code generation. */ + pop_type (ptr_type_node); + PUSH_TYPE (int_type_node); break; /* Q&D verification *or* more checking done during code generation @@ -1093,8 +1164,9 @@ verify_jvm_instructions (JCF* jcf, const unsigned char *byte_ops, long length) case OPCODE_bastore: type = int_type_node; goto astore; case OPCODE_castore: type = int_type_node; goto astore; case OPCODE_sastore: type = int_type_node; goto astore; + astore: - /* FIXME - need better verification here */ + /* FIXME - need better verification here. */ pop_type (type); /* new value */ pop_type (int_type_node); /* index */ pop_type (ptr_type_node); /* array */ @@ -1110,6 +1182,7 @@ verify_jvm_instructions (JCF* jcf, const unsigned char *byte_ops, long length) case OPCODE_baload: type = promote_type (byte_type_node); goto aload; case OPCODE_caload: type = promote_type (char_type_node); goto aload; case OPCODE_saload: type = promote_type (short_type_node); goto aload; + aload: pop_type (int_type_node); tmp = pop_type (ptr_type_node); @@ -1135,9 +1208,9 @@ verify_jvm_instructions (JCF* jcf, const unsigned char *byte_ops, long length) newarray: if (int_value >= 0 && prevpc >= 0) { - /* If previous instruction pushed int constant, + /* If the previous instruction pushed an int constant, we want to use it. */ - switch (byte_ops[prevpc]) + switch (byte_ops [prevpc]) { case OPCODE_iconst_0: case OPCODE_iconst_1: case OPCODE_iconst_2: case OPCODE_iconst_3: @@ -1151,6 +1224,7 @@ verify_jvm_instructions (JCF* jcf, const unsigned char *byte_ops, long length) } else int_value = -1; + type = build_java_array_type (type, int_value); pop_type (int_type_node); PUSH_TYPE (type); @@ -1162,11 +1236,13 @@ verify_jvm_instructions (JCF* jcf, const unsigned char *byte_ops, long length) index = IMMEDIATE_u2; ndim = IMMEDIATE_u1; - if( ndim < 1 ) - VERIFICATION_ERROR ("number of dimension lower that 1 in multianewarray" ); + if (ndim < 1) + VERIFICATION_ERROR + ("number of dimension lower that 1 in multianewarray" ); - for( i = 0; i < ndim; i++ ) + for (i = 0; i < ndim; i++) pop_type (int_type_node); + PUSH_TYPE (get_class_constant (current_jcf, index)); break; } @@ -1176,7 +1252,7 @@ verify_jvm_instructions (JCF* jcf, const unsigned char *byte_ops, long length) break; case OPCODE_athrow: - /* FIXME: athrow also empties the stack. */ + /* FIXME: athrow also empties the stack. */ POP_TYPE (throwable_type_node, "missing throwable at athrow" ); INVALIDATE_PC; break; @@ -1187,6 +1263,7 @@ verify_jvm_instructions (JCF* jcf, const unsigned char *byte_ops, long length) type = get_class_constant (current_jcf, IMMEDIATE_u2); PUSH_TYPE (type); break; + case OPCODE_instanceof: POP_TYPE (object_ptr_type_node, "instanceof operand is not a pointer"); @@ -1199,12 +1276,14 @@ verify_jvm_instructions (JCF* jcf, const unsigned char *byte_ops, long length) jint low, high; POP_TYPE (int_type_node, "missing int for tableswitch"); + while (PC%4) { - if (byte_ops[PC++]) + if (byte_ops [PC++]) VERIFICATION_ERROR ("bad alignment in tableswitch pad"); } - PUSH_PENDING (lookup_label (oldpc+IMMEDIATE_s4)); + + PUSH_PENDING (lookup_label (oldpc + IMMEDIATE_s4)); low = IMMEDIATE_s4; high = IMMEDIATE_s4; @@ -1213,6 +1292,7 @@ verify_jvm_instructions (JCF* jcf, const unsigned char *byte_ops, long length) while (low++ <= high) PUSH_PENDING (lookup_label (oldpc + IMMEDIATE_s4)); + INVALIDATE_PC; break; } @@ -1222,13 +1302,14 @@ verify_jvm_instructions (JCF* jcf, const unsigned char *byte_ops, long length) jint npairs, last = 0, not_registered = 1; POP_TYPE (int_type_node, "missing int for lookupswitch"); + while (PC%4) { - if (byte_ops[PC++]) + if (byte_ops [PC++]) VERIFICATION_ERROR ("bad alignment in lookupswitch pad"); } - PUSH_PENDING (lookup_label (oldpc+IMMEDIATE_s4)); + PUSH_PENDING (lookup_label (oldpc + IMMEDIATE_s4)); npairs = IMMEDIATE_s4; if (npairs < 0) @@ -1237,6 +1318,7 @@ verify_jvm_instructions (JCF* jcf, const unsigned char *byte_ops, long length) while (npairs--) { int match = IMMEDIATE_s4; + if (not_registered) not_registered = 0; else if (last >= match) @@ -1273,13 +1355,16 @@ verify_jvm_instructions (JCF* jcf, const unsigned char *byte_ops, long length) int nlocals = DECL_MAX_LOCALS (current_function_decl); index = nlocals + DECL_MAX_STACK (current_function_decl); return_type_map = make_tree_vec (index); + while (index > nlocals) TREE_VEC_ELT (return_type_map, --index) = TYPE_UNKNOWN; + while (index > 0) TREE_VEC_ELT (return_type_map, --index) = TYPE_UNUSED; + LABEL_RETURN_LABEL (target) = build_decl (LABEL_DECL, NULL_TREE, TREE_TYPE (target)); - LABEL_PC (LABEL_RETURN_LABEL (target)) = -1; + LABEL_PC (LABEL_RETURN_LABEL (target)) = INVALID_PC; LABEL_RETURN_TYPE_STATE (target) = return_type_map; LABEL_IS_SUBR_START (target) = 1; LABEL_IN_SUBR (target) = 1; @@ -1315,7 +1400,7 @@ verify_jvm_instructions (JCF* jcf, const unsigned char *byte_ops, long length) while (--len >= 0) { if (TREE_VEC_ELT (return_map, len) != TYPE_UNUSED) - type_map[len] = TREE_VEC_ELT (return_map, len); + type_map [len] = TREE_VEC_ELT (return_map, len); } current_subr = LABEL_SUBR_CONTEXT (target); if (RETURN_MAP_ADJUSTED (return_map)) @@ -1325,33 +1410,37 @@ verify_jvm_instructions (JCF* jcf, const unsigned char *byte_ops, long length) INVALIDATE_PC; } break; + case OPCODE_ret: - if (current_subr == NULL) + if (current_subr == NULL_TREE) VERIFICATION_ERROR ("ret instruction not in a jsr subroutine"); else { tree ret_map = LABEL_RETURN_TYPE_STATE (current_subr); - int size = DECL_MAX_LOCALS(current_function_decl)+stack_pointer; + int size + = DECL_MAX_LOCALS (current_function_decl) + stack_pointer; index = wide ? IMMEDIATE_u2 : IMMEDIATE_u1; wide = 0; INVALIDATE_PC; if (index < 0 || index >= DECL_MAX_LOCALS (current_function_decl) - || type_map[index] != TYPE_RETURN_ADDR) + || type_map [index] != TYPE_RETURN_ADDR) VERIFICATION_ERROR ("invalid ret index"); /* The next chunk of code is similar to an inlined version of - * merge_type_state (LABEL_RETURN_LABEL (current_subr)). - * The main differences are that LABEL_RETURN_LABEL is - * pre-allocated by the jsr (but we don't know the size then); - * and that we have to handle TYPE_UNUSED. */ + merge_type_state (LABEL_RETURN_LABEL (current_subr)). + The main differences are that LABEL_RETURN_LABEL is + pre-allocated by the jsr (but we don't know the size then); + and that we have to handle TYPE_UNUSED. */ if (! RETURN_MAP_ADJUSTED (ret_map)) - { /* First return from this subroutine - fix stack pointer. */ + { + /* First return from this subroutine - fix stack + pointer. */ TREE_VEC_LENGTH (ret_map) = size; for (index = size; --index >= 0; ) { if (TREE_VEC_ELT (ret_map, index) != TYPE_UNUSED) - TREE_VEC_ELT (ret_map, index) = type_map[index]; + TREE_VEC_ELT (ret_map, index) = type_map [index]; } RETURN_MAP_ADJUSTED (ret_map) = 1; } @@ -1377,10 +1466,9 @@ verify_jvm_instructions (JCF* jcf, const unsigned char *byte_ops, long length) } } } - - } break; + case OPCODE_jsr_w: case OPCODE_ret_w: default: @@ -1393,17 +1481,18 @@ verify_jvm_instructions (JCF* jcf, const unsigned char *byte_ops, long length) /* The following test is true if we have entered or exited an exception handler range *or* we have done a store to a local variable. In either case we need to consider any exception handlers that - might "follow" this instruction. */ + might "follow" this instruction. */ if (eh_ranges != prev_eh_ranges) { int save_stack_pointer = stack_pointer; int index = DECL_MAX_LOCALS (current_function_decl); - tree save_type = type_map[index]; + tree save_type = type_map [index]; tree save_current_subr = current_subr; struct eh_range *ranges = find_handler (oldpc); stack_pointer = 1; - for (; ranges != NULL_EH_RANGE; ranges = ranges->outer) + + for ( ; ranges != NULL_EH_RANGE; ranges = ranges->outer) { tree chain = ranges->handlers; @@ -1420,7 +1509,7 @@ verify_jvm_instructions (JCF* jcf, const unsigned char *byte_ops, long length) have that the current_subr is entirely within the catch range. In that case we can assume if that if a caller (the jsr) of a subroutine is within the catch range, then the handler is - *not* part of the subroutine, and vice versa. */ + *not* part of the subroutine, and vice versa. */ current_subr = save_current_subr; for ( ; current_subr != NULL_TREE; @@ -1428,31 +1517,35 @@ verify_jvm_instructions (JCF* jcf, const unsigned char *byte_ops, long length) { tree return_labels = LABEL_RETURN_LABELS (current_subr); /* There could be multiple return_labels, but - we only need to check one. */ + we only need to check one. */ int return_pc = LABEL_PC (TREE_VALUE (return_labels)); if (return_pc <= ranges->start_pc || return_pc > ranges->end_pc) break; } - for ( ; chain != NULL_TREE; chain = TREE_CHAIN (chain)) + for ( ; chain != NULL_TREE; chain = TREE_CHAIN (chain)) { tree handler = TREE_VALUE (chain); tree type = TREE_PURPOSE (chain); + if (type == NULL_TREE) /* a finally handler */ type = throwable_type_node; - type_map[index] = promote_type (type); + + type_map [index] = promote_type (type); PUSH_PENDING (handler); } } stack_pointer = save_stack_pointer; current_subr = save_current_subr; - type_map[index] = save_type; + type_map [index] = save_type; prev_eh_ranges = eh_ranges; } } + return 1; + pop_type_error: error ("verification error at PC=%d", oldpc); if (message != NULL) @@ -1460,16 +1553,20 @@ verify_jvm_instructions (JCF* jcf, const unsigned char *byte_ops, long length) error ("%s", pmessage); free (pmessage); return 0; + stack_overflow: message = "stack overflow"; goto verify_error; + bad_pc: message = "program counter out of range"; goto verify_error; + error_with_index: error ("verification error at PC=%d", oldpc); error (message, index); return 0; + verify_error: error ("verification error at PC=%d", oldpc); error ("%s", message); -- 2.7.4