OpenJDK / valhalla / valhalla
changeset 53561:530ea41023b2 lworld
8216270: [lworld] C2 compilation fails with Error mixing types: NULL and valuetype[3]
author | thartmann |
---|---|
date | Tue, 08 Jan 2019 17:09:51 +0100 |
parents | 09d9f9085910 |
children | 5e2f5c15f970 |
files | src/hotspot/share/opto/callnode.cpp src/hotspot/share/opto/compile.cpp src/hotspot/share/opto/macro.cpp src/hotspot/share/opto/parse1.cpp src/hotspot/share/opto/valuetypenode.cpp src/hotspot/share/opto/valuetypenode.hpp test/hotspot/jtreg/compiler/valhalla/valuetypes/TestCallingConvention.java |
diffstat | 7 files changed, 65 insertions(+), 77 deletions(-) [+] |
line wrap: on
line diff
--- a/src/hotspot/share/opto/callnode.cpp Mon Jan 07 10:53:29 2019 +0100 +++ b/src/hotspot/share/opto/callnode.cpp Tue Jan 08 17:09:51 2019 +0100 @@ -1194,31 +1194,7 @@ //------------------------------Ideal------------------------------------------ // Skip over any collapsed Regions Node *SafePointNode::Ideal(PhaseGVN *phase, bool can_reshape) { - if (remove_dead_region(phase, can_reshape)) { - return this; - } - if (jvms() != NULL) { - bool progress = false; - // A ValueTypeNode that was already heap allocated in the debug - // info? Reference the object directly. Helps removal of useless - // value type allocations with incremental inlining. - for (uint i = jvms()->debug_start(); i < jvms()->debug_end(); i++) { - Node *arg = in(i); - if (arg->is_ValueType()) { - ValueTypeNode* vt = arg->as_ValueType(); - Node* in_oop = vt->get_oop(); - const Type* oop_type = phase->type(in_oop); - if (!TypePtr::NULL_PTR->higher_equal(oop_type)) { - set_req(i, in_oop); - progress = true; - } - } - } - if (progress) { - return this; - } - } - return NULL; + return remove_dead_region(phase, can_reshape) ? this : NULL; } //------------------------------Identity---------------------------------------
--- a/src/hotspot/share/opto/compile.cpp Mon Jan 07 10:53:29 2019 +0100 +++ b/src/hotspot/share/opto/compile.cpp Tue Jan 08 17:09:51 2019 +0100 @@ -2071,7 +2071,7 @@ // Make value types scalar in safepoints while (_value_type_nodes->size() != 0) { ValueTypeBaseNode* vt = _value_type_nodes->pop()->as_ValueTypeBase(); - vt->make_scalar_in_safepoints(igvn.C->root(), &igvn); + vt->make_scalar_in_safepoints(&igvn); if (vt->is_ValueTypePtr()) { igvn.replace_node(vt, vt->get_oop()); }
--- a/src/hotspot/share/opto/macro.cpp Mon Jan 07 10:53:29 2019 +0100 +++ b/src/hotspot/share/opto/macro.cpp Tue Jan 08 17:09:51 2019 +0100 @@ -966,7 +966,7 @@ // Scalarize value types that were added to the safepoint for (uint i = 0; i < value_worklist.size(); ++i) { Node* vt = value_worklist.at(i); - vt->as_ValueType()->make_scalar_in_safepoints(C->root(), &_igvn); + vt->as_ValueType()->make_scalar_in_safepoints(&_igvn); } return true; }
--- a/src/hotspot/share/opto/parse1.cpp Mon Jan 07 10:53:29 2019 +0100 +++ b/src/hotspot/share/opto/parse1.cpp Tue Jan 08 17:09:51 2019 +0100 @@ -1118,12 +1118,7 @@ BasicType ret_bt = method()->return_type()->basic_type(); ret_phi = mask_int_value(ret_phi, ret_bt, &_gvn); } - if (_caller->has_method() && ret_type->is_valuetypeptr()) { - // Inlined methods return a ValueTypeNode - _exits.push_node(T_VALUETYPE, ret_phi); - } else { - _exits.push_node(ret_type->basic_type(), ret_phi); - } + _exits.push_node(ret_type->basic_type(), ret_phi); } // Note: Logic for creating and optimizing the ReturnNode is in Compile. @@ -2329,20 +2324,23 @@ // frame pointer is always same, already captured if (value != NULL) { Node* phi = _exits.argument(0); - const TypeOopPtr* tr = phi->bottom_type()->isa_oopptr(); - if (tf()->returns_value_type_as_fields() && !_caller->has_method() && !value->is_ValueType()) { - // TODO there should be a checkcast in between, right? - value = ValueTypeNode::make_from_oop(this, value, phi->bottom_type()->is_valuetype()->value_klass()); - } - if (value->is_ValueType() && !_caller->has_method()) { - // Value type is returned as oop from root method - if (tf()->returns_value_type_as_fields()) { - // Make sure non-flattened value type fields are allocated + const Type* return_type = phi->bottom_type(); + const TypeOopPtr* tr = return_type->isa_oopptr(); + if (return_type->isa_valuetype()) { + // Value type is returned as fields, make sure it is scalarized + if (!value->is_ValueType()) { + value = ValueTypeNode::make_from_oop(this, value, return_type->is_valuetype()->value_klass()); + } + if (!_caller->has_method()) { + // Value type is returned as fields from root method, make + // sure all non-flattened value type fields are allocated. + assert(tf()->returns_value_type_as_fields(), "must be returned as fields"); value = value->as_ValueType()->allocate_fields(this); - } else { - // Make sure value type is allocated - value = value->as_ValueType()->allocate(this)->get_oop(); } + } else if (value->is_ValueType()) { + // Value type is returned as oop, make sure it is allocated + assert(tr && tr->can_be_value_type(), "must return a value type pointer"); + value = ValueTypePtrNode::make_from_value_type(this, value->as_ValueType()); } else if (tr && tr->isa_instptr() && tr->klass()->is_loaded() && tr->klass()->is_interface()) { // If returning oops to an interface-return, there is a silent free // cast from oop to interface allowed by the Verifier. Make it explicit here. @@ -2358,10 +2356,10 @@ // Handle returns of oop-arrays to an arrays-of-interface return const TypeInstPtr* phi_tip; const TypeInstPtr* val_tip; - Type::get_arrays_base_elements(phi->bottom_type(), value->bottom_type(), &phi_tip, &val_tip); + Type::get_arrays_base_elements(return_type, value->bottom_type(), &phi_tip, &val_tip); if (phi_tip != NULL && phi_tip->is_loaded() && phi_tip->klass()->is_interface() && val_tip != NULL && val_tip->is_loaded() && !val_tip->klass()->is_interface()) { - value = _gvn.transform(new CheckCastPPNode(0, value, phi->bottom_type())); + value = _gvn.transform(new CheckCastPPNode(0, value, return_type)); } } phi->add_req(value);
--- a/src/hotspot/share/opto/valuetypenode.cpp Mon Jan 07 10:53:29 2019 +0100 +++ b/src/hotspot/share/opto/valuetypenode.cpp Tue Jan 08 17:09:51 2019 +0100 @@ -195,7 +195,7 @@ return field->is_flattenable(); } -int ValueTypeBaseNode::make_scalar_in_safepoint(Unique_Node_List& worklist, SafePointNode* sfpt, Node* root, PhaseGVN* gvn) { +int ValueTypeBaseNode::make_scalar_in_safepoint(PhaseIterGVN* igvn, Unique_Node_List& worklist, SafePointNode* sfpt) { ciValueKlass* vk = value_klass(); uint nfields = vk->nof_nonstatic_fields(); JVMState* jvms = sfpt->jvms(); @@ -209,48 +209,45 @@ NULL, #endif first_ind, nfields); - sobj->init_req(0, root); + sobj->init_req(0, igvn->C->root()); // Iterate over the value type fields in order of increasing // offset and add the field values to the safepoint. for (uint j = 0; j < nfields; ++j) { int offset = vk->nonstatic_field_at(j)->offset(); Node* value = field_value_by_offset(offset, true /* include flattened value type fields */); if (value->is_ValueType()) { - if (value->as_ValueType()->is_allocated(gvn)) { - value = value->as_ValueType()->get_oop(); - } else { - // Add non-flattened value type field to the worklist to process later - worklist.push(value); - } + // Add value type field to the worklist to process later + worklist.push(value); } sfpt->add_req(value); } jvms->set_endoff(sfpt->req()); - if (gvn != NULL) { - sobj = gvn->transform(sobj)->as_SafePointScalarObject(); - gvn->igvn_rehash_node_delayed(sfpt); - } + sobj = igvn->transform(sobj)->as_SafePointScalarObject(); + igvn->rehash_node_delayed(sfpt); return sfpt->replace_edges_in_range(this, sobj, start, end); } -void ValueTypeBaseNode::make_scalar_in_safepoints(Node* root, PhaseGVN* gvn) { +void ValueTypeBaseNode::make_scalar_in_safepoints(PhaseIterGVN* igvn) { // Process all safepoint uses and scalarize value type Unique_Node_List worklist; for (DUIterator_Fast imax, i = fast_outs(imax); i < imax; i++) { - Node* u = fast_out(i); - if (u->is_SafePoint() && !u->is_CallLeaf() && (!u->is_Call() || u->as_Call()->has_debug_use(this))) { - SafePointNode* sfpt = u->as_SafePoint(); - Node* in_oop = get_oop(); - const Type* oop_type = in_oop->bottom_type(); - assert(Opcode() == Op_ValueTypePtr || !isa_ValueType()->is_allocated(gvn), "already heap allocated value types should be linked directly"); - int nb = make_scalar_in_safepoint(worklist, sfpt, root, gvn); + SafePointNode* sfpt = fast_out(i)->isa_SafePoint(); + if (sfpt != NULL && !sfpt->is_CallLeaf() && (!sfpt->is_Call() || sfpt->as_Call()->has_debug_use(this))) { + int nb = 0; + if (is_allocated(igvn) && get_oop()->is_Con()) { + // Value type is allocated with a constant oop, link it directly + nb = sfpt->replace_edges_in_range(this, get_oop(), sfpt->jvms()->debug_start(), sfpt->jvms()->debug_end()); + igvn->rehash_node_delayed(sfpt); + } else { + nb = make_scalar_in_safepoint(igvn, worklist, sfpt); + } --i; imax -= nb; } } // Now scalarize non-flattened fields for (uint i = 0; i < worklist.size(); ++i) { Node* vt = worklist.at(i); - vt->as_ValueType()->make_scalar_in_safepoints(root, gvn); + vt->as_ValueType()->make_scalar_in_safepoints(igvn); } } @@ -810,13 +807,7 @@ StoreNode* store = addp->fast_out(k)->isa_Store(); if (store != NULL && store->outcnt() != 0) { // Remove the useless store - Node* mem = store->in(MemNode::Memory); - Node* val = store->in(MemNode::ValueIn); - val = val->is_EncodeP() ? val->in(1) : val; - const Type* val_type = igvn->type(val); - assert(val_type->is_zero_type() || (val->is_Con() && val_type->make_ptr()->is_valuetypeptr()), - "must be zero-type or default value store"); - igvn->replace_in_uses(store, mem); + igvn->replace_in_uses(store, store->in(MemNode::Memory)); } } }
--- a/src/hotspot/share/opto/valuetypenode.hpp Mon Jan 07 10:53:29 2019 +0100 +++ b/src/hotspot/share/opto/valuetypenode.hpp Tue Jan 08 17:09:51 2019 +0100 @@ -48,7 +48,7 @@ // Get the klass defining the field layout of the value type virtual ciValueKlass* value_klass() const = 0; - int make_scalar_in_safepoint(Unique_Node_List& worklist, SafePointNode* sfpt, Node* root, PhaseGVN* gvn); + int make_scalar_in_safepoint(PhaseIterGVN* igvn, Unique_Node_List& worklist, SafePointNode* sfpt); // Initialize the value type fields with the inputs or outputs of a MultiNode void initialize(GraphKit* kit, MultiNode* multi, ciValueKlass* vk, int base_offset, uint& base_input, bool in); @@ -78,7 +78,7 @@ bool field_is_flattenable(uint index) const; // Replace ValueTypeNodes in debug info at safepoints with SafePointScalarObjectNodes - void make_scalar_in_safepoints(Node* root, PhaseGVN* gvn); + void make_scalar_in_safepoints(PhaseIterGVN* igvn); // Store the value type as a flattened (headerless) representation void store_flattened(GraphKit* kit, Node* base, Node* ptr, ciInstanceKlass* holder = NULL, int holder_offset = 0) const;
--- a/test/hotspot/jtreg/compiler/valhalla/valuetypes/TestCallingConvention.java Mon Jan 07 10:53:29 2019 +0100 +++ b/test/hotspot/jtreg/compiler/valhalla/valuetypes/TestCallingConvention.java Tue Jan 08 17:09:51 2019 +0100 @@ -479,4 +479,27 @@ MyValue2.box vt = test24(); Asserts.assertEQ(vt, null); } + + // Same as test24 but with control flow and inlining + @ForceInline + public MyValue2.box test26_callee(boolean b) { + if (b) { + return null; + } else { + return MyValue2.createWithFieldsInline(rI, true); + } + } + + @Test + public MyValue2.box test26(boolean b) { + return test26_callee(b); + } + + @DontCompile + public void test26_verifier(boolean warmup) { + MyValue2.box vt = test26(true); + Asserts.assertEQ(vt, null); + vt = test26(false); + Asserts.assertEQ(vt.hash(), MyValue2.createWithFieldsInline(rI, true).hash()); + } }