src/share/vm/opto/graphKit.cpp

changeset 1005
dca06e7f503d
parent 979
82a980778b92
parent 1000
7fe62bb75bf4
child 1014
0fbdb4381b99
child 1040
98cb887364d3
equal deleted inserted replaced
984:fe3d7c11b4b7 1005:dca06e7f503d
1834 "Only one we handle so far."); 1834 "Only one we handle so far.");
1835 CardTableModRefBS* ct = 1835 CardTableModRefBS* ct =
1836 (CardTableModRefBS*)(Universe::heap()->barrier_set()); 1836 (CardTableModRefBS*)(Universe::heap()->barrier_set());
1837 Node *b = _gvn.transform(new (C, 3) URShiftXNode( cast, _gvn.intcon(CardTableModRefBS::card_shift) )); 1837 Node *b = _gvn.transform(new (C, 3) URShiftXNode( cast, _gvn.intcon(CardTableModRefBS::card_shift) ));
1838 // We store into a byte array, so do not bother to left-shift by zero 1838 // We store into a byte array, so do not bother to left-shift by zero
1839 // Get base of card map 1839 Node *c = byte_map_base_node();
1840 assert(sizeof(*ct->byte_map_base) == sizeof(jbyte),
1841 "adjust this code");
1842 Node *c = makecon(TypeRawPtr::make((address)ct->byte_map_base));
1843 // Combine 1840 // Combine
1844 Node *sb_ctl = control(); 1841 Node *sb_ctl = control();
1845 Node *sb_adr = _gvn.transform(new (C, 4) AddPNode( top()/*no base ptr*/, c, b )); 1842 Node *sb_adr = _gvn.transform(new (C, 4) AddPNode( top()/*no base ptr*/, c, b ));
1846 Node *sb_val = _gvn.intcon(0); 1843 Node *sb_val = _gvn.intcon(0);
1847 // Smash zero into card 1844 // Smash zero into card
2943 if (!tklass) tklass = TypeKlassPtr::OBJECT; 2940 if (!tklass) tklass = TypeKlassPtr::OBJECT;
2944 const TypeOopPtr* oop_type = tklass->as_instance_type(); 2941 const TypeOopPtr* oop_type = tklass->as_instance_type();
2945 2942
2946 // Now generate allocation code 2943 // Now generate allocation code
2947 2944
2948 // With escape analysis, the entire memory state is needed to be able to 2945 // The entire memory state is needed for slow path of the allocation
2949 // eliminate the allocation. If the allocations cannot be eliminated, this 2946 // since GC and deoptimization can happened.
2950 // will be optimized to the raw slice when the allocation is expanded. 2947 Node *mem = reset_memory();
2951 Node *mem; 2948 set_all_memory(mem); // Create new memory state
2952 if (C->do_escape_analysis()) {
2953 mem = reset_memory();
2954 set_all_memory(mem);
2955 } else {
2956 mem = memory(Compile::AliasIdxRaw);
2957 }
2958 2949
2959 AllocateNode* alloc 2950 AllocateNode* alloc
2960 = new (C, AllocateNode::ParmLimit) 2951 = new (C, AllocateNode::ParmLimit)
2961 AllocateNode(C, AllocateNode::alloc_type(), 2952 AllocateNode(C, AllocateNode::alloc_type(),
2962 control(), mem, i_o(), 2953 control(), mem, i_o(),
3089 (*return_size_val) = size; 3080 (*return_size_val) = size;
3090 } 3081 }
3091 3082
3092 // Now generate allocation code 3083 // Now generate allocation code
3093 3084
3094 // With escape analysis, the entire memory state is needed to be able to 3085 // The entire memory state is needed for slow path of the allocation
3095 // eliminate the allocation. If the allocations cannot be eliminated, this 3086 // since GC and deoptimization can happened.
3096 // will be optimized to the raw slice when the allocation is expanded. 3087 Node *mem = reset_memory();
3097 Node *mem; 3088 set_all_memory(mem); // Create new memory state
3098 if (C->do_escape_analysis()) {
3099 mem = reset_memory();
3100 set_all_memory(mem);
3101 } else {
3102 mem = memory(Compile::AliasIdxRaw);
3103 }
3104 3089
3105 // Create the AllocateArrayNode and its result projections 3090 // Create the AllocateArrayNode and its result projections
3106 AllocateArrayNode* alloc 3091 AllocateArrayNode* alloc
3107 = new (C, AllocateArrayNode::ParmLimit) 3092 = new (C, AllocateArrayNode::ParmLimit)
3108 AllocateArrayNode(C, AllocateArrayNode::alloc_type(), 3093 AllocateArrayNode(C, AllocateArrayNode::alloc_type(),
3363 // Get the alias_index for raw card-mark memory 3348 // Get the alias_index for raw card-mark memory
3364 const TypePtr* card_type = TypeRawPtr::BOTTOM; 3349 const TypePtr* card_type = TypeRawPtr::BOTTOM;
3365 3350
3366 const TypeFunc *tf = OptoRuntime::g1_wb_post_Type(); 3351 const TypeFunc *tf = OptoRuntime::g1_wb_post_Type();
3367 3352
3368 // Get the address of the card table
3369 CardTableModRefBS* ct =
3370 (CardTableModRefBS*)(Universe::heap()->barrier_set());
3371 Node *card_table = __ makecon(TypeRawPtr::make((address)ct->byte_map_base));
3372 // Get base of card map
3373 assert(sizeof(*ct->byte_map_base) == sizeof(jbyte), "adjust this code");
3374
3375
3376 // Offsets into the thread 3353 // Offsets into the thread
3377 const int index_offset = in_bytes(JavaThread::dirty_card_queue_offset() + 3354 const int index_offset = in_bytes(JavaThread::dirty_card_queue_offset() +
3378 PtrQueue::byte_offset_of_index()); 3355 PtrQueue::byte_offset_of_index());
3379 const int buffer_offset = in_bytes(JavaThread::dirty_card_queue_offset() + 3356 const int buffer_offset = in_bytes(JavaThread::dirty_card_queue_offset() +
3380 PtrQueue::byte_offset_of_buf()); 3357 PtrQueue::byte_offset_of_buf());
3400 3377
3401 // Divide pointer by card size 3378 // Divide pointer by card size
3402 Node* card_offset = __ URShiftX( cast, __ ConI(CardTableModRefBS::card_shift) ); 3379 Node* card_offset = __ URShiftX( cast, __ ConI(CardTableModRefBS::card_shift) );
3403 3380
3404 // Combine card table base and card offset 3381 // Combine card table base and card offset
3405 Node *card_adr = __ AddP(no_base, card_table, card_offset ); 3382 Node *card_adr = __ AddP(no_base, byte_map_base_node(), card_offset );
3406 3383
3407 // If we know the value being stored does it cross regions? 3384 // If we know the value being stored does it cross regions?
3408 3385
3409 if (val != NULL) { 3386 if (val != NULL) {
3410 // Does the store cause us to cross regions? 3387 // Does the store cause us to cross regions?

mercurial