src/cpu/x86/vm/templateTable_x86_64.cpp
Index Unified diffs Context diffs Sdiffs Wdiffs Patch New Old Previous File Next File 7071653 Sdiff src/cpu/x86/vm

src/cpu/x86/vm/templateTable_x86_64.cpp

Print this page




 186       break;
 187     case BarrierSet::ModRef:
 188     case BarrierSet::Other:
 189       if (val == noreg) {
 190         __ store_heap_oop_null(obj);
 191       } else {
 192         __ store_heap_oop(obj, val);
 193       }
 194       break;
 195     default      :
 196       ShouldNotReachHere();
 197 
 198   }
 199 }
 200 
 201 Address TemplateTable::at_bcp(int offset) {
 202   assert(_desc->uses_bcp(), "inconsistent uses_bcp information");
 203   return Address(r13, offset);
 204 }
 205 
 206 void TemplateTable::patch_bytecode(Bytecodes::Code bytecode, Register bc,
 207                                    Register scratch,
 208                                    bool load_bc_into_scratch/*=true*/) {
 209   if (!RewriteBytecodes) {
 210     return;






















 211   }



 212   // the pair bytecodes have already done the load.
 213   if (load_bc_into_scratch) {
 214     __ movl(bc, bytecode);

 215   }
 216   Label patch_done;
 217   if (JvmtiExport::can_post_breakpoint()) {
 218     Label fast_patch;
 219     // if a breakpoint is present we can't rewrite the stream directly
 220     __ movzbl(scratch, at_bcp(0));
 221     __ cmpl(scratch, Bytecodes::_breakpoint);
 222     __ jcc(Assembler::notEqual, fast_patch);
 223     __ get_method(scratch);
 224     // Let breakpoint table handling rewrite to quicker bytecode
 225     __ call_VM(noreg, CAST_FROM_FN_PTR(address, InterpreterRuntime::set_original_bytecode_at), scratch, r13, bc);
 226 #ifndef ASSERT
 227     __ jmpb(patch_done);
 228 #else
 229     __ jmp(patch_done);
 230 #endif
 231     __ bind(fast_patch);
 232   }

 233 #ifdef ASSERT
 234   Label okay;
 235   __ load_unsigned_byte(scratch, at_bcp(0));
 236   __ cmpl(scratch, (int) Bytecodes::java_code(bytecode));
 237   __ jcc(Assembler::equal, okay);
 238   __ cmpl(scratch, bc);
 239   __ jcc(Assembler::equal, okay);
 240   __ stop("patching the wrong bytecode");
 241   __ bind(okay);
 242 #endif

 243   // patch bytecode
 244   __ movb(at_bcp(0), bc);
 245   __ bind(patch_done);
 246 }
 247 
 248 
 249 // Individual instructions
 250 
 251 void TemplateTable::nop() {
 252   transition(vtos, vtos);
 253   // nothing to do
 254 }
 255 
 256 void TemplateTable::shouldnotreachhere() {
 257   transition(vtos, vtos);
 258   __ stop("shouldnotreachhere bytecode");
 259 }
 260 
 261 void TemplateTable::aconst_null() {
 262   transition(vtos, atos);
 263   __ xorl(rax, rax);
 264 }
 265 


2081 // volatile-store-volatile-load case.  This final case is placed after
2082 // volatile-stores although it could just as well go before
2083 // volatile-loads.
2084 void TemplateTable::volatile_barrier(Assembler::Membar_mask_bits
2085                                      order_constraint) {
2086   // Helper function to insert a is-volatile test and memory barrier
2087   if (os::is_MP()) { // Not needed on single CPU
2088     __ membar(order_constraint);
2089   }
2090 }
2091 
2092 void TemplateTable::resolve_cache_and_index(int byte_no,
2093                                             Register result,
2094                                             Register Rcache,
2095                                             Register index,
2096                                             size_t index_size) {
2097   const Register temp = rbx;
2098   assert_different_registers(result, Rcache, index, temp);
2099 
2100   Label resolved;
2101   __ get_cache_and_index_at_bcp(Rcache, index, 1, index_size);
2102   if (byte_no == f1_oop) {
2103     // We are resolved if the f1 field contains a non-null object (CallSite, etc.)
2104     // This kind of CP cache entry does not need to match the flags byte, because
2105     // there is a 1-1 relation between bytecode type and CP entry type.
2106     assert(result != noreg, ""); //else do cmpptr(Address(...), (int32_t) NULL_WORD)

2107     __ movptr(result, Address(Rcache, index, Address::times_ptr, constantPoolCacheOopDesc::base_offset() + ConstantPoolCacheEntry::f1_offset()));
2108     __ testptr(result, result);
2109     __ jcc(Assembler::notEqual, resolved);
2110   } else {
2111     assert(byte_no == f1_byte || byte_no == f2_byte, "byte_no out of range");
2112     assert(result == noreg, "");  //else change code for setting result
2113     const int shift_count = (1 + byte_no) * BitsPerByte;
2114     __ movl(temp, Address(Rcache, index, Address::times_ptr, constantPoolCacheOopDesc::base_offset() + ConstantPoolCacheEntry::indices_offset()));
2115     __ shrl(temp, shift_count);
2116     // have we resolved this bytecode?
2117     __ andl(temp, 0xFF);
2118     __ cmpl(temp, (int) bytecode());
2119     __ jcc(Assembler::equal, resolved);
2120   }
2121 
2122   // resolve first time through
2123   address entry;
2124   switch (bytecode()) {
2125   case Bytecodes::_getstatic:
2126   case Bytecodes::_putstatic:
2127   case Bytecodes::_getfield:
2128   case Bytecodes::_putfield:
2129     entry = CAST_FROM_FN_PTR(address, InterpreterRuntime::resolve_get_put);
2130     break;
2131   case Bytecodes::_invokevirtual:
2132   case Bytecodes::_invokespecial:
2133   case Bytecodes::_invokestatic:
2134   case Bytecodes::_invokeinterface:
2135     entry = CAST_FROM_FN_PTR(address, InterpreterRuntime::resolve_invoke);
2136     break;
2137   case Bytecodes::_invokedynamic:
2138     entry = CAST_FROM_FN_PTR(address, InterpreterRuntime::resolve_invokedynamic);


2490   // [jk] not needed currently
2491   // volatile_barrier(Assembler::Membar_mask_bits(Assembler::LoadStore |
2492   //                                              Assembler::StoreStore));
2493 
2494   Label notVolatile, Done;
2495   __ movl(rdx, flags);
2496   __ shrl(rdx, ConstantPoolCacheEntry::volatileField);
2497   __ andl(rdx, 0x1);
2498 
2499   // field address
2500   const Address field(obj, off, Address::times_1);
2501 
2502   Label notByte, notInt, notShort, notChar,
2503         notLong, notFloat, notObj, notDouble;
2504 
2505   __ shrl(flags, ConstantPoolCacheEntry::tosBits);
2506 
2507   assert(btos == 0, "change code, btos != 0");
2508   __ andl(flags, 0x0f);
2509   __ jcc(Assembler::notZero, notByte);

2510   // btos

2511   __ pop(btos);
2512   if (!is_static) pop_and_check_object(obj);
2513   __ movb(field, rax);
2514   if (!is_static) {
2515     patch_bytecode(Bytecodes::_fast_bputfield, bc, rbx);
2516   }
2517   __ jmp(Done);

2518 
2519   __ bind(notByte);
2520   __ cmpl(flags, atos);
2521   __ jcc(Assembler::notEqual, notObj);

2522   // atos

2523   __ pop(atos);
2524   if (!is_static) pop_and_check_object(obj);
2525 
2526   // Store into the field
2527   do_oop_store(_masm, field, rax, _bs->kind(), false);
2528 
2529   if (!is_static) {
2530     patch_bytecode(Bytecodes::_fast_aputfield, bc, rbx);
2531   }
2532   __ jmp(Done);

2533 
2534   __ bind(notObj);
2535   __ cmpl(flags, itos);
2536   __ jcc(Assembler::notEqual, notInt);

2537   // itos

2538   __ pop(itos);
2539   if (!is_static) pop_and_check_object(obj);
2540   __ movl(field, rax);
2541   if (!is_static) {
2542     patch_bytecode(Bytecodes::_fast_iputfield, bc, rbx);
2543   }
2544   __ jmp(Done);

2545 
2546   __ bind(notInt);
2547   __ cmpl(flags, ctos);
2548   __ jcc(Assembler::notEqual, notChar);

2549   // ctos

2550   __ pop(ctos);
2551   if (!is_static) pop_and_check_object(obj);
2552   __ movw(field, rax);
2553   if (!is_static) {
2554     patch_bytecode(Bytecodes::_fast_cputfield, bc, rbx);
2555   }
2556   __ jmp(Done);

2557 
2558   __ bind(notChar);
2559   __ cmpl(flags, stos);
2560   __ jcc(Assembler::notEqual, notShort);

2561   // stos

2562   __ pop(stos);
2563   if (!is_static) pop_and_check_object(obj);
2564   __ movw(field, rax);
2565   if (!is_static) {
2566     patch_bytecode(Bytecodes::_fast_sputfield, bc, rbx);
2567   }
2568   __ jmp(Done);

2569 
2570   __ bind(notShort);
2571   __ cmpl(flags, ltos);
2572   __ jcc(Assembler::notEqual, notLong);

2573   // ltos

2574   __ pop(ltos);
2575   if (!is_static) pop_and_check_object(obj);
2576   __ movq(field, rax);
2577   if (!is_static) {
2578     patch_bytecode(Bytecodes::_fast_lputfield, bc, rbx);
2579   }
2580   __ jmp(Done);

2581 
2582   __ bind(notLong);
2583   __ cmpl(flags, ftos);
2584   __ jcc(Assembler::notEqual, notFloat);

2585   // ftos

2586   __ pop(ftos);
2587   if (!is_static) pop_and_check_object(obj);
2588   __ movflt(field, xmm0);
2589   if (!is_static) {
2590     patch_bytecode(Bytecodes::_fast_fputfield, bc, rbx);
2591   }
2592   __ jmp(Done);

2593 
2594   __ bind(notFloat);
2595 #ifdef ASSERT
2596   __ cmpl(flags, dtos);
2597   __ jcc(Assembler::notEqual, notDouble);
2598 #endif

2599   // dtos

2600   __ pop(dtos);
2601   if (!is_static) pop_and_check_object(obj);
2602   __ movdbl(field, xmm0);
2603   if (!is_static) {
2604     patch_bytecode(Bytecodes::_fast_dputfield, bc, rbx);

2605   }
2606 
2607 #ifdef ASSERT
2608   __ jmp(Done);
2609 
2610   __ bind(notDouble);
2611   __ stop("Bad state");
2612 #endif
2613 
2614   __ bind(Done);

2615   // Check for volatile store
2616   __ testl(rdx, rdx);
2617   __ jcc(Assembler::zero, notVolatile);
2618   volatile_barrier(Assembler::Membar_mask_bits(Assembler::StoreLoad |
2619                                                Assembler::StoreStore));
2620 
2621   __ bind(notVolatile);
2622 }
2623 
2624 void TemplateTable::putfield(int byte_no) {
2625   putfield_or_static(byte_no, false);
2626 }
2627 
2628 void TemplateTable::putstatic(int byte_no) {
2629   putfield_or_static(byte_no, true);
2630 }
2631 
2632 void TemplateTable::jvmti_post_fast_field_mod() {
2633   if (JvmtiExport::can_post_field_modification()) {
2634     // Check to see if a field modification watch has been set before
2635     // we take the time to call into the VM.
2636     Label L2;
2637     __ mov32(c_rarg3, ExternalAddress((address)JvmtiExport::get_field_modification_count_addr()));
2638     __ testl(c_rarg3, c_rarg3);
2639     __ jcc(Assembler::zero, L2);
2640     __ pop_ptr(rbx);                  // copy the object pointer from tos




 186       break;
 187     case BarrierSet::ModRef:
 188     case BarrierSet::Other:
 189       if (val == noreg) {
 190         __ store_heap_oop_null(obj);
 191       } else {
 192         __ store_heap_oop(obj, val);
 193       }
 194       break;
 195     default      :
 196       ShouldNotReachHere();
 197 
 198   }
 199 }
 200 
 201 Address TemplateTable::at_bcp(int offset) {
 202   assert(_desc->uses_bcp(), "inconsistent uses_bcp information");
 203   return Address(r13, offset);
 204 }
 205 
 206 void TemplateTable::patch_bytecode(Bytecodes::Code bc, Register bc_reg,
 207                                    Register temp_reg, bool load_bc_into_bc_reg/*=true*/,
 208                                    int byte_no) {
 209   if (!RewriteBytecodes)  return;
 210   Label L_patch_done;
 211 
 212   switch (bc) {
 213   case Bytecodes::_fast_aputfield:
 214   case Bytecodes::_fast_bputfield:
 215   case Bytecodes::_fast_cputfield:
 216   case Bytecodes::_fast_dputfield:
 217   case Bytecodes::_fast_fputfield:
 218   case Bytecodes::_fast_iputfield:
 219   case Bytecodes::_fast_lputfield:
 220   case Bytecodes::_fast_sputfield:
 221     {
 222       // We skip bytecode quickening for putfield instructions when
 223       // the put_code written to the constant pool cache is zero.
 224       // This is required so that every execution of this instruction
 225       // calls out to InterpreterRuntime::resolve_get_put to do
 226       // additional, required work.
 227       assert(byte_no == f1_byte || byte_no == f2_byte, "byte_no out of range");
 228       assert(load_bc_into_bc_reg, "we use bc_reg as temp");
 229       __ get_cache_and_index_and_bytecode_at_bcp(temp_reg, bc_reg, temp_reg, byte_no, 1);
 230       __ movl(bc_reg, bc);
 231       __ cmpl(temp_reg, (int) 0);
 232       __ jcc(Assembler::zero, L_patch_done);  // don't patch
 233     }
 234     break;
 235   default:
 236     assert(byte_no == -1, "sanity");
 237     // the pair bytecodes have already done the load.
 238     if (load_bc_into_bc_reg) {
 239       __ movl(bc_reg, bc);
 240     }
 241   }
 242 
 243   if (JvmtiExport::can_post_breakpoint()) {
 244     Label L_fast_patch;
 245     // if a breakpoint is present we can't rewrite the stream directly
 246     __ movzbl(temp_reg, at_bcp(0));
 247     __ cmpl(temp_reg, Bytecodes::_breakpoint);
 248     __ jcc(Assembler::notEqual, L_fast_patch);
 249     __ get_method(temp_reg);
 250     // Let breakpoint table handling rewrite to quicker bytecode
 251     __ call_VM(noreg, CAST_FROM_FN_PTR(address, InterpreterRuntime::set_original_bytecode_at), temp_reg, r13, bc_reg);
 252 #ifndef ASSERT
 253     __ jmpb(L_patch_done);
 254 #else
 255     __ jmp(L_patch_done);
 256 #endif
 257     __ bind(L_fast_patch);
 258   }
 259 
 260 #ifdef ASSERT
 261   Label L_okay;
 262   __ load_unsigned_byte(temp_reg, at_bcp(0));
 263   __ cmpl(temp_reg, (int) Bytecodes::java_code(bc));
 264   __ jcc(Assembler::equal, L_okay);
 265   __ cmpl(temp_reg, bc_reg);
 266   __ jcc(Assembler::equal, L_okay);
 267   __ stop("patching the wrong bytecode");
 268   __ bind(L_okay);
 269 #endif
 270 
 271   // patch bytecode
 272   __ movb(at_bcp(0), bc_reg);
 273   __ bind(L_patch_done);
 274 }
 275 
 276 
 277 // Individual instructions
 278 
 279 void TemplateTable::nop() {
 280   transition(vtos, vtos);
 281   // nothing to do
 282 }
 283 
 284 void TemplateTable::shouldnotreachhere() {
 285   transition(vtos, vtos);
 286   __ stop("shouldnotreachhere bytecode");
 287 }
 288 
 289 void TemplateTable::aconst_null() {
 290   transition(vtos, atos);
 291   __ xorl(rax, rax);
 292 }
 293 


2109 // volatile-store-volatile-load case.  This final case is placed after
2110 // volatile-stores although it could just as well go before
2111 // volatile-loads.
2112 void TemplateTable::volatile_barrier(Assembler::Membar_mask_bits
2113                                      order_constraint) {
2114   // Helper function to insert a is-volatile test and memory barrier
2115   if (os::is_MP()) { // Not needed on single CPU
2116     __ membar(order_constraint);
2117   }
2118 }
2119 
2120 void TemplateTable::resolve_cache_and_index(int byte_no,
2121                                             Register result,
2122                                             Register Rcache,
2123                                             Register index,
2124                                             size_t index_size) {
2125   const Register temp = rbx;
2126   assert_different_registers(result, Rcache, index, temp);
2127 
2128   Label resolved;

2129   if (byte_no == f1_oop) {
2130     // We are resolved if the f1 field contains a non-null object (CallSite, etc.)
2131     // This kind of CP cache entry does not need to match the flags byte, because
2132     // there is a 1-1 relation between bytecode type and CP entry type.
2133     assert(result != noreg, ""); //else do cmpptr(Address(...), (int32_t) NULL_WORD)
2134     __ get_cache_and_index_at_bcp(Rcache, index, 1, index_size);
2135     __ movptr(result, Address(Rcache, index, Address::times_ptr, constantPoolCacheOopDesc::base_offset() + ConstantPoolCacheEntry::f1_offset()));
2136     __ testptr(result, result);
2137     __ jcc(Assembler::notEqual, resolved);
2138   } else {
2139     assert(byte_no == f1_byte || byte_no == f2_byte, "byte_no out of range");
2140     assert(result == noreg, "");  //else change code for setting result
2141     __ get_cache_and_index_and_bytecode_at_bcp(Rcache, index, temp, byte_no, 1, index_size);
2142     __ cmpl(temp, (int) bytecode());  // have we resolved this bytecode?




2143     __ jcc(Assembler::equal, resolved);
2144   }
2145 
2146   // resolve first time through
2147   address entry;
2148   switch (bytecode()) {
2149   case Bytecodes::_getstatic:
2150   case Bytecodes::_putstatic:
2151   case Bytecodes::_getfield:
2152   case Bytecodes::_putfield:
2153     entry = CAST_FROM_FN_PTR(address, InterpreterRuntime::resolve_get_put);
2154     break;
2155   case Bytecodes::_invokevirtual:
2156   case Bytecodes::_invokespecial:
2157   case Bytecodes::_invokestatic:
2158   case Bytecodes::_invokeinterface:
2159     entry = CAST_FROM_FN_PTR(address, InterpreterRuntime::resolve_invoke);
2160     break;
2161   case Bytecodes::_invokedynamic:
2162     entry = CAST_FROM_FN_PTR(address, InterpreterRuntime::resolve_invokedynamic);


2514   // [jk] not needed currently
2515   // volatile_barrier(Assembler::Membar_mask_bits(Assembler::LoadStore |
2516   //                                              Assembler::StoreStore));
2517 
2518   Label notVolatile, Done;
2519   __ movl(rdx, flags);
2520   __ shrl(rdx, ConstantPoolCacheEntry::volatileField);
2521   __ andl(rdx, 0x1);
2522 
2523   // field address
2524   const Address field(obj, off, Address::times_1);
2525 
2526   Label notByte, notInt, notShort, notChar,
2527         notLong, notFloat, notObj, notDouble;
2528 
2529   __ shrl(flags, ConstantPoolCacheEntry::tosBits);
2530 
2531   assert(btos == 0, "change code, btos != 0");
2532   __ andl(flags, 0x0f);
2533   __ jcc(Assembler::notZero, notByte);
2534 
2535   // btos
2536   {
2537     __ pop(btos);
2538     if (!is_static) pop_and_check_object(obj);
2539     __ movb(field, rax);
2540     if (!is_static) {
2541       patch_bytecode(Bytecodes::_fast_bputfield, bc, rbx, true, byte_no);
2542     }
2543     __ jmp(Done);
2544   }
2545 
2546   __ bind(notByte);
2547   __ cmpl(flags, atos);
2548   __ jcc(Assembler::notEqual, notObj);
2549 
2550   // atos
2551   {
2552     __ pop(atos);
2553     if (!is_static) pop_and_check_object(obj);

2554     // Store into the field
2555     do_oop_store(_masm, field, rax, _bs->kind(), false);

2556     if (!is_static) {
2557       patch_bytecode(Bytecodes::_fast_aputfield, bc, rbx, true, byte_no);
2558     }
2559     __ jmp(Done);
2560   }
2561 
2562   __ bind(notObj);
2563   __ cmpl(flags, itos);
2564   __ jcc(Assembler::notEqual, notInt);
2565 
2566   // itos
2567   {
2568     __ pop(itos);
2569     if (!is_static) pop_and_check_object(obj);
2570     __ movl(field, rax);
2571     if (!is_static) {
2572       patch_bytecode(Bytecodes::_fast_iputfield, bc, rbx, true, byte_no);
2573     }
2574     __ jmp(Done);
2575   }
2576 
2577   __ bind(notInt);
2578   __ cmpl(flags, ctos);
2579   __ jcc(Assembler::notEqual, notChar);
2580 
2581   // ctos
2582   {
2583     __ pop(ctos);
2584     if (!is_static) pop_and_check_object(obj);
2585     __ movw(field, rax);
2586     if (!is_static) {
2587       patch_bytecode(Bytecodes::_fast_cputfield, bc, rbx, true, byte_no);
2588     }
2589     __ jmp(Done);
2590   }
2591 
2592   __ bind(notChar);
2593   __ cmpl(flags, stos);
2594   __ jcc(Assembler::notEqual, notShort);
2595 
2596   // stos
2597   {
2598     __ pop(stos);
2599     if (!is_static) pop_and_check_object(obj);
2600     __ movw(field, rax);
2601     if (!is_static) {
2602       patch_bytecode(Bytecodes::_fast_sputfield, bc, rbx, true, byte_no);
2603     }
2604     __ jmp(Done);
2605   }
2606 
2607   __ bind(notShort);
2608   __ cmpl(flags, ltos);
2609   __ jcc(Assembler::notEqual, notLong);
2610 
2611   // ltos
2612   {
2613     __ pop(ltos);
2614     if (!is_static) pop_and_check_object(obj);
2615     __ movq(field, rax);
2616     if (!is_static) {
2617       patch_bytecode(Bytecodes::_fast_lputfield, bc, rbx, true, byte_no);
2618     }
2619     __ jmp(Done);
2620   }
2621 
2622   __ bind(notLong);
2623   __ cmpl(flags, ftos);
2624   __ jcc(Assembler::notEqual, notFloat);
2625 
2626   // ftos
2627   {
2628     __ pop(ftos);
2629     if (!is_static) pop_and_check_object(obj);
2630     __ movflt(field, xmm0);
2631     if (!is_static) {
2632       patch_bytecode(Bytecodes::_fast_fputfield, bc, rbx, true, byte_no);
2633     }
2634     __ jmp(Done);
2635   }
2636 
2637   __ bind(notFloat);
2638 #ifdef ASSERT
2639   __ cmpl(flags, dtos);
2640   __ jcc(Assembler::notEqual, notDouble);
2641 #endif
2642 
2643   // dtos
2644   {
2645     __ pop(dtos);
2646     if (!is_static) pop_and_check_object(obj);
2647     __ movdbl(field, xmm0);
2648     if (!is_static) {
2649       patch_bytecode(Bytecodes::_fast_dputfield, bc, rbx, true, byte_no);
2650     }
2651   }
2652 
2653 #ifdef ASSERT
2654   __ jmp(Done);
2655 
2656   __ bind(notDouble);
2657   __ stop("Bad state");
2658 #endif
2659 
2660   __ bind(Done);
2661 
2662   // Check for volatile store
2663   __ testl(rdx, rdx);
2664   __ jcc(Assembler::zero, notVolatile);
2665   volatile_barrier(Assembler::Membar_mask_bits(Assembler::StoreLoad |
2666                                                Assembler::StoreStore));

2667   __ bind(notVolatile);
2668 }
2669 
2670 void TemplateTable::putfield(int byte_no) {
2671   putfield_or_static(byte_no, false);
2672 }
2673 
2674 void TemplateTable::putstatic(int byte_no) {
2675   putfield_or_static(byte_no, true);
2676 }
2677 
2678 void TemplateTable::jvmti_post_fast_field_mod() {
2679   if (JvmtiExport::can_post_field_modification()) {
2680     // Check to see if a field modification watch has been set before
2681     // we take the time to call into the VM.
2682     Label L2;
2683     __ mov32(c_rarg3, ExternalAddress((address)JvmtiExport::get_field_modification_count_addr()));
2684     __ testl(c_rarg3, c_rarg3);
2685     __ jcc(Assembler::zero, L2);
2686     __ pop_ptr(rbx);                  // copy the object pointer from tos


src/cpu/x86/vm/templateTable_x86_64.cpp
Index Unified diffs Context diffs Sdiffs Wdiffs Patch New Old Previous File Next File