Skip site navigation (1)Skip section navigation (2)
Date:      Sun, 20 Mar 2011 23:14:12 +0200
From:      George Liaskos <geo.liaskos@gmail.com>
To:        Rene Ladan <rene@freebsd.org>
Cc:        freebsd-chromium@freebsd.org
Subject:   Re: versions shifted
Message-ID:  <AANLkTintGv=jouCGRjLZOxCMUt-=pqax=Qsm5wechmsC@mail.gmail.com>
In-Reply-To: <4D7FC392.1000401@freebsd.org>
References:  <4D7E83EE.90803@freebsd.org> <AANLkTi=o2sX_dHhWwC875sngx=GUx3keoCDTxpddSKKi@mail.gmail.com> <4D7FC392.1000401@freebsd.org>

next in thread | previous in thread | raw e-mail | index | archive | help

[-- Attachment #1 --]
Hi

The attached patches resolve the compilation errors and fix the v8
stack overflow error on amd64 (backported from the bleeding_edge
upstream branch).
Chromium does not render pages yet, i have done some progress in this
regard too but lets fix the build first.

Regards

[-- Attachment #2 --]
Index: src/ia32/code-stubs-ia32.cc
===================================================================
--- src/ia32/code-stubs-ia32.cc	(revision 7153)
+++ src/ia32/code-stubs-ia32.cc	(working copy)
@@ -5509,8 +5509,8 @@
   STATIC_ASSERT(Smi::kMaxValue == String::kMaxLength);
   // Handle exceptionally long strings in the runtime system.
   __ j(overflow, &string_add_runtime);
-  // Use the runtime system when adding two one character strings, as it
-  // contains optimizations for this specific case using the symbol table.
+  // Use the symbol table when adding two one character strings, as it
+  // helps later optimizations to return a symbol here.
   __ cmp(Operand(ebx), Immediate(Smi::FromInt(2)));
   __ j(not_equal, &longer_than_two);
 
@@ -5927,6 +5927,8 @@
     // If entry is undefined no string with this hash can be found.
     __ cmp(candidate, Factory::undefined_value());
     __ j(equal, not_found);
+    __ cmp(candidate, Factory::null_value());
+    __ j(equal, &next_probe[i]);
 
     // If length is not 2 the string is not a candidate.
     __ cmp(FieldOperand(candidate, String::kLengthOffset),
Index: src/ia32/full-codegen-ia32.cc
===================================================================
--- src/ia32/full-codegen-ia32.cc	(revision 7153)
+++ src/ia32/full-codegen-ia32.cc	(working copy)
@@ -3049,8 +3049,8 @@
   // Fetch the map and check if array is in fast case.
   // Check that object doesn't require security checks and
   // has no indexed interceptor.
-  __ CmpObjectType(object, FIRST_JS_OBJECT_TYPE, temp);
-  __ j(below, &slow_case);
+  __ CmpObjectType(object, JS_OBJECT_TYPE, temp);
+  __ j(not_equal, &slow_case);
   __ test_b(FieldOperand(temp, Map::kBitFieldOffset),
             KeyedLoadIC::kSlowCaseBitFieldMask);
   __ j(not_zero, &slow_case);
Index: src/x64/assembler-x64.h
===================================================================
--- src/x64/assembler-x64.h	(revision 7153)
+++ src/x64/assembler-x64.h	(working copy)
@@ -656,7 +656,7 @@
 
   // Move sign extended immediate to memory location.
   void movq(const Operand& dst, Immediate value);
-  // New x64 instructions to load a 64-bit immediate into a register.
+  // Instructions to load a 64-bit immediate into a register.
   // All 64-bit immediates must have a relocation mode.
   void movq(Register dst, void* ptr, RelocInfo::Mode rmode);
   void movq(Register dst, int64_t value, RelocInfo::Mode rmode);
@@ -681,7 +681,7 @@
   void repmovsl();
   void repmovsq();
 
-  // New x64 instruction to load from an immediate 64-bit pointer into RAX.
+  // Instruction to load from an immediate 64-bit pointer into RAX.
   void load_rax(void* ptr, RelocInfo::Mode rmode);
   void load_rax(ExternalReference ext);
 
Index: src/x64/code-stubs-x64.cc
===================================================================
--- src/x64/code-stubs-x64.cc	(revision 7153)
+++ src/x64/code-stubs-x64.cc	(working copy)
@@ -2244,11 +2244,14 @@
   Label slow;
   __ JumpIfNotSmi(rdx, &slow);
 
-  // Check if the calling frame is an arguments adaptor frame.
+  // Check if the calling frame is an arguments adaptor frame.  We look at the
+  // context offset, and if the frame is not a regular one, then we find a
+  // Smi instead of the context.  We can't use SmiCompare here, because that
+  // only works for comparing two smis.
   Label adaptor;
   __ movq(rbx, Operand(rbp, StandardFrameConstants::kCallerFPOffset));
-  __ SmiCompare(Operand(rbx, StandardFrameConstants::kContextOffset),
-                Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR));
+  __ Cmp(Operand(rbx, StandardFrameConstants::kContextOffset),
+         Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR));
   __ j(equal, &adaptor);
 
   // Check index against formal parameters count limit passed in
@@ -2303,8 +2306,8 @@
   // Check if the calling frame is an arguments adaptor frame.
   Label adaptor_frame, try_allocate, runtime;
   __ movq(rdx, Operand(rbp, StandardFrameConstants::kCallerFPOffset));
-  __ SmiCompare(Operand(rdx, StandardFrameConstants::kContextOffset),
-                Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR));
+  __ Cmp(Operand(rdx, StandardFrameConstants::kContextOffset),
+         Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR));
   __ j(equal, &adaptor_frame);
 
   // Get the length from the frame.
@@ -4157,8 +4160,8 @@
   // Look at the length of the result of adding the two strings.
   STATIC_ASSERT(String::kMaxLength <= Smi::kMaxValue / 2);
   __ SmiAdd(rbx, rbx, rcx);
-  // Use the runtime system when adding two one character strings, as it
-  // contains optimizations for this specific case using the symbol table.
+  // Use the symbol table when adding two one character strings, as it
+  // helps later optimizations to return a symbol here.
   __ SmiCompare(rbx, Smi::FromInt(2));
   __ j(not_equal, &longer_than_two);
 
@@ -4510,15 +4513,14 @@
                     FieldOperand(symbol_table, SymbolTable::kCapacityOffset));
   __ decl(mask);
 
-  Register undefined = scratch4;
-  __ LoadRoot(undefined, Heap::kUndefinedValueRootIndex);
+  Register map = scratch4;
 
   // Registers
   // chars:        two character string, char 1 in byte 0 and char 2 in byte 1.
   // hash:         hash of two character string (32-bit int)
   // symbol_table: symbol table
   // mask:         capacity mask (32-bit int)
-  // undefined:    undefined value
+  // map:          -
   // scratch:      -
 
   // Perform a number of probes in the symbol table.
@@ -4533,7 +4535,7 @@
     }
     __ andl(scratch, mask);
 
-    // Load the entry from the symble table.
+    // Load the entry from the symbol table.
     Register candidate = scratch;  // Scratch register contains candidate.
     STATIC_ASSERT(SymbolTable::kEntrySize == 1);
     __ movq(candidate,
@@ -4543,9 +4545,17 @@
                          SymbolTable::kElementsStartOffset));
 
     // If entry is undefined no string with this hash can be found.
-    __ cmpq(candidate, undefined);
+    NearLabel is_string;
+    __ CmpObjectType(candidate, ODDBALL_TYPE, map);
+    __ j(not_equal, &is_string);
+
+    __ CompareRoot(candidate, Heap::kUndefinedValueRootIndex);
     __ j(equal, not_found);
+    // Must be null (deleted entry).
+    __ jmp(&next_probe[i]);
 
+    __ bind(&is_string);
+
     // If length is not 2 the string is not a candidate.
     __ SmiCompare(FieldOperand(candidate, String::kLengthOffset),
                   Smi::FromInt(2));
@@ -4556,8 +4566,7 @@
     Register temp = kScratchRegister;
 
     // Check that the candidate is a non-external ascii string.
-    __ movq(temp, FieldOperand(candidate, HeapObject::kMapOffset));
-    __ movzxbl(temp, FieldOperand(temp, Map::kInstanceTypeOffset));
+    __ movzxbl(temp, FieldOperand(map, Map::kInstanceTypeOffset));
     __ JumpIfInstanceTypeIsNotSequentialAscii(
         temp, temp, &next_probe[i]);
 
Index: src/x64/codegen-x64.cc
===================================================================
--- src/x64/codegen-x64.cc	(revision 7153)
+++ src/x64/codegen-x64.cc	(working copy)
@@ -766,7 +766,7 @@
       __ AbortIfNotNumber(value.reg());
     }
     // Smi => false iff zero.
-    __ SmiCompare(value.reg(), Smi::FromInt(0));
+    __ Cmp(value.reg(), Smi::FromInt(0));
     if (value.is_smi()) {
       value.Unuse();
       dest->Split(not_zero);
@@ -794,7 +794,7 @@
     dest->false_target()->Branch(equal);
 
     // Smi => false iff zero.
-    __ SmiCompare(value.reg(), Smi::FromInt(0));
+    __ Cmp(value.reg(), Smi::FromInt(0));
     dest->false_target()->Branch(equal);
     Condition is_smi = masm_->CheckSmi(value.reg());
     dest->true_target()->Branch(is_smi);
@@ -1036,7 +1036,7 @@
                                         true, overwrite_mode);
   } else {
     // Set the flags based on the operation, type and loop nesting level.
-    // Bit operations always assume they likely operate on Smis. Still only
+    // Bit operations always assume they likely operate on smis. Still only
     // generate the inline Smi check code if this operation is part of a loop.
     // For all other operations only inline the Smi check code for likely smis
     // if the operation is part of a loop.
@@ -2108,7 +2108,7 @@
       if (cc == equal) {
         Label comparison_done;
         __ SmiCompare(FieldOperand(left_side.reg(), String::kLengthOffset),
-                Smi::FromInt(1));
+                      Smi::FromInt(1));
         __ j(not_equal, &comparison_done);
         uint8_t char_value =
             static_cast<uint8_t>(String::cast(*right_val)->Get(0));
@@ -2294,7 +2294,7 @@
       // CompareStub and the inline code both support all values of cc.
     }
     // Implement comparison against a constant Smi, inlining the case
-    // where both sides are Smis.
+    // where both sides are smis.
     left_side->ToRegister();
     Register left_reg = left_side->reg();
     Smi* constant_smi = Smi::cast(*right_side->handle());
@@ -2304,7 +2304,6 @@
         __ AbortIfNotSmi(left_reg);
       }
       // Test smi equality and comparison by signed int comparison.
-      // Both sides are smis, so we can use an Immediate.
       __ SmiCompare(left_reg, constant_smi);
       left_side->Unuse();
       right_side->Unuse();
@@ -2314,7 +2313,7 @@
       JumpTarget is_smi;
       if (cc == equal) {
         // We can do the equality comparison before the smi check.
-        __ SmiCompare(left_reg, constant_smi);
+        __ Cmp(left_reg, constant_smi);
         dest->true_target()->Branch(equal);
         Condition left_is_smi = masm_->CheckSmi(left_reg);
         dest->false_target()->Branch(left_is_smi);
@@ -2575,8 +2574,8 @@
       // adaptor frame below it.
       Label invoke, adapted;
       __ movq(rdx, Operand(rbp, StandardFrameConstants::kCallerFPOffset));
-      __ SmiCompare(Operand(rdx, StandardFrameConstants::kContextOffset),
-                    Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR));
+      __ Cmp(Operand(rdx, StandardFrameConstants::kContextOffset),
+             Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR));
       __ j(equal, &adapted);
 
       // No arguments adaptor frame. Copy fixed number of arguments.
@@ -3857,7 +3856,7 @@
   __ movq(rbx, rax);
 
   // If the property has been removed while iterating, we just skip it.
-  __ SmiCompare(rbx, Smi::FromInt(0));
+  __ Cmp(rbx, Smi::FromInt(0));
   node->continue_target()->Branch(equal);
 
   end_del_check.Bind();
@@ -6198,15 +6197,15 @@
 
   // Skip the arguments adaptor frame if it exists.
   Label check_frame_marker;
-  __ SmiCompare(Operand(fp.reg(), StandardFrameConstants::kContextOffset),
-                Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR));
+  __ Cmp(Operand(fp.reg(), StandardFrameConstants::kContextOffset),
+         Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR));
   __ j(not_equal, &check_frame_marker);
   __ movq(fp.reg(), Operand(fp.reg(), StandardFrameConstants::kCallerFPOffset));
 
   // Check the marker in the calling frame.
   __ bind(&check_frame_marker);
-  __ SmiCompare(Operand(fp.reg(), StandardFrameConstants::kMarkerOffset),
-                Smi::FromInt(StackFrame::CONSTRUCT));
+  __ Cmp(Operand(fp.reg(), StandardFrameConstants::kMarkerOffset),
+         Smi::FromInt(StackFrame::CONSTRUCT));
   fp.Unuse();
   destination()->Split(equal);
 }
@@ -6226,8 +6225,8 @@
 
   // Check if the calling frame is an arguments adaptor frame.
   __ movq(fp.reg(), Operand(rbp, StandardFrameConstants::kCallerFPOffset));
-  __ SmiCompare(Operand(fp.reg(), StandardFrameConstants::kContextOffset),
-                Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR));
+  __ Cmp(Operand(fp.reg(), StandardFrameConstants::kContextOffset),
+         Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR));
   __ j(not_equal, &exit);
 
   // Arguments adaptor case: Read the arguments length from the
@@ -6783,8 +6782,8 @@
   // Fetch the map and check if array is in fast case.
   // Check that object doesn't require security checks and
   // has no indexed interceptor.
-  __ CmpObjectType(object.reg(), FIRST_JS_OBJECT_TYPE, tmp1.reg());
-  deferred->Branch(below);
+  __ CmpObjectType(object.reg(), JS_ARRAY_TYPE, tmp1.reg());
+  deferred->Branch(not_equal);
   __ testb(FieldOperand(tmp1.reg(), Map::kBitFieldOffset),
            Immediate(KeyedLoadIC::kSlowCaseBitFieldMask));
   deferred->Branch(not_zero);
@@ -6826,7 +6825,7 @@
 
   Label done;
   __ InNewSpace(tmp1.reg(), tmp2.reg(), equal, &done);
-  // Possible optimization: do a check that both values are Smis
+  // Possible optimization: do a check that both values are smis
   // (or them and test against Smi mask.)
 
   __ movq(tmp2.reg(), tmp1.reg());
@@ -8516,12 +8515,6 @@
     __ CmpObjectType(receiver.reg(), JS_ARRAY_TYPE, kScratchRegister);
     deferred->Branch(not_equal);
 
-    // Check that the key is within bounds.  Both the key and the length of
-    // the JSArray are smis. Use unsigned comparison to handle negative keys.
-    __ SmiCompare(FieldOperand(receiver.reg(), JSArray::kLengthOffset),
-                  key.reg());
-    deferred->Branch(below_equal);
-
     // Get the elements array from the receiver and check that it is not a
     // dictionary.
     __ movq(tmp.reg(),
@@ -8550,6 +8543,14 @@
             kScratchRegister);
     deferred->Branch(not_equal);
 
+    // Check that the key is within bounds.  Both the key and the length of
+    // the JSArray are smis (because the fixed array check above ensures the
+    // elements are in fast case). Use unsigned comparison to handle negative
+    // keys.
+    __ SmiCompare(FieldOperand(receiver.reg(), JSArray::kLengthOffset),
+                  key.reg());
+    deferred->Branch(below_equal);
+
     // Store the value.
     SmiIndex index =
         masm()->SmiToIndex(kScratchRegister, key.reg(), kPointerSizeLog2);
Index: src/x64/full-codegen-x64.cc
===================================================================
--- src/x64/full-codegen-x64.cc	(revision 7153)
+++ src/x64/full-codegen-x64.cc	(working copy)
@@ -549,7 +549,7 @@
   __ CompareRoot(result_register(), Heap::kFalseValueRootIndex);
   __ j(equal, if_false);
   STATIC_ASSERT(kSmiTag == 0);
-  __ SmiCompare(result_register(), Smi::FromInt(0));
+  __ Cmp(result_register(), Smi::FromInt(0));
   __ j(equal, if_false);
   Condition is_smi = masm_->CheckSmi(result_register());
   __ j(is_smi, if_true);
@@ -995,7 +995,7 @@
   __ push(rcx);  // Enumerable.
   __ push(rbx);  // Current entry.
   __ InvokeBuiltin(Builtins::FILTER_KEY, CALL_FUNCTION);
-  __ SmiCompare(rax, Smi::FromInt(0));
+  __ Cmp(rax, Smi::FromInt(0));
   __ j(equal, loop_statement.continue_target());
   __ movq(rbx, rax);
 
@@ -2502,15 +2502,15 @@
 
   // Skip the arguments adaptor frame if it exists.
   Label check_frame_marker;
-  __ SmiCompare(Operand(rax, StandardFrameConstants::kContextOffset),
-                Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR));
+  __ Cmp(Operand(rax, StandardFrameConstants::kContextOffset),
+         Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR));
   __ j(not_equal, &check_frame_marker);
   __ movq(rax, Operand(rax, StandardFrameConstants::kCallerFPOffset));
 
   // Check the marker in the calling frame.
   __ bind(&check_frame_marker);
-  __ SmiCompare(Operand(rax, StandardFrameConstants::kMarkerOffset),
-                Smi::FromInt(StackFrame::CONSTRUCT));
+  __ Cmp(Operand(rax, StandardFrameConstants::kMarkerOffset),
+         Smi::FromInt(StackFrame::CONSTRUCT));
   PrepareForBailoutBeforeSplit(TOS_REG, true, if_true, if_false);
   Split(equal, if_true, if_false, fall_through);
 
@@ -2564,8 +2564,8 @@
 
   // Check if the calling frame is an arguments adaptor frame.
   __ movq(rbx, Operand(rbp, StandardFrameConstants::kCallerFPOffset));
-  __ SmiCompare(Operand(rbx, StandardFrameConstants::kContextOffset),
-                Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR));
+  __ Cmp(Operand(rbx, StandardFrameConstants::kContextOffset),
+         Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR));
   __ j(not_equal, &exit);
 
   // Arguments adaptor case: Read the arguments length from the
@@ -3010,8 +3010,8 @@
   // Fetch the map and check if array is in fast case.
   // Check that object doesn't require security checks and
   // has no indexed interceptor.
-  __ CmpObjectType(object, FIRST_JS_OBJECT_TYPE, temp);
-  __ j(below, &slow_case);
+  __ CmpObjectType(object, JS_ARRAY_TYPE, temp);
+  __ j(not_equal, &slow_case);
   __ testb(FieldOperand(temp, Map::kBitFieldOffset),
            Immediate(KeyedLoadIC::kSlowCaseBitFieldMask));
   __ j(not_zero, &slow_case);
Index: src/x64/lithium-codegen-x64.cc
===================================================================
--- src/x64/lithium-codegen-x64.cc	(revision 7153)
+++ src/x64/lithium-codegen-x64.cc	(working copy)
@@ -1225,7 +1225,7 @@
       __ j(equal, true_label);
       __ CompareRoot(reg, Heap::kFalseValueRootIndex);
       __ j(equal, false_label);
-      __ SmiCompare(reg, Smi::FromInt(0));
+      __ Cmp(reg, Smi::FromInt(0));
       __ j(equal, false_label);
       __ JumpIfSmi(reg, true_label);
 
@@ -2177,8 +2177,8 @@
   // Check for arguments adapter frame.
   NearLabel done, adapted;
   __ movq(result, Operand(rbp, StandardFrameConstants::kCallerFPOffset));
-  __ SmiCompare(Operand(result, StandardFrameConstants::kContextOffset),
-                Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR));
+  __ Cmp(Operand(result, StandardFrameConstants::kContextOffset),
+         Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR));
   __ j(equal, &adapted);
 
   // No arguments adaptor frame.
@@ -3611,15 +3611,15 @@
 
   // Skip the arguments adaptor frame if it exists.
   NearLabel check_frame_marker;
-  __ SmiCompare(Operand(temp, StandardFrameConstants::kContextOffset),
-                Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR));
+  __ Cmp(Operand(temp, StandardFrameConstants::kContextOffset),
+         Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR));
   __ j(not_equal, &check_frame_marker);
   __ movq(temp, Operand(rax, StandardFrameConstants::kCallerFPOffset));
 
   // Check the marker in the calling frame.
   __ bind(&check_frame_marker);
-  __ SmiCompare(Operand(temp, StandardFrameConstants::kMarkerOffset),
-                Smi::FromInt(StackFrame::CONSTRUCT));
+  __ Cmp(Operand(temp, StandardFrameConstants::kMarkerOffset),
+         Smi::FromInt(StackFrame::CONSTRUCT));
 }
 
 
Index: src/x64/macro-assembler-x64.cc
===================================================================
--- src/x64/macro-assembler-x64.cc	(revision 7153)
+++ src/x64/macro-assembler-x64.cc	(working copy)
@@ -124,7 +124,7 @@
   ASSERT(!object.is(rsi) && !value.is(rsi) && !index.is(rsi));
 
   // First, check if a write barrier is even needed. The tests below
-  // catch stores of Smis and stores into young gen.
+  // catch stores of smis and stores into young gen.
   Label done;
   JumpIfSmi(value, &done);
 
@@ -153,7 +153,7 @@
   ASSERT(!object.is(rsi) && !value.is(rsi) && !address.is(rsi));
 
   // First, check if a write barrier is even needed. The tests below
-  // catch stores of Smis and stores into young gen.
+  // catch stores of smis and stores into young gen.
   Label done;
   JumpIfSmi(value, &done);
 
@@ -837,12 +837,26 @@
 }
 
 
-void MacroAssembler::SmiCompare(Register dst, Register src) {
-  cmpq(dst, src);
+void MacroAssembler::SmiCompare(Register smi1, Register smi2) {
+  if (FLAG_debug_code) {
+    AbortIfNotSmi(smi1);
+    AbortIfNotSmi(smi2);
+  }
+  cmpq(smi1, smi2);
 }
 
 
 void MacroAssembler::SmiCompare(Register dst, Smi* src) {
+  if (FLAG_debug_code) {
+    AbortIfNotSmi(dst);
+  }
+  // Actually, knowing the register is a smi doesn't enable any optimizations
+  // with the current tagging scheme.
+  Cmp(dst, src);
+}
+
+
+void MacroAssembler::Cmp(Register dst, Smi* src) {
   ASSERT(!dst.is(kScratchRegister));
   if (src->value() == 0) {
     testq(dst, dst);
@@ -854,20 +868,41 @@
 
 
 void MacroAssembler::SmiCompare(Register dst, const Operand& src) {
+  if (FLAG_debug_code) {
+    AbortIfNotSmi(dst);
+    AbortIfNotSmi(src);
+  }
   cmpq(dst, src);
 }
 
 
 void MacroAssembler::SmiCompare(const Operand& dst, Register src) {
+  if (FLAG_debug_code) {
+    AbortIfNotSmi(dst);
+    AbortIfNotSmi(src);
+  }
   cmpq(dst, src);
 }
 
 
 void MacroAssembler::SmiCompare(const Operand& dst, Smi* src) {
+  if (FLAG_debug_code) {
+    AbortIfNotSmi(dst);
+  }
   cmpl(Operand(dst, kSmiShift / kBitsPerByte), Immediate(src->value()));
 }
 
 
+void MacroAssembler::Cmp(const Operand& dst, Smi* src) {
+  // The Operand cannot use the smi register, since we may use the scratch
+  // register to get around the lack of 64 bit immediates in the instruction
+  // set.
+  Register smi_reg = GetSmiConstant(src);
+  ASSERT(!dst.AddressUsesRegister(smi_reg));
+  cmpq(dst, smi_reg);
+}
+
+
 void MacroAssembler::SmiCompareInteger32(const Operand& dst, Register src) {
   cmpl(Operand(dst, kSmiShift / kBitsPerByte), src);
 }
@@ -1352,7 +1387,7 @@
 
 void MacroAssembler::Cmp(Register dst, Handle<Object> source) {
   if (source->IsSmi()) {
-    SmiCompare(dst, Smi::cast(*source));
+    Cmp(dst, Smi::cast(*source));
   } else {
     Move(kScratchRegister, source);
     cmpq(dst, kScratchRegister);
@@ -1362,7 +1397,7 @@
 
 void MacroAssembler::Cmp(const Operand& dst, Handle<Object> source) {
   if (source->IsSmi()) {
-    SmiCompare(dst, Smi::cast(*source));
+    Cmp(dst, Smi::cast(*source));
   } else {
     ASSERT(source->IsHeapObject());
     movq(kScratchRegister, source, RelocInfo::EMBEDDED_OBJECT);
@@ -1753,12 +1788,17 @@
 
 
 void MacroAssembler::AbortIfNotSmi(Register object) {
-  NearLabel ok;
   Condition is_smi = CheckSmi(object);
   Assert(is_smi, "Operand is not a smi");
 }
 
 
+void MacroAssembler::AbortIfNotSmi(const Operand& object) {
+  Condition is_smi = CheckSmi(object);
+  Assert(is_smi, "Operand is not a smi");
+}
+
+
 void MacroAssembler::AbortIfNotString(Register object) {
   testb(object, Immediate(kSmiTagMask));
   Assert(not_equal, "Operand is not a string");
Index: src/x64/macro-assembler-x64.h
===================================================================
--- src/x64/macro-assembler-x64.h	(revision 7153)
+++ src/x64/macro-assembler-x64.h	(working copy)
@@ -278,8 +278,9 @@
                                            int power);
 
 
-  // Simple comparison of smis.
-  void SmiCompare(Register dst, Register src);
+  // Simple comparison of smis.  Both sides must be known smis to use these,
+  // otherwise use Cmp.
+  void SmiCompare(Register smi1, Register smi2);
   void SmiCompare(Register dst, Smi* src);
   void SmiCompare(Register dst, const Operand& src);
   void SmiCompare(const Operand& dst, Register src);
@@ -609,6 +610,8 @@
   void Move(const Operand& dst, Handle<Object> source);
   void Cmp(Register dst, Handle<Object> source);
   void Cmp(const Operand& dst, Handle<Object> source);
+  void Cmp(Register dst, Smi* src);
+  void Cmp(const Operand& dst, Smi* src);
   void Push(Handle<Object> source);
 
   // Emit code to discard a non-negative number of pointer-sized elements
@@ -702,6 +705,7 @@
 
   // Abort execution if argument is not a smi. Used in debug code.
   void AbortIfNotSmi(Register object);
+  void AbortIfNotSmi(const Operand& object);
 
   // Abort execution if argument is a string. Used in debug code.
   void AbortIfNotString(Register object);

Want to link to this message? Use this URL: <https://mail-archive.FreeBSD.org/cgi/mid.cgi?AANLkTintGv=jouCGRjLZOxCMUt-=pqax=Qsm5wechmsC>