--- old/modules/graphics/src/test/java/com/sun/javafx/css/parser/CSSLexerTest.java 2015-09-03 15:35:19.490222500 -0700 +++ /dev/null 2015-09-03 15:35:20.000000000 -0700 @@ -1,847 +0,0 @@ -/* - * Copyright (c) 2011, 2014, Oracle and/or its affiliates. All rights reserved. - * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER. - * - * This code is free software; you can redistribute it and/or modify it - * under the terms of the GNU General Public License version 2 only, as - * published by the Free Software Foundation. Oracle designates this - * particular file as subject to the "Classpath" exception as provided - * by Oracle in the LICENSE file that accompanied this code. - * - * This code is distributed in the hope that it will be useful, but WITHOUT - * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or - * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License - * version 2 for more details (a copy is included in the LICENSE file that - * accompanied this code). - * - * You should have received a copy of the GNU General Public License version - * 2 along with this work; if not, write to the Free Software Foundation, - * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA. - * - * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA - * or visit www.oracle.com if you need additional information or have any - * questions. - */ - -package com.sun.javafx.css.parser; - -import java.io.CharArrayReader; -import java.io.Reader; -import java.util.ArrayList; -import java.util.Arrays; -import java.util.Collections; -import java.util.List; -import static org.junit.Assert.*; -import static org.junit.Assert.assertEquals; - -import org.junit.Test; - - -public class CSSLexerTest { - - public CSSLexerTest() { - } - - private void checkTokens(List resultTokens, Token... expectedTokens) - throws org.junit.ComparisonFailure { - - if (expectedTokens.length != resultTokens.size()) { - throw new org.junit.ComparisonFailure( - "lengths do not match", - Arrays.toString(expectedTokens), - resultTokens.toString() - ); - } - - for (int n = 0; n getTokens(String string) { - - Reader reader = new CharArrayReader(string.toCharArray()); - final CSSLexer lexer = CSSLexer.getInstance(); - lexer.setReader(reader); - - final List tokens = new ArrayList(); - - Token token = null; - do { - token = lexer.nextToken(); - tokens.add(token); - } while (token.getType() != Token.EOF); - - return Collections.unmodifiableList(tokens); - } - - private void lexDigitsWithUnits(String units, int type) throws org.junit.ComparisonFailure { - - checkTokens(getTokens("123"+units), new Token(type, "123"+units), Token.EOF_TOKEN); - checkTokens(getTokens("123.45"+units), new Token(type, "123.45"+units), Token.EOF_TOKEN); - checkTokens(getTokens(".45"+units), new Token(type, ".45"+units), Token.EOF_TOKEN); - checkTokens(getTokens("-123"+units), new Token(type, "-123"+units), Token.EOF_TOKEN); - checkTokens(getTokens("-.45"+units), new Token(type, "-.45"+units), Token.EOF_TOKEN); - checkTokens(getTokens("+123"+units), new Token(type, "+123"+units), Token.EOF_TOKEN); - checkTokens(getTokens("+.45"+units), new Token(type, "+.45"+units), Token.EOF_TOKEN); - } - - @Test - public void testLexValidDigits() { - lexDigitsWithUnits("", CSSLexer.NUMBER); - } - - @Test - public void testLexValidDigitsWithCM() { - lexDigitsWithUnits("cm", CSSLexer.CM); - // case should be ignored - lexDigitsWithUnits("cM", CSSLexer.CM); - } - @Test - public void testLexValidDigitsWithDEG() { - lexDigitsWithUnits("deg", CSSLexer.DEG); - // case should be ignored - lexDigitsWithUnits("dEg", CSSLexer.DEG); - } - @Test - public void testLexValidDigitsWithEM() { - lexDigitsWithUnits("em", CSSLexer.EMS); - // case should be ignored - lexDigitsWithUnits("Em", CSSLexer.EMS); - } - @Test - public void testLexValidDigitsWithEX() { - lexDigitsWithUnits("ex", CSSLexer.EXS); - // case should be ignored - lexDigitsWithUnits("Ex", CSSLexer.EXS); - } - @Test - public void testLexValidDigitsWithGRAD() { - lexDigitsWithUnits("grad", CSSLexer.GRAD); - // case should be ignored - lexDigitsWithUnits("gRad", CSSLexer.GRAD); - } - @Test - public void testLexValidDigitsWithIN() { - lexDigitsWithUnits("in", CSSLexer.IN); - // case should be ignored - lexDigitsWithUnits("In", CSSLexer.IN); - } - @Test - public void testLexValidDigitsWithMM() { - lexDigitsWithUnits("mm", CSSLexer.MM); - // case should be ignored - lexDigitsWithUnits("mM", CSSLexer.MM); - } - @Test - public void testLexValidDigitsWithPC() { - lexDigitsWithUnits("pc", CSSLexer.PC); - // case should be ignored - lexDigitsWithUnits("Pc", CSSLexer.PC); - } - @Test - public void testLexValidDigitsWithPT() { - lexDigitsWithUnits("pt", CSSLexer.PT); - // case should be ignored - lexDigitsWithUnits("PT", CSSLexer.PT); - } - @Test - public void testLexValidDigitsWithPX() { - lexDigitsWithUnits("px", CSSLexer.PX); - // case should be ignored - lexDigitsWithUnits("Px", CSSLexer.PX); - } - @Test - public void testLexValidDigitsWithRAD() { - lexDigitsWithUnits("rad", CSSLexer.RAD); - // case should be ignored - lexDigitsWithUnits("RaD", CSSLexer.RAD); - } - @Test - public void testLexValidDigitsWithTURN() { - lexDigitsWithUnits("turn", CSSLexer.TURN); - // case should be ignored - lexDigitsWithUnits("TurN", CSSLexer.TURN); - } - @Test - public void testLexValidDigitsWithS() { - lexDigitsWithUnits("s", CSSLexer.SECONDS); - // case should be ignored - lexDigitsWithUnits("S", CSSLexer.SECONDS); - } - @Test - public void testLexValidDigitsWithMS() { - lexDigitsWithUnits("ms", CSSLexer.MS); - // case should be ignored - lexDigitsWithUnits("mS", CSSLexer.MS); - } - @Test - public void testLexValidDigitsWithPCT() { - lexDigitsWithUnits("%", CSSLexer.PERCENTAGE); - } - @Test - public void testLexValidDigitsWithBadUnits() { - lexDigitsWithUnits("xyzzy", Token.INVALID); - } - @Test - public void textLexValidDigitsValidDigits() { - checkTokens( - getTokens("foo: 10pt; bar: 20%;"), - new Token(CSSLexer.IDENT, "foo"), - new Token(CSSLexer.COLON, ":"), - new Token(CSSLexer.WS, " "), - new Token(CSSLexer.PT, "10pt"), - new Token(CSSLexer.SEMI, ";"), - new Token(CSSLexer.WS, " "), - new Token(CSSLexer.IDENT, "bar"), - new Token(CSSLexer.COLON, ":"), - new Token(CSSLexer.WS, " "), - new Token(CSSLexer.PERCENTAGE, "20%"), - new Token(CSSLexer.SEMI, ";"), - Token.EOF_TOKEN - ); - } - @Test - public void textLexInvalidDigitsValidDigits() { - checkTokens( - getTokens("foo: 10pz; bar: 20%;"), - new Token(CSSLexer.IDENT, "foo"), - new Token(CSSLexer.COLON, ":"), - new Token(CSSLexer.WS, " "), - new Token(Token.INVALID, "10pz"), - new Token(CSSLexer.SEMI, ";"), - new Token(CSSLexer.WS, " "), - new Token(CSSLexer.IDENT, "bar"), - new Token(CSSLexer.COLON, ":"), - new Token(CSSLexer.WS, " "), - new Token(CSSLexer.PERCENTAGE, "20%"), - new Token(CSSLexer.SEMI, ";"), - Token.EOF_TOKEN - ); - } - @Test - public void textLexValidDigitsBangImportant() { - checkTokens( - getTokens("foo: 10pt !important;"), - new Token(CSSLexer.IDENT, "foo"), - new Token(CSSLexer.COLON, ":"), - new Token(CSSLexer.WS, " "), - new Token(CSSLexer.PT, "10pt"), - new Token(CSSLexer.WS, " "), - new Token(CSSLexer.IMPORTANT_SYM, "!important"), - new Token(CSSLexer.SEMI, ";"), - Token.EOF_TOKEN - ); - } - @Test - public void textLexInvalidDigitsBangImportant() { - checkTokens( - getTokens("foo: 10pz !important;"), - new Token(CSSLexer.IDENT, "foo"), - new Token(CSSLexer.COLON, ":"), - new Token(CSSLexer.WS, " "), - new Token(Token.INVALID, "10pz"), - new Token(CSSLexer.WS, " "), - new Token(CSSLexer.IMPORTANT_SYM, "!important"), - new Token(CSSLexer.SEMI, ";"), - Token.EOF_TOKEN - ); - } - @Test - public void textLexValidDigitsInSequence() { - checkTokens( - getTokens("-1 0px 1pt .5em;"), - new Token(CSSLexer.NUMBER, "-1"), - new Token(CSSLexer.WS, " "), - new Token(CSSLexer.PX, "0px"), - new Token(CSSLexer.WS, " "), - new Token(CSSLexer.PT, "1pt"), - new Token(CSSLexer.WS, " "), - new Token(CSSLexer.EMS, ".5em"), - new Token(CSSLexer.SEMI, ";"), - Token.EOF_TOKEN - ); - } - @Test - public void textLexInvalidDigitsInSequence() { - checkTokens( - getTokens("-1 0px 1pz .5em;"), - new Token(CSSLexer.NUMBER, "-1"), - new Token(CSSLexer.WS, " "), - new Token(CSSLexer.PX, "0px"), - new Token(CSSLexer.WS, " "), - new Token(Token.INVALID, "1pz"), - new Token(CSSLexer.WS, " "), - new Token(CSSLexer.EMS, ".5em"), - new Token(CSSLexer.SEMI, ";"), - Token.EOF_TOKEN - ); - } - - @Test - public void testTokenOffset() { - - String str = "a: b;"; - // [?][0] = line - // [?][1] = offset - Token[] expected = { - new Token(CSSLexer.IDENT, "a", 1, 0), - new Token(CSSLexer.COLON, ":", 1, 1), - new Token(CSSLexer.WS, " ", 1, 2), - new Token(CSSLexer.IDENT, "b", 1, 3), - new Token(CSSLexer.SEMI, ";", 1, 4), - Token.EOF_TOKEN - }; - - List tlist = getTokens(str); - checkTokens(tlist, expected); - - for(int n=0; n tlist = getTokens(str); - checkTokens(tlist, expected); - - for(int n=0; n tlist = getTokens(str); - checkTokens(tlist, expected); - - for(int n=0; n tlist = getTokens(str); - checkTokens(tlist, expected); - - for(int n=0; n tlist = getTokens(str); - checkTokens(tlist, expected); - - for(int n=0; n tlist = getTokens(str); - checkTokens(tlist, expected); - - for(int n=0; n tlist = getTokens(str); - checkTokens(tlist, expected); - - for(int n=0; n tlist = getTokens(str); - checkTokens(tlist, expected); - - for(int n=0; n tlist = getTokens(str); - checkTokens(tlist, expected); - - for(int n=0; n tlist = getTokens(str); - checkTokens(tlist, expected); - - for(int n=0; n tlist = getTokens(str); - checkTokens(tlist, expected); - - for(int n=0; n tlist = getTokens(str); - checkTokens(tlist, expected); - - for(int n=0; n tlist = getTokens(str); - checkTokens(tlist, expected); - - for(int n=0; n tlist = getTokens(str); - checkTokens(tlist, expected); - - for(int n=0; n tlist = getTokens(str); - checkTokens(tlist, expected); - - for(int n=0; n tlist = getTokens(str); - checkTokens(tlist, expected); - - for(int n=0; n tlist = getTokens(str); - checkTokens(tlist, expected); - - for(int n=0; n tlist = getTokens(str); - checkTokens(tlist, expected); - - for(int n=0; n tlist = getTokens(str); - checkTokens(tlist, expected); - - for(int n=0; n tlist = getTokens(str); - checkTokens(tlist, expected); - - for(int n=0; n tlist = getTokens(str); - checkTokens(tlist, expected); - - for(int n=0; n resultTokens, Token... expectedTokens) + throws org.junit.ComparisonFailure { + + if (expectedTokens.length != resultTokens.size()) { + throw new org.junit.ComparisonFailure( + "lengths do not match", + Arrays.toString(expectedTokens), + resultTokens.toString() + ); + } + + for (int n = 0; n getTokens(String string) { + + Reader reader = new CharArrayReader(string.toCharArray()); + final CssLexer lexer = new CssLexer(); + lexer.setReader(reader); + + final List tokens = new ArrayList(); + + Token token = null; + do { + token = lexer.nextToken(); + tokens.add(token); + } while (token.getType() != Token.EOF); + + return Collections.unmodifiableList(tokens); + } + + private void lexDigitsWithUnits(String units, int type) throws org.junit.ComparisonFailure { + + checkTokens(getTokens("123"+units), new Token(type, "123"+units), Token.EOF_TOKEN); + checkTokens(getTokens("123.45"+units), new Token(type, "123.45"+units), Token.EOF_TOKEN); + checkTokens(getTokens(".45"+units), new Token(type, ".45"+units), Token.EOF_TOKEN); + checkTokens(getTokens("-123"+units), new Token(type, "-123"+units), Token.EOF_TOKEN); + checkTokens(getTokens("-.45"+units), new Token(type, "-.45"+units), Token.EOF_TOKEN); + checkTokens(getTokens("+123"+units), new Token(type, "+123"+units), Token.EOF_TOKEN); + checkTokens(getTokens("+.45"+units), new Token(type, "+.45"+units), Token.EOF_TOKEN); + } + + @Test + public void testLexValidDigits() { + lexDigitsWithUnits("", CssLexer.NUMBER); + } + + @Test + public void testLexValidDigitsWithCM() { + lexDigitsWithUnits("cm", CssLexer.CM); + // case should be ignored + lexDigitsWithUnits("cM", CssLexer.CM); + } + @Test + public void testLexValidDigitsWithDEG() { + lexDigitsWithUnits("deg", CssLexer.DEG); + // case should be ignored + lexDigitsWithUnits("dEg", CssLexer.DEG); + } + @Test + public void testLexValidDigitsWithEM() { + lexDigitsWithUnits("em", CssLexer.EMS); + // case should be ignored + lexDigitsWithUnits("Em", CssLexer.EMS); + } + @Test + public void testLexValidDigitsWithEX() { + lexDigitsWithUnits("ex", CssLexer.EXS); + // case should be ignored + lexDigitsWithUnits("Ex", CssLexer.EXS); + } + @Test + public void testLexValidDigitsWithGRAD() { + lexDigitsWithUnits("grad", CssLexer.GRAD); + // case should be ignored + lexDigitsWithUnits("gRad", CssLexer.GRAD); + } + @Test + public void testLexValidDigitsWithIN() { + lexDigitsWithUnits("in", CssLexer.IN); + // case should be ignored + lexDigitsWithUnits("In", CssLexer.IN); + } + @Test + public void testLexValidDigitsWithMM() { + lexDigitsWithUnits("mm", CssLexer.MM); + // case should be ignored + lexDigitsWithUnits("mM", CssLexer.MM); + } + @Test + public void testLexValidDigitsWithPC() { + lexDigitsWithUnits("pc", CssLexer.PC); + // case should be ignored + lexDigitsWithUnits("Pc", CssLexer.PC); + } + @Test + public void testLexValidDigitsWithPT() { + lexDigitsWithUnits("pt", CssLexer.PT); + // case should be ignored + lexDigitsWithUnits("PT", CssLexer.PT); + } + @Test + public void testLexValidDigitsWithPX() { + lexDigitsWithUnits("px", CssLexer.PX); + // case should be ignored + lexDigitsWithUnits("Px", CssLexer.PX); + } + @Test + public void testLexValidDigitsWithRAD() { + lexDigitsWithUnits("rad", CssLexer.RAD); + // case should be ignored + lexDigitsWithUnits("RaD", CssLexer.RAD); + } + @Test + public void testLexValidDigitsWithTURN() { + lexDigitsWithUnits("turn", CssLexer.TURN); + // case should be ignored + lexDigitsWithUnits("TurN", CssLexer.TURN); + } + @Test + public void testLexValidDigitsWithS() { + lexDigitsWithUnits("s", CssLexer.SECONDS); + // case should be ignored + lexDigitsWithUnits("S", CssLexer.SECONDS); + } + @Test + public void testLexValidDigitsWithMS() { + lexDigitsWithUnits("ms", CssLexer.MS); + // case should be ignored + lexDigitsWithUnits("mS", CssLexer.MS); + } + @Test + public void testLexValidDigitsWithPCT() { + lexDigitsWithUnits("%", CssLexer.PERCENTAGE); + } + @Test + public void testLexValidDigitsWithBadUnits() { + lexDigitsWithUnits("xyzzy", Token.INVALID); + } + @Test + public void textLexValidDigitsValidDigits() { + checkTokens( + getTokens("foo: 10pt; bar: 20%;"), + new Token(CssLexer.IDENT, "foo"), + new Token(CssLexer.COLON, ":"), + new Token(CssLexer.WS, " "), + new Token(CssLexer.PT, "10pt"), + new Token(CssLexer.SEMI, ";"), + new Token(CssLexer.WS, " "), + new Token(CssLexer.IDENT, "bar"), + new Token(CssLexer.COLON, ":"), + new Token(CssLexer.WS, " "), + new Token(CssLexer.PERCENTAGE, "20%"), + new Token(CssLexer.SEMI, ";"), + Token.EOF_TOKEN + ); + } + @Test + public void textLexInvalidDigitsValidDigits() { + checkTokens( + getTokens("foo: 10pz; bar: 20%;"), + new Token(CssLexer.IDENT, "foo"), + new Token(CssLexer.COLON, ":"), + new Token(CssLexer.WS, " "), + new Token(Token.INVALID, "10pz"), + new Token(CssLexer.SEMI, ";"), + new Token(CssLexer.WS, " "), + new Token(CssLexer.IDENT, "bar"), + new Token(CssLexer.COLON, ":"), + new Token(CssLexer.WS, " "), + new Token(CssLexer.PERCENTAGE, "20%"), + new Token(CssLexer.SEMI, ";"), + Token.EOF_TOKEN + ); + } + @Test + public void textLexValidDigitsBangImportant() { + checkTokens( + getTokens("foo: 10pt !important;"), + new Token(CssLexer.IDENT, "foo"), + new Token(CssLexer.COLON, ":"), + new Token(CssLexer.WS, " "), + new Token(CssLexer.PT, "10pt"), + new Token(CssLexer.WS, " "), + new Token(CssLexer.IMPORTANT_SYM, "!important"), + new Token(CssLexer.SEMI, ";"), + Token.EOF_TOKEN + ); + } + @Test + public void textLexInvalidDigitsBangImportant() { + checkTokens( + getTokens("foo: 10pz !important;"), + new Token(CssLexer.IDENT, "foo"), + new Token(CssLexer.COLON, ":"), + new Token(CssLexer.WS, " "), + new Token(Token.INVALID, "10pz"), + new Token(CssLexer.WS, " "), + new Token(CssLexer.IMPORTANT_SYM, "!important"), + new Token(CssLexer.SEMI, ";"), + Token.EOF_TOKEN + ); + } + @Test + public void textLexValidDigitsInSequence() { + checkTokens( + getTokens("-1 0px 1pt .5em;"), + new Token(CssLexer.NUMBER, "-1"), + new Token(CssLexer.WS, " "), + new Token(CssLexer.PX, "0px"), + new Token(CssLexer.WS, " "), + new Token(CssLexer.PT, "1pt"), + new Token(CssLexer.WS, " "), + new Token(CssLexer.EMS, ".5em"), + new Token(CssLexer.SEMI, ";"), + Token.EOF_TOKEN + ); + } + @Test + public void textLexInvalidDigitsInSequence() { + checkTokens( + getTokens("-1 0px 1pz .5em;"), + new Token(CssLexer.NUMBER, "-1"), + new Token(CssLexer.WS, " "), + new Token(CssLexer.PX, "0px"), + new Token(CssLexer.WS, " "), + new Token(Token.INVALID, "1pz"), + new Token(CssLexer.WS, " "), + new Token(CssLexer.EMS, ".5em"), + new Token(CssLexer.SEMI, ";"), + Token.EOF_TOKEN + ); + } + + @Test + public void testTokenOffset() { + + String str = "a: b;"; + // [?][0] = line + // [?][1] = offset + Token[] expected = { + new Token(CssLexer.IDENT, "a", 1, 0), + new Token(CssLexer.COLON, ":", 1, 1), + new Token(CssLexer.WS, " ", 1, 2), + new Token(CssLexer.IDENT, "b", 1, 3), + new Token(CssLexer.SEMI, ";", 1, 4), + Token.EOF_TOKEN + }; + + List tlist = getTokens(str); + checkTokens(tlist, expected); + + for(int n=0; n tlist = getTokens(str); + checkTokens(tlist, expected); + + for(int n=0; n tlist = getTokens(str); + checkTokens(tlist, expected); + + for(int n=0; n tlist = getTokens(str); + checkTokens(tlist, expected); + + for(int n=0; n tlist = getTokens(str); + checkTokens(tlist, expected); + + for(int n=0; n tlist = getTokens(str); + checkTokens(tlist, expected); + + for(int n=0; n tlist = getTokens(str); + checkTokens(tlist, expected); + + for(int n=0; n tlist = getTokens(str); + checkTokens(tlist, expected); + + for(int n=0; n tlist = getTokens(str); + checkTokens(tlist, expected); + + for(int n=0; n tlist = getTokens(str); + checkTokens(tlist, expected); + + for(int n=0; n tlist = getTokens(str); + checkTokens(tlist, expected); + + for(int n=0; n tlist = getTokens(str); + checkTokens(tlist, expected); + + for(int n=0; n tlist = getTokens(str); + checkTokens(tlist, expected); + + for(int n=0; n tlist = getTokens(str); + checkTokens(tlist, expected); + + for(int n=0; n tlist = getTokens(str); + checkTokens(tlist, expected); + + for(int n=0; n tlist = getTokens(str); + checkTokens(tlist, expected); + + for(int n=0; n tlist = getTokens(str); + checkTokens(tlist, expected); + + for(int n=0; n tlist = getTokens(str); + checkTokens(tlist, expected); + + for(int n=0; n tlist = getTokens(str); + checkTokens(tlist, expected); + + for(int n=0; n tlist = getTokens(str); + checkTokens(tlist, expected); + + for(int n=0; n tlist = getTokens(str); + checkTokens(tlist, expected); + + for(int n=0; n