1 /* 2 * Copyright (c) 2011, 2014, Oracle and/or its affiliates. All rights reserved. 3 * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER. 4 * 5 * This code is free software; you can redistribute it and/or modify it 6 * under the terms of the GNU General Public License version 2 only, as 7 * published by the Free Software Foundation. Oracle designates this 8 * particular file as subject to the "Classpath" exception as provided 9 * by Oracle in the LICENSE file that accompanied this code. 10 * 11 * This code is distributed in the hope that it will be useful, but WITHOUT 12 * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or 13 * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License 14 * version 2 for more details (a copy is included in the LICENSE file that 15 * accompanied this code). 16 * 17 * You should have received a copy of the GNU General Public License version 18 * 2 along with this work; if not, write to the Free Software Foundation, 19 * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA. 20 * 21 * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA 22 * or visit www.oracle.com if you need additional information or have any 23 * questions. 24 */ 25 26 package test.javafx.css; 27 28 import com.sun.javafx.css.parser.Token; 29 import com.sun.javafx.css.parser.TokenShim; 30 31 import java.io.CharArrayReader; 32 import java.io.Reader; 33 import java.util.ArrayList; 34 import java.util.Arrays; 35 import java.util.Collections; 36 import java.util.List; 37 import javafx.css.CssLexerShim; 38 import static org.junit.Assert.*; 39 import static org.junit.Assert.assertEquals; 40 41 import org.junit.Test; 42 43 44 public class CssLexerTest { 45 46 public CssLexerTest() { 47 } 48 49 private void checkTokens(List<TokenShim> resultTokens, TokenShim... expectedTokens) 50 throws org.junit.ComparisonFailure { 51 52 if (expectedTokens.length != resultTokens.size()) { 53 throw new org.junit.ComparisonFailure( 54 "lengths do not match", 55 Arrays.toString(expectedTokens), 56 resultTokens.toString() 57 ); 58 } 59 60 for (int n = 0; n<expectedTokens.length; n++) { 61 62 final TokenShim result = resultTokens.get(n); 63 final TokenShim expected = expectedTokens[n]; 64 65 if (expected.getType() != result.getType()) { 66 throw new org.junit.ComparisonFailure( 67 "token " + n + " types do not match", 68 Arrays.toString(expectedTokens), 69 resultTokens.toString() 70 ); 71 } 72 73 final String expectedText = expected.getText(); 74 final String resultText = result.getText(); 75 76 if (expectedText == null ? resultText != null : !expectedText.equals(resultText)) { 77 throw new org.junit.ComparisonFailure( 78 "token " + n + " text does not match", 79 Arrays.toString(expectedTokens), 80 resultTokens.toString() 81 ); 82 } 83 } 84 } 85 86 List<TokenShim> getTokens(String string) { 87 88 Reader reader = new CharArrayReader(string.toCharArray()); 89 final CssLexerShim lexer = new CssLexerShim(); 90 lexer.setReader(reader); 91 92 final List<TokenShim> tokens = new ArrayList<TokenShim>(); 93 94 TokenShim token = null; 95 do { 96 token = lexer.nextToken(); 97 tokens.add(token); 98 } while (token.getType() != Token.EOF); 99 100 return Collections.unmodifiableList(tokens); 101 } 102 103 private void lexDigitsWithUnits(String units, int type) throws org.junit.ComparisonFailure { 104 105 checkTokens(getTokens("123"+units), new TokenShim(type, "123"+units), TokenShim.EOF_TOKEN); 106 checkTokens(getTokens("123.45"+units), new TokenShim(type, "123.45"+units), TokenShim.EOF_TOKEN); 107 checkTokens(getTokens(".45"+units), new TokenShim(type, ".45"+units), TokenShim.EOF_TOKEN); 108 checkTokens(getTokens("-123"+units), new TokenShim(type, "-123"+units), TokenShim.EOF_TOKEN); 109 checkTokens(getTokens("-.45"+units), new TokenShim(type, "-.45"+units), TokenShim.EOF_TOKEN); 110 checkTokens(getTokens("+123"+units), new TokenShim(type, "+123"+units), TokenShim.EOF_TOKEN); 111 checkTokens(getTokens("+.45"+units), new TokenShim(type, "+.45"+units), TokenShim.EOF_TOKEN); 112 } 113 114 @Test 115 public void testLexValidDigits() { 116 lexDigitsWithUnits("", CssLexerShim.NUMBER); 117 } 118 119 @Test 120 public void testLexValidDigitsWithCM() { 121 lexDigitsWithUnits("cm", CssLexerShim.CM); 122 // case should be ignored 123 lexDigitsWithUnits("cM", CssLexerShim.CM); 124 } 125 @Test 126 public void testLexValidDigitsWithDEG() { 127 lexDigitsWithUnits("deg", CssLexerShim.DEG); 128 // case should be ignored 129 lexDigitsWithUnits("dEg", CssLexerShim.DEG); 130 } 131 @Test 132 public void testLexValidDigitsWithEM() { 133 lexDigitsWithUnits("em", CssLexerShim.EMS); 134 // case should be ignored 135 lexDigitsWithUnits("Em", CssLexerShim.EMS); 136 } 137 @Test 138 public void testLexValidDigitsWithEX() { 139 lexDigitsWithUnits("ex", CssLexerShim.EXS); 140 // case should be ignored 141 lexDigitsWithUnits("Ex", CssLexerShim.EXS); 142 } 143 @Test 144 public void testLexValidDigitsWithGRAD() { 145 lexDigitsWithUnits("grad", CssLexerShim.GRAD); 146 // case should be ignored 147 lexDigitsWithUnits("gRad", CssLexerShim.GRAD); 148 } 149 @Test 150 public void testLexValidDigitsWithIN() { 151 lexDigitsWithUnits("in", CssLexerShim.IN); 152 // case should be ignored 153 lexDigitsWithUnits("In", CssLexerShim.IN); 154 } 155 @Test 156 public void testLexValidDigitsWithMM() { 157 lexDigitsWithUnits("mm", CssLexerShim.MM); 158 // case should be ignored 159 lexDigitsWithUnits("mM", CssLexerShim.MM); 160 } 161 @Test 162 public void testLexValidDigitsWithPC() { 163 lexDigitsWithUnits("pc", CssLexerShim.PC); 164 // case should be ignored 165 lexDigitsWithUnits("Pc", CssLexerShim.PC); 166 } 167 @Test 168 public void testLexValidDigitsWithPT() { 169 lexDigitsWithUnits("pt", CssLexerShim.PT); 170 // case should be ignored 171 lexDigitsWithUnits("PT", CssLexerShim.PT); 172 } 173 @Test 174 public void testLexValidDigitsWithPX() { 175 lexDigitsWithUnits("px", CssLexerShim.PX); 176 // case should be ignored 177 lexDigitsWithUnits("Px", CssLexerShim.PX); 178 } 179 @Test 180 public void testLexValidDigitsWithRAD() { 181 lexDigitsWithUnits("rad", CssLexerShim.RAD); 182 // case should be ignored 183 lexDigitsWithUnits("RaD", CssLexerShim.RAD); 184 } 185 @Test 186 public void testLexValidDigitsWithTURN() { 187 lexDigitsWithUnits("turn", CssLexerShim.TURN); 188 // case should be ignored 189 lexDigitsWithUnits("TurN", CssLexerShim.TURN); 190 } 191 @Test 192 public void testLexValidDigitsWithS() { 193 lexDigitsWithUnits("s", CssLexerShim.SECONDS); 194 // case should be ignored 195 lexDigitsWithUnits("S", CssLexerShim.SECONDS); 196 } 197 @Test 198 public void testLexValidDigitsWithMS() { 199 lexDigitsWithUnits("ms", CssLexerShim.MS); 200 // case should be ignored 201 lexDigitsWithUnits("mS", CssLexerShim.MS); 202 } 203 @Test 204 public void testLexValidDigitsWithPCT() { 205 lexDigitsWithUnits("%", CssLexerShim.PERCENTAGE); 206 } 207 @Test 208 public void testLexValidDigitsWithBadUnits() { 209 lexDigitsWithUnits("xyzzy", Token.INVALID); 210 } 211 @Test 212 public void textLexValidDigitsValidDigits() { 213 checkTokens( 214 getTokens("foo: 10pt; bar: 20%;"), 215 new TokenShim(CssLexerShim.IDENT, "foo"), 216 new TokenShim(CssLexerShim.COLON, ":"), 217 new TokenShim(CssLexerShim.WS, " "), 218 new TokenShim(CssLexerShim.PT, "10pt"), 219 new TokenShim(CssLexerShim.SEMI, ";"), 220 new TokenShim(CssLexerShim.WS, " "), 221 new TokenShim(CssLexerShim.IDENT, "bar"), 222 new TokenShim(CssLexerShim.COLON, ":"), 223 new TokenShim(CssLexerShim.WS, " "), 224 new TokenShim(CssLexerShim.PERCENTAGE, "20%"), 225 new TokenShim(CssLexerShim.SEMI, ";"), 226 TokenShim.EOF_TOKEN 227 ); 228 } 229 @Test 230 public void textLexInvalidDigitsValidDigits() { 231 checkTokens( 232 getTokens("foo: 10pz; bar: 20%;"), 233 new TokenShim(CssLexerShim.IDENT, "foo"), 234 new TokenShim(CssLexerShim.COLON, ":"), 235 new TokenShim(CssLexerShim.WS, " "), 236 new TokenShim(Token.INVALID, "10pz"), 237 new TokenShim(CssLexerShim.SEMI, ";"), 238 new TokenShim(CssLexerShim.WS, " "), 239 new TokenShim(CssLexerShim.IDENT, "bar"), 240 new TokenShim(CssLexerShim.COLON, ":"), 241 new TokenShim(CssLexerShim.WS, " "), 242 new TokenShim(CssLexerShim.PERCENTAGE, "20%"), 243 new TokenShim(CssLexerShim.SEMI, ";"), 244 TokenShim.EOF_TOKEN 245 ); 246 } 247 @Test 248 public void textLexValidDigitsBangImportant() { 249 checkTokens( 250 getTokens("foo: 10pt !important;"), 251 new TokenShim(CssLexerShim.IDENT, "foo"), 252 new TokenShim(CssLexerShim.COLON, ":"), 253 new TokenShim(CssLexerShim.WS, " "), 254 new TokenShim(CssLexerShim.PT, "10pt"), 255 new TokenShim(CssLexerShim.WS, " "), 256 new TokenShim(CssLexerShim.IMPORTANT_SYM, "!important"), 257 new TokenShim(CssLexerShim.SEMI, ";"), 258 TokenShim.EOF_TOKEN 259 ); 260 } 261 @Test 262 public void textLexInvalidDigitsBangImportant() { 263 checkTokens( 264 getTokens("foo: 10pz !important;"), 265 new TokenShim(CssLexerShim.IDENT, "foo"), 266 new TokenShim(CssLexerShim.COLON, ":"), 267 new TokenShim(CssLexerShim.WS, " "), 268 new TokenShim(Token.INVALID, "10pz"), 269 new TokenShim(CssLexerShim.WS, " "), 270 new TokenShim(CssLexerShim.IMPORTANT_SYM, "!important"), 271 new TokenShim(CssLexerShim.SEMI, ";"), 272 TokenShim.EOF_TOKEN 273 ); 274 } 275 @Test 276 public void textLexValidDigitsInSequence() { 277 checkTokens( 278 getTokens("-1 0px 1pt .5em;"), 279 new TokenShim(CssLexerShim.NUMBER, "-1"), 280 new TokenShim(CssLexerShim.WS, " "), 281 new TokenShim(CssLexerShim.PX, "0px"), 282 new TokenShim(CssLexerShim.WS, " "), 283 new TokenShim(CssLexerShim.PT, "1pt"), 284 new TokenShim(CssLexerShim.WS, " "), 285 new TokenShim(CssLexerShim.EMS, ".5em"), 286 new TokenShim(CssLexerShim.SEMI, ";"), 287 TokenShim.EOF_TOKEN 288 ); 289 } 290 @Test 291 public void textLexInvalidDigitsInSequence() { 292 checkTokens( 293 getTokens("-1 0px 1pz .5em;"), 294 new TokenShim(CssLexerShim.NUMBER, "-1"), 295 new TokenShim(CssLexerShim.WS, " "), 296 new TokenShim(CssLexerShim.PX, "0px"), 297 new TokenShim(CssLexerShim.WS, " "), 298 new TokenShim(Token.INVALID, "1pz"), 299 new TokenShim(CssLexerShim.WS, " "), 300 new TokenShim(CssLexerShim.EMS, ".5em"), 301 new TokenShim(CssLexerShim.SEMI, ";"), 302 TokenShim.EOF_TOKEN 303 ); 304 } 305 306 @Test 307 public void testTokenOffset() { 308 309 String str = "a: b;"; 310 // [?][0] = line 311 // [?][1] = offset 312 TokenShim[] expected = { 313 new TokenShim(CssLexerShim.IDENT, "a", 1, 0), 314 new TokenShim(CssLexerShim.COLON, ":", 1, 1), 315 new TokenShim(CssLexerShim.WS, " ", 1, 2), 316 new TokenShim(CssLexerShim.IDENT, "b", 1, 3), 317 new TokenShim(CssLexerShim.SEMI, ";", 1, 4), 318 TokenShim.EOF_TOKEN 319 }; 320 321 List<TokenShim> tlist = getTokens(str); 322 checkTokens(tlist, expected); 323 324 for(int n=0; n<tlist.size(); n++) { 325 TokenShim tok = tlist.get(n); 326 assertEquals("bad line. tok="+tok, expected[n].getLine(), tok.getLine()); 327 assertEquals("bad offset. tok="+tok, expected[n].getOffset(), tok.getOffset()); 328 } 329 330 } 331 332 @Test 333 public void testTokenLineAndOffsetWithCR() { 334 335 String str = "a: b;\rc: d;"; 336 // [?][0] = line 337 // [?][1] = offset 338 TokenShim[] expected = { 339 new TokenShim(CssLexerShim.IDENT, "a", 1, 0), 340 new TokenShim(CssLexerShim.COLON, ":", 1, 1), 341 new TokenShim(CssLexerShim.WS, " ", 1, 2), 342 new TokenShim(CssLexerShim.IDENT, "b", 1, 3), 343 new TokenShim(CssLexerShim.SEMI, ";", 1, 4), 344 new TokenShim(CssLexerShim.NL, "\\r", 1, 5), 345 new TokenShim(CssLexerShim.IDENT, "c", 2, 0), 346 new TokenShim(CssLexerShim.COLON, ":", 2, 1), 347 new TokenShim(CssLexerShim.WS, " ", 2, 2), 348 new TokenShim(CssLexerShim.IDENT, "d", 2, 3), 349 new TokenShim(CssLexerShim.SEMI, ";", 2, 4), 350 TokenShim.EOF_TOKEN 351 }; 352 353 List<TokenShim> tlist = getTokens(str); 354 checkTokens(tlist, expected); 355 356 for(int n=0; n<tlist.size(); n++) { 357 TokenShim tok = tlist.get(n); 358 assertEquals("bad line. tok="+tok, expected[n].getLine(), tok.getLine()); 359 assertEquals("bad offset. tok="+tok, expected[n].getOffset(), tok.getOffset()); 360 } 361 362 } 363 364 @Test 365 public void testTokenLineAndOffsetWithLF() { 366 367 String str = "a: b;\nc: d;"; 368 // [?][0] = line 369 // [?][1] = offset 370 TokenShim[] expected = { 371 new TokenShim(CssLexerShim.IDENT, "a", 1, 0), 372 new TokenShim(CssLexerShim.COLON, ":", 1, 1), 373 new TokenShim(CssLexerShim.WS, " ", 1, 2), 374 new TokenShim(CssLexerShim.IDENT, "b", 1, 3), 375 new TokenShim(CssLexerShim.SEMI, ";", 1, 4), 376 new TokenShim(CssLexerShim.NL, "\\n", 1, 5), 377 new TokenShim(CssLexerShim.IDENT, "c", 2, 0), 378 new TokenShim(CssLexerShim.COLON, ":", 2, 1), 379 new TokenShim(CssLexerShim.WS, " ", 2, 2), 380 new TokenShim(CssLexerShim.IDENT, "d", 2, 3), 381 new TokenShim(CssLexerShim.SEMI, ";", 2, 4), 382 TokenShim.EOF_TOKEN 383 }; 384 385 List<TokenShim> tlist = getTokens(str); 386 checkTokens(tlist, expected); 387 388 for(int n=0; n<tlist.size(); n++) { 389 TokenShim tok = tlist.get(n); 390 assertEquals("bad line. tok="+tok, expected[n].getLine(), tok.getLine()); 391 assertEquals("bad offset. tok="+tok, expected[n].getOffset(), tok.getOffset()); 392 } 393 394 } 395 396 @Test 397 public void testTokenLineAndOffsetWithCRLF() { 398 // 012345 01234 399 String str = "a: b;\r\nc: d;"; 400 // [?][0] = line 401 // [?][1] = offset 402 TokenShim[] expected = { 403 new TokenShim(CssLexerShim.IDENT, "a", 1, 0), 404 new TokenShim(CssLexerShim.COLON, ":", 1, 1), 405 new TokenShim(CssLexerShim.WS, " ", 1, 2), 406 new TokenShim(CssLexerShim.IDENT, "b", 1, 3), 407 new TokenShim(CssLexerShim.SEMI, ";", 1, 4), 408 new TokenShim(CssLexerShim.NL, "\\r\\n", 1, 5), 409 new TokenShim(CssLexerShim.IDENT, "c", 2, 0), 410 new TokenShim(CssLexerShim.COLON, ":", 2, 1), 411 new TokenShim(CssLexerShim.WS, " ", 2, 2), 412 new TokenShim(CssLexerShim.IDENT, "d", 2, 3), 413 new TokenShim(CssLexerShim.SEMI, ";", 2, 4), 414 TokenShim.EOF_TOKEN 415 }; 416 417 List<TokenShim> tlist = getTokens(str); 418 checkTokens(tlist, expected); 419 420 for(int n=0; n<tlist.size(); n++) { 421 TokenShim tok = tlist.get(n); 422 assertEquals("bad line. tok="+tok, expected[n].getLine(), tok.getLine()); 423 assertEquals("bad offset. tok="+tok, expected[n].getOffset(), tok.getOffset()); 424 } 425 426 } 427 428 @Test 429 public void testTokenOffsetWithEmbeddedComment() { 430 // 0123456789012345 431 String str = "a: /*comment*/b;"; 432 // [?][0] = line 433 // [?][1] = offset 434 TokenShim[] expected = { 435 new TokenShim(CssLexerShim.IDENT, "a", 1, 0), 436 new TokenShim(CssLexerShim.COLON, ":", 1, 1), 437 new TokenShim(CssLexerShim.WS, " ", 1, 2), 438 new TokenShim(CssLexerShim.IDENT, "b", 1, 14), 439 new TokenShim(CssLexerShim.SEMI, ";", 1, 15), 440 TokenShim.EOF_TOKEN 441 }; 442 443 List<TokenShim> tlist = getTokens(str); 444 checkTokens(tlist, expected); 445 446 for(int n=0; n<tlist.size(); n++) { 447 TokenShim tok = tlist.get(n); 448 assertEquals("bad line. tok="+tok, expected[n].getLine(), tok.getLine()); 449 assertEquals("bad offset. tok="+tok, expected[n].getOffset(), tok.getOffset()); 450 } 451 } 452 453 @Test 454 public void testTokenLineAndOffsetWithLeadingComment() { 455 // 012345678901 01234 456 String str = "/*comment*/\na: b;"; 457 // [?][0] = line 458 // [?][1] = offset 459 TokenShim[] expected = { 460 new TokenShim(CssLexerShim.NL, "\\n", 1, 11), 461 new TokenShim(CssLexerShim.IDENT, "a", 2, 0), 462 new TokenShim(CssLexerShim.COLON, ":", 2, 1), 463 new TokenShim(CssLexerShim.WS, " ", 2, 2), 464 new TokenShim(CssLexerShim.IDENT, "b", 2, 3), 465 new TokenShim(CssLexerShim.SEMI, ";", 2, 4), 466 TokenShim.EOF_TOKEN 467 }; 468 469 List<TokenShim> tlist = getTokens(str); 470 checkTokens(tlist, expected); 471 472 for(int n=0; n<tlist.size(); n++) { 473 TokenShim tok = tlist.get(n); 474 assertEquals("bad line. tok="+tok, expected[n].getLine(), tok.getLine()); 475 assertEquals("bad offset. tok="+tok, expected[n].getOffset(), tok.getOffset()); 476 } 477 } 478 479 @Test 480 public void testTokenOffsetWithFunction() { 481 // 01234567890 482 String str = "a: b(arg);"; 483 // [?][0] = line 484 // [?][1] = offset 485 TokenShim[] expected = { 486 new TokenShim(CssLexerShim.IDENT, "a", 1, 0), 487 new TokenShim(CssLexerShim.COLON, ":", 1, 1), 488 new TokenShim(CssLexerShim.WS, " ", 1, 2), 489 new TokenShim(CssLexerShim.IDENT, "b", 1, 3), 490 new TokenShim(CssLexerShim.LPAREN, "(", 1, 4), 491 new TokenShim(CssLexerShim.IDENT, "arg", 1, 5), 492 new TokenShim(CssLexerShim.RPAREN, ")", 1, 8), 493 new TokenShim(CssLexerShim.SEMI, ";", 1, 9), 494 TokenShim.EOF_TOKEN 495 }; 496 497 List<TokenShim> tlist = getTokens(str); 498 checkTokens(tlist, expected); 499 500 for(int n=0; n<tlist.size(); n++) { 501 TokenShim tok = tlist.get(n); 502 assertEquals("bad line. tok="+tok, expected[n].getLine(), tok.getLine()); 503 assertEquals("bad offset. tok="+tok, expected[n].getOffset(), tok.getOffset()); 504 } 505 } 506 507 @Test 508 public void testTokenOffsetWithHash() { 509 // 01234567890 510 String str = "a: #012345;"; 511 // [?][0] = line 512 // [?][1] = offset 513 TokenShim[] expected = { 514 new TokenShim(CssLexerShim.IDENT, "a", 1, 0), 515 new TokenShim(CssLexerShim.COLON, ":", 1, 1), 516 new TokenShim(CssLexerShim.WS, " ", 1, 2), 517 new TokenShim(CssLexerShim.HASH, "#012345", 1, 3), 518 new TokenShim(CssLexerShim.SEMI, ";", 1, 10), 519 TokenShim.EOF_TOKEN 520 }; 521 522 List<TokenShim> tlist = getTokens(str); 523 checkTokens(tlist, expected); 524 525 for(int n=0; n<tlist.size(); n++) { 526 TokenShim tok = tlist.get(n); 527 assertEquals("bad line. tok="+tok, expected[n].getLine(), tok.getLine()); 528 assertEquals("bad offset. tok="+tok, expected[n].getOffset(), tok.getOffset()); 529 } 530 } 531 532 @Test 533 public void testTokenOffsetWithDigits() { 534 // 01234567890 535 String str = "a: 123.45;"; 536 // [?][0] = line 537 // [?][1] = offset 538 TokenShim[] expected = { 539 new TokenShim(CssLexerShim.IDENT, "a", 1, 0), 540 new TokenShim(CssLexerShim.COLON, ":", 1, 1), 541 new TokenShim(CssLexerShim.WS, " ", 1, 2), 542 new TokenShim(CssLexerShim.NUMBER, "123.45", 1, 3), 543 new TokenShim(CssLexerShim.SEMI, ";", 1, 9), 544 TokenShim.EOF_TOKEN 545 }; 546 547 List<TokenShim> tlist = getTokens(str); 548 checkTokens(tlist, expected); 549 550 for(int n=0; n<tlist.size(); n++) { 551 TokenShim tok = tlist.get(n); 552 assertEquals("bad line. tok="+tok, expected[n].getLine(), tok.getLine()); 553 assertEquals("bad offset. tok="+tok, expected[n].getOffset(), tok.getOffset()); 554 } 555 } 556 557 @Test 558 public void testTokenOffsetWithBangImportant() { 559 // 0123456789012345 560 String str = "a: b !important;"; 561 // [?][0] = line 562 // [?][1] = offset 563 TokenShim[] expected = { 564 new TokenShim(CssLexerShim.IDENT, "a", 1, 0), 565 new TokenShim(CssLexerShim.COLON, ":", 1, 1), 566 new TokenShim(CssLexerShim.WS, " ", 1, 2), 567 new TokenShim(CssLexerShim.IDENT, "b", 1, 3), 568 new TokenShim(CssLexerShim.WS, " ", 1, 4), 569 new TokenShim(CssLexerShim.IMPORTANT_SYM, "!important", 1, 5), 570 new TokenShim(CssLexerShim.SEMI, ";", 1, 15), 571 TokenShim.EOF_TOKEN 572 }; 573 574 List<TokenShim> tlist = getTokens(str); 575 checkTokens(tlist, expected); 576 577 for(int n=0; n<tlist.size(); n++) { 578 TokenShim tok = tlist.get(n); 579 assertEquals("bad line. tok="+tok, expected[n].getLine(), tok.getLine()); 580 assertEquals("bad offset. tok="+tok, expected[n].getOffset(), tok.getOffset()); 581 } 582 } 583 584 @Test 585 public void testTokenOffsetWithSkip() { 586 // 0123456789012345 587 String str = "a: b !imporzant;"; 588 // [?][0] = line 589 // [?][1] = offset 590 TokenShim[] expected = { 591 new TokenShim(CssLexerShim.IDENT, "a", 1, 0), 592 new TokenShim(CssLexerShim.COLON, ":", 1, 1), 593 new TokenShim(CssLexerShim.WS, " ", 1, 2), 594 new TokenShim(CssLexerShim.IDENT, "b", 1, 3), 595 new TokenShim(CssLexerShim.WS, " ", 1, 4), 596 new TokenShim(Token.SKIP, "!imporz", 1, 5), 597 new TokenShim(CssLexerShim.SEMI, ";", 1, 15), 598 TokenShim.EOF_TOKEN 599 }; 600 601 List<TokenShim> tlist = getTokens(str); 602 checkTokens(tlist, expected); 603 604 for(int n=0; n<tlist.size(); n++) { 605 TokenShim tok = tlist.get(n); 606 assertEquals("bad line. tok="+tok, expected[n].getLine(), tok.getLine()); 607 assertEquals("bad offset. tok="+tok, expected[n].getOffset(), tok.getOffset()); 608 } 609 } 610 611 @Test 612 public void testTokenOffsetWithInvalid() { 613 // 0123456789012345 614 String str = "a: 1pz;"; 615 // [?][0] = line 616 // [?][1] = offset 617 TokenShim[] expected = { 618 new TokenShim(CssLexerShim.IDENT, "a", 1, 0), 619 new TokenShim(CssLexerShim.COLON, ":", 1, 1), 620 new TokenShim(CssLexerShim.WS, " ", 1, 2), 621 new TokenShim(Token.INVALID, "1pz", 1, 3), 622 new TokenShim(CssLexerShim.SEMI, ";", 1, 6), 623 TokenShim.EOF_TOKEN 624 }; 625 626 List<TokenShim> tlist = getTokens(str); 627 checkTokens(tlist, expected); 628 629 for(int n=0; n<tlist.size(); n++) { 630 TokenShim tok = tlist.get(n); 631 assertEquals("bad line. tok="+tok, expected[n].getLine(), tok.getLine()); 632 assertEquals("bad offset. tok="+tok, expected[n].getOffset(), tok.getOffset()); 633 } 634 } 635 636 @Test 637 public void testTokenLineAndOffsetMoreFully() { 638 // 1 2 3 4 639 // 012345678901 0123456789012345 012345678 0 640 String str = "/*comment*/\n*.foo#bar:baz {\n\ta: 1em;\n}"; 641 // [?][0] = line 642 // [?][1] = offset 643 TokenShim[] expected = { 644 new TokenShim(CssLexerShim.NL, "\\n", 1, 11), 645 new TokenShim(CssLexerShim.STAR, "*", 2, 0), 646 new TokenShim(CssLexerShim.DOT, ".", 2, 1), 647 new TokenShim(CssLexerShim.IDENT, "foo", 2, 2), 648 new TokenShim(CssLexerShim.HASH, "#bar", 2, 5), 649 new TokenShim(CssLexerShim.COLON, ":", 2, 9), 650 new TokenShim(CssLexerShim.IDENT, "baz", 2, 10), 651 new TokenShim(CssLexerShim.WS, " ", 2, 13), 652 new TokenShim(CssLexerShim.LBRACE, "{", 2, 14), 653 new TokenShim(CssLexerShim.NL, "\\n", 2, 15), 654 new TokenShim(CssLexerShim.WS, "\t", 3, 0), 655 new TokenShim(CssLexerShim.IDENT, "a", 3, 1), 656 new TokenShim(CssLexerShim.COLON, ":", 3, 2), 657 new TokenShim(CssLexerShim.WS, " ", 3, 3), 658 new TokenShim(CssLexerShim.EMS, "1em", 3, 4), 659 new TokenShim(CssLexerShim.SEMI, ";", 3, 7), 660 new TokenShim(CssLexerShim.NL, "\\n", 3, 8), 661 new TokenShim(CssLexerShim.RBRACE, "}", 4, 0), 662 TokenShim.EOF_TOKEN 663 }; 664 665 List<TokenShim> tlist = getTokens(str); 666 checkTokens(tlist, expected); 667 668 for(int n=0; n<tlist.size(); n++) { 669 TokenShim tok = tlist.get(n); 670 assertEquals("bad line. tok="+tok, expected[n].getLine(), tok.getLine()); 671 assertEquals("bad offset. tok="+tok, expected[n].getOffset(), tok.getOffset()); 672 } 673 } 674 675 @Test 676 public void testScanUrl() { 677 678 // 1 2 3 4 679 // 01234567890101234567890123450123456780123456789 680 String str = "url(http://foo.bar.com/fonts/serif/fubar.ttf)"; 681 TokenShim[] expected = new TokenShim[]{ 682 new TokenShim(CssLexerShim.URL, "http://foo.bar.com/fonts/serif/fubar.ttf", 1, 0), 683 TokenShim.EOF_TOKEN 684 }; 685 686 List<TokenShim> tlist = getTokens(str); 687 checkTokens(tlist, expected); 688 689 for(int n=0; n<tlist.size(); n++) { 690 TokenShim tok = tlist.get(n); 691 assertEquals("bad line. tok="+tok, expected[n].getLine(), tok.getLine()); 692 assertEquals("bad offset. tok="+tok, expected[n].getOffset(), tok.getOffset()); 693 } 694 695 } 696 697 @Test 698 public void testScanUrlWithWhiteSpace() { 699 700 // 1 2 3 4 701 // 01234567890101234567890123450123456780123456789 702 String str = "url( http://foo.bar.com/fonts/serif/fubar.ttf\t)"; 703 TokenShim[] expected = new TokenShim[]{ 704 new TokenShim(CssLexerShim.URL, "http://foo.bar.com/fonts/serif/fubar.ttf", 1, 0), 705 TokenShim.EOF_TOKEN 706 }; 707 708 List<TokenShim> tlist = getTokens(str); 709 checkTokens(tlist, expected); 710 711 for(int n=0; n<tlist.size(); n++) { 712 TokenShim tok = tlist.get(n); 713 assertEquals("bad line. tok="+tok, expected[n].getLine(), tok.getLine()); 714 assertEquals("bad offset. tok="+tok, expected[n].getOffset(), tok.getOffset()); 715 } 716 717 } 718 719 @Test 720 public void testScanQuotedUrlWithWhiteSpace() { 721 722 // 1 2 3 4 723 // 01234567890101234567890123450123456780123456789 724 String str = "url( 'http://foo.bar.com/fonts/serif/fubar.ttf'\t)"; 725 TokenShim[] expected = new TokenShim[]{ 726 new TokenShim(CssLexerShim.URL, "http://foo.bar.com/fonts/serif/fubar.ttf", 1, 0), 727 TokenShim.EOF_TOKEN 728 }; 729 730 List<TokenShim> tlist = getTokens(str); 731 checkTokens(tlist, expected); 732 733 for(int n=0; n<tlist.size(); n++) { 734 TokenShim tok = tlist.get(n); 735 assertEquals("bad line. tok="+tok, expected[n].getLine(), tok.getLine()); 736 assertEquals("bad offset. tok="+tok, expected[n].getOffset(), tok.getOffset()); 737 } 738 739 } 740 741 @Test 742 public void testScanQuotedUrl() { 743 744 // 1 2 3 4 745 // 01234567890101234567890123450123456780123456789 746 String str = "url(\"http://foo.bar.com/fonts/serif/fubar.ttf\")"; 747 TokenShim[] expected = new TokenShim[]{ 748 new TokenShim(CssLexerShim.URL, "http://foo.bar.com/fonts/serif/fubar.ttf", 1, 0), 749 TokenShim.EOF_TOKEN 750 }; 751 752 List<TokenShim> tlist = getTokens(str); 753 checkTokens(tlist, expected); 754 755 for(int n=0; n<tlist.size(); n++) { 756 TokenShim tok = tlist.get(n); 757 assertEquals("bad line. tok="+tok, expected[n].getLine(), tok.getLine()); 758 assertEquals("bad offset. tok="+tok, expected[n].getOffset(), tok.getOffset()); 759 } 760 761 } 762 763 @Test 764 public void testScanUrlWithEscapes() { 765 766 // 1 2 3 4 767 // 01234567890101234567890123450123456780123456789 768 String str = "url(http://foo.bar.com/fonts/true\\ type/fubar.ttf)"; 769 TokenShim[] expected = new TokenShim[]{ 770 new TokenShim(CssLexerShim.URL, "http://foo.bar.com/fonts/true type/fubar.ttf", 1, 0), 771 TokenShim.EOF_TOKEN 772 }; 773 774 List<TokenShim> tlist = getTokens(str); 775 checkTokens(tlist, expected); 776 777 for(int n=0; n<tlist.size(); n++) { 778 TokenShim tok = tlist.get(n); 779 assertEquals("bad line. tok="+tok, expected[n].getLine(), tok.getLine()); 780 assertEquals("bad offset. tok="+tok, expected[n].getOffset(), tok.getOffset()); 781 } 782 783 } 784 785 @Test 786 public void testScanQuotedUrlWithEscapes() { 787 788 // 1 2 3 4 789 // 01234567890101234567890123450123456780123456789 790 String str = "url(\"http://foo.bar.com/fonts/true\\ type/fubar.ttf\")"; 791 TokenShim[] expected = new TokenShim[]{ 792 new TokenShim(CssLexerShim.URL, "http://foo.bar.com/fonts/true type/fubar.ttf", 1, 0), 793 TokenShim.EOF_TOKEN 794 }; 795 796 List<TokenShim> tlist = getTokens(str); 797 checkTokens(tlist, expected); 798 799 for(int n=0; n<tlist.size(); n++) { 800 TokenShim tok = tlist.get(n); 801 assertEquals("bad line. tok="+tok, expected[n].getLine(), tok.getLine()); 802 assertEquals("bad offset. tok="+tok, expected[n].getOffset(), tok.getOffset()); 803 } 804 805 } 806 807 @Test 808 public void testScanUrlWithSyntaxError() { 809 810 // 1 2 3 4 811 // 01234567890101234567890123450123456780123456789 812 String str = "url(http://foo.bar.com/fonts/true'type/fubar.ttf)"; 813 TokenShim[] expected = new TokenShim[]{ 814 new TokenShim(Token.INVALID, "http://foo.bar.com/fonts/true", 1, 0), 815 TokenShim.EOF_TOKEN 816 }; 817 818 List<TokenShim> tlist = getTokens(str); 819 checkTokens(tlist, expected); 820 821 for(int n=0; n<tlist.size(); n++) { 822 TokenShim tok = tlist.get(n); 823 assertEquals("bad line. tok="+tok, expected[n].getLine(), tok.getLine()); 824 assertEquals("bad offset. tok="+tok, expected[n].getOffset(), tok.getOffset()); 825 } 826 827 } 828 829 @Test 830 public void testScanQuotedUrlWithSyntaxError() { 831 832 // 1 2 3 4 833 // 01234567890101234567890123450123456780123456789 834 String str = "url('http://foo.bar.com/fonts/true\rtype/fubar.ttf')"; 835 TokenShim[] expected = new TokenShim[]{ 836 new TokenShim(Token.INVALID, "http://foo.bar.com/fonts/true", 2, 0), 837 TokenShim.EOF_TOKEN 838 }; 839 840 List<TokenShim> tlist = getTokens(str); 841 checkTokens(tlist, expected); 842 843 for(int n=0; n<tlist.size(); n++) { 844 TokenShim tok = tlist.get(n); 845 assertEquals("bad line. tok="+tok, expected[n].getLine(), tok.getLine()); 846 assertEquals("bad offset. tok="+tok, expected[n].getOffset(), tok.getOffset()); 847 } 848 849 } 850 851 }