mirror of
https://github.com/hak5/nano-tetra-modules.git
synced 2025-10-29 16:58:09 +00:00
Add modules to repository
This commit is contained in:
18
PortalAuth/includes/scripts/libs/tinycss/tests/__init__.py
Executable file
18
PortalAuth/includes/scripts/libs/tinycss/tests/__init__.py
Executable file
@@ -0,0 +1,18 @@
|
||||
# coding: utf8
|
||||
"""
|
||||
Test suite for tinycss
|
||||
----------------------
|
||||
|
||||
:copyright: (c) 2012 by Simon Sapin.
|
||||
:license: BSD, see LICENSE for more details.
|
||||
"""
|
||||
|
||||
|
||||
from __future__ import unicode_literals
|
||||
|
||||
|
||||
def assert_errors(errors, expected_errors):
|
||||
"""Test not complete error messages but only substrings."""
|
||||
assert len(errors) == len(expected_errors)
|
||||
for error, expected in zip(errors, expected_errors):
|
||||
assert expected in str(error)
|
||||
137
PortalAuth/includes/scripts/libs/tinycss/tests/speed.py
Executable file
137
PortalAuth/includes/scripts/libs/tinycss/tests/speed.py
Executable file
@@ -0,0 +1,137 @@
|
||||
# coding: utf8
|
||||
"""
|
||||
Speed tests
|
||||
-----------
|
||||
|
||||
Note: this file is not named test_*.py as it is not part of the
|
||||
test suite ran by pytest.
|
||||
|
||||
:copyright: (c) 2012 by Simon Sapin.
|
||||
:license: BSD, see LICENSE for more details.
|
||||
"""
|
||||
|
||||
|
||||
from __future__ import unicode_literals, division
|
||||
|
||||
import sys
|
||||
import os.path
|
||||
import contextlib
|
||||
import timeit
|
||||
import functools
|
||||
|
||||
from cssutils import parseString
|
||||
|
||||
from .. import tokenizer
|
||||
from ..css21 import CSS21Parser
|
||||
from ..parsing import remove_whitespace
|
||||
|
||||
|
||||
CSS_REPEAT = 4
|
||||
TIMEIT_REPEAT = 3
|
||||
TIMEIT_NUMBER = 20
|
||||
|
||||
|
||||
def load_css():
|
||||
filename = os.path.join(os.path.dirname(__file__),
|
||||
'..', '..', 'docs', '_static', 'custom.css')
|
||||
with open(filename, 'rb') as fd:
|
||||
return b'\n'.join([fd.read()] * CSS_REPEAT)
|
||||
|
||||
|
||||
# Pre-load so that I/O is not measured
|
||||
CSS = load_css()
|
||||
|
||||
|
||||
@contextlib.contextmanager
|
||||
def install_tokenizer(name):
|
||||
original = tokenizer.tokenize_flat
|
||||
try:
|
||||
tokenizer.tokenize_flat = getattr(tokenizer, name)
|
||||
yield
|
||||
finally:
|
||||
tokenizer.tokenize_flat = original
|
||||
|
||||
|
||||
def parse(tokenizer_name):
|
||||
with install_tokenizer(tokenizer_name):
|
||||
stylesheet = CSS21Parser().parse_stylesheet_bytes(CSS)
|
||||
result = []
|
||||
for rule in stylesheet.rules:
|
||||
selector = rule.selector.as_css()
|
||||
declarations = [
|
||||
(declaration.name, len(list(remove_whitespace(declaration.value))))
|
||||
for declaration in rule.declarations]
|
||||
result.append((selector, declarations))
|
||||
return result
|
||||
|
||||
parse_cython = functools.partial(parse, 'cython_tokenize_flat')
|
||||
parse_python = functools.partial(parse, 'python_tokenize_flat')
|
||||
|
||||
|
||||
def parse_cssutils():
|
||||
stylesheet = parseString(CSS)
|
||||
result = []
|
||||
for rule in stylesheet.cssRules:
|
||||
selector = rule.selectorText
|
||||
declarations = [
|
||||
(declaration.name, len(list(declaration.propertyValue)))
|
||||
for declaration in rule.style.getProperties(all=True)]
|
||||
result.append((selector, declarations))
|
||||
return result
|
||||
|
||||
|
||||
def check_consistency():
|
||||
result = parse_python()
|
||||
#import pprint
|
||||
#pprint.pprint(result)
|
||||
assert len(result) > 0
|
||||
if tokenizer.cython_tokenize_flat:
|
||||
assert parse_cython() == result
|
||||
assert parse_cssutils() == result
|
||||
version = '.'.join(map(str, sys.version_info[:3]))
|
||||
print('Python {}, consistency OK.'.format(version))
|
||||
|
||||
|
||||
def warm_up():
|
||||
is_pypy = hasattr(sys, 'pypy_translation_info')
|
||||
if is_pypy:
|
||||
print('Warming up for PyPy...')
|
||||
for i in range(80):
|
||||
for i in range(10):
|
||||
parse_python()
|
||||
parse_cssutils()
|
||||
sys.stdout.write('.')
|
||||
sys.stdout.flush()
|
||||
sys.stdout.write('\n')
|
||||
|
||||
|
||||
def time(function):
|
||||
seconds = timeit.Timer(function).repeat(TIMEIT_REPEAT, TIMEIT_NUMBER)
|
||||
miliseconds = int(min(seconds) * 1000)
|
||||
return miliseconds
|
||||
|
||||
|
||||
def run():
|
||||
if tokenizer.cython_tokenize_flat:
|
||||
data_set = [
|
||||
('tinycss + speedups ', parse_cython),
|
||||
]
|
||||
else:
|
||||
print('Speedups are NOT available.')
|
||||
data_set = []
|
||||
data_set += [
|
||||
('tinycss WITHOUT speedups', parse_python),
|
||||
('cssutils ', parse_cssutils),
|
||||
]
|
||||
label, function = data_set.pop(0)
|
||||
ref = time(function)
|
||||
print('{} {} ms'.format(label, ref))
|
||||
for label, function in data_set:
|
||||
result = time(function)
|
||||
print('{} {} ms {:.2f}x'.format(label, result, result / ref))
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
check_consistency()
|
||||
warm_up()
|
||||
run()
|
||||
47
PortalAuth/includes/scripts/libs/tinycss/tests/test_api.py
Executable file
47
PortalAuth/includes/scripts/libs/tinycss/tests/test_api.py
Executable file
@@ -0,0 +1,47 @@
|
||||
# coding: utf8
|
||||
"""
|
||||
Tests for the public API
|
||||
------------------------
|
||||
|
||||
:copyright: (c) 2012 by Simon Sapin.
|
||||
:license: BSD, see LICENSE for more details.
|
||||
"""
|
||||
|
||||
|
||||
from __future__ import unicode_literals
|
||||
import itertools
|
||||
|
||||
from pytest import raises
|
||||
|
||||
from tinycss import make_parser
|
||||
from tinycss.page3 import CSSPage3Parser
|
||||
|
||||
|
||||
def test_make_parser():
|
||||
class MyParser(object):
|
||||
def __init__(self, some_config):
|
||||
self.some_config = some_config
|
||||
|
||||
parsers = [
|
||||
make_parser(),
|
||||
make_parser('page3'),
|
||||
make_parser(CSSPage3Parser),
|
||||
make_parser(MyParser, some_config=42),
|
||||
make_parser(CSSPage3Parser, MyParser, some_config=42),
|
||||
make_parser(MyParser, 'page3', some_config=42),
|
||||
]
|
||||
|
||||
for parser, exp in zip(parsers, [False, True, True, False, True, True]):
|
||||
assert isinstance(parser, CSSPage3Parser) == exp
|
||||
|
||||
for parser, exp in zip(parsers, [False, False, False, True, True, True]):
|
||||
assert isinstance(parser, MyParser) == exp
|
||||
|
||||
for parser in parsers[3:]:
|
||||
assert parser.some_config == 42
|
||||
|
||||
# Extra or missing named parameters
|
||||
raises(TypeError, make_parser, some_config=4)
|
||||
raises(TypeError, make_parser, 'page3', some_config=4)
|
||||
raises(TypeError, make_parser, MyParser)
|
||||
raises(TypeError, make_parser, MyParser, some_config=4, other_config=7)
|
||||
203
PortalAuth/includes/scripts/libs/tinycss/tests/test_color3.py
Executable file
203
PortalAuth/includes/scripts/libs/tinycss/tests/test_color3.py
Executable file
@@ -0,0 +1,203 @@
|
||||
# coding: utf8
|
||||
"""
|
||||
Tests for the CSS 3 color parser
|
||||
--------------------------------
|
||||
|
||||
:copyright: (c) 2012 by Simon Sapin.
|
||||
:license: BSD, see LICENSE for more details.
|
||||
"""
|
||||
|
||||
|
||||
from __future__ import unicode_literals
|
||||
|
||||
import pytest
|
||||
|
||||
from tinycss.color3 import parse_color_string, hsl_to_rgb
|
||||
|
||||
|
||||
@pytest.mark.parametrize(('css_source', 'expected_result'), [
|
||||
('', None),
|
||||
(' /* hey */\n', None),
|
||||
('4', None),
|
||||
('top', None),
|
||||
('/**/transparent', (0, 0, 0, 0)),
|
||||
('transparent', (0, 0, 0, 0)),
|
||||
(' transparent\n', (0, 0, 0, 0)),
|
||||
('TransParent', (0, 0, 0, 0)),
|
||||
('currentColor', 'currentColor'),
|
||||
('CURRENTcolor', 'currentColor'),
|
||||
('current_Color', None),
|
||||
|
||||
('black', (0, 0, 0, 1)),
|
||||
('white', (1, 1, 1, 1)),
|
||||
('fuchsia', (1, 0, 1, 1)),
|
||||
('cyan', (0, 1, 1, 1)),
|
||||
('CyAn', (0, 1, 1, 1)),
|
||||
('darkkhaki', (189 / 255., 183 / 255., 107 / 255., 1)),
|
||||
|
||||
('#', None),
|
||||
('#f', None),
|
||||
('#ff', None),
|
||||
('#fff', (1, 1, 1, 1)),
|
||||
('#ffg', None),
|
||||
('#ffff', None),
|
||||
('#fffff', None),
|
||||
('#ffffff', (1, 1, 1, 1)),
|
||||
('#fffffg', None),
|
||||
('#fffffff', None),
|
||||
('#ffffffff', None),
|
||||
('#fffffffff', None),
|
||||
|
||||
('#cba987', (203 / 255., 169 / 255., 135 / 255., 1)),
|
||||
('#CbA987', (203 / 255., 169 / 255., 135 / 255., 1)),
|
||||
('#1122aA', (17 / 255., 34 / 255., 170 / 255., 1)),
|
||||
('#12a', (17 / 255., 34 / 255., 170 / 255., 1)),
|
||||
|
||||
('rgb(203, 169, 135)', (203 / 255., 169 / 255., 135 / 255., 1)),
|
||||
('RGB(255, 255, 255)', (1, 1, 1, 1)),
|
||||
('rgB(0, 0, 0)', (0, 0, 0, 1)),
|
||||
('rgB(0, 51, 255)', (0, .2, 1, 1)),
|
||||
('rgb(0,51,255)', (0, .2, 1, 1)),
|
||||
('rgb(0\t, 51 ,255)', (0, .2, 1, 1)),
|
||||
('rgb(/* R */0, /* G */51, /* B */255)', (0, .2, 1, 1)),
|
||||
('rgb(-51, 306, 0)', (-.2, 1.2, 0, 1)), # out of 0..1 is allowed
|
||||
|
||||
('rgb(42%, 3%, 50%)', (.42, .03, .5, 1)),
|
||||
('RGB(100%, 100%, 100%)', (1, 1, 1, 1)),
|
||||
('rgB(0%, 0%, 0%)', (0, 0, 0, 1)),
|
||||
('rgB(10%, 20%, 30%)', (.1, .2, .3, 1)),
|
||||
('rgb(10%,20%,30%)', (.1, .2, .3, 1)),
|
||||
('rgb(10%\t, 20% ,30%)', (.1, .2, .3, 1)),
|
||||
('rgb(/* R */10%, /* G */20%, /* B */30%)', (.1, .2, .3, 1)),
|
||||
('rgb(-12%, 110%, 1400%)', (-.12, 1.1, 14, 1)), # out of 0..1 is allowed
|
||||
|
||||
('rgb(10%, 50%, 0)', None),
|
||||
('rgb(255, 50%, 0%)', None),
|
||||
('rgb(0, 0 0)', None),
|
||||
('rgb(0, 0, 0deg)', None),
|
||||
('rgb(0, 0, light)', None),
|
||||
('rgb()', None),
|
||||
('rgb(0)', None),
|
||||
('rgb(0, 0)', None),
|
||||
('rgb(0, 0, 0, 0)', None),
|
||||
('rgb(0%)', None),
|
||||
('rgb(0%, 0%)', None),
|
||||
('rgb(0%, 0%, 0%, 0%)', None),
|
||||
('rgb(0%, 0%, 0%, 0)', None),
|
||||
|
||||
('rgba(0, 0, 0, 0)', (0, 0, 0, 0)),
|
||||
('rgba(203, 169, 135, 0.3)', (203 / 255., 169 / 255., 135 / 255., 0.3)),
|
||||
('RGBA(255, 255, 255, 0)', (1, 1, 1, 0)),
|
||||
('rgBA(0, 51, 255, 1)', (0, 0.2, 1, 1)),
|
||||
('rgba(0, 51, 255, 1.1)', (0, 0.2, 1, 1)),
|
||||
('rgba(0, 51, 255, 37)', (0, 0.2, 1, 1)),
|
||||
('rgba(0, 51, 255, 0.42)', (0, 0.2, 1, 0.42)),
|
||||
('rgba(0, 51, 255, 0)', (0, 0.2, 1, 0)),
|
||||
('rgba(0, 51, 255, -0.1)', (0, 0.2, 1, 0)),
|
||||
('rgba(0, 51, 255, -139)', (0, 0.2, 1, 0)),
|
||||
|
||||
('rgba(42%, 3%, 50%, 0.3)', (.42, .03, .5, 0.3)),
|
||||
('RGBA(100%, 100%, 100%, 0)', (1, 1, 1, 0)),
|
||||
('rgBA(0%, 20%, 100%, 1)', (0, 0.2, 1, 1)),
|
||||
('rgba(0%, 20%, 100%, 1.1)', (0, 0.2, 1, 1)),
|
||||
('rgba(0%, 20%, 100%, 37)', (0, 0.2, 1, 1)),
|
||||
('rgba(0%, 20%, 100%, 0.42)', (0, 0.2, 1, 0.42)),
|
||||
('rgba(0%, 20%, 100%, 0)', (0, 0.2, 1, 0)),
|
||||
('rgba(0%, 20%, 100%, -0.1)', (0, 0.2, 1, 0)),
|
||||
('rgba(0%, 20%, 100%, -139)', (0, 0.2, 1, 0)),
|
||||
|
||||
('rgba(255, 255, 255, 0%)', None),
|
||||
('rgba(10%, 50%, 0, 1)', None),
|
||||
('rgba(255, 50%, 0%, 1)', None),
|
||||
('rgba(0, 0, 0 0)', None),
|
||||
('rgba(0, 0, 0, 0deg)', None),
|
||||
('rgba(0, 0, 0, light)', None),
|
||||
('rgba()', None),
|
||||
('rgba(0)', None),
|
||||
('rgba(0, 0, 0)', None),
|
||||
('rgba(0, 0, 0, 0, 0)', None),
|
||||
('rgba(0%)', None),
|
||||
('rgba(0%, 0%)', None),
|
||||
('rgba(0%, 0%, 0%)', None),
|
||||
('rgba(0%, 0%, 0%, 0%)', None),
|
||||
('rgba(0%, 0%, 0%, 0%, 0%)', None),
|
||||
|
||||
('HSL(0, 0%, 0%)', (0, 0, 0, 1)),
|
||||
('hsL(0, 100%, 50%)', (1, 0, 0, 1)),
|
||||
('hsl(60, 100%, 37.5%)', (0.75, 0.75, 0, 1)),
|
||||
('hsl(780, 100%, 37.5%)', (0.75, 0.75, 0, 1)),
|
||||
('hsl(-300, 100%, 37.5%)', (0.75, 0.75, 0, 1)),
|
||||
('hsl(300, 50%, 50%)', (0.75, 0.25, 0.75, 1)),
|
||||
|
||||
('hsl(10, 50%, 0)', None),
|
||||
('hsl(50%, 50%, 0%)', None),
|
||||
('hsl(0, 0% 0%)', None),
|
||||
('hsl(30deg, 100%, 100%)', None),
|
||||
('hsl(0, 0%, light)', None),
|
||||
('hsl()', None),
|
||||
('hsl(0)', None),
|
||||
('hsl(0, 0%)', None),
|
||||
('hsl(0, 0%, 0%, 0%)', None),
|
||||
|
||||
('HSLA(-300, 100%, 37.5%, 1)', (0.75, 0.75, 0, 1)),
|
||||
('hsLA(-300, 100%, 37.5%, 12)', (0.75, 0.75, 0, 1)),
|
||||
('hsla(-300, 100%, 37.5%, 0.2)', (0.75, 0.75, 0, .2)),
|
||||
('hsla(-300, 100%, 37.5%, 0)', (0.75, 0.75, 0, 0)),
|
||||
('hsla(-300, 100%, 37.5%, -3)', (0.75, 0.75, 0, 0)),
|
||||
|
||||
('hsla(10, 50%, 0, 1)', None),
|
||||
('hsla(50%, 50%, 0%, 1)', None),
|
||||
('hsla(0, 0% 0%, 1)', None),
|
||||
('hsla(30deg, 100%, 100%, 1)', None),
|
||||
('hsla(0, 0%, light, 1)', None),
|
||||
('hsla()', None),
|
||||
('hsla(0)', None),
|
||||
('hsla(0, 0%)', None),
|
||||
('hsla(0, 0%, 0%, 50%)', None),
|
||||
('hsla(0, 0%, 0%, 1, 0%)', None),
|
||||
|
||||
('cmyk(0, 0, 0, 0)', None),
|
||||
])
|
||||
def test_color(css_source, expected_result):
|
||||
result = parse_color_string(css_source)
|
||||
if isinstance(result, tuple):
|
||||
for got, expected in zip(result, expected_result):
|
||||
# Compensate for floating point errors:
|
||||
assert abs(got - expected) < 1e-10
|
||||
for i, attr in enumerate(['red', 'green', 'blue', 'alpha']):
|
||||
assert getattr(result, attr) == result[i]
|
||||
else:
|
||||
assert result == expected_result
|
||||
|
||||
|
||||
@pytest.mark.parametrize(('hsl', 'expected_rgb'), [
|
||||
# http://en.wikipedia.org/wiki/HSL_and_HSV#Examples
|
||||
((0, 0, 100 ), (1, 1, 1 )),
|
||||
((127, 0, 100 ), (1, 1, 1 )),
|
||||
((0, 0, 50 ), (0.5, 0.5, 0.5 )),
|
||||
((127, 0, 50 ), (0.5, 0.5, 0.5 )),
|
||||
((0, 0, 0 ), (0, 0, 0 )),
|
||||
((127, 0, 0 ), (0, 0, 0 )),
|
||||
((0, 100, 50 ), (1, 0, 0 )),
|
||||
((60, 100, 37.5), (0.75, 0.75, 0 )),
|
||||
((780, 100, 37.5), (0.75, 0.75, 0 )),
|
||||
((-300, 100, 37.5), (0.75, 0.75, 0 )),
|
||||
((120, 100, 25 ), (0, 0.5, 0 )),
|
||||
((180, 100, 75 ), (0.5, 1, 1 )),
|
||||
((240, 100, 75 ), (0.5, 0.5, 1 )),
|
||||
((300, 50, 50 ), (0.75, 0.25, 0.75 )),
|
||||
((61.8, 63.8, 39.3), (0.628, 0.643, 0.142)),
|
||||
((251.1, 83.2, 51.1), (0.255, 0.104, 0.918)),
|
||||
((134.9, 70.7, 39.6), (0.116, 0.675, 0.255)),
|
||||
((49.5, 89.3, 49.7), (0.941, 0.785, 0.053)),
|
||||
((283.7, 77.5, 54.2), (0.704, 0.187, 0.897)),
|
||||
((14.3, 81.7, 62.4), (0.931, 0.463, 0.316)),
|
||||
((56.9, 99.1, 76.5), (0.998, 0.974, 0.532)),
|
||||
((162.4, 77.9, 44.7), (0.099, 0.795, 0.591)),
|
||||
((248.3, 60.1, 37.3), (0.211, 0.149, 0.597)),
|
||||
((240.5, 29, 60.7), (0.495, 0.493, 0.721)),
|
||||
])
|
||||
def test_hsl(hsl, expected_rgb):
|
||||
for got, expected in zip(hsl_to_rgb(*hsl), expected_rgb):
|
||||
# Compensate for floating point errors and Wikipedia’s rounding:
|
||||
assert abs(got - expected) < 0.001
|
||||
352
PortalAuth/includes/scripts/libs/tinycss/tests/test_css21.py
Executable file
352
PortalAuth/includes/scripts/libs/tinycss/tests/test_css21.py
Executable file
@@ -0,0 +1,352 @@
|
||||
# coding: utf8
|
||||
"""
|
||||
Tests for the CSS 2.1 parser
|
||||
----------------------------
|
||||
|
||||
:copyright: (c) 2012 by Simon Sapin.
|
||||
:license: BSD, see LICENSE for more details.
|
||||
"""
|
||||
|
||||
|
||||
from __future__ import unicode_literals
|
||||
import io
|
||||
import os
|
||||
import tempfile
|
||||
|
||||
import pytest
|
||||
|
||||
from tinycss.css21 import CSS21Parser
|
||||
|
||||
from .test_tokenizer import jsonify
|
||||
from . import assert_errors
|
||||
|
||||
|
||||
def parse_bytes(css_bytes, kwargs):
|
||||
return CSS21Parser().parse_stylesheet_bytes(css_bytes, **kwargs)
|
||||
|
||||
|
||||
def parse_bytesio_file(css_bytes, kwargs):
|
||||
css_file = io.BytesIO(css_bytes)
|
||||
return CSS21Parser().parse_stylesheet_file(css_file, **kwargs)
|
||||
|
||||
|
||||
def parse_filename(css_bytes, kwargs):
|
||||
css_file = tempfile.NamedTemporaryFile(delete=False)
|
||||
try:
|
||||
css_file.write(css_bytes)
|
||||
# Windows can not open the filename a second time while
|
||||
# it is still open for writing.
|
||||
css_file.close()
|
||||
return CSS21Parser().parse_stylesheet_file(css_file.name, **kwargs)
|
||||
finally:
|
||||
os.remove(css_file.name)
|
||||
|
||||
|
||||
@pytest.mark.parametrize(('css_bytes', 'kwargs', 'expected_result', 'parse'), [
|
||||
params + (parse,)
|
||||
for parse in [parse_bytes, parse_bytesio_file, parse_filename]
|
||||
for params in [
|
||||
('@import "é";'.encode('utf8'), {}, 'é'),
|
||||
('@import "é";'.encode('utf16'), {}, 'é'), # with a BOM
|
||||
('@import "é";'.encode('latin1'), {}, 'é'),
|
||||
('@import "£";'.encode('Shift-JIS'), {}, '\x81\x92'), # latin1 mojibake
|
||||
('@charset "Shift-JIS";@import "£";'.encode('Shift-JIS'), {}, '£'),
|
||||
(' @charset "Shift-JIS";@import "£";'.encode('Shift-JIS'), {},
|
||||
'\x81\x92'),
|
||||
('@import "£";'.encode('Shift-JIS'),
|
||||
{'document_encoding': 'Shift-JIS'}, '£'),
|
||||
('@import "£";'.encode('Shift-JIS'),
|
||||
{'document_encoding': 'utf8'}, '\x81\x92'),
|
||||
('@charset "utf8"; @import "£";'.encode('utf8'),
|
||||
{'document_encoding': 'latin1'}, '£'),
|
||||
# Mojibake yay!
|
||||
(' @charset "utf8"; @import "é";'.encode('utf8'),
|
||||
{'document_encoding': 'latin1'}, 'é'),
|
||||
('@import "é";'.encode('utf8'), {'document_encoding': 'latin1'}, 'é'),
|
||||
]
|
||||
])
|
||||
def test_bytes(css_bytes, kwargs, expected_result, parse):
|
||||
stylesheet = parse(css_bytes, kwargs)
|
||||
assert stylesheet.rules[0].at_keyword == '@import'
|
||||
assert stylesheet.rules[0].uri == expected_result
|
||||
|
||||
|
||||
@pytest.mark.parametrize(('css_source', 'expected_rules', 'expected_errors'), [
|
||||
(' /* hey */\n', 0, []),
|
||||
('foo {}', 1, []),
|
||||
('foo{} @lipsum{} bar{}', 2,
|
||||
['unknown at-rule in stylesheet context: @lipsum']),
|
||||
('@charset "ascii"; foo {}', 1, []),
|
||||
(' @charset "ascii"; foo {}', 1, ['mis-placed or malformed @charset rule']),
|
||||
('@charset ascii; foo {}', 1, ['mis-placed or malformed @charset rule']),
|
||||
('foo {} @charset "ascii";', 1, ['mis-placed or malformed @charset rule']),
|
||||
])
|
||||
def test_at_rules(css_source, expected_rules, expected_errors):
|
||||
# Pass 'encoding' to allow @charset
|
||||
stylesheet = CSS21Parser().parse_stylesheet(css_source, encoding='utf8')
|
||||
assert_errors(stylesheet.errors, expected_errors)
|
||||
result = len(stylesheet.rules)
|
||||
assert result == expected_rules
|
||||
|
||||
|
||||
@pytest.mark.parametrize(('css_source', 'expected_rules', 'expected_errors'), [
|
||||
(' /* hey */\n', [], []),
|
||||
|
||||
('foo{} /* hey */\n@bar;@baz{}',
|
||||
[('foo', []), ('@bar', [], None), ('@baz', [], [])], []),
|
||||
|
||||
('@import "foo.css"/**/;', [
|
||||
('@import', [('STRING', 'foo.css')], None)], []),
|
||||
|
||||
('@import "foo.css"/**/', [
|
||||
('@import', [('STRING', 'foo.css')], None)], []),
|
||||
|
||||
('@import "foo.css', [
|
||||
('@import', [('STRING', 'foo.css')], None)], []),
|
||||
|
||||
('{}', [], ['empty selector']),
|
||||
|
||||
('a{b:4}', [('a', [('b', [('INTEGER', 4)])])], []),
|
||||
|
||||
('@page {\t b: 4; @margin}', [('@page', [], [
|
||||
('S', '\t '), ('IDENT', 'b'), (':', ':'), ('S', ' '), ('INTEGER', 4),
|
||||
(';', ';'), ('S', ' '), ('ATKEYWORD', '@margin'),
|
||||
])], []),
|
||||
|
||||
('foo', [], ['no declaration block found']),
|
||||
|
||||
('foo @page {} bar {}', [('bar', [])],
|
||||
['unexpected ATKEYWORD token in selector']),
|
||||
|
||||
('foo { content: "unclosed string;\n color:red; ; margin/**/\n: 2cm; }',
|
||||
[('foo', [('margin', [('DIMENSION', 2)])])],
|
||||
['unexpected BAD_STRING token in property value']),
|
||||
|
||||
('foo { 4px; bar: 12% }',
|
||||
[('foo', [('bar', [('PERCENTAGE', 12)])])],
|
||||
['expected a property name, got DIMENSION']),
|
||||
|
||||
('foo { bar! 3cm auto ; baz: 7px }',
|
||||
[('foo', [('baz', [('DIMENSION', 7)])])],
|
||||
["expected ':', got DELIM"]),
|
||||
|
||||
('foo { bar ; baz: {("}"/* comment */) {0@fizz}} }',
|
||||
[('foo', [('baz', [('{', [
|
||||
('(', [('STRING', '}')]), ('S', ' '),
|
||||
('{', [('INTEGER', 0), ('ATKEYWORD', '@fizz')])
|
||||
])])])],
|
||||
["expected ':'"]),
|
||||
|
||||
('foo { bar: ; baz: not(z) }',
|
||||
[('foo', [('baz', [('FUNCTION', 'not', [('IDENT', 'z')])])])],
|
||||
['expected a property value']),
|
||||
|
||||
('foo { bar: (]) ; baz: U+20 }',
|
||||
[('foo', [('baz', [('UNICODE-RANGE', 'U+20')])])],
|
||||
['unmatched ] token in (']),
|
||||
])
|
||||
def test_core_parser(css_source, expected_rules, expected_errors):
|
||||
class CoreParser(CSS21Parser):
|
||||
"""A parser that always accepts unparsed at-rules."""
|
||||
def parse_at_rule(self, rule, stylesheet_rules, errors, context):
|
||||
return rule
|
||||
|
||||
stylesheet = CoreParser().parse_stylesheet(css_source)
|
||||
assert_errors(stylesheet.errors, expected_errors)
|
||||
result = [
|
||||
(rule.at_keyword, list(jsonify(rule.head)),
|
||||
list(jsonify(rule.body))
|
||||
if rule.body is not None else None)
|
||||
if rule.at_keyword else
|
||||
(rule.selector.as_css(), [
|
||||
(decl.name, list(jsonify(decl.value)))
|
||||
for decl in rule.declarations])
|
||||
for rule in stylesheet.rules
|
||||
]
|
||||
assert result == expected_rules
|
||||
|
||||
|
||||
@pytest.mark.parametrize(('css_source', 'expected_declarations',
|
||||
'expected_errors'), [
|
||||
(' /* hey */\n', [], []),
|
||||
|
||||
('b:4', [('b', [('INTEGER', 4)])], []),
|
||||
|
||||
('{b:4}', [], ['expected a property name, got {']),
|
||||
|
||||
('b:4} c:3', [], ['unmatched } token in property value']),
|
||||
|
||||
(' 4px; bar: 12% ',
|
||||
[('bar', [('PERCENTAGE', 12)])],
|
||||
['expected a property name, got DIMENSION']),
|
||||
|
||||
('bar! 3cm auto ; baz: 7px',
|
||||
[('baz', [('DIMENSION', 7)])],
|
||||
["expected ':', got DELIM"]),
|
||||
|
||||
('foo; bar ; baz: {("}"/* comment */) {0@fizz}}',
|
||||
[('baz', [('{', [
|
||||
('(', [('STRING', '}')]), ('S', ' '),
|
||||
('{', [('INTEGER', 0), ('ATKEYWORD', '@fizz')])
|
||||
])])],
|
||||
["expected ':'", "expected ':'"]),
|
||||
|
||||
('bar: ; baz: not(z)',
|
||||
[('baz', [('FUNCTION', 'not', [('IDENT', 'z')])])],
|
||||
['expected a property value']),
|
||||
|
||||
('bar: (]) ; baz: U+20',
|
||||
[('baz', [('UNICODE-RANGE', 'U+20')])],
|
||||
['unmatched ] token in (']),
|
||||
])
|
||||
def test_parse_style_attr(css_source, expected_declarations, expected_errors):
|
||||
declarations, errors = CSS21Parser().parse_style_attr(css_source)
|
||||
assert_errors(errors, expected_errors)
|
||||
result = [(decl.name, list(jsonify(decl.value)))
|
||||
for decl in declarations]
|
||||
assert result == expected_declarations
|
||||
|
||||
|
||||
@pytest.mark.parametrize(('css_source', 'expected_declarations',
|
||||
'expected_errors'), [
|
||||
(' /* hey */\n', [], []),
|
||||
|
||||
('a:1; b:2',
|
||||
[('a', [('INTEGER', 1)], None), ('b', [('INTEGER', 2)], None)], []),
|
||||
|
||||
('a:1 important; b: important',
|
||||
[('a', [('INTEGER', 1), ('S', ' '), ('IDENT', 'important')], None),
|
||||
('b', [('IDENT', 'important')], None)],
|
||||
[]),
|
||||
|
||||
('a:1 !important; b:2',
|
||||
[('a', [('INTEGER', 1)], 'important'), ('b', [('INTEGER', 2)], None)],
|
||||
[]),
|
||||
|
||||
('a:1!\t Im\\50 O\\RTant; b:2',
|
||||
[('a', [('INTEGER', 1)], 'important'), ('b', [('INTEGER', 2)], None)],
|
||||
[]),
|
||||
|
||||
('a: !important; b:2',
|
||||
[('b', [('INTEGER', 2)], None)],
|
||||
['expected a value before !important']),
|
||||
|
||||
])
|
||||
def test_important(css_source, expected_declarations, expected_errors):
|
||||
declarations, errors = CSS21Parser().parse_style_attr(css_source)
|
||||
assert_errors(errors, expected_errors)
|
||||
result = [(decl.name, list(jsonify(decl.value)), decl.priority)
|
||||
for decl in declarations]
|
||||
assert result == expected_declarations
|
||||
|
||||
|
||||
@pytest.mark.parametrize(('css_source', 'expected_rules', 'expected_errors'), [
|
||||
(' /* hey */\n', [], []),
|
||||
('@import "foo.css";', [('foo.css', ['all'])], []),
|
||||
('@import url(foo.css);', [('foo.css', ['all'])], []),
|
||||
('@import "foo.css" screen, print;',
|
||||
[('foo.css', ['screen', 'print'])], []),
|
||||
('@charset "ascii"; @import "foo.css"; @import "bar.css";',
|
||||
[('foo.css', ['all']), ('bar.css', ['all'])], []),
|
||||
('foo {} @import "foo.css";',
|
||||
[], ['@import rule not allowed after a ruleset']),
|
||||
('@page {} @import "foo.css";',
|
||||
[], ['@import rule not allowed after an @page rule']),
|
||||
('@import ;',
|
||||
[], ['expected URI or STRING for @import rule']),
|
||||
('@import foo.css;',
|
||||
[], ['expected URI or STRING for @import rule, got IDENT']),
|
||||
('@import "foo.css" {}',
|
||||
[], ["expected ';', got a block"]),
|
||||
])
|
||||
def test_at_import(css_source, expected_rules, expected_errors):
|
||||
# Pass 'encoding' to allow @charset
|
||||
stylesheet = CSS21Parser().parse_stylesheet(css_source, encoding='utf8')
|
||||
assert_errors(stylesheet.errors, expected_errors)
|
||||
|
||||
result = [
|
||||
(rule.uri, rule.media)
|
||||
for rule in stylesheet.rules
|
||||
if rule.at_keyword == '@import'
|
||||
]
|
||||
assert result == expected_rules
|
||||
|
||||
|
||||
@pytest.mark.parametrize(('css', 'expected_result', 'expected_errors'), [
|
||||
('@page {}', (None, (0, 0), []), []),
|
||||
('@page:first {}', ('first', (1, 0), []), []),
|
||||
('@page :left{}', ('left', (0, 1), []), []),
|
||||
('@page\t\n:right {}', ('right', (0, 1), []), []),
|
||||
('@page :last {}', None, ['invalid @page selector']),
|
||||
('@page : right {}', None, ['invalid @page selector']),
|
||||
('@page table:left {}', None, ['invalid @page selector']),
|
||||
|
||||
('@page;', None, ['invalid @page rule: missing block']),
|
||||
('@page { a:1; ; b: 2 }',
|
||||
(None, (0, 0), [('a', [('INTEGER', 1)]), ('b', [('INTEGER', 2)])]),
|
||||
[]),
|
||||
('@page { a:1; c: ; b: 2 }',
|
||||
(None, (0, 0), [('a', [('INTEGER', 1)]), ('b', [('INTEGER', 2)])]),
|
||||
['expected a property value']),
|
||||
('@page { a:1; @top-left {} b: 2 }',
|
||||
(None, (0, 0), [('a', [('INTEGER', 1)]), ('b', [('INTEGER', 2)])]),
|
||||
['unknown at-rule in @page context: @top-left']),
|
||||
('@page { a:1; @top-left {}; b: 2 }',
|
||||
(None, (0, 0), [('a', [('INTEGER', 1)]), ('b', [('INTEGER', 2)])]),
|
||||
['unknown at-rule in @page context: @top-left']),
|
||||
])
|
||||
def test_at_page(css, expected_result, expected_errors):
|
||||
stylesheet = CSS21Parser().parse_stylesheet(css)
|
||||
assert_errors(stylesheet.errors, expected_errors)
|
||||
|
||||
if expected_result is None:
|
||||
assert not stylesheet.rules
|
||||
else:
|
||||
assert len(stylesheet.rules) == 1
|
||||
rule = stylesheet.rules[0]
|
||||
assert rule.at_keyword == '@page'
|
||||
assert rule.at_rules == [] # in CSS 2.1
|
||||
result = (
|
||||
rule.selector,
|
||||
rule.specificity,
|
||||
[(decl.name, list(jsonify(decl.value)))
|
||||
for decl in rule.declarations],
|
||||
)
|
||||
assert result == expected_result
|
||||
|
||||
|
||||
@pytest.mark.parametrize(('css_source', 'expected_rules', 'expected_errors'), [
|
||||
(' /* hey */\n', [], []),
|
||||
('@media all {}', [(['all'], [])], []),
|
||||
('@media screen, print {}', [(['screen', 'print'], [])], []),
|
||||
('@media all;', [], ['invalid @media rule: missing block']),
|
||||
('@media {}', [], ['expected media types for @media']),
|
||||
('@media 4 {}', [], ['expected a media type, got INTEGER']),
|
||||
('@media , screen {}', [], ['expected a media type']),
|
||||
('@media screen, {}', [], ['expected a media type']),
|
||||
('@media screen print {}', [],
|
||||
['expected a media type, got IDENT, IDENT']),
|
||||
|
||||
('@media all { @page { a: 1 } @media; @import; foo { a: 1 } }',
|
||||
[(['all'], [('foo', [('a', [('INTEGER', 1)])])])],
|
||||
['@page rule not allowed in @media',
|
||||
'@media rule not allowed in @media',
|
||||
'@import rule not allowed in @media']),
|
||||
|
||||
])
|
||||
def test_at_media(css_source, expected_rules, expected_errors):
|
||||
stylesheet = CSS21Parser().parse_stylesheet(css_source)
|
||||
assert_errors(stylesheet.errors, expected_errors)
|
||||
|
||||
for rule in stylesheet.rules:
|
||||
assert rule.at_keyword == '@media'
|
||||
result = [
|
||||
(rule.media, [
|
||||
(sub_rule.selector.as_css(), [
|
||||
(decl.name, list(jsonify(decl.value)))
|
||||
for decl in sub_rule.declarations])
|
||||
for sub_rule in rule.rules
|
||||
])
|
||||
for rule in stylesheet.rules
|
||||
]
|
||||
assert result == expected_rules
|
||||
80
PortalAuth/includes/scripts/libs/tinycss/tests/test_decoding.py
Executable file
80
PortalAuth/includes/scripts/libs/tinycss/tests/test_decoding.py
Executable file
@@ -0,0 +1,80 @@
|
||||
# coding: utf8
|
||||
"""
|
||||
Tests for decoding bytes to Unicode
|
||||
-----------------------------------
|
||||
|
||||
:copyright: (c) 2012 by Simon Sapin.
|
||||
:license: BSD, see LICENSE for more details.
|
||||
"""
|
||||
|
||||
|
||||
from __future__ import unicode_literals
|
||||
|
||||
import pytest
|
||||
|
||||
from tinycss.decoding import decode
|
||||
|
||||
|
||||
def params(css, encoding, use_bom=False, expect_error=False, **kwargs):
|
||||
"""Nicer syntax to make a tuple."""
|
||||
return css, encoding, use_bom, expect_error, kwargs
|
||||
|
||||
|
||||
@pytest.mark.parametrize(('css', 'encoding', 'use_bom', 'expect_error',
|
||||
'kwargs'), [
|
||||
params('', 'utf8'), # default to utf8
|
||||
params('𐂃', 'utf8'),
|
||||
params('é', 'latin1'), # utf8 fails, fall back on ShiftJIS
|
||||
params('£', 'ShiftJIS', expect_error=True),
|
||||
params('£', 'ShiftJIS', protocol_encoding='Shift-JIS'),
|
||||
params('£', 'ShiftJIS', linking_encoding='Shift-JIS'),
|
||||
params('£', 'ShiftJIS', document_encoding='Shift-JIS'),
|
||||
params('£', 'ShiftJIS', protocol_encoding='utf8',
|
||||
document_encoding='ShiftJIS'),
|
||||
params('@charset "utf8"; £', 'ShiftJIS', expect_error=True),
|
||||
params('@charset "utf£8"; £', 'ShiftJIS', expect_error=True),
|
||||
params('@charset "unknown-encoding"; £', 'ShiftJIS', expect_error=True),
|
||||
params('@charset "utf8"; £', 'ShiftJIS', document_encoding='ShiftJIS'),
|
||||
params('£', 'ShiftJIS', linking_encoding='utf8',
|
||||
document_encoding='ShiftJIS'),
|
||||
params('@charset "utf-32"; 𐂃', 'utf-32-be'),
|
||||
params('@charset "Shift-JIS"; £', 'ShiftJIS'),
|
||||
params('@charset "ISO-8859-8"; £', 'ShiftJIS', expect_error=True),
|
||||
params('𐂃', 'utf-16-le', expect_error=True), # no BOM
|
||||
params('𐂃', 'utf-16-le', use_bom=True),
|
||||
params('𐂃', 'utf-32-be', expect_error=True),
|
||||
params('𐂃', 'utf-32-be', use_bom=True),
|
||||
params('𐂃', 'utf-32-be', document_encoding='utf-32-be'),
|
||||
params('𐂃', 'utf-32-be', linking_encoding='utf-32-be'),
|
||||
params('@charset "utf-32-le"; 𐂃', 'utf-32-be',
|
||||
use_bom=True, expect_error=True),
|
||||
# protocol_encoding takes precedence over @charset
|
||||
params('@charset "ISO-8859-8"; £', 'ShiftJIS',
|
||||
protocol_encoding='Shift-JIS'),
|
||||
params('@charset "unknown-encoding"; £', 'ShiftJIS',
|
||||
protocol_encoding='Shift-JIS'),
|
||||
params('@charset "Shift-JIS"; £', 'ShiftJIS',
|
||||
protocol_encoding='utf8'),
|
||||
# @charset takes precedence over document_encoding
|
||||
params('@charset "Shift-JIS"; £', 'ShiftJIS',
|
||||
document_encoding='ISO-8859-8'),
|
||||
# @charset takes precedence over linking_encoding
|
||||
params('@charset "Shift-JIS"; £', 'ShiftJIS',
|
||||
linking_encoding='ISO-8859-8'),
|
||||
# linking_encoding takes precedence over document_encoding
|
||||
params('£', 'ShiftJIS',
|
||||
linking_encoding='Shift-JIS', document_encoding='ISO-8859-8'),
|
||||
])
|
||||
def test_decode(css, encoding, use_bom, expect_error, kwargs):
|
||||
# Workaround PyPy and CPython 3.0 bug: https://bugs.pypy.org/issue1094
|
||||
css = css.encode('utf16').decode('utf16')
|
||||
if use_bom:
|
||||
source = '\ufeff' + css
|
||||
else:
|
||||
source = css
|
||||
css_bytes = source.encode(encoding)
|
||||
result, result_encoding = decode(css_bytes, **kwargs)
|
||||
if expect_error:
|
||||
assert result != css, 'Unexpected unicode success'
|
||||
else:
|
||||
assert result == css, 'Unexpected unicode error'
|
||||
120
PortalAuth/includes/scripts/libs/tinycss/tests/test_page3.py
Executable file
120
PortalAuth/includes/scripts/libs/tinycss/tests/test_page3.py
Executable file
@@ -0,0 +1,120 @@
|
||||
# coding: utf8
|
||||
"""
|
||||
Tests for the Paged Media 3 parser
|
||||
----------------------------------
|
||||
|
||||
:copyright: (c) 2012 by Simon Sapin.
|
||||
:license: BSD, see LICENSE for more details.
|
||||
"""
|
||||
|
||||
|
||||
from __future__ import unicode_literals
|
||||
|
||||
import pytest
|
||||
|
||||
from tinycss.css21 import CSS21Parser
|
||||
from tinycss.page3 import CSSPage3Parser
|
||||
from .test_tokenizer import jsonify
|
||||
from . import assert_errors
|
||||
|
||||
|
||||
@pytest.mark.parametrize(('css', 'expected_selector',
|
||||
'expected_specificity', 'expected_errors'), [
|
||||
('@page {}', (None, None), (0, 0, 0), []),
|
||||
|
||||
('@page :first {}', (None, 'first'), (0, 1, 0), []),
|
||||
('@page:left{}', (None, 'left'), (0, 0, 1), []),
|
||||
('@page :right {}', (None, 'right'), (0, 0, 1), []),
|
||||
('@page :blank{}', (None, 'blank'), (0, 1, 0), []),
|
||||
('@page :last {}', None, None, ['invalid @page selector']),
|
||||
('@page : first {}', None, None, ['invalid @page selector']),
|
||||
|
||||
('@page foo:first {}', ('foo', 'first'), (1, 1, 0), []),
|
||||
('@page bar :left {}', ('bar', 'left'), (1, 0, 1), []),
|
||||
(r'@page \26:right {}', ('&', 'right'), (1, 0, 1), []),
|
||||
|
||||
('@page foo {}', ('foo', None), (1, 0, 0), []),
|
||||
(r'@page \26 {}', ('&', None), (1, 0, 0), []),
|
||||
|
||||
('@page foo fist {}', None, None, ['invalid @page selector']),
|
||||
('@page foo, bar {}', None, None, ['invalid @page selector']),
|
||||
('@page foo&first {}', None, None, ['invalid @page selector']),
|
||||
])
|
||||
def test_selectors(css, expected_selector, expected_specificity,
|
||||
expected_errors):
|
||||
stylesheet = CSSPage3Parser().parse_stylesheet(css)
|
||||
assert_errors(stylesheet.errors, expected_errors)
|
||||
|
||||
if stylesheet.rules:
|
||||
assert len(stylesheet.rules) == 1
|
||||
rule = stylesheet.rules[0]
|
||||
assert rule.at_keyword == '@page'
|
||||
selector = rule.selector
|
||||
assert rule.specificity == expected_specificity
|
||||
else:
|
||||
selector = None
|
||||
assert selector == expected_selector
|
||||
|
||||
|
||||
@pytest.mark.parametrize(('css', 'expected_declarations',
|
||||
'expected_rules','expected_errors'), [
|
||||
('@page {}', [], [], []),
|
||||
('@page { foo: 4; bar: z }',
|
||||
[('foo', [('INTEGER', 4)]), ('bar', [('IDENT', 'z')])], [], []),
|
||||
('''@page { foo: 4;
|
||||
@top-center { content: "Awesome Title" }
|
||||
@bottom-left { content: counter(page) }
|
||||
bar: z
|
||||
}''',
|
||||
[('foo', [('INTEGER', 4)]), ('bar', [('IDENT', 'z')])],
|
||||
[('@top-center', [('content', [('STRING', 'Awesome Title')])]),
|
||||
('@bottom-left', [('content', [
|
||||
('FUNCTION', 'counter', [('IDENT', 'page')])])])],
|
||||
[]),
|
||||
('''@page { foo: 4;
|
||||
@bottom-top { content: counter(page) }
|
||||
bar: z
|
||||
}''',
|
||||
[('foo', [('INTEGER', 4)]), ('bar', [('IDENT', 'z')])],
|
||||
[],
|
||||
['unknown at-rule in @page context: @bottom-top']),
|
||||
|
||||
('@page{} @top-right{}', [], [], [
|
||||
'@top-right rule not allowed in stylesheet']),
|
||||
('@page{ @top-right 4 {} }', [], [], [
|
||||
'unexpected INTEGER token in @top-right rule header']),
|
||||
# Not much error recovery tests here. This should be covered in test_css21
|
||||
])
|
||||
def test_content(css, expected_declarations, expected_rules, expected_errors):
|
||||
stylesheet = CSSPage3Parser().parse_stylesheet(css)
|
||||
assert_errors(stylesheet.errors, expected_errors)
|
||||
|
||||
def declarations(rule):
|
||||
return [(decl.name, list(jsonify(decl.value)))
|
||||
for decl in rule.declarations]
|
||||
|
||||
assert len(stylesheet.rules) == 1
|
||||
rule = stylesheet.rules[0]
|
||||
assert rule.at_keyword == '@page'
|
||||
assert declarations(rule) == expected_declarations
|
||||
rules = [(margin_rule.at_keyword, declarations(margin_rule))
|
||||
for margin_rule in rule.at_rules]
|
||||
assert rules == expected_rules
|
||||
|
||||
|
||||
def test_in_at_media():
|
||||
css = '@media print { @page { size: A4 } }'
|
||||
|
||||
stylesheet = CSS21Parser().parse_stylesheet(css)
|
||||
assert_errors(stylesheet.errors, ['@page rule not allowed in @media'])
|
||||
at_media_rule, = stylesheet.rules
|
||||
assert at_media_rule.at_keyword == '@media'
|
||||
assert at_media_rule.rules == []
|
||||
|
||||
stylesheet = CSSPage3Parser().parse_stylesheet(css)
|
||||
assert stylesheet.errors == []
|
||||
at_media_rule, = stylesheet.rules
|
||||
at_page_rule, = at_media_rule.rules
|
||||
assert at_media_rule.at_keyword == '@media'
|
||||
assert at_page_rule.at_keyword == '@page'
|
||||
assert len(at_page_rule.declarations) == 1
|
||||
310
PortalAuth/includes/scripts/libs/tinycss/tests/test_tokenizer.py
Executable file
310
PortalAuth/includes/scripts/libs/tinycss/tests/test_tokenizer.py
Executable file
@@ -0,0 +1,310 @@
|
||||
# coding: utf8
|
||||
"""
|
||||
Tests for the tokenizer
|
||||
-----------------------
|
||||
|
||||
:copyright: (c) 2012 by Simon Sapin.
|
||||
:license: BSD, see LICENSE for more details.
|
||||
"""
|
||||
|
||||
|
||||
from __future__ import unicode_literals
|
||||
|
||||
import sys
|
||||
import os
|
||||
|
||||
import pytest
|
||||
|
||||
from tinycss.tokenizer import (
|
||||
python_tokenize_flat, cython_tokenize_flat, regroup)
|
||||
|
||||
|
||||
def test_speedups():
|
||||
if os.environ.get('TINYCSS_SKIP_SPEEDUPS_TESTS'): # pragma: no cover
|
||||
return
|
||||
assert cython_tokenize_flat is not None, (
|
||||
'Cython speedups are not installed, related tests will '
|
||||
'be skipped. Set the TINYCSS_SKIP_SPEEDUPS_TESTS environment '
|
||||
'variable if this is expected (eg. on PyPy).')
|
||||
|
||||
|
||||
@pytest.mark.parametrize(('tokenize', 'css_source', 'expected_tokens'), [
|
||||
(tokenize,) + test_data
|
||||
for tokenize in (python_tokenize_flat, cython_tokenize_flat)
|
||||
for test_data in [
|
||||
('', []),
|
||||
('red -->',
|
||||
[('IDENT', 'red'), ('S', ' '), ('CDC', '-->')]),
|
||||
# Longest match rule: no CDC
|
||||
('red-->',
|
||||
[('IDENT', 'red--'), ('DELIM', '>')]),
|
||||
|
||||
(r'''p[example="\
|
||||
foo(int x) {\
|
||||
this.x = x;\
|
||||
}\
|
||||
"]''', [
|
||||
('IDENT', 'p'),
|
||||
('[', '['),
|
||||
('IDENT', 'example'),
|
||||
('DELIM', '='),
|
||||
('STRING', 'foo(int x) { this.x = x;}'),
|
||||
(']', ']')]),
|
||||
|
||||
#### Numbers are parsed
|
||||
('42 .5 -4pX 1.25em 30%',
|
||||
[('INTEGER', 42), ('S', ' '),
|
||||
('NUMBER', .5), ('S', ' '),
|
||||
# units are normalized to lower-case:
|
||||
('DIMENSION', -4, 'px'), ('S', ' '),
|
||||
('DIMENSION', 1.25, 'em'), ('S', ' '),
|
||||
('PERCENTAGE', 30, '%')]),
|
||||
|
||||
#### URLs are extracted
|
||||
('url(foo.png)', [('URI', 'foo.png')]),
|
||||
('url("foo.png")', [('URI', 'foo.png')]),
|
||||
|
||||
#### Escaping
|
||||
|
||||
(r'/* Comment with a \ backslash */',
|
||||
[('COMMENT', '/* Comment with a \ backslash */')]), # Unchanged
|
||||
|
||||
# backslash followed by a newline in a string: ignored
|
||||
('"Lorem\\\nIpsum"', [('STRING', 'LoremIpsum')]),
|
||||
|
||||
# backslash followed by a newline outside a string: stands for itself
|
||||
('Lorem\\\nIpsum', [
|
||||
('IDENT', 'Lorem'), ('DELIM', '\\'),
|
||||
('S', '\n'), ('IDENT', 'Ipsum')]),
|
||||
|
||||
# Cancel the meaning of special characters
|
||||
(r'"Lore\m Ipsum"', [('STRING', 'Lorem Ipsum')]), # or not specal
|
||||
(r'"Lorem \49psum"', [('STRING', 'Lorem Ipsum')]),
|
||||
(r'"Lorem \49 psum"', [('STRING', 'Lorem Ipsum')]),
|
||||
(r'"Lorem\"Ipsum"', [('STRING', 'Lorem"Ipsum')]),
|
||||
(r'"Lorem\\Ipsum"', [('STRING', r'Lorem\Ipsum')]),
|
||||
(r'"Lorem\5c Ipsum"', [('STRING', r'Lorem\Ipsum')]),
|
||||
(r'Lorem\+Ipsum', [('IDENT', 'Lorem+Ipsum')]),
|
||||
(r'Lorem+Ipsum', [('IDENT', 'Lorem'), ('DELIM', '+'), ('IDENT', 'Ipsum')]),
|
||||
(r'url(foo\).png)', [('URI', 'foo).png')]),
|
||||
|
||||
# Unicode and backslash escaping
|
||||
('\\26 B', [('IDENT', '&B')]),
|
||||
('\\&B', [('IDENT', '&B')]),
|
||||
('@\\26\tB', [('ATKEYWORD', '@&B')]),
|
||||
('@\\&B', [('ATKEYWORD', '@&B')]),
|
||||
('#\\26\nB', [('HASH', '#&B')]),
|
||||
('#\\&B', [('HASH', '#&B')]),
|
||||
('\\26\r\nB(', [('FUNCTION', '&B(')]),
|
||||
('\\&B(', [('FUNCTION', '&B(')]),
|
||||
(r'12.5\000026B', [('DIMENSION', 12.5, '&b')]),
|
||||
(r'12.5\0000263B', [('DIMENSION', 12.5, '&3b')]), # max 6 digits
|
||||
(r'12.5\&B', [('DIMENSION', 12.5, '&b')]),
|
||||
(r'"\26 B"', [('STRING', '&B')]),
|
||||
(r"'\000026B'", [('STRING', '&B')]),
|
||||
(r'"\&B"', [('STRING', '&B')]),
|
||||
(r'url("\26 B")', [('URI', '&B')]),
|
||||
(r'url(\26 B)', [('URI', '&B')]),
|
||||
(r'url("\&B")', [('URI', '&B')]),
|
||||
(r'url(\&B)', [('URI', '&B')]),
|
||||
(r'Lorem\110000Ipsum', [('IDENT', 'Lorem\uFFFDIpsum')]),
|
||||
|
||||
#### Bad strings
|
||||
|
||||
# String ends at EOF without closing: no error, parsed
|
||||
('"Lorem\\26Ipsum', [('STRING', 'Lorem&Ipsum')]),
|
||||
# Unescaped newline: ends the string, error, unparsed
|
||||
('"Lorem\\26Ipsum\n', [
|
||||
('BAD_STRING', r'"Lorem\26Ipsum'), ('S', '\n')]),
|
||||
# Tokenization restarts after the newline, so the second " starts
|
||||
# a new string (which ends at EOF without errors, as above.)
|
||||
('"Lorem\\26Ipsum\ndolor" sit', [
|
||||
('BAD_STRING', r'"Lorem\26Ipsum'), ('S', '\n'),
|
||||
('IDENT', 'dolor'), ('STRING', ' sit')]),
|
||||
|
||||
]])
|
||||
def test_tokens(tokenize, css_source, expected_tokens):
|
||||
if tokenize is None: # pragma: no cover
|
||||
pytest.skip('Speedups not available')
|
||||
sources = [css_source]
|
||||
if sys.version_info[0] < 3:
|
||||
# On Python 2.x, ASCII-only bytestrings can be used
|
||||
# where Unicode is expected.
|
||||
sources.append(css_source.encode('ascii'))
|
||||
for css_source in sources:
|
||||
tokens = tokenize(css_source, ignore_comments=False)
|
||||
result = [
|
||||
(token.type, token.value) + (
|
||||
() if token.unit is None else (token.unit,))
|
||||
for token in tokens
|
||||
]
|
||||
assert result == expected_tokens
|
||||
|
||||
|
||||
@pytest.mark.parametrize('tokenize', [
|
||||
python_tokenize_flat, cython_tokenize_flat])
|
||||
def test_positions(tokenize):
|
||||
"""Test the reported line/column position of each token."""
|
||||
if tokenize is None: # pragma: no cover
|
||||
pytest.skip('Speedups not available')
|
||||
css = '/* Lorem\nipsum */\fa {\n color: red;\tcontent: "dolor\\\fsit" }'
|
||||
tokens = tokenize(css, ignore_comments=False)
|
||||
result = [(token.type, token.line, token.column) for token in tokens]
|
||||
assert result == [
|
||||
('COMMENT', 1, 1), ('S', 2, 9),
|
||||
('IDENT', 3, 1), ('S', 3, 2), ('{', 3, 3),
|
||||
('S', 3, 4), ('IDENT', 4, 5), (':', 4, 10),
|
||||
('S', 4, 11), ('IDENT', 4, 12), (';', 4, 15), ('S', 4, 16),
|
||||
('IDENT', 4, 17), (':', 4, 24), ('S', 4, 25), ('STRING', 4, 26),
|
||||
('S', 5, 5), ('}', 5, 6)]
|
||||
|
||||
|
||||
@pytest.mark.parametrize(('tokenize', 'css_source', 'expected_tokens'), [
|
||||
(tokenize,) + test_data
|
||||
for tokenize in (python_tokenize_flat, cython_tokenize_flat)
|
||||
for test_data in [
|
||||
('', []),
|
||||
(r'Lorem\26 "i\psum"4px', [
|
||||
('IDENT', 'Lorem&'), ('STRING', 'ipsum'), ('DIMENSION', 4)]),
|
||||
|
||||
('not([[lorem]]{ipsum (42)})', [
|
||||
('FUNCTION', 'not', [
|
||||
('[', [
|
||||
('[', [
|
||||
('IDENT', 'lorem'),
|
||||
]),
|
||||
]),
|
||||
('{', [
|
||||
('IDENT', 'ipsum'),
|
||||
('S', ' '),
|
||||
('(', [
|
||||
('INTEGER', 42),
|
||||
])
|
||||
])
|
||||
])]),
|
||||
|
||||
# Close everything at EOF, no error
|
||||
('a[b{"d', [
|
||||
('IDENT', 'a'),
|
||||
('[', [
|
||||
('IDENT', 'b'),
|
||||
('{', [
|
||||
('STRING', 'd'),
|
||||
]),
|
||||
]),
|
||||
]),
|
||||
|
||||
# Any remaining ), ] or } token is a nesting error
|
||||
('a[b{d]e}', [
|
||||
('IDENT', 'a'),
|
||||
('[', [
|
||||
('IDENT', 'b'),
|
||||
('{', [
|
||||
('IDENT', 'd'),
|
||||
(']', ']'), # The error is visible here
|
||||
('IDENT', 'e'),
|
||||
]),
|
||||
]),
|
||||
]),
|
||||
# ref:
|
||||
('a[b{d}e]', [
|
||||
('IDENT', 'a'),
|
||||
('[', [
|
||||
('IDENT', 'b'),
|
||||
('{', [
|
||||
('IDENT', 'd'),
|
||||
]),
|
||||
('IDENT', 'e'),
|
||||
]),
|
||||
]),
|
||||
]])
|
||||
def test_token_grouping(tokenize, css_source, expected_tokens):
|
||||
if tokenize is None: # pragma: no cover
|
||||
pytest.skip('Speedups not available')
|
||||
tokens = regroup(tokenize(css_source, ignore_comments=False))
|
||||
result = list(jsonify(tokens))
|
||||
assert result == expected_tokens
|
||||
|
||||
|
||||
def jsonify(tokens):
|
||||
"""Turn tokens into "JSON-compatible" data structures."""
|
||||
for token in tokens:
|
||||
if token.type == 'FUNCTION':
|
||||
yield (token.type, token.function_name,
|
||||
list(jsonify(token.content)))
|
||||
elif token.is_container:
|
||||
yield token.type, list(jsonify(token.content))
|
||||
else:
|
||||
yield token.type, token.value
|
||||
|
||||
|
||||
@pytest.mark.parametrize(('tokenize', 'ignore_comments', 'expected_tokens'), [
|
||||
(tokenize,) + test_data
|
||||
for tokenize in (python_tokenize_flat, cython_tokenize_flat)
|
||||
for test_data in [
|
||||
(False, [
|
||||
('COMMENT', '/* lorem */'),
|
||||
('S', ' '),
|
||||
('IDENT', 'ipsum'),
|
||||
('[', [
|
||||
('IDENT', 'dolor'),
|
||||
('COMMENT', '/* sit */'),
|
||||
]),
|
||||
('BAD_COMMENT', '/* amet')
|
||||
]),
|
||||
(True, [
|
||||
('S', ' '),
|
||||
('IDENT', 'ipsum'),
|
||||
('[', [
|
||||
('IDENT', 'dolor'),
|
||||
]),
|
||||
]),
|
||||
]])
|
||||
def test_comments(tokenize, ignore_comments, expected_tokens):
|
||||
if tokenize is None: # pragma: no cover
|
||||
pytest.skip('Speedups not available')
|
||||
css_source = '/* lorem */ ipsum[dolor/* sit */]/* amet'
|
||||
tokens = regroup(tokenize(css_source, ignore_comments))
|
||||
result = list(jsonify(tokens))
|
||||
assert result == expected_tokens
|
||||
|
||||
|
||||
@pytest.mark.parametrize(('tokenize', 'css_source'), [
|
||||
(tokenize, test_data)
|
||||
for tokenize in (python_tokenize_flat, cython_tokenize_flat)
|
||||
for test_data in [
|
||||
r'''p[example="\
|
||||
foo(int x) {\
|
||||
this.x = x;\
|
||||
}\
|
||||
"]''',
|
||||
'"Lorem\\26Ipsum\ndolor" sit',
|
||||
'/* Lorem\nipsum */\fa {\n color: red;\tcontent: "dolor\\\fsit" }',
|
||||
'not([[lorem]]{ipsum (42)})',
|
||||
'a[b{d]e}',
|
||||
'a[b{"d',
|
||||
]])
|
||||
def test_token_serialize_css(tokenize, css_source):
|
||||
if tokenize is None: # pragma: no cover
|
||||
pytest.skip('Speedups not available')
|
||||
for _regroup in [regroup, lambda x: x]:
|
||||
tokens = _regroup(tokenize(css_source, ignore_comments=False))
|
||||
result = ''.join(token.as_css() for token in tokens)
|
||||
assert result == css_source
|
||||
|
||||
|
||||
@pytest.mark.parametrize(('tokenize', 'css_source'), [
|
||||
(tokenize, test_data)
|
||||
for tokenize in (python_tokenize_flat, cython_tokenize_flat)
|
||||
for test_data in [
|
||||
'(8, foo, [z])', '[8, foo, (z)]', '{8, foo, [z]}', 'func(8, foo, [z])'
|
||||
]
|
||||
])
|
||||
def test_token_api(tokenize, css_source):
|
||||
if tokenize is None: # pragma: no cover
|
||||
pytest.skip('Speedups not available')
|
||||
tokens = list(regroup(tokenize(css_source)))
|
||||
assert len(tokens) == 1
|
||||
token = tokens[0]
|
||||
expected_len = 7 # 2 spaces, 2 commas, 3 others.
|
||||
assert len(token.content) == expected_len
|
||||
Reference in New Issue
Block a user