aboutsummaryrefslogtreecommitdiffstats
path: root/components/script/dom/bindings/codegen
diff options
context:
space:
mode:
Diffstat (limited to 'components/script/dom/bindings/codegen')
-rw-r--r--components/script/dom/bindings/codegen/parser/README1
-rw-r--r--components/script/dom/bindings/codegen/parser/UPSTREAM1
-rw-r--r--components/script/dom/bindings/codegen/parser/WebIDL.py9107
-rw-r--r--components/script/dom/bindings/codegen/parser/abstract.patch10
-rw-r--r--components/script/dom/bindings/codegen/parser/callback-location.patch20
-rw-r--r--components/script/dom/bindings/codegen/parser/debug.patch10
-rw-r--r--components/script/dom/bindings/codegen/parser/ext-attribute-no-value-error.patch11
-rw-r--r--components/script/dom/bindings/codegen/parser/inline.patch10
-rw-r--r--components/script/dom/bindings/codegen/parser/readable-stream.patch162
-rw-r--r--components/script/dom/bindings/codegen/parser/runtests.py79
-rw-r--r--components/script/dom/bindings/codegen/parser/tests/test_any_null.py16
-rw-r--r--components/script/dom/bindings/codegen/parser/tests/test_argument_identifier_conflicts.py16
-rw-r--r--components/script/dom/bindings/codegen/parser/tests/test_argument_keywords.py22
-rw-r--r--components/script/dom/bindings/codegen/parser/tests/test_arraybuffer.py95
-rw-r--r--components/script/dom/bindings/codegen/parser/tests/test_attr.py199
-rw-r--r--components/script/dom/bindings/codegen/parser/tests/test_attr_sequence_type.py77
-rw-r--r--components/script/dom/bindings/codegen/parser/tests/test_attributes_on_types.py570
-rw-r--r--components/script/dom/bindings/codegen/parser/tests/test_builtin_filename.py14
-rw-r--r--components/script/dom/bindings/codegen/parser/tests/test_builtins.py59
-rw-r--r--components/script/dom/bindings/codegen/parser/tests/test_bytestring.py125
-rw-r--r--components/script/dom/bindings/codegen/parser/tests/test_callback.py42
-rw-r--r--components/script/dom/bindings/codegen/parser/tests/test_callback_constructor.py84
-rw-r--r--components/script/dom/bindings/codegen/parser/tests/test_callback_interface.py106
-rw-r--r--components/script/dom/bindings/codegen/parser/tests/test_cereactions.py157
-rw-r--r--components/script/dom/bindings/codegen/parser/tests/test_conditional_dictionary_member.py128
-rw-r--r--components/script/dom/bindings/codegen/parser/tests/test_const.py96
-rw-r--r--components/script/dom/bindings/codegen/parser/tests/test_constructor.py594
-rw-r--r--components/script/dom/bindings/codegen/parser/tests/test_constructor_global.py72
-rw-r--r--components/script/dom/bindings/codegen/parser/tests/test_constructor_no_interface_object.py47
-rw-r--r--components/script/dom/bindings/codegen/parser/tests/test_deduplicate.py20
-rw-r--r--components/script/dom/bindings/codegen/parser/tests/test_dictionary.py875
-rw-r--r--components/script/dom/bindings/codegen/parser/tests/test_distinguishability.py425
-rw-r--r--components/script/dom/bindings/codegen/parser/tests/test_double_null.py16
-rw-r--r--components/script/dom/bindings/codegen/parser/tests/test_duplicate_qualifiers.py64
-rw-r--r--components/script/dom/bindings/codegen/parser/tests/test_empty_enum.py17
-rw-r--r--components/script/dom/bindings/codegen/parser/tests/test_empty_sequence_default_value.py54
-rw-r--r--components/script/dom/bindings/codegen/parser/tests/test_enum.py107
-rw-r--r--components/script/dom/bindings/codegen/parser/tests/test_enum_duplicate_values.py16
-rw-r--r--components/script/dom/bindings/codegen/parser/tests/test_error_colno.py24
-rw-r--r--components/script/dom/bindings/codegen/parser/tests/test_error_lineno.py38
-rw-r--r--components/script/dom/bindings/codegen/parser/tests/test_exposed_extended_attribute.py383
-rw-r--r--components/script/dom/bindings/codegen/parser/tests/test_extended_attributes.py131
-rw-r--r--components/script/dom/bindings/codegen/parser/tests/test_float_types.py145
-rw-r--r--components/script/dom/bindings/codegen/parser/tests/test_forward_decl.py18
-rw-r--r--components/script/dom/bindings/codegen/parser/tests/test_global_extended_attr.py129
-rw-r--r--components/script/dom/bindings/codegen/parser/tests/test_identifier_conflict.py49
-rw-r--r--components/script/dom/bindings/codegen/parser/tests/test_incomplete_parent.py21
-rw-r--r--components/script/dom/bindings/codegen/parser/tests/test_incomplete_types.py61
-rw-r--r--components/script/dom/bindings/codegen/parser/tests/test_interface.py459
-rw-r--r--components/script/dom/bindings/codegen/parser/tests/test_interface_const_identifier_conflicts.py17
-rw-r--r--components/script/dom/bindings/codegen/parser/tests/test_interface_identifier_conflicts_across_members.py68
-rw-r--r--components/script/dom/bindings/codegen/parser/tests/test_interface_maplikesetlikeiterable.py912
-rw-r--r--components/script/dom/bindings/codegen/parser/tests/test_interfacemixin.py534
-rw-r--r--components/script/dom/bindings/codegen/parser/tests/test_lenientSetter.py84
-rw-r--r--components/script/dom/bindings/codegen/parser/tests/test_method.py430
-rw-r--r--components/script/dom/bindings/codegen/parser/tests/test_namespace.py232
-rw-r--r--components/script/dom/bindings/codegen/parser/tests/test_newobject.py76
-rw-r--r--components/script/dom/bindings/codegen/parser/tests/test_nullable_equivalency.py141
-rw-r--r--components/script/dom/bindings/codegen/parser/tests/test_observableArray.py288
-rw-r--r--components/script/dom/bindings/codegen/parser/tests/test_optional_constraints.py35
-rw-r--r--components/script/dom/bindings/codegen/parser/tests/test_overload.py74
-rw-r--r--components/script/dom/bindings/codegen/parser/tests/test_promise.py177
-rw-r--r--components/script/dom/bindings/codegen/parser/tests/test_prototype_ident.py107
-rw-r--r--components/script/dom/bindings/codegen/parser/tests/test_putForwards.py119
-rw-r--r--components/script/dom/bindings/codegen/parser/tests/test_record.py61
-rw-r--r--components/script/dom/bindings/codegen/parser/tests/test_replaceable.py84
-rw-r--r--components/script/dom/bindings/codegen/parser/tests/test_sanity.py7
-rw-r--r--components/script/dom/bindings/codegen/parser/tests/test_securecontext_extended_attribute.py499
-rw-r--r--components/script/dom/bindings/codegen/parser/tests/test_special_method_signature_mismatch.py256
-rw-r--r--components/script/dom/bindings/codegen/parser/tests/test_special_methods.py117
-rw-r--r--components/script/dom/bindings/codegen/parser/tests/test_special_methods_uniqueness.py54
-rw-r--r--components/script/dom/bindings/codegen/parser/tests/test_stringifier.py196
-rw-r--r--components/script/dom/bindings/codegen/parser/tests/test_toJSON.py309
-rw-r--r--components/script/dom/bindings/codegen/parser/tests/test_treatNonCallableAsNull.py80
-rw-r--r--components/script/dom/bindings/codegen/parser/tests/test_typedef.py94
-rw-r--r--components/script/dom/bindings/codegen/parser/tests/test_typedef_identifier_conflict.py19
-rw-r--r--components/script/dom/bindings/codegen/parser/tests/test_undefined.py246
-rw-r--r--components/script/dom/bindings/codegen/parser/tests/test_unenumerable_own_properties.py71
-rw-r--r--components/script/dom/bindings/codegen/parser/tests/test_unforgeable.py311
-rw-r--r--components/script/dom/bindings/codegen/parser/tests/test_union.py198
-rw-r--r--components/script/dom/bindings/codegen/parser/tests/test_union_any.py16
-rw-r--r--components/script/dom/bindings/codegen/parser/tests/test_union_nullable.py60
-rw-r--r--components/script/dom/bindings/codegen/parser/tests/test_usvstring.py40
-rw-r--r--components/script/dom/bindings/codegen/parser/tests/test_variadic_callback.py13
-rw-r--r--components/script/dom/bindings/codegen/parser/tests/test_variadic_constraints.py74
-rw-r--r--components/script/dom/bindings/codegen/parser/union-typedef.patch22
-rwxr-xr-xcomponents/script/dom/bindings/codegen/parser/update.sh13
-rw-r--r--components/script/dom/bindings/codegen/ply/ANNOUNCE40
-rw-r--r--components/script/dom/bindings/codegen/ply/CHANGES1394
-rw-r--r--components/script/dom/bindings/codegen/ply/MANIFEST.in8
-rw-r--r--components/script/dom/bindings/codegen/ply/PKG-INFO22
-rw-r--r--components/script/dom/bindings/codegen/ply/README.md273
-rw-r--r--components/script/dom/bindings/codegen/ply/TODO16
-rw-r--r--components/script/dom/bindings/codegen/ply/example/BASIC/README79
-rw-r--r--components/script/dom/bindings/codegen/ply/example/BASIC/basic.py65
-rw-r--r--components/script/dom/bindings/codegen/ply/example/BASIC/basiclex.py61
-rw-r--r--components/script/dom/bindings/codegen/ply/example/BASIC/basiclog.py73
-rw-r--r--components/script/dom/bindings/codegen/ply/example/BASIC/basinterp.py496
-rw-r--r--components/script/dom/bindings/codegen/ply/example/BASIC/basparse.py474
-rw-r--r--components/script/dom/bindings/codegen/ply/example/BASIC/dim.bas14
-rw-r--r--components/script/dom/bindings/codegen/ply/example/BASIC/func.bas5
-rw-r--r--components/script/dom/bindings/codegen/ply/example/BASIC/gcd.bas22
-rw-r--r--components/script/dom/bindings/codegen/ply/example/BASIC/gosub.bas13
-rw-r--r--components/script/dom/bindings/codegen/ply/example/BASIC/hello.bas4
-rw-r--r--components/script/dom/bindings/codegen/ply/example/BASIC/linear.bas17
-rw-r--r--components/script/dom/bindings/codegen/ply/example/BASIC/maxsin.bas12
-rw-r--r--components/script/dom/bindings/codegen/ply/example/BASIC/powers.bas13
-rw-r--r--components/script/dom/bindings/codegen/ply/example/BASIC/rand.bas4
-rw-r--r--components/script/dom/bindings/codegen/ply/example/BASIC/sales.bas20
-rw-r--r--components/script/dom/bindings/codegen/ply/example/BASIC/sears.bas18
-rw-r--r--components/script/dom/bindings/codegen/ply/example/BASIC/sqrt1.bas5
-rw-r--r--components/script/dom/bindings/codegen/ply/example/BASIC/sqrt2.bas4
-rw-r--r--components/script/dom/bindings/codegen/ply/example/GardenSnake/GardenSnake.py777
-rw-r--r--components/script/dom/bindings/codegen/ply/example/GardenSnake/README5
-rw-r--r--components/script/dom/bindings/codegen/ply/example/README10
-rw-r--r--components/script/dom/bindings/codegen/ply/example/ansic/README2
-rw-r--r--components/script/dom/bindings/codegen/ply/example/ansic/clex.py168
-rw-r--r--components/script/dom/bindings/codegen/ply/example/ansic/cparse.py1048
-rw-r--r--components/script/dom/bindings/codegen/ply/example/calc/calc.py123
-rw-r--r--components/script/dom/bindings/codegen/ply/example/calcdebug/calc.py129
-rw-r--r--components/script/dom/bindings/codegen/ply/example/calceof/calc.py132
-rwxr-xr-xcomponents/script/dom/bindings/codegen/ply/example/classcalc/calc.py165
-rwxr-xr-xcomponents/script/dom/bindings/codegen/ply/example/cleanup.sh2
-rw-r--r--components/script/dom/bindings/codegen/ply/example/closurecalc/calc.py132
-rw-r--r--components/script/dom/bindings/codegen/ply/example/hedit/hedit.py48
-rwxr-xr-xcomponents/script/dom/bindings/codegen/ply/example/newclasscalc/calc.py167
-rw-r--r--components/script/dom/bindings/codegen/ply/example/optcalc/README9
-rw-r--r--components/script/dom/bindings/codegen/ply/example/optcalc/calc.py134
-rw-r--r--components/script/dom/bindings/codegen/ply/example/unicalc/calc.py133
-rw-r--r--components/script/dom/bindings/codegen/ply/example/yply/README41
-rw-r--r--components/script/dom/bindings/codegen/ply/example/yply/ylex.py119
-rw-r--r--components/script/dom/bindings/codegen/ply/example/yply/yparse.py244
-rwxr-xr-xcomponents/script/dom/bindings/codegen/ply/example/yply/yply.py51
-rw-r--r--components/script/dom/bindings/codegen/ply/ply.egg-info/PKG-INFO22
-rw-r--r--components/script/dom/bindings/codegen/ply/ply.egg-info/SOURCES.txt172
-rw-r--r--components/script/dom/bindings/codegen/ply/ply.egg-info/dependency_links.txt1
-rw-r--r--components/script/dom/bindings/codegen/ply/ply.egg-info/top_level.txt1
-rw-r--r--components/script/dom/bindings/codegen/ply/ply/__init__.py5
-rw-r--r--components/script/dom/bindings/codegen/ply/ply/cpp.py918
-rw-r--r--components/script/dom/bindings/codegen/ply/ply/ctokens.py133
-rw-r--r--components/script/dom/bindings/codegen/ply/ply/lex.py1100
-rw-r--r--components/script/dom/bindings/codegen/ply/ply/yacc.py3494
-rw-r--r--components/script/dom/bindings/codegen/ply/ply/ygen.py74
-rw-r--r--components/script/dom/bindings/codegen/ply/setup.cfg11
-rw-r--r--components/script/dom/bindings/codegen/ply/setup.py31
-rw-r--r--components/script/dom/bindings/codegen/run.py10
146 files changed, 7 insertions, 33872 deletions
diff --git a/components/script/dom/bindings/codegen/parser/README b/components/script/dom/bindings/codegen/parser/README
deleted file mode 100644
index 94b64b88459..00000000000
--- a/components/script/dom/bindings/codegen/parser/README
+++ /dev/null
@@ -1 +0,0 @@
-A WebIDL parser written in Python to be used in Mozilla. \ No newline at end of file
diff --git a/components/script/dom/bindings/codegen/parser/UPSTREAM b/components/script/dom/bindings/codegen/parser/UPSTREAM
deleted file mode 100644
index 7ac5899379e..00000000000
--- a/components/script/dom/bindings/codegen/parser/UPSTREAM
+++ /dev/null
@@ -1 +0,0 @@
-http://dev.w3.org/cvsweb/~checkout~/2006/webapi/WebIDL/Overview.html?rev=1.409;content-type=text%2Fhtml%3b+charset=utf-8 \ No newline at end of file
diff --git a/components/script/dom/bindings/codegen/parser/WebIDL.py b/components/script/dom/bindings/codegen/parser/WebIDL.py
deleted file mode 100644
index 2366e3f7027..00000000000
--- a/components/script/dom/bindings/codegen/parser/WebIDL.py
+++ /dev/null
@@ -1,9107 +0,0 @@
-# This Source Code Form is subject to the terms of the Mozilla Public
-# License, v. 2.0. If a copy of the MPL was not distributed with this
-# file, You can obtain one at http://mozilla.org/MPL/2.0/.
-
-""" A WebIDL parser. """
-
-import copy
-import math
-import os
-import re
-import string
-import traceback
-from collections import OrderedDict, defaultdict
-from itertools import chain
-
-from ply import lex, yacc
-
-# Machinery
-
-
-def parseInt(literal):
- string = literal
- sign = 0
- base = 0
-
- if string[0] == "-":
- sign = -1
- string = string[1:]
- else:
- sign = 1
-
- if string[0] == "0" and len(string) > 1:
- if string[1] == "x" or string[1] == "X":
- base = 16
- string = string[2:]
- else:
- base = 8
- string = string[1:]
- else:
- base = 10
-
- value = int(string, base)
- return value * sign
-
-
-def enum(*names, **kw):
- class Foo(object):
- attrs = OrderedDict()
-
- def __init__(self, names):
- for v, k in enumerate(names):
- self.attrs[k] = v
-
- def __getattr__(self, attr):
- if attr in self.attrs:
- return self.attrs[attr]
- raise AttributeError
-
- def __setattr__(self, name, value): # this makes it read-only
- raise NotImplementedError
-
- if "base" not in kw:
- return Foo(names)
- return Foo(chain(kw["base"].attrs.keys(), names))
-
-
-class WebIDLError(Exception):
- def __init__(self, message, locations, warning=False):
- self.message = message
- self.locations = [str(loc) for loc in locations]
- self.warning = warning
-
- def __str__(self):
- return "%s: %s%s%s" % (
- self.warning and "warning" or "error",
- self.message,
- ", " if len(self.locations) != 0 else "",
- "\n".join(self.locations),
- )
-
-
-class Location(object):
- def __init__(self, lexer, lineno, lexpos, filename):
- self._line = None
- self._lineno = lineno
- self._lexpos = lexpos
- self._lexdata = lexer.lexdata
- self._file = filename if filename else "<unknown>"
-
- def __eq__(self, other):
- return self._lexpos == other._lexpos and self._file == other._file
-
- def filename(self):
- return self._file
-
- def resolve(self):
- if self._line:
- return
-
- startofline = self._lexdata.rfind("\n", 0, self._lexpos) + 1
- endofline = self._lexdata.find("\n", self._lexpos, self._lexpos + 80)
- if endofline != -1:
- self._line = self._lexdata[startofline:endofline]
- else:
- self._line = self._lexdata[startofline:]
- self._colno = self._lexpos - startofline
-
- # Our line number seems to point to the start of self._lexdata
- self._lineno += self._lexdata.count("\n", 0, startofline)
-
- def get(self):
- self.resolve()
- return "%s line %s:%s" % (self._file, self._lineno, self._colno)
-
- def _pointerline(self):
- return " " * self._colno + "^"
-
- def __str__(self):
- self.resolve()
- return "%s line %s:%s\n%s\n%s" % (
- self._file,
- self._lineno,
- self._colno,
- self._line,
- self._pointerline(),
- )
-
-
-class BuiltinLocation(object):
- def __init__(self, text):
- self.msg = text + "\n"
-
- def __eq__(self, other):
- return isinstance(other, BuiltinLocation) and self.msg == other.msg
-
- def filename(self):
- return "<builtin>"
-
- def resolve(self):
- pass
-
- def get(self):
- return self.msg
-
- def __str__(self):
- return self.get()
-
-
-# Data Model
-
-
-class IDLObject(object):
- def __init__(self, location):
- self.location = location
- self.userData = dict()
-
- def filename(self):
- return self.location.filename()
-
- def isInterface(self):
- return False
-
- def isNamespace(self):
- return False
-
- def isInterfaceMixin(self):
- return False
-
- def isEnum(self):
- return False
-
- def isCallback(self):
- return False
-
- def isType(self):
- return False
-
- def isDictionary(self):
- return False
-
- def isUnion(self):
- return False
-
- def isTypedef(self):
- return False
-
- def getUserData(self, key, default):
- return self.userData.get(key, default)
-
- def setUserData(self, key, value):
- self.userData[key] = value
-
- def addExtendedAttributes(self, attrs):
- assert False # Override me!
-
- def handleExtendedAttribute(self, attr):
- assert False # Override me!
-
- def _getDependentObjects(self):
- assert False # Override me!
-
- def getDeps(self, visited=None):
- """Return a set of files that this object depends on. If any of
- these files are changed the parser needs to be rerun to regenerate
- a new IDLObject.
-
- The visited argument is a set of all the objects already visited.
- We must test to see if we are in it, and if so, do nothing. This
- prevents infinite recursion."""
-
- # NB: We can't use visited=set() above because the default value is
- # evaluated when the def statement is evaluated, not when the function
- # is executed, so there would be one set for all invocations.
- if visited is None:
- visited = set()
-
- if self in visited:
- return set()
-
- visited.add(self)
-
- deps = set()
- if self.filename() != "<builtin>":
- deps.add(self.filename())
-
- for d in self._getDependentObjects():
- deps.update(d.getDeps(visited))
-
- return deps
-
-
-class IDLScope(IDLObject):
- def __init__(self, location, parentScope, identifier):
- IDLObject.__init__(self, location)
-
- self.parentScope = parentScope
- if identifier:
- assert isinstance(identifier, IDLIdentifier)
- self._name = identifier
- else:
- self._name = None
-
- self._dict = {}
- self.globalNames = set()
- # A mapping from global name to the set of global interfaces
- # that have that global name.
- self.globalNameMapping = defaultdict(set)
-
- def __str__(self):
- return self.QName()
-
- def QName(self):
- # It's possible for us to be called before __init__ has been called, for
- # the IDLObjectWithScope case. In that case, self._name won't be set yet.
- if hasattr(self, "_name"):
- name = self._name
- else:
- name = None
- if name:
- return name.QName() + "::"
- return "::"
-
- def ensureUnique(self, identifier, object):
- """
- Ensure that there is at most one 'identifier' in scope ('self').
- Note that object can be None. This occurs if we end up here for an
- interface type we haven't seen yet.
- """
- assert isinstance(identifier, IDLUnresolvedIdentifier)
- assert not object or isinstance(object, IDLObjectWithIdentifier)
- assert not object or object.identifier == identifier
-
- if identifier.name in self._dict:
- if not object:
- return
-
- # ensureUnique twice with the same object is not allowed
- assert id(object) != id(self._dict[identifier.name])
-
- replacement = self.resolveIdentifierConflict(
- self, identifier, self._dict[identifier.name], object
- )
- self._dict[identifier.name] = replacement
- return
-
- assert object
-
- self._dict[identifier.name] = object
-
- def resolveIdentifierConflict(self, scope, identifier, originalObject, newObject):
- if (
- isinstance(originalObject, IDLExternalInterface)
- and isinstance(newObject, IDLExternalInterface)
- and originalObject.identifier.name == newObject.identifier.name
- ):
- return originalObject
-
- if isinstance(originalObject, IDLExternalInterface) or isinstance(
- newObject, IDLExternalInterface
- ):
- raise WebIDLError(
- "Name collision between "
- "interface declarations for identifier '%s' at '%s' and '%s'"
- % (identifier.name, originalObject.location, newObject.location),
- [],
- )
-
- if isinstance(originalObject, IDLDictionary) or isinstance(
- newObject, IDLDictionary
- ):
- raise WebIDLError(
- "Name collision between dictionary declarations for "
- "identifier '%s'.\n%s\n%s"
- % (identifier.name, originalObject.location, newObject.location),
- [],
- )
-
- # We do the merging of overloads here as opposed to in IDLInterface
- # because we need to merge overloads of LegacyFactoryFunctions and we need to
- # detect conflicts in those across interfaces. See also the comment in
- # IDLInterface.addExtendedAttributes for "LegacyFactoryFunction".
- if isinstance(originalObject, IDLMethod) and isinstance(newObject, IDLMethod):
- return originalObject.addOverload(newObject)
-
- # Default to throwing, derived classes can override.
- conflictdesc = "\n\t%s at %s\n\t%s at %s" % (
- originalObject,
- originalObject.location,
- newObject,
- newObject.location,
- )
-
- raise WebIDLError(
- "Multiple unresolvable definitions of identifier '%s' in scope '%s'%s"
- % (identifier.name, str(self), conflictdesc),
- [],
- )
-
- def _lookupIdentifier(self, identifier):
- return self._dict[identifier.name]
-
- def lookupIdentifier(self, identifier):
- assert isinstance(identifier, IDLIdentifier)
- assert identifier.scope == self
- return self._lookupIdentifier(identifier)
-
- def addIfaceGlobalNames(self, interfaceName, globalNames):
- """Record the global names (from |globalNames|) that can be used in
- [Exposed] to expose things in a global named |interfaceName|"""
- self.globalNames.update(globalNames)
- for name in globalNames:
- self.globalNameMapping[name].add(interfaceName)
-
-
-class IDLIdentifier(IDLObject):
- def __init__(self, location, scope, name):
- IDLObject.__init__(self, location)
-
- self.name = name
- assert isinstance(scope, IDLScope)
- self.scope = scope
-
- def __str__(self):
- return self.QName()
-
- def QName(self):
- return self.scope.QName() + self.name
-
- def __hash__(self):
- return self.QName().__hash__()
-
- def __eq__(self, other):
- return self.QName() == other.QName()
-
- def object(self):
- return self.scope.lookupIdentifier(self)
-
-
-class IDLUnresolvedIdentifier(IDLObject):
- def __init__(
- self, location, name, allowDoubleUnderscore=False, allowForbidden=False
- ):
- IDLObject.__init__(self, location)
-
- assert len(name) > 0
-
- if name == "__noSuchMethod__":
- raise WebIDLError("__noSuchMethod__ is deprecated", [location])
-
- if name[:2] == "__" and not allowDoubleUnderscore:
- raise WebIDLError("Identifiers beginning with __ are reserved", [location])
- if name[0] == "_" and not allowDoubleUnderscore:
- name = name[1:]
- if name in ["constructor", "toString"] and not allowForbidden:
- raise WebIDLError(
- "Cannot use reserved identifier '%s'" % (name), [location]
- )
-
- self.name = name
-
- def __str__(self):
- return self.QName()
-
- def QName(self):
- return "<unresolved scope>::" + self.name
-
- def resolve(self, scope, object):
- assert isinstance(scope, IDLScope)
- assert not object or isinstance(object, IDLObjectWithIdentifier)
- assert not object or object.identifier == self
-
- scope.ensureUnique(self, object)
-
- identifier = IDLIdentifier(self.location, scope, self.name)
- if object:
- object.identifier = identifier
- return identifier
-
- def finish(self):
- assert False # Should replace with a resolved identifier first.
-
-
-class IDLObjectWithIdentifier(IDLObject):
- def __init__(self, location, parentScope, identifier):
- IDLObject.__init__(self, location)
-
- assert isinstance(identifier, IDLUnresolvedIdentifier)
-
- self.identifier = identifier
-
- if parentScope:
- self.resolve(parentScope)
-
- def resolve(self, parentScope):
- assert isinstance(parentScope, IDLScope)
- assert isinstance(self.identifier, IDLUnresolvedIdentifier)
- self.identifier.resolve(parentScope, self)
-
-
-class IDLObjectWithScope(IDLObjectWithIdentifier, IDLScope):
- def __init__(self, location, parentScope, identifier):
- assert isinstance(identifier, IDLUnresolvedIdentifier)
-
- IDLObjectWithIdentifier.__init__(self, location, parentScope, identifier)
- IDLScope.__init__(self, location, parentScope, self.identifier)
-
-
-class IDLIdentifierPlaceholder(IDLObjectWithIdentifier):
- def __init__(self, location, identifier):
- assert isinstance(identifier, IDLUnresolvedIdentifier)
- IDLObjectWithIdentifier.__init__(self, location, None, identifier)
-
- def finish(self, scope):
- try:
- scope._lookupIdentifier(self.identifier)
- except:
- raise WebIDLError(
- "Unresolved type '%s'." % self.identifier, [self.location]
- )
-
- obj = self.identifier.resolve(scope, None)
- return scope.lookupIdentifier(obj)
-
-
-class IDLExposureMixins:
- def __init__(self, location):
- # _exposureGlobalNames are the global names listed in our [Exposed]
- # extended attribute. exposureSet is the exposure set as defined in the
- # Web IDL spec: it contains interface names.
- self._exposureGlobalNames = set()
- self.exposureSet = set()
- self._location = location
- self._globalScope = None
-
- def finish(self, scope):
- assert scope.parentScope is None
- self._globalScope = scope
-
- if "*" in self._exposureGlobalNames:
- self._exposureGlobalNames = scope.globalNames
- else:
- # Verify that our [Exposed] value, if any, makes sense.
- for globalName in self._exposureGlobalNames:
- if globalName not in scope.globalNames:
- raise WebIDLError(
- "Unknown [Exposed] value %s" % globalName, [self._location]
- )
-
- # Verify that we are exposed _somwhere_ if we have some place to be
- # exposed. We don't want to assert that we're definitely exposed
- # because a lot of our parser tests have small-enough IDL snippets that
- # they don't include any globals, and we don't really want to go through
- # and add global interfaces and [Exposed] annotations to all those
- # tests.
- if len(scope.globalNames) != 0:
- if len(self._exposureGlobalNames) == 0 and not self.isPseudoInterface():
- raise WebIDLError(
- (
- "'%s' is not exposed anywhere even though we have "
- "globals to be exposed to"
- )
- % self,
- [self.location],
- )
-
- globalNameSetToExposureSet(scope, self._exposureGlobalNames, self.exposureSet)
-
- def isExposedInWindow(self):
- return "Window" in self.exposureSet
-
- def isExposedInAnyWorker(self):
- return len(self.getWorkerExposureSet()) > 0
-
- def isExposedInWorkerDebugger(self):
- return len(self.getWorkerDebuggerExposureSet()) > 0
-
- def isExposedInAnyWorklet(self):
- return len(self.getWorkletExposureSet()) > 0
-
- def isExposedInSomeButNotAllWorkers(self):
- """
- Returns true if the Exposed extended attribute for this interface
- exposes it in some worker globals but not others. The return value does
- not depend on whether the interface is exposed in Window or System
- globals.
- """
- if not self.isExposedInAnyWorker():
- return False
- workerScopes = self.parentScope.globalNameMapping["Worker"]
- return len(workerScopes.difference(self.exposureSet)) > 0
-
- def isExposedInShadowRealms(self):
- return "ShadowRealmGlobalScope" in self.exposureSet
-
- def getWorkerExposureSet(self):
- workerScopes = self._globalScope.globalNameMapping["Worker"]
- return workerScopes.intersection(self.exposureSet)
-
- def getWorkletExposureSet(self):
- workletScopes = self._globalScope.globalNameMapping["Worklet"]
- return workletScopes.intersection(self.exposureSet)
-
- def getWorkerDebuggerExposureSet(self):
- workerDebuggerScopes = self._globalScope.globalNameMapping["WorkerDebugger"]
- return workerDebuggerScopes.intersection(self.exposureSet)
-
-
-class IDLExternalInterface(IDLObjectWithIdentifier):
- def __init__(self, location, parentScope, identifier):
- assert isinstance(identifier, IDLUnresolvedIdentifier)
- assert isinstance(parentScope, IDLScope)
- self.parent = None
- IDLObjectWithIdentifier.__init__(self, location, parentScope, identifier)
- IDLObjectWithIdentifier.resolve(self, parentScope)
-
- def finish(self, scope):
- pass
-
- def validate(self):
- pass
-
- def isIteratorInterface(self):
- return False
-
- def isAsyncIteratorInterface(self):
- return False
-
- def isExternal(self):
- return True
-
- def isInterface(self):
- return True
-
- def addExtendedAttributes(self, attrs):
- if len(attrs) != 0:
- raise WebIDLError(
- "There are no extended attributes that are "
- "allowed on external interfaces",
- [attrs[0].location, self.location],
- )
-
- def resolve(self, parentScope):
- pass
-
- def getJSImplementation(self):
- return None
-
- def isJSImplemented(self):
- return False
-
- def hasProbablyShortLivingWrapper(self):
- return False
-
- def _getDependentObjects(self):
- return set()
-
-
-class IDLPartialDictionary(IDLObject):
- def __init__(self, location, name, members, nonPartialDictionary):
- assert isinstance(name, IDLUnresolvedIdentifier)
-
- IDLObject.__init__(self, location)
- self.identifier = name
- self.members = members
- self._nonPartialDictionary = nonPartialDictionary
- self._finished = False
- nonPartialDictionary.addPartialDictionary(self)
-
- def addExtendedAttributes(self, attrs):
- pass
-
- def finish(self, scope):
- if self._finished:
- return
- self._finished = True
-
- # Need to make sure our non-partial dictionary gets
- # finished so it can report cases when we only have partial
- # dictionaries.
- self._nonPartialDictionary.finish(scope)
-
- def validate(self):
- pass
-
-
-class IDLPartialInterfaceOrNamespace(IDLObject):
- def __init__(self, location, name, members, nonPartialInterfaceOrNamespace):
- assert isinstance(name, IDLUnresolvedIdentifier)
-
- IDLObject.__init__(self, location)
- self.identifier = name
- self.members = members
- # propagatedExtendedAttrs are the ones that should get
- # propagated to our non-partial interface.
- self.propagatedExtendedAttrs = []
- self._haveSecureContextExtendedAttribute = False
- self._nonPartialInterfaceOrNamespace = nonPartialInterfaceOrNamespace
- self._finished = False
- nonPartialInterfaceOrNamespace.addPartial(self)
-
- def addExtendedAttributes(self, attrs):
- for attr in attrs:
- identifier = attr.identifier()
-
- if identifier == "LegacyFactoryFunction":
- self.propagatedExtendedAttrs.append(attr)
- elif identifier == "SecureContext":
- self._haveSecureContextExtendedAttribute = True
- # This gets propagated to all our members.
- for member in self.members:
- if member.getExtendedAttribute("SecureContext"):
- raise WebIDLError(
- "[SecureContext] specified on both a "
- "partial interface member and on the "
- "partial interface itself",
- [member.location, attr.location],
- )
- member.addExtendedAttributes([attr])
- elif identifier == "Exposed":
- # This just gets propagated to all our members.
- for member in self.members:
- if len(member._exposureGlobalNames) != 0:
- raise WebIDLError(
- "[Exposed] specified on both a "
- "partial interface member and on the "
- "partial interface itself",
- [member.location, attr.location],
- )
- member.addExtendedAttributes([attr])
- else:
- raise WebIDLError(
- "Unknown extended attribute %s on partial "
- "interface" % identifier,
- [attr.location],
- )
-
- def finish(self, scope):
- if self._finished:
- return
- self._finished = True
- if (
- not self._haveSecureContextExtendedAttribute
- and self._nonPartialInterfaceOrNamespace.getExtendedAttribute(
- "SecureContext"
- )
- ):
- # This gets propagated to all our members.
- for member in self.members:
- if member.getExtendedAttribute("SecureContext"):
- raise WebIDLError(
- "[SecureContext] specified on both a "
- "partial interface member and on the "
- "non-partial interface",
- [
- member.location,
- self._nonPartialInterfaceOrNamespace.location,
- ],
- )
- member.addExtendedAttributes(
- [
- IDLExtendedAttribute(
- self._nonPartialInterfaceOrNamespace.location,
- ("SecureContext",),
- )
- ]
- )
- # Need to make sure our non-partial interface or namespace gets
- # finished so it can report cases when we only have partial
- # interfaces/namespaces.
- self._nonPartialInterfaceOrNamespace.finish(scope)
-
- def validate(self):
- pass
-
-
-def convertExposedAttrToGlobalNameSet(exposedAttr, targetSet):
- assert len(targetSet) == 0
- if exposedAttr.hasValue():
- targetSet.add(exposedAttr.value())
- else:
- assert exposedAttr.hasArgs()
- targetSet.update(exposedAttr.args())
-
-
-def globalNameSetToExposureSet(globalScope, nameSet, exposureSet):
- for name in nameSet:
- exposureSet.update(globalScope.globalNameMapping[name])
-
-
-class IDLInterfaceOrInterfaceMixinOrNamespace(IDLObjectWithScope, IDLExposureMixins):
- def __init__(self, location, parentScope, name):
- assert isinstance(parentScope, IDLScope)
- assert isinstance(name, IDLUnresolvedIdentifier)
-
- self._finished = False
- self.members = []
- self._partials = []
- self._extendedAttrDict = {}
- self._isKnownNonPartial = False
-
- IDLObjectWithScope.__init__(self, location, parentScope, name)
- IDLExposureMixins.__init__(self, location)
-
- def finish(self, scope):
- if not self._isKnownNonPartial:
- raise WebIDLError(
- "%s does not have a non-partial declaration" % str(self),
- [self.location],
- )
-
- IDLExposureMixins.finish(self, scope)
-
- # Now go ahead and merge in our partials.
- for partial in self._partials:
- partial.finish(scope)
- self.addExtendedAttributes(partial.propagatedExtendedAttrs)
- self.members.extend(partial.members)
-
- def resolveIdentifierConflict(self, scope, identifier, originalObject, newObject):
- assert isinstance(scope, IDLScope)
- assert isinstance(originalObject, IDLInterfaceMember)
- assert isinstance(newObject, IDLInterfaceMember)
-
- retval = IDLScope.resolveIdentifierConflict(
- self, scope, identifier, originalObject, newObject
- )
-
- # Might be a ctor, which isn't in self.members
- if newObject in self.members:
- self.members.remove(newObject)
- return retval
-
- def typeName(self):
- if self.isInterface():
- return "interface"
- if self.isNamespace():
- return "namespace"
- assert self.isInterfaceMixin()
- return "interface mixin"
-
- def getExtendedAttribute(self, name):
- return self._extendedAttrDict.get(name, None)
-
- def setNonPartial(self, location, members):
- if self._isKnownNonPartial:
- raise WebIDLError(
- "Two non-partial definitions for the " "same %s" % self.typeName(),
- [location, self.location],
- )
- self._isKnownNonPartial = True
- # Now make it look like we were parsed at this new location, since
- # that's the place where the interface is "really" defined
- self.location = location
- # Put the new members at the beginning
- self.members = members + self.members
-
- def addPartial(self, partial):
- assert self.identifier.name == partial.identifier.name
- self._partials.append(partial)
-
- def getPartials(self):
- # Don't let people mutate our guts.
- return list(self._partials)
-
- def finishMembers(self, scope):
- # Assuming we've merged in our partials, set the _exposureGlobalNames on
- # any members that don't have it set yet. Note that any partial
- # interfaces that had [Exposed] set have already set up
- # _exposureGlobalNames on all the members coming from them, so this is
- # just implementing the "members default to interface or interface mixin
- # that defined them" and "partial interfaces or interface mixins default
- # to interface or interface mixin they're a partial for" rules from the
- # spec.
- for m in self.members:
- # If m, or the partial m came from, had [Exposed]
- # specified, it already has a nonempty exposure global names set.
- if len(m._exposureGlobalNames) == 0:
- m._exposureGlobalNames.update(self._exposureGlobalNames)
- if m.isAttr() and m.stringifier:
- m.expand(self.members)
-
- # resolve() will modify self.members, so we need to iterate
- # over a copy of the member list here.
- for member in list(self.members):
- member.resolve(self)
-
- for member in self.members:
- member.finish(scope)
-
- # Now that we've finished our members, which has updated their exposure
- # sets, make sure they aren't exposed in places where we are not.
- for member in self.members:
- if not member.exposureSet.issubset(self.exposureSet):
- raise WebIDLError(
- "Interface or interface mixin member has "
- "larger exposure set than its container",
- [member.location, self.location],
- )
-
- def isExternal(self):
- return False
-
-
-class IDLInterfaceMixin(IDLInterfaceOrInterfaceMixinOrNamespace):
- def __init__(self, location, parentScope, name, members, isKnownNonPartial):
- self.actualExposureGlobalNames = set()
-
- assert isKnownNonPartial or len(members) == 0
- IDLInterfaceOrInterfaceMixinOrNamespace.__init__(
- self, location, parentScope, name
- )
-
- if isKnownNonPartial:
- self.setNonPartial(location, members)
-
- def __str__(self):
- return "Interface mixin '%s'" % self.identifier.name
-
- def isInterfaceMixin(self):
- return True
-
- def finish(self, scope):
- if self._finished:
- return
- self._finished = True
-
- # Expose to the globals of interfaces that includes this mixin if this
- # mixin has no explicit [Exposed] so that its members can be exposed
- # based on the base interface exposure set.
- #
- # Make sure this is done before IDLExposureMixins.finish call, since
- # that converts our set of exposure global names to an actual exposure
- # set.
- hasImplicitExposure = len(self._exposureGlobalNames) == 0
- if hasImplicitExposure:
- self._exposureGlobalNames.update(self.actualExposureGlobalNames)
-
- IDLInterfaceOrInterfaceMixinOrNamespace.finish(self, scope)
-
- self.finishMembers(scope)
-
- def validate(self):
- for member in self.members:
-
- if member.isAttr():
- if member.inherit:
- raise WebIDLError(
- "Interface mixin member cannot include "
- "an inherited attribute",
- [member.location, self.location],
- )
- if member.isStatic():
- raise WebIDLError(
- "Interface mixin member cannot include " "a static member",
- [member.location, self.location],
- )
-
- if member.isMethod():
- if member.isStatic():
- raise WebIDLError(
- "Interface mixin member cannot include " "a static operation",
- [member.location, self.location],
- )
- if (
- member.isGetter()
- or member.isSetter()
- or member.isDeleter()
- or member.isLegacycaller()
- ):
- raise WebIDLError(
- "Interface mixin member cannot include a " "special operation",
- [member.location, self.location],
- )
-
- def addExtendedAttributes(self, attrs):
- for attr in attrs:
- identifier = attr.identifier()
-
- if identifier == "SecureContext":
- if not attr.noArguments():
- raise WebIDLError(
- "[%s] must take no arguments" % identifier, [attr.location]
- )
- # This gets propagated to all our members.
- for member in self.members:
- if member.getExtendedAttribute("SecureContext"):
- raise WebIDLError(
- "[SecureContext] specified on both "
- "an interface mixin member and on"
- "the interface mixin itself",
- [member.location, attr.location],
- )
- member.addExtendedAttributes([attr])
- elif identifier == "Exposed":
- convertExposedAttrToGlobalNameSet(attr, self._exposureGlobalNames)
- else:
- raise WebIDLError(
- "Unknown extended attribute %s on interface" % identifier,
- [attr.location],
- )
-
- attrlist = attr.listValue()
- self._extendedAttrDict[identifier] = attrlist if len(attrlist) else True
-
- def _getDependentObjects(self):
- return set(self.members)
-
-
-class IDLInterfaceOrNamespace(IDLInterfaceOrInterfaceMixinOrNamespace):
- def __init__(self, location, parentScope, name, parent, members, isKnownNonPartial):
- assert isKnownNonPartial or not parent
- assert isKnownNonPartial or len(members) == 0
-
- self.parent = None
- self._callback = False
- self.maplikeOrSetlikeOrIterable = None
- # namedConstructors needs deterministic ordering because bindings code
- # outputs the constructs in the order that namedConstructors enumerates
- # them.
- self.legacyFactoryFunctions = list()
- self.legacyWindowAliases = []
- self.includedMixins = set()
- # self.interfacesBasedOnSelf is the set of interfaces that inherit from
- # self, including self itself.
- # Used for distinguishability checking.
- self.interfacesBasedOnSelf = set([self])
- self._hasChildInterfaces = False
- self._isOnGlobalProtoChain = False
- # Pseudo interfaces aren't exposed anywhere, and so shouldn't issue warnings
- self._isPseudo = False
-
- # Tracking of the number of reserved slots we need for our
- # members and those of ancestor interfaces.
- self.totalMembersInSlots = 0
- # Tracking of the number of own own members we have in slots
- self._ownMembersInSlots = 0
- # If this is an iterator interface, we need to know what iterable
- # interface we're iterating for in order to get its nativeType.
- self.iterableInterface = None
- self.asyncIterableInterface = None
- # True if we have cross-origin members.
- self.hasCrossOriginMembers = False
- # True if some descendant (including ourselves) has cross-origin members
- self.hasDescendantWithCrossOriginMembers = False
-
- IDLInterfaceOrInterfaceMixinOrNamespace.__init__(
- self, location, parentScope, name
- )
-
- if isKnownNonPartial:
- self.setNonPartial(location, parent, members)
-
- def ctor(self):
- identifier = IDLUnresolvedIdentifier(
- self.location, "constructor", allowForbidden=True
- )
- try:
- return self._lookupIdentifier(identifier)
- except:
- return None
-
- def isIterable(self):
- return (
- self.maplikeOrSetlikeOrIterable
- and self.maplikeOrSetlikeOrIterable.isIterable()
- )
-
- def isAsyncIterable(self):
- return (
- self.maplikeOrSetlikeOrIterable
- and self.maplikeOrSetlikeOrIterable.isAsyncIterable()
- )
-
- def isIteratorInterface(self):
- return self.iterableInterface is not None
-
- def isAsyncIteratorInterface(self):
- return self.asyncIterableInterface is not None
-
- def getClassName(self):
- return self.identifier.name
-
- def finish(self, scope):
- if self._finished:
- return
-
- self._finished = True
-
- IDLInterfaceOrInterfaceMixinOrNamespace.finish(self, scope)
-
- if len(self.legacyWindowAliases) > 0:
- if not self.hasInterfaceObject():
- raise WebIDLError(
- "Interface %s unexpectedly has [LegacyWindowAlias] "
- "and [LegacyNoInterfaceObject] together" % self.identifier.name,
- [self.location],
- )
- if not self.isExposedInWindow():
- raise WebIDLError(
- "Interface %s has [LegacyWindowAlias] "
- "but not exposed in Window" % self.identifier.name,
- [self.location],
- )
-
- # Generate maplike/setlike interface members. Since generated members
- # need to be treated like regular interface members, do this before
- # things like exposure setting.
- for member in self.members:
- if member.isMaplikeOrSetlikeOrIterable():
- if self.isJSImplemented():
- raise WebIDLError(
- "%s declaration used on "
- "interface that is implemented in JS"
- % (member.maplikeOrSetlikeOrIterableType),
- [member.location],
- )
- if member.valueType.isObservableArray() or (
- member.hasKeyType() and member.keyType.isObservableArray()
- ):
- raise WebIDLError(
- "%s declaration uses ObservableArray as value or key type"
- % (member.maplikeOrSetlikeOrIterableType),
- [member.location],
- )
- # Check that we only have one interface declaration (currently
- # there can only be one maplike/setlike declaration per
- # interface)
- if self.maplikeOrSetlikeOrIterable:
- raise WebIDLError(
- "%s declaration used on "
- "interface that already has %s "
- "declaration"
- % (
- member.maplikeOrSetlikeOrIterableType,
- self.maplikeOrSetlikeOrIterable.maplikeOrSetlikeOrIterableType,
- ),
- [self.maplikeOrSetlikeOrIterable.location, member.location],
- )
- self.maplikeOrSetlikeOrIterable = member
- # If we've got a maplike or setlike declaration, we'll be building all of
- # our required methods in Codegen. Generate members now.
- self.maplikeOrSetlikeOrIterable.expand(self.members)
-
- assert not self.parent or isinstance(self.parent, IDLIdentifierPlaceholder)
- parent = self.parent.finish(scope) if self.parent else None
- if parent and isinstance(parent, IDLExternalInterface):
- raise WebIDLError(
- "%s inherits from %s which does not have "
- "a definition" % (self.identifier.name, self.parent.identifier.name),
- [self.location],
- )
- if parent and not isinstance(parent, IDLInterface):
- raise WebIDLError(
- "%s inherits from %s which is not an interface "
- % (self.identifier.name, self.parent.identifier.name),
- [self.location, parent.location],
- )
-
- self.parent = parent
-
- assert iter(self.members)
-
- if self.isNamespace():
- assert not self.parent
- for m in self.members:
- if m.isAttr() or m.isMethod():
- if m.isStatic():
- raise WebIDLError(
- "Don't mark things explicitly static " "in namespaces",
- [self.location, m.location],
- )
- # Just mark all our methods/attributes as static. The other
- # option is to duplicate the relevant InterfaceMembers
- # production bits but modified to produce static stuff to
- # start with, but that sounds annoying.
- m.forceStatic()
-
- if self.parent:
- self.parent.finish(scope)
- self.parent._hasChildInterfaces = True
-
- self.totalMembersInSlots = self.parent.totalMembersInSlots
-
- # Interfaces with [Global] must not have anything inherit from them
- if self.parent.getExtendedAttribute("Global"):
- # Note: This is not a self.parent.isOnGlobalProtoChain() check
- # because ancestors of a [Global] interface can have other
- # descendants.
- raise WebIDLError(
- "[Global] interface has another interface " "inheriting from it",
- [self.location, self.parent.location],
- )
-
- # Make sure that we're not exposed in places where our parent is not
- if not self.exposureSet.issubset(self.parent.exposureSet):
- raise WebIDLError(
- "Interface %s is exposed in globals where its "
- "parent interface %s is not exposed."
- % (self.identifier.name, self.parent.identifier.name),
- [self.location, self.parent.location],
- )
-
- # Callbacks must not inherit from non-callbacks.
- # XXXbz Can non-callbacks inherit from callbacks? Spec issue pending.
- if self.isCallback():
- if not self.parent.isCallback():
- raise WebIDLError(
- "Callback interface %s inheriting from "
- "non-callback interface %s"
- % (self.identifier.name, self.parent.identifier.name),
- [self.location, self.parent.location],
- )
- elif self.parent.isCallback():
- raise WebIDLError(
- "Non-callback interface %s inheriting from "
- "callback interface %s"
- % (self.identifier.name, self.parent.identifier.name),
- [self.location, self.parent.location],
- )
-
- # Interfaces which have interface objects can't inherit
- # from [LegacyNoInterfaceObject] interfaces.
- if self.parent.getExtendedAttribute(
- "LegacyNoInterfaceObject"
- ) and not self.getExtendedAttribute("LegacyNoInterfaceObject"):
- raise WebIDLError(
- "Interface %s does not have "
- "[LegacyNoInterfaceObject] but inherits from "
- "interface %s which does"
- % (self.identifier.name, self.parent.identifier.name),
- [self.location, self.parent.location],
- )
-
- # Interfaces that are not [SecureContext] can't inherit
- # from [SecureContext] interfaces.
- if self.parent.getExtendedAttribute(
- "SecureContext"
- ) and not self.getExtendedAttribute("SecureContext"):
- raise WebIDLError(
- "Interface %s does not have "
- "[SecureContext] but inherits from "
- "interface %s which does"
- % (self.identifier.name, self.parent.identifier.name),
- [self.location, self.parent.location],
- )
-
- for mixin in self.includedMixins:
- mixin.finish(scope)
-
- cycleInGraph = self.findInterfaceLoopPoint(self)
- if cycleInGraph:
- raise WebIDLError(
- "Interface %s has itself as ancestor" % self.identifier.name,
- [self.location, cycleInGraph.location],
- )
-
- self.finishMembers(scope)
-
- ctor = self.ctor()
- if ctor is not None:
- if not self.hasInterfaceObject():
- raise WebIDLError(
- "Can't have both a constructor and [LegacyNoInterfaceObject]",
- [self.location, ctor.location],
- )
-
- if self.globalNames:
- raise WebIDLError(
- "Can't have both a constructor and [Global]",
- [self.location, ctor.location],
- )
-
- assert ctor._exposureGlobalNames == self._exposureGlobalNames
- ctor._exposureGlobalNames.update(self._exposureGlobalNames)
- # Remove the constructor operation from our member list so
- # it doesn't get in the way later.
- self.members.remove(ctor)
-
- for ctor in self.legacyFactoryFunctions:
- if self.globalNames:
- raise WebIDLError(
- "Can't have both a legacy factory function and [Global]",
- [self.location, ctor.location],
- )
- assert len(ctor._exposureGlobalNames) == 0
- ctor._exposureGlobalNames.update(self._exposureGlobalNames)
- ctor.finish(scope)
-
- # Make a copy of our member list, so things that implement us
- # can get those without all the stuff we implement ourselves
- # admixed.
- self.originalMembers = list(self.members)
-
- for mixin in sorted(self.includedMixins, key=lambda x: x.identifier.name):
- for mixinMember in mixin.members:
- for member in self.members:
- if mixinMember.identifier.name == member.identifier.name:
- raise WebIDLError(
- "Multiple definitions of %s on %s coming from 'includes' statements"
- % (member.identifier.name, self),
- [mixinMember.location, member.location],
- )
- self.members.extend(mixin.members)
-
- for ancestor in self.getInheritedInterfaces():
- ancestor.interfacesBasedOnSelf.add(self)
- if (
- ancestor.maplikeOrSetlikeOrIterable is not None
- and self.maplikeOrSetlikeOrIterable is not None
- ):
- raise WebIDLError(
- "Cannot have maplike/setlike on %s that "
- "inherits %s, which is already "
- "maplike/setlike"
- % (self.identifier.name, ancestor.identifier.name),
- [
- self.maplikeOrSetlikeOrIterable.location,
- ancestor.maplikeOrSetlikeOrIterable.location,
- ],
- )
-
- # Deal with interfaces marked [LegacyUnforgeable], now that we have our full
- # member list, except unforgeables pulled in from parents. We want to
- # do this before we set "originatingInterface" on our unforgeable
- # members.
- if self.getExtendedAttribute("LegacyUnforgeable"):
- # Check that the interface already has all the things the
- # spec would otherwise require us to synthesize and is
- # missing the ones we plan to synthesize.
- if not any(m.isMethod() and m.isStringifier() for m in self.members):
- raise WebIDLError(
- "LegacyUnforgeable interface %s does not have a "
- "stringifier" % self.identifier.name,
- [self.location],
- )
-
- for m in self.members:
- if m.identifier.name == "toJSON":
- raise WebIDLError(
- "LegacyUnforgeable interface %s has a "
- "toJSON so we won't be able to add "
- "one ourselves" % self.identifier.name,
- [self.location, m.location],
- )
-
- if m.identifier.name == "valueOf" and not m.isStatic():
- raise WebIDLError(
- "LegacyUnforgeable interface %s has a valueOf "
- "member so we won't be able to add one "
- "ourselves" % self.identifier.name,
- [self.location, m.location],
- )
-
- for member in self.members:
- if (
- (member.isAttr() or member.isMethod())
- and member.isLegacyUnforgeable()
- and not hasattr(member, "originatingInterface")
- ):
- member.originatingInterface = self
-
- for member in self.members:
- if (
- member.isMethod() and member.getExtendedAttribute("CrossOriginCallable")
- ) or (
- member.isAttr()
- and (
- member.getExtendedAttribute("CrossOriginReadable")
- or member.getExtendedAttribute("CrossOriginWritable")
- )
- ):
- self.hasCrossOriginMembers = True
- break
-
- if self.hasCrossOriginMembers:
- parent = self
- while parent:
- parent.hasDescendantWithCrossOriginMembers = True
- parent = parent.parent
-
- # Compute slot indices for our members before we pull in unforgeable
- # members from our parent. Also, maplike/setlike declarations get a
- # slot to hold their backing object.
- for member in self.members:
- if (
- member.isAttr()
- and (
- member.getExtendedAttribute("StoreInSlot")
- or member.getExtendedAttribute("Cached")
- or member.type.isObservableArray()
- )
- ) or member.isMaplikeOrSetlike():
- if self.isJSImplemented() and not member.isMaplikeOrSetlike():
- raise WebIDLError(
- "Interface %s is JS-implemented and we "
- "don't support [Cached] or [StoreInSlot] or ObservableArray "
- "on JS-implemented interfaces" % self.identifier.name,
- [self.location, member.location],
- )
- if member.slotIndices is None:
- member.slotIndices = dict()
- member.slotIndices[self.identifier.name] = self.totalMembersInSlots
- self.totalMembersInSlots += 1
- if member.getExtendedAttribute("StoreInSlot"):
- self._ownMembersInSlots += 1
-
- if self.parent:
- # Make sure we don't shadow any of the [LegacyUnforgeable] attributes on our
- # ancestor interfaces. We don't have to worry about mixins here, because
- # those have already been imported into the relevant .members lists. And
- # we don't have to worry about anything other than our parent, because it
- # has already imported its ancestors' unforgeable attributes into its
- # member list.
- for unforgeableMember in (
- member
- for member in self.parent.members
- if (member.isAttr() or member.isMethod())
- and member.isLegacyUnforgeable()
- ):
- shadows = [
- m
- for m in self.members
- if (m.isAttr() or m.isMethod())
- and not m.isStatic()
- and m.identifier.name == unforgeableMember.identifier.name
- ]
- if len(shadows) != 0:
- locs = [unforgeableMember.location] + [s.location for s in shadows]
- raise WebIDLError(
- "Interface %s shadows [LegacyUnforgeable] "
- "members of %s"
- % (self.identifier.name, ancestor.identifier.name),
- locs,
- )
- # And now just stick it in our members, since we won't be
- # inheriting this down the proto chain. If we really cared we
- # could try to do something where we set up the unforgeable
- # attributes/methods of ancestor interfaces, with their
- # corresponding getters, on our interface, but that gets pretty
- # complicated and seems unnecessary.
- self.members.append(unforgeableMember)
-
- # At this point, we have all of our members. If the current interface
- # uses maplike/setlike, check for collisions anywhere in the current
- # interface or higher in the inheritance chain.
- if self.maplikeOrSetlikeOrIterable:
- testInterface = self
- isAncestor = False
- while testInterface:
- self.maplikeOrSetlikeOrIterable.checkCollisions(
- testInterface.members, isAncestor
- )
- isAncestor = True
- testInterface = testInterface.parent
-
- # Ensure that there's at most one of each {named,indexed}
- # {getter,setter,deleter}, at most one stringifier,
- # and at most one legacycaller. Note that this last is not
- # quite per spec, but in practice no one overloads
- # legacycallers. Also note that in practice we disallow
- # indexed deleters, but it simplifies some other code to
- # treat deleter analogously to getter/setter by
- # prefixing it with "named".
- specialMembersSeen = {}
- for member in self.members:
- if not member.isMethod():
- continue
-
- if member.isGetter():
- memberType = "getters"
- elif member.isSetter():
- memberType = "setters"
- elif member.isDeleter():
- memberType = "deleters"
- elif member.isStringifier():
- memberType = "stringifiers"
- elif member.isLegacycaller():
- memberType = "legacycallers"
- else:
- continue
-
- if memberType != "stringifiers" and memberType != "legacycallers":
- if member.isNamed():
- memberType = "named " + memberType
- else:
- assert member.isIndexed()
- memberType = "indexed " + memberType
-
- if memberType in specialMembersSeen:
- raise WebIDLError(
- "Multiple " + memberType + " on %s" % (self),
- [
- self.location,
- specialMembersSeen[memberType].location,
- member.location,
- ],
- )
-
- specialMembersSeen[memberType] = member
-
- if self.getExtendedAttribute("LegacyUnenumerableNamedProperties"):
- # Check that we have a named getter.
- if "named getters" not in specialMembersSeen:
- raise WebIDLError(
- "Interface with [LegacyUnenumerableNamedProperties] does "
- "not have a named getter",
- [self.location],
- )
- ancestor = self.parent
- while ancestor:
- if ancestor.getExtendedAttribute("LegacyUnenumerableNamedProperties"):
- raise WebIDLError(
- "Interface with [LegacyUnenumerableNamedProperties] "
- "inherits from another interface with "
- "[LegacyUnenumerableNamedProperties]",
- [self.location, ancestor.location],
- )
- ancestor = ancestor.parent
-
- if self._isOnGlobalProtoChain:
- # Make sure we have no named setters or deleters
- for memberType in ["setter", "deleter"]:
- memberId = "named " + memberType + "s"
- if memberId in specialMembersSeen:
- raise WebIDLError(
- "Interface with [Global] has a named %s" % memberType,
- [self.location, specialMembersSeen[memberId].location],
- )
- # Make sure we're not [LegacyOverrideBuiltIns]
- if self.getExtendedAttribute("LegacyOverrideBuiltIns"):
- raise WebIDLError(
- "Interface with [Global] also has " "[LegacyOverrideBuiltIns]",
- [self.location],
- )
- # Mark all of our ancestors as being on the global's proto chain too
- parent = self.parent
- while parent:
- # Must not inherit from an interface with [LegacyOverrideBuiltIns]
- if parent.getExtendedAttribute("LegacyOverrideBuiltIns"):
- raise WebIDLError(
- "Interface with [Global] inherits from "
- "interface with [LegacyOverrideBuiltIns]",
- [self.location, parent.location],
- )
- parent._isOnGlobalProtoChain = True
- parent = parent.parent
-
- def validate(self):
- def checkDuplicateNames(member, name, attributeName):
- for m in self.members:
- if m.identifier.name == name:
- raise WebIDLError(
- "[%s=%s] has same name as interface member"
- % (attributeName, name),
- [member.location, m.location],
- )
- if m.isMethod() and m != member and name in m.aliases:
- raise WebIDLError(
- "conflicting [%s=%s] definitions" % (attributeName, name),
- [member.location, m.location],
- )
- if m.isAttr() and m != member and name in m.bindingAliases:
- raise WebIDLError(
- "conflicting [%s=%s] definitions" % (attributeName, name),
- [member.location, m.location],
- )
-
- # We also don't support inheriting from unforgeable interfaces.
- if self.getExtendedAttribute("LegacyUnforgeable") and self.hasChildInterfaces():
- locations = [self.location] + list(
- i.location for i in self.interfacesBasedOnSelf if i.parent == self
- )
- raise WebIDLError(
- "%s is an unforgeable ancestor interface" % self.identifier.name,
- locations,
- )
-
- ctor = self.ctor()
- if ctor is not None:
- ctor.validate()
- for namedCtor in self.legacyFactoryFunctions:
- namedCtor.validate()
-
- indexedGetter = None
- hasLengthAttribute = False
- for member in self.members:
- member.validate()
-
- if self.isCallback() and member.getExtendedAttribute("Replaceable"):
- raise WebIDLError(
- "[Replaceable] used on an attribute on "
- "interface %s which is a callback interface" % self.identifier.name,
- [self.location, member.location],
- )
-
- # Check that PutForwards refers to another attribute and that no
- # cycles exist in forwarded assignments. Also check for a
- # integer-typed "length" attribute.
- if member.isAttr():
- if member.identifier.name == "length" and member.type.isInteger():
- hasLengthAttribute = True
-
- iface = self
- attr = member
- putForwards = attr.getExtendedAttribute("PutForwards")
- if putForwards and self.isCallback():
- raise WebIDLError(
- "[PutForwards] used on an attribute "
- "on interface %s which is a callback "
- "interface" % self.identifier.name,
- [self.location, member.location],
- )
-
- while putForwards is not None:
- forwardIface = attr.type.unroll().inner
- fowardAttr = None
-
- for forwardedMember in forwardIface.members:
- if (
- not forwardedMember.isAttr()
- or forwardedMember.identifier.name != putForwards[0]
- ):
- continue
- if forwardedMember == member:
- raise WebIDLError(
- "Cycle detected in forwarded "
- "assignments for attribute %s on "
- "%s" % (member.identifier.name, self),
- [member.location],
- )
- fowardAttr = forwardedMember
- break
-
- if fowardAttr is None:
- raise WebIDLError(
- "Attribute %s on %s forwards to "
- "missing attribute %s"
- % (attr.identifier.name, iface, putForwards),
- [attr.location],
- )
-
- iface = forwardIface
- attr = fowardAttr
- putForwards = attr.getExtendedAttribute("PutForwards")
-
- # Check that the name of an [Alias] doesn't conflict with an
- # interface member and whether we support indexed properties.
- if member.isMethod():
- if member.isGetter() and member.isIndexed():
- indexedGetter = member
-
- for alias in member.aliases:
- if self.isOnGlobalProtoChain():
- raise WebIDLError(
- "[Alias] must not be used on a "
- "[Global] interface operation",
- [member.location],
- )
- if (
- member.getExtendedAttribute("Exposed")
- or member.getExtendedAttribute("ChromeOnly")
- or member.getExtendedAttribute("Pref")
- or member.getExtendedAttribute("Func")
- or member.getExtendedAttribute("Trial")
- or member.getExtendedAttribute("SecureContext")
- ):
- raise WebIDLError(
- "[Alias] must not be used on a "
- "conditionally exposed operation",
- [member.location],
- )
- if member.isStatic():
- raise WebIDLError(
- "[Alias] must not be used on a " "static operation",
- [member.location],
- )
- if member.isIdentifierLess():
- raise WebIDLError(
- "[Alias] must not be used on an "
- "identifierless operation",
- [member.location],
- )
- if member.isLegacyUnforgeable():
- raise WebIDLError(
- "[Alias] must not be used on an "
- "[LegacyUnforgeable] operation",
- [member.location],
- )
-
- checkDuplicateNames(member, alias, "Alias")
-
- # Check that the name of a [BindingAlias] doesn't conflict with an
- # interface member.
- if member.isAttr():
- for bindingAlias in member.bindingAliases:
- checkDuplicateNames(member, bindingAlias, "BindingAlias")
-
- # Conditional exposure makes no sense for interfaces with no
- # interface object.
- # And SecureContext makes sense for interfaces with no interface object,
- # since it is also propagated to interface members.
- if (
- self.isExposedConditionally(exclusions=["SecureContext"])
- and not self.hasInterfaceObject()
- ):
- raise WebIDLError(
- "Interface with no interface object is " "exposed conditionally",
- [self.location],
- )
-
- # Value iterators are only allowed on interfaces with indexed getters,
- # and pair iterators are only allowed on interfaces without indexed
- # getters.
- if self.isIterable():
- iterableDecl = self.maplikeOrSetlikeOrIterable
- if iterableDecl.isValueIterator():
- if not indexedGetter:
- raise WebIDLError(
- "Interface with value iterator does not "
- "support indexed properties",
- [self.location, iterableDecl.location],
- )
-
- if iterableDecl.valueType != indexedGetter.signatures()[0][0]:
- raise WebIDLError(
- "Iterable type does not match indexed " "getter type",
- [iterableDecl.location, indexedGetter.location],
- )
-
- if not hasLengthAttribute:
- raise WebIDLError(
- "Interface with value iterator does not "
- 'have an integer-typed "length" attribute',
- [self.location, iterableDecl.location],
- )
- else:
- assert iterableDecl.isPairIterator()
- if indexedGetter:
- raise WebIDLError(
- "Interface with pair iterator supports " "indexed properties",
- [self.location, iterableDecl.location, indexedGetter.location],
- )
-
- if indexedGetter and not hasLengthAttribute:
- raise WebIDLError(
- "Interface with an indexed getter does not have "
- 'an integer-typed "length" attribute',
- [self.location, indexedGetter.location],
- )
-
- def setCallback(self, value):
- self._callback = value
-
- def isCallback(self):
- return self._callback
-
- def isSingleOperationInterface(self):
- assert self.isCallback() or self.isJSImplemented()
- return (
- # JS-implemented things should never need the
- # this-handling weirdness of single-operation interfaces.
- not self.isJSImplemented()
- and
- # Not inheriting from another interface
- not self.parent
- and
- # No attributes of any kinds
- not any(m.isAttr() for m in self.members)
- and
- # There is at least one regular operation, and all regular
- # operations have the same identifier
- len(
- set(
- m.identifier.name
- for m in self.members
- if m.isMethod() and not m.isStatic()
- )
- )
- == 1
- )
-
- def inheritanceDepth(self):
- depth = 0
- parent = self.parent
- while parent:
- depth = depth + 1
- parent = parent.parent
- return depth
-
- def hasConstants(self):
- return any(m.isConst() for m in self.members)
-
- def hasInterfaceObject(self):
- if self.isCallback():
- return self.hasConstants()
- return not hasattr(self, "_noInterfaceObject") and not self.isPseudoInterface()
-
- def hasInterfacePrototypeObject(self):
- return (
- not self.isCallback()
- and not self.isNamespace()
- and self.getUserData("hasConcreteDescendant", False)
- and not self.isPseudoInterface()
- )
-
- def addIncludedMixin(self, includedMixin):
- assert isinstance(includedMixin, IDLInterfaceMixin)
- self.includedMixins.add(includedMixin)
-
- def getInheritedInterfaces(self):
- """
- Returns a list of the interfaces this interface inherits from
- (not including this interface itself). The list is in order
- from most derived to least derived.
- """
- assert self._finished
- if not self.parent:
- return []
- parentInterfaces = self.parent.getInheritedInterfaces()
- parentInterfaces.insert(0, self.parent)
- return parentInterfaces
-
- def findInterfaceLoopPoint(self, otherInterface):
- """
- Finds an interface amongst our ancestors that inherits from otherInterface.
- If there is no such interface, returns None.
- """
- if self.parent:
- if self.parent == otherInterface:
- return self
- loopPoint = self.parent.findInterfaceLoopPoint(otherInterface)
- if loopPoint:
- return loopPoint
- return None
-
- def setNonPartial(self, location, parent, members):
- assert not parent or isinstance(parent, IDLIdentifierPlaceholder)
- IDLInterfaceOrInterfaceMixinOrNamespace.setNonPartial(self, location, members)
- assert not self.parent
- self.parent = parent
-
- def getJSImplementation(self):
- classId = self.getExtendedAttribute("JSImplementation")
- if not classId:
- return classId
- assert isinstance(classId, list)
- assert len(classId) == 1
- return classId[0]
-
- def isJSImplemented(self):
- return bool(self.getJSImplementation())
-
- def hasProbablyShortLivingWrapper(self):
- current = self
- while current:
- if current.getExtendedAttribute("ProbablyShortLivingWrapper"):
- return True
- current = current.parent
- return False
-
- def hasChildInterfaces(self):
- return self._hasChildInterfaces
-
- def isOnGlobalProtoChain(self):
- return self._isOnGlobalProtoChain
-
- def isPseudoInterface(self):
- return self._isPseudo
-
- def _getDependentObjects(self):
- deps = set(self.members)
- deps.update(self.includedMixins)
- if self.parent:
- deps.add(self.parent)
- return deps
-
- def hasMembersInSlots(self):
- return self._ownMembersInSlots != 0
-
- conditionExtendedAttributes = [
- "Pref",
- "ChromeOnly",
- "Func",
- "Trial",
- "SecureContext",
- ]
-
- def isExposedConditionally(self, exclusions=[]):
- return any(
- ((not a in exclusions) and self.getExtendedAttribute(a))
- for a in self.conditionExtendedAttributes
- )
-
-
-class IDLInterface(IDLInterfaceOrNamespace):
- def __init__(
- self,
- location,
- parentScope,
- name,
- parent,
- members,
- isKnownNonPartial,
- classNameOverride=None,
- ):
- IDLInterfaceOrNamespace.__init__(
- self, location, parentScope, name, parent, members, isKnownNonPartial
- )
- self.classNameOverride = classNameOverride
-
- def __str__(self):
- return "Interface '%s'" % self.identifier.name
-
- def isInterface(self):
- return True
-
- def getClassName(self):
- if self.classNameOverride:
- return self.classNameOverride
- return IDLInterfaceOrNamespace.getClassName(self)
-
- def addExtendedAttributes(self, attrs):
- for attr in attrs:
- identifier = attr.identifier()
-
- # Special cased attrs
- if identifier == "TreatNonCallableAsNull":
- raise WebIDLError(
- "TreatNonCallableAsNull cannot be specified on interfaces",
- [attr.location, self.location],
- )
- if identifier == "LegacyTreatNonObjectAsNull":
- raise WebIDLError(
- "LegacyTreatNonObjectAsNull cannot be specified on interfaces",
- [attr.location, self.location],
- )
- elif identifier == "LegacyNoInterfaceObject":
- if not attr.noArguments():
- raise WebIDLError(
- "[LegacyNoInterfaceObject] must take no arguments",
- [attr.location],
- )
-
- self._noInterfaceObject = True
- elif identifier == "LegacyFactoryFunction":
- if not attr.hasValue():
- raise WebIDLError(
- "LegacyFactoryFunction must either take an identifier or take a named argument list",
- [attr.location],
- )
-
- args = attr.args() if attr.hasArgs() else []
-
- retType = IDLWrapperType(self.location, self)
-
- method = IDLConstructor(attr.location, args, attr.value())
- method.reallyInit(self)
-
- # Named constructors are always assumed to be able to
- # throw (since there's no way to indicate otherwise).
- method.addExtendedAttributes(
- [IDLExtendedAttribute(self.location, ("Throws",))]
- )
-
- # We need to detect conflicts for LegacyFactoryFunctions across
- # interfaces. We first call resolve on the parentScope,
- # which will merge all LegacyFactoryFunctions with the same
- # identifier accross interfaces as overloads.
- method.resolve(self.parentScope)
-
- # Then we look up the identifier on the parentScope. If the
- # result is the same as the method we're adding then it
- # hasn't been added as an overload and it's the first time
- # we've encountered a LegacyFactoryFunction with that identifier.
- # If the result is not the same as the method we're adding
- # then it has been added as an overload and we need to check
- # whether the result is actually one of our existing
- # LegacyFactoryFunctions.
- newMethod = self.parentScope.lookupIdentifier(method.identifier)
- if newMethod == method:
- self.legacyFactoryFunctions.append(method)
- elif newMethod not in self.legacyFactoryFunctions:
- raise WebIDLError(
- "LegacyFactoryFunction conflicts with a "
- "LegacyFactoryFunction of a different interface",
- [method.location, newMethod.location],
- )
- elif identifier == "ExceptionClass":
- if not attr.noArguments():
- raise WebIDLError(
- "[ExceptionClass] must take no arguments", [attr.location]
- )
- if self.parent:
- raise WebIDLError(
- "[ExceptionClass] must not be specified on "
- "an interface with inherited interfaces",
- [attr.location, self.location],
- )
- elif identifier == "Global":
- if attr.hasValue():
- self.globalNames = [attr.value()]
- elif attr.hasArgs():
- self.globalNames = attr.args()
- else:
- self.globalNames = [self.identifier.name]
- self.parentScope.addIfaceGlobalNames(
- self.identifier.name, self.globalNames
- )
- self._isOnGlobalProtoChain = True
- elif identifier == "LegacyWindowAlias":
- if attr.hasValue():
- self.legacyWindowAliases = [attr.value()]
- elif attr.hasArgs():
- self.legacyWindowAliases = attr.args()
- else:
- raise WebIDLError(
- "[%s] must either take an identifier "
- "or take an identifier list" % identifier,
- [attr.location],
- )
- for alias in self.legacyWindowAliases:
- unresolved = IDLUnresolvedIdentifier(attr.location, alias)
- IDLObjectWithIdentifier(attr.location, self.parentScope, unresolved)
- elif identifier == "SecureContext":
- if not attr.noArguments():
- raise WebIDLError(
- "[%s] must take no arguments" % identifier, [attr.location]
- )
- # This gets propagated to all our members.
- for member in self.members:
- if member.getExtendedAttribute("SecureContext"):
- raise WebIDLError(
- "[SecureContext] specified on both "
- "an interface member and on the "
- "interface itself",
- [member.location, attr.location],
- )
- member.addExtendedAttributes([attr])
- elif (
- identifier == "NeedResolve"
- or identifier == "LegacyOverrideBuiltIns"
- or identifier == "ChromeOnly"
- or identifier == "LegacyUnforgeable"
- or identifier == "LegacyEventInit"
- or identifier == "ProbablyShortLivingWrapper"
- or identifier == "LegacyUnenumerableNamedProperties"
- or identifier == "RunConstructorInCallerCompartment"
- or identifier == "WantsEventListenerHooks"
- or identifier == "Serializable"
- or identifier == "Abstract"
- or identifier == "Inline"
- ):
- # Known extended attributes that do not take values
- if not attr.noArguments():
- raise WebIDLError(
- "[%s] must take no arguments" % identifier, [attr.location]
- )
- elif identifier == "Exposed":
- convertExposedAttrToGlobalNameSet(attr, self._exposureGlobalNames)
- elif (
- identifier == "Pref"
- or identifier == "JSImplementation"
- or identifier == "HeaderFile"
- or identifier == "Func"
- or identifier == "Trial"
- or identifier == "Deprecated"
- ):
- # Known extended attributes that take a string value
- if not attr.hasValue():
- raise WebIDLError(
- "[%s] must have a value" % identifier, [attr.location]
- )
- elif identifier == "InstrumentedProps":
- # Known extended attributes that take a list
- if not attr.hasArgs():
- raise WebIDLError(
- "[%s] must have arguments" % identifier, [attr.location]
- )
- else:
- raise WebIDLError(
- "Unknown extended attribute %s on interface" % identifier,
- [attr.location],
- )
-
- attrlist = attr.listValue()
- self._extendedAttrDict[identifier] = attrlist if len(attrlist) else True
-
- def validate(self):
- IDLInterfaceOrNamespace.validate(self)
- if self.parent and self.isSerializable() and not self.parent.isSerializable():
- raise WebIDLError(
- "Serializable interface inherits from non-serializable "
- "interface. Per spec, that means the object should not be "
- "serializable, so chances are someone made a mistake here "
- "somewhere.",
- [self.location, self.parent.location],
- )
-
- def isSerializable(self):
- return self.getExtendedAttribute("Serializable")
-
- def setNonPartial(self, location, parent, members):
- # Before we do anything else, finish initializing any constructors that
- # might be in "members", so we don't have partially-initialized objects
- # hanging around. We couldn't do it before now because we needed to have
- # to have the IDLInterface on hand to properly set the return type.
- for member in members:
- if isinstance(member, IDLConstructor):
- member.reallyInit(self)
-
- IDLInterfaceOrNamespace.setNonPartial(self, location, parent, members)
-
-
-class IDLNamespace(IDLInterfaceOrNamespace):
- def __init__(self, location, parentScope, name, members, isKnownNonPartial):
- IDLInterfaceOrNamespace.__init__(
- self, location, parentScope, name, None, members, isKnownNonPartial
- )
-
- def __str__(self):
- return "Namespace '%s'" % self.identifier.name
-
- def isNamespace(self):
- return True
-
- def addExtendedAttributes(self, attrs):
- # The set of things namespaces support is small enough it's simpler
- # to factor out into a separate method than it is to sprinkle
- # isNamespace() checks all through
- # IDLInterfaceOrNamespace.addExtendedAttributes.
- for attr in attrs:
- identifier = attr.identifier()
-
- if identifier == "Exposed":
- convertExposedAttrToGlobalNameSet(attr, self._exposureGlobalNames)
- elif identifier == "ClassString":
- # Takes a string value to override the default "Object" if
- # desired.
- if not attr.hasValue():
- raise WebIDLError(
- "[%s] must have a value" % identifier, [attr.location]
- )
- elif identifier == "ProtoObjectHack" or identifier == "ChromeOnly":
- if not attr.noArguments():
- raise WebIDLError(
- "[%s] must not have arguments" % identifier, [attr.location]
- )
- elif (
- identifier == "Pref"
- or identifier == "HeaderFile"
- or identifier == "Func"
- or identifier == "Trial"
- ):
- # Known extended attributes that take a string value
- if not attr.hasValue():
- raise WebIDLError(
- "[%s] must have a value" % identifier, [attr.location]
- )
- else:
- raise WebIDLError(
- "Unknown extended attribute %s on namespace" % identifier,
- [attr.location],
- )
-
- attrlist = attr.listValue()
- self._extendedAttrDict[identifier] = attrlist if len(attrlist) else True
-
- def isSerializable(self):
- return False
-
-
-class IDLDictionary(IDLObjectWithScope):
- def __init__(self, location, parentScope, name, parent, members):
- assert isinstance(parentScope, IDLScope)
- assert isinstance(name, IDLUnresolvedIdentifier)
- assert not parent or isinstance(parent, IDLIdentifierPlaceholder)
-
- self.parent = parent
- self._finished = False
- self.members = list(members)
- self._partialDictionaries = []
- self._extendedAttrDict = {}
- self.needsConversionToJS = False
- self.needsConversionFromJS = False
-
- IDLObjectWithScope.__init__(self, location, parentScope, name)
-
- def __str__(self):
- return "Dictionary '%s'" % self.identifier.name
-
- def isDictionary(self):
- return True
-
- def canBeEmpty(self):
- """
- Returns true if this dictionary can be empty (that is, it has no
- required members and neither do any of its ancestors).
- """
- return all(member.optional for member in self.members) and (
- not self.parent or self.parent.canBeEmpty()
- )
-
- def finish(self, scope):
- if self._finished:
- return
-
- self._finished = True
-
- if self.parent:
- assert isinstance(self.parent, IDLIdentifierPlaceholder)
- oldParent = self.parent
- self.parent = self.parent.finish(scope)
- if not isinstance(self.parent, IDLDictionary):
- raise WebIDLError(
- "Dictionary %s has parent that is not a dictionary"
- % self.identifier.name,
- [oldParent.location, self.parent.location],
- )
-
- # Make sure the parent resolves all its members before we start
- # looking at them.
- self.parent.finish(scope)
-
- # Now go ahead and merge in our partial dictionaries.
- for partial in self._partialDictionaries:
- partial.finish(scope)
- self.members.extend(partial.members)
-
- for member in self.members:
- member.resolve(self)
- if not member.isComplete():
- member.complete(scope)
- assert member.type.isComplete()
-
- # Members of a dictionary are sorted in lexicographic order,
- # unless the dictionary opts out.
- if not self.getExtendedAttribute("Unsorted"):
- self.members.sort(key=lambda x: x.identifier.name)
-
- inheritedMembers = []
- ancestor = self.parent
- while ancestor:
- if ancestor == self:
- raise WebIDLError(
- "Dictionary %s has itself as an ancestor" % self.identifier.name,
- [self.identifier.location],
- )
- inheritedMembers.extend(ancestor.members)
- ancestor = ancestor.parent
-
- # Catch name duplication
- for inheritedMember in inheritedMembers:
- for member in self.members:
- if member.identifier.name == inheritedMember.identifier.name:
- raise WebIDLError(
- "Dictionary %s has two members with name %s"
- % (self.identifier.name, member.identifier.name),
- [member.location, inheritedMember.location],
- )
-
- def validate(self):
- def typeContainsDictionary(memberType, dictionary):
- """
- Returns a tuple whose:
-
- - First element is a Boolean value indicating whether
- memberType contains dictionary.
-
- - Second element is:
- A list of locations that leads from the type that was passed in
- the memberType argument, to the dictionary being validated,
- if the boolean value in the first element is True.
-
- None, if the boolean value in the first element is False.
- """
-
- if (
- memberType.nullable()
- or memberType.isSequence()
- or memberType.isRecord()
- ):
- return typeContainsDictionary(memberType.inner, dictionary)
-
- if memberType.isDictionary():
- if memberType.inner == dictionary:
- return (True, [memberType.location])
-
- (contains, locations) = dictionaryContainsDictionary(
- memberType.inner, dictionary
- )
- if contains:
- return (True, [memberType.location] + locations)
-
- if memberType.isUnion():
- for member in memberType.flatMemberTypes:
- (contains, locations) = typeContainsDictionary(member, dictionary)
- if contains:
- return (True, locations)
-
- return (False, None)
-
- def dictionaryContainsDictionary(dictMember, dictionary):
- for member in dictMember.members:
- (contains, locations) = typeContainsDictionary(member.type, dictionary)
- if contains:
- return (True, [member.location] + locations)
-
- if dictMember.parent:
- if dictMember.parent == dictionary:
- return (True, [dictMember.location])
- else:
- (contains, locations) = dictionaryContainsDictionary(
- dictMember.parent, dictionary
- )
- if contains:
- return (True, [dictMember.location] + locations)
-
- return (False, None)
-
- for member in self.members:
- if member.type.isDictionary() and member.type.nullable():
- raise WebIDLError(
- "Dictionary %s has member with nullable "
- "dictionary type" % self.identifier.name,
- [member.location],
- )
- (contains, locations) = typeContainsDictionary(member.type, self)
- if contains:
- raise WebIDLError(
- "Dictionary %s has member with itself as type."
- % self.identifier.name,
- [member.location] + locations,
- )
-
- if member.type.isUndefined():
- raise WebIDLError(
- "Dictionary %s has member with undefined as its type."
- % self.identifier.name,
- [member.location],
- )
- elif member.type.isUnion():
- for unionMember in member.type.unroll().flatMemberTypes:
- if unionMember.isUndefined():
- raise WebIDLError(
- "Dictionary %s has member with a union containing "
- "undefined as a type." % self.identifier.name,
- [unionMember.location],
- )
-
- def getExtendedAttribute(self, name):
- return self._extendedAttrDict.get(name, None)
-
- def addExtendedAttributes(self, attrs):
- for attr in attrs:
- identifier = attr.identifier()
-
- if identifier == "GenerateInitFromJSON" or identifier == "GenerateInit":
- if not attr.noArguments():
- raise WebIDLError(
- "[%s] must not have arguments" % identifier, [attr.location]
- )
- self.needsConversionFromJS = True
- elif (
- identifier == "GenerateConversionToJS" or identifier == "GenerateToJSON"
- ):
- if not attr.noArguments():
- raise WebIDLError(
- "[%s] must not have arguments" % identifier, [attr.location]
- )
- # ToJSON methods require to-JS conversion, because we
- # implement ToJSON by converting to a JS object and
- # then using JSON.stringify.
- self.needsConversionToJS = True
- elif identifier == "Unsorted":
- if not attr.noArguments():
- raise WebIDLError(
- "[Unsorted] must take no arguments", [attr.location]
- )
- else:
- raise WebIDLError(
- "[%s] extended attribute not allowed on "
- "dictionaries" % identifier,
- [attr.location],
- )
-
- self._extendedAttrDict[identifier] = True
-
- def _getDependentObjects(self):
- deps = set(self.members)
- if self.parent:
- deps.add(self.parent)
- return deps
-
- def addPartialDictionary(self, partial):
- assert self.identifier.name == partial.identifier.name
- self._partialDictionaries.append(partial)
-
-
-class IDLEnum(IDLObjectWithIdentifier):
- def __init__(self, location, parentScope, name, values):
- assert isinstance(parentScope, IDLScope)
- assert isinstance(name, IDLUnresolvedIdentifier)
-
- if len(values) != len(set(values)):
- raise WebIDLError(
- "Enum %s has multiple identical strings" % name.name, [location]
- )
-
- IDLObjectWithIdentifier.__init__(self, location, parentScope, name)
- self._values = values
-
- def values(self):
- return self._values
-
- def finish(self, scope):
- pass
-
- def validate(self):
- pass
-
- def isEnum(self):
- return True
-
- def addExtendedAttributes(self, attrs):
- if len(attrs) != 0:
- raise WebIDLError(
- "There are no extended attributes that are " "allowed on enums",
- [attrs[0].location, self.location],
- )
-
- def _getDependentObjects(self):
- return set()
-
-
-class IDLType(IDLObject):
- Tags = enum(
- # The integer types
- "int8",
- "uint8",
- "int16",
- "uint16",
- "int32",
- "uint32",
- "int64",
- "uint64",
- # Additional primitive types
- "bool",
- "unrestricted_float",
- "float",
- "unrestricted_double",
- # "double" last primitive type to match IDLBuiltinType
- "double",
- # Other types
- "any",
- "undefined",
- "domstring",
- "bytestring",
- "usvstring",
- "utf8string",
- "jsstring",
- "object",
- # Funny stuff
- "interface",
- "dictionary",
- "enum",
- "callback",
- "union",
- "sequence",
- "record",
- "promise",
- "observablearray",
- )
-
- def __init__(self, location, name):
- IDLObject.__init__(self, location)
- self.name = name
- self.builtin = False
- self.legacyNullToEmptyString = False
- self._clamp = False
- self._enforceRange = False
- self._allowShared = False
- self._extendedAttrDict = {}
-
- def __hash__(self):
- return (
- hash(self.builtin)
- + hash(self.name)
- + hash(self._clamp)
- + hash(self._enforceRange)
- + hash(self.legacyNullToEmptyString)
- + hash(self._allowShared)
- )
-
- def __eq__(self, other):
- return (
- other
- and self.builtin == other.builtin
- and self.name == other.name
- and self._clamp == other.hasClamp()
- and self._enforceRange == other.hasEnforceRange()
- and self.legacyNullToEmptyString == other.legacyNullToEmptyString
- and self._allowShared == other.hasAllowShared()
- )
-
- def __ne__(self, other):
- return not self == other
-
- def __str__(self):
- return str(self.name)
-
- def prettyName(self):
- """
- A name that looks like what this type is named in the IDL spec. By default
- this is just our .name, but types that have more interesting spec
- representations should override this.
- """
- return str(self.name)
-
- def isType(self):
- return True
-
- def nullable(self):
- return False
-
- def isPrimitive(self):
- return False
-
- def isBoolean(self):
- return False
-
- def isNumeric(self):
- return False
-
- def isString(self):
- return False
-
- def isByteString(self):
- return False
-
- def isDOMString(self):
- return False
-
- def isUSVString(self):
- return False
-
- def isUTF8String(self):
- return False
-
- def isJSString(self):
- return False
-
- def isUndefined(self):
- return False
-
- def isSequence(self):
- return False
-
- def isRecord(self):
- return False
-
- def isReadableStream(self):
- return False
-
- def isArrayBuffer(self):
- return False
-
- def isArrayBufferView(self):
- return False
-
- def isTypedArray(self):
- return False
-
- def isBufferSource(self):
- return self.isArrayBuffer() or self.isArrayBufferView() or self.isTypedArray()
-
- def isCallbackInterface(self):
- return False
-
- def isNonCallbackInterface(self):
- return False
-
- def isGeckoInterface(self):
- """Returns a boolean indicating whether this type is an 'interface'
- type that is implemented in Gecko. At the moment, this returns
- true for all interface types that are not types from the TypedArray
- spec."""
- return self.isInterface() and not self.isSpiderMonkeyInterface()
-
- def isSpiderMonkeyInterface(self):
- """Returns a boolean indicating whether this type is an 'interface'
- type that is implemented in SpiderMonkey."""
- return self.isInterface() and (self.isBufferSource() or self.isReadableStream())
-
- def isAny(self):
- return self.tag() == IDLType.Tags.any
-
- def isObject(self):
- return self.tag() == IDLType.Tags.object
-
- def isPromise(self):
- return False
-
- def isComplete(self):
- return True
-
- def includesRestrictedFloat(self):
- return False
-
- def isFloat(self):
- return False
-
- def isUnrestricted(self):
- # Should only call this on float types
- assert self.isFloat()
-
- def isJSONType(self):
- return False
-
- def isObservableArray(self):
- return False
-
- def isDictionaryLike(self):
- return self.isDictionary() or self.isRecord() or self.isCallbackInterface()
-
- def hasClamp(self):
- return self._clamp
-
- def hasEnforceRange(self):
- return self._enforceRange
-
- def hasAllowShared(self):
- return self._allowShared
-
- def tag(self):
- assert False # Override me!
-
- def treatNonCallableAsNull(self):
- assert self.tag() == IDLType.Tags.callback
- return self.nullable() and self.inner.callback._treatNonCallableAsNull
-
- def treatNonObjectAsNull(self):
- assert self.tag() == IDLType.Tags.callback
- return self.nullable() and self.inner.callback._treatNonObjectAsNull
-
- def withExtendedAttributes(self, attrs):
- if len(attrs) > 0:
- raise WebIDLError(
- "Extended attributes on types only supported for builtins",
- [attrs[0].location, self.location],
- )
- return self
-
- def getExtendedAttribute(self, name):
- return self._extendedAttrDict.get(name, None)
-
- def resolveType(self, parentScope):
- pass
-
- def unroll(self):
- return self
-
- def isDistinguishableFrom(self, other):
- raise TypeError(
- "Can't tell whether a generic type is or is not "
- "distinguishable from other things"
- )
-
- def isExposedInAllOf(self, exposureSet):
- return True
-
-
-class IDLUnresolvedType(IDLType):
- """
- Unresolved types are interface types
- """
-
- def __init__(self, location, name, attrs=[]):
- IDLType.__init__(self, location, name)
- self.extraTypeAttributes = attrs
-
- def isComplete(self):
- return False
-
- def complete(self, scope):
- obj = None
- try:
- obj = scope._lookupIdentifier(self.name)
- except:
- raise WebIDLError("Unresolved type '%s'." % self.name, [self.location])
-
- assert obj
- assert not obj.isType()
- if obj.isTypedef():
- assert self.name.name == obj.identifier.name
- typedefType = IDLTypedefType(self.location, obj.innerType, obj.identifier)
- assert not typedefType.isComplete()
- return typedefType.complete(scope).withExtendedAttributes(
- self.extraTypeAttributes
- )
- elif obj.isCallback() and not obj.isInterface():
- assert self.name.name == obj.identifier.name
- return IDLCallbackType(obj.location, obj)
-
- name = self.name.resolve(scope, None)
- return IDLWrapperType(self.location, obj)
-
- def withExtendedAttributes(self, attrs):
- return IDLUnresolvedType(self.location, self.name, attrs)
-
- def isDistinguishableFrom(self, other):
- raise TypeError(
- "Can't tell whether an unresolved type is or is not "
- "distinguishable from other things"
- )
-
-
-class IDLParametrizedType(IDLType):
- def __init__(self, location, name, innerType):
- IDLType.__init__(self, location, name)
- self.builtin = False
- self.inner = innerType
-
- def includesRestrictedFloat(self):
- return self.inner.includesRestrictedFloat()
-
- def resolveType(self, parentScope):
- assert isinstance(parentScope, IDLScope)
- self.inner.resolveType(parentScope)
-
- def isComplete(self):
- return self.inner.isComplete()
-
- def unroll(self):
- return self.inner.unroll()
-
- def _getDependentObjects(self):
- return self.inner._getDependentObjects()
-
-
-class IDLNullableType(IDLParametrizedType):
- def __init__(self, location, innerType):
- assert not innerType == BuiltinTypes[IDLBuiltinType.Types.any]
-
- IDLParametrizedType.__init__(self, location, None, innerType)
-
- def __hash__(self):
- return hash(self.inner)
-
- def __eq__(self, other):
- return isinstance(other, IDLNullableType) and self.inner == other.inner
-
- def __str__(self):
- return self.inner.__str__() + "OrNull"
-
- def prettyName(self):
- return self.inner.prettyName() + "?"
-
- def nullable(self):
- return True
-
- def isCallback(self):
- return self.inner.isCallback()
-
- def isPrimitive(self):
- return self.inner.isPrimitive()
-
- def isBoolean(self):
- return self.inner.isBoolean()
-
- def isNumeric(self):
- return self.inner.isNumeric()
-
- def isString(self):
- return self.inner.isString()
-
- def isByteString(self):
- return self.inner.isByteString()
-
- def isDOMString(self):
- return self.inner.isDOMString()
-
- def isUSVString(self):
- return self.inner.isUSVString()
-
- def isUTF8String(self):
- return self.inner.isUTF8String()
-
- def isJSString(self):
- return self.inner.isJSString()
-
- def isFloat(self):
- return self.inner.isFloat()
-
- def isUnrestricted(self):
- return self.inner.isUnrestricted()
-
- def isInteger(self):
- return self.inner.isInteger()
-
- def isUndefined(self):
- return self.inner.isUndefined()
-
- def isSequence(self):
- return self.inner.isSequence()
-
- def isRecord(self):
- return self.inner.isRecord()
-
- def isReadableStream(self):
- return self.inner.isReadableStream()
-
- def isArrayBuffer(self):
- return self.inner.isArrayBuffer()
-
- def isArrayBufferView(self):
- return self.inner.isArrayBufferView()
-
- def isTypedArray(self):
- return self.inner.isTypedArray()
-
- def isDictionary(self):
- return self.inner.isDictionary()
-
- def isInterface(self):
- return self.inner.isInterface()
-
- def isPromise(self):
- # There is no such thing as a nullable Promise.
- assert not self.inner.isPromise()
- return False
-
- def isCallbackInterface(self):
- return self.inner.isCallbackInterface()
-
- def isNonCallbackInterface(self):
- return self.inner.isNonCallbackInterface()
-
- def isEnum(self):
- return self.inner.isEnum()
-
- def isUnion(self):
- return self.inner.isUnion()
-
- def isJSONType(self):
- return self.inner.isJSONType()
-
- def isObservableArray(self):
- return self.inner.isObservableArray()
-
- def hasClamp(self):
- return self.inner.hasClamp()
-
- def hasEnforceRange(self):
- return self.inner.hasEnforceRange()
-
- def hasAllowShared(self):
- return self.inner.hasAllowShared()
-
- def isComplete(self):
- return self.name is not None
-
- def tag(self):
- return self.inner.tag()
-
- def complete(self, scope):
- if not self.inner.isComplete():
- self.inner = self.inner.complete(scope)
- assert self.inner.isComplete()
-
- if self.inner.nullable():
- raise WebIDLError(
- "The inner type of a nullable type must not be a nullable type",
- [self.location, self.inner.location],
- )
- if self.inner.isUnion():
- if self.inner.hasNullableType:
- raise WebIDLError(
- "The inner type of a nullable type must not "
- "be a union type that itself has a nullable "
- "type as a member type",
- [self.location],
- )
- if self.inner.isDOMString():
- if self.inner.legacyNullToEmptyString:
- raise WebIDLError(
- "[LegacyNullToEmptyString] not allowed on a nullable DOMString",
- [self.location, self.inner.location],
- )
- if self.inner.isObservableArray():
- raise WebIDLError(
- "The inner type of a nullable type must not be an ObservableArray type",
- [self.location, self.inner.location],
- )
-
- self.name = self.inner.name + "OrNull"
- return self
-
- def isDistinguishableFrom(self, other):
- if (
- other.nullable()
- or other.isDictionary()
- or (
- other.isUnion() and (other.hasNullableType or other.hasDictionaryType())
- )
- ):
- # Can't tell which type null should become
- return False
- return self.inner.isDistinguishableFrom(other)
-
- def withExtendedAttributes(self, attrs):
- # See https://github.com/heycam/webidl/issues/827#issuecomment-565131350
- # Allowing extended attributes to apply to a nullable type is an intermediate solution.
- # A potential longer term solution is to introduce a null type and get rid of nullables.
- # For example, we could do `([Clamp] long or null) foo` in the future.
- return IDLNullableType(self.location, self.inner.withExtendedAttributes(attrs))
-
-
-class IDLSequenceType(IDLParametrizedType):
- def __init__(self, location, parameterType):
- assert not parameterType.isUndefined()
-
- IDLParametrizedType.__init__(self, location, parameterType.name, parameterType)
- # Need to set self.name up front if our inner type is already complete,
- # since in that case our .complete() won't be called.
- if self.inner.isComplete():
- self.name = self.inner.name + "Sequence"
-
- def __hash__(self):
- return hash(self.inner)
-
- def __eq__(self, other):
- return isinstance(other, IDLSequenceType) and self.inner == other.inner
-
- def __str__(self):
- return self.inner.__str__() + "Sequence"
-
- def prettyName(self):
- return "sequence<%s>" % self.inner.prettyName()
-
- def isSequence(self):
- return True
-
- def isJSONType(self):
- return self.inner.isJSONType()
-
- def tag(self):
- return IDLType.Tags.sequence
-
- def complete(self, scope):
- if self.inner.isObservableArray():
- raise WebIDLError(
- "The inner type of a sequence type must not be an ObservableArray type",
- [self.location, self.inner.location],
- )
-
- self.inner = self.inner.complete(scope)
- self.name = self.inner.name + "Sequence"
- return self
-
- def isDistinguishableFrom(self, other):
- if other.isPromise():
- return False
- if other.isUnion():
- # Just forward to the union; it'll deal
- return other.isDistinguishableFrom(self)
- return (
- other.isUndefined()
- or other.isPrimitive()
- or other.isString()
- or other.isEnum()
- or other.isInterface()
- or other.isDictionary()
- or other.isCallback()
- or other.isRecord()
- )
-
-
-class IDLRecordType(IDLParametrizedType):
- def __init__(self, location, keyType, valueType):
- assert keyType.isString()
- assert keyType.isComplete()
- assert not valueType.isUndefined()
-
- IDLParametrizedType.__init__(self, location, valueType.name, valueType)
- self.keyType = keyType
-
- # Need to set self.name up front if our inner type is already complete,
- # since in that case our .complete() won't be called.
- if self.inner.isComplete():
- self.name = self.keyType.name + self.inner.name + "Record"
-
- def __hash__(self):
- return hash(self.inner)
-
- def __eq__(self, other):
- return isinstance(other, IDLRecordType) and self.inner == other.inner
-
- def __str__(self):
- return self.keyType.__str__() + self.inner.__str__() + "Record"
-
- def prettyName(self):
- return "record<%s, %s>" % (self.keyType.prettyName(), self.inner.prettyName())
-
- def isRecord(self):
- return True
-
- def isJSONType(self):
- return self.inner.isJSONType()
-
- def tag(self):
- return IDLType.Tags.record
-
- def complete(self, scope):
- if self.inner.isObservableArray():
- raise WebIDLError(
- "The value type of a record type must not be an ObservableArray type",
- [self.location, self.inner.location],
- )
-
- self.inner = self.inner.complete(scope)
- self.name = self.keyType.name + self.inner.name + "Record"
- return self
-
- def unroll(self):
- # We do not unroll our inner. Just stop at ourselves. That
- # lets us add headers for both ourselves and our inner as
- # needed.
- return self
-
- def isDistinguishableFrom(self, other):
- if other.isPromise():
- return False
- if other.isUnion():
- # Just forward to the union; it'll deal
- return other.isDistinguishableFrom(self)
- return (
- other.isPrimitive()
- or other.isString()
- or other.isEnum()
- or other.isNonCallbackInterface()
- or other.isSequence()
- )
-
- def isExposedInAllOf(self, exposureSet):
- return self.inner.unroll().isExposedInAllOf(exposureSet)
-
-
-class IDLObservableArrayType(IDLParametrizedType):
- def __init__(self, location, innerType):
- assert not innerType.isUndefined()
- IDLParametrizedType.__init__(self, location, None, innerType)
-
- def __hash__(self):
- return hash(self.inner)
-
- def __eq__(self, other):
- return isinstance(other, IDLObservableArrayType) and self.inner == other.inner
-
- def __str__(self):
- return self.inner.__str__() + "ObservableArray"
-
- def prettyName(self):
- return "ObservableArray<%s>" % self.inner.prettyName()
-
- def isJSONType(self):
- return self.inner.isJSONType()
-
- def isObservableArray(self):
- return True
-
- def isComplete(self):
- return self.name is not None
-
- def tag(self):
- return IDLType.Tags.observablearray
-
- def complete(self, scope):
- if not self.inner.isComplete():
- self.inner = self.inner.complete(scope)
- assert self.inner.isComplete()
-
- if self.inner.isDictionary():
- raise WebIDLError(
- "The inner type of an ObservableArray type must not "
- "be a dictionary type",
- [self.location, self.inner.location],
- )
- if self.inner.isSequence():
- raise WebIDLError(
- "The inner type of an ObservableArray type must not "
- "be a sequence type",
- [self.location, self.inner.location],
- )
- if self.inner.isRecord():
- raise WebIDLError(
- "The inner type of an ObservableArray type must not be a record type",
- [self.location, self.inner.location],
- )
- if self.inner.isObservableArray():
- raise WebIDLError(
- "The inner type of an ObservableArray type must not "
- "be an ObservableArray type",
- [self.location, self.inner.location],
- )
-
- self.name = self.inner.name + "ObservableArray"
- return self
-
- def isDistinguishableFrom(self, other):
- # ObservableArrays are not distinguishable from anything.
- return False
-
-
-class IDLUnionType(IDLType):
- def __init__(self, location, memberTypes):
- IDLType.__init__(self, location, "")
- self.memberTypes = memberTypes
- self.hasNullableType = False
- self._dictionaryType = None
- self.flatMemberTypes = None
- self.builtin = False
-
- def __eq__(self, other):
- return isinstance(other, IDLUnionType) and self.memberTypes == other.memberTypes
-
- def __hash__(self):
- assert self.isComplete()
- return self.name.__hash__()
-
- def prettyName(self):
- return "(" + " or ".join(m.prettyName() for m in self.memberTypes) + ")"
-
- def isUnion(self):
- return True
-
- def isJSONType(self):
- return all(m.isJSONType() for m in self.memberTypes)
-
- def includesRestrictedFloat(self):
- return any(t.includesRestrictedFloat() for t in self.memberTypes)
-
- def tag(self):
- return IDLType.Tags.union
-
- def resolveType(self, parentScope):
- assert isinstance(parentScope, IDLScope)
- for t in self.memberTypes:
- t.resolveType(parentScope)
-
- def isComplete(self):
- return self.flatMemberTypes is not None
-
- def complete(self, scope):
- def typeName(type):
- if isinstance(type, IDLNullableType):
- return typeName(type.inner) + "OrNull"
- if isinstance(type, IDLWrapperType):
- return typeName(type._identifier.object())
- if isinstance(type, IDLObjectWithIdentifier):
- return typeName(type.identifier)
- if isinstance(type, IDLBuiltinType) and type.hasAllowShared():
- assert type.isBufferSource()
- return "MaybeShared" + type.name
- return type.name
-
- for (i, type) in enumerate(self.memberTypes):
- # Exclude typedefs because if given "typedef (B or C) test",
- # we want AOrTest, not AOrBOrC
- if not type.isComplete() and not isinstance(type, IDLTypedefType):
- self.memberTypes[i] = type.complete(scope)
-
- self.name = "Or".join(typeName(type) for type in self.memberTypes)
-
- # We do this again to complete the typedef types
- for (i, type) in enumerate(self.memberTypes):
- if not type.isComplete():
- self.memberTypes[i] = type.complete(scope)
-
- self.flatMemberTypes = list(self.memberTypes)
- i = 0
- while i < len(self.flatMemberTypes):
- if self.flatMemberTypes[i].nullable():
- if self.hasNullableType:
- raise WebIDLError(
- "Can't have more than one nullable types in a union",
- [nullableType.location, self.flatMemberTypes[i].location],
- )
- if self.hasDictionaryType():
- raise WebIDLError(
- "Can't have a nullable type and a "
- "dictionary type in a union",
- [
- self._dictionaryType.location,
- self.flatMemberTypes[i].location,
- ],
- )
- self.hasNullableType = True
- nullableType = self.flatMemberTypes[i]
- self.flatMemberTypes[i] = self.flatMemberTypes[i].inner
- continue
- if self.flatMemberTypes[i].isDictionary():
- if self.hasNullableType:
- raise WebIDLError(
- "Can't have a nullable type and a "
- "dictionary type in a union",
- [nullableType.location, self.flatMemberTypes[i].location],
- )
- self._dictionaryType = self.flatMemberTypes[i]
- self.flatMemberTypes[i].inner.needsConversionFromJS = True
- elif self.flatMemberTypes[i].isUnion():
- self.flatMemberTypes[i : i + 1] = self.flatMemberTypes[i].memberTypes
- continue
- i += 1
-
- for (i, t) in enumerate(self.flatMemberTypes[:-1]):
- for u in self.flatMemberTypes[i + 1 :]:
- if not t.isDistinguishableFrom(u):
- raise WebIDLError(
- "Flat member types of a union should be "
- "distinguishable, " + str(t) + " is not "
- "distinguishable from " + str(u),
- [self.location, t.location, u.location],
- )
-
- return self
-
- def isDistinguishableFrom(self, other):
- if self.hasNullableType and other.nullable():
- # Can't tell which type null should become
- return False
- if other.isUnion():
- otherTypes = other.unroll().memberTypes
- else:
- otherTypes = [other]
- # For every type in otherTypes, check that it's distinguishable from
- # every type in our types
- for u in otherTypes:
- if any(not t.isDistinguishableFrom(u) for t in self.memberTypes):
- return False
- return True
-
- def isExposedInAllOf(self, exposureSet):
- # We could have different member types in different globals. Just make sure that each thing in exposureSet has one of our member types exposed in it.
- for globalName in exposureSet:
- if not any(
- t.unroll().isExposedInAllOf(set([globalName]))
- for t in self.flatMemberTypes
- ):
- return False
- return True
-
- def hasDictionaryType(self):
- return self._dictionaryType is not None
-
- def hasPossiblyEmptyDictionaryType(self):
- return (
- self._dictionaryType is not None and self._dictionaryType.inner.canBeEmpty()
- )
-
- def _getDependentObjects(self):
- return set(self.memberTypes)
-
-
-class IDLTypedefType(IDLType):
- def __init__(self, location, innerType, name):
- IDLType.__init__(self, location, name)
- self.inner = innerType
- self.builtin = False
-
- def __hash__(self):
- return hash(self.inner)
-
- def __eq__(self, other):
- return isinstance(other, IDLTypedefType) and self.inner == other.inner
-
- def __str__(self):
- return self.name
-
- def nullable(self):
- return self.inner.nullable()
-
- def isPrimitive(self):
- return self.inner.isPrimitive()
-
- def isBoolean(self):
- return self.inner.isBoolean()
-
- def isNumeric(self):
- return self.inner.isNumeric()
-
- def isString(self):
- return self.inner.isString()
-
- def isByteString(self):
- return self.inner.isByteString()
-
- def isDOMString(self):
- return self.inner.isDOMString()
-
- def isUSVString(self):
- return self.inner.isUSVString()
-
- def isUTF8String(self):
- return self.inner.isUTF8String()
-
- def isJSString(self):
- return self.inner.isJSString()
-
- def isUndefined(self):
- return self.inner.isUndefined()
-
- def isJSONType(self):
- return self.inner.isJSONType()
-
- def isSequence(self):
- return self.inner.isSequence()
-
- def isRecord(self):
- return self.inner.isRecord()
-
- def isReadableStream(self):
- return self.inner.isReadableStream()
-
- def isDictionary(self):
- return self.inner.isDictionary()
-
- def isArrayBuffer(self):
- return self.inner.isArrayBuffer()
-
- def isArrayBufferView(self):
- return self.inner.isArrayBufferView()
-
- def isTypedArray(self):
- return self.inner.isTypedArray()
-
- def isInterface(self):
- return self.inner.isInterface()
-
- def isCallbackInterface(self):
- return self.inner.isCallbackInterface()
-
- def isNonCallbackInterface(self):
- return self.inner.isNonCallbackInterface()
-
- def isComplete(self):
- return False
-
- def complete(self, parentScope):
- if not self.inner.isComplete():
- self.inner = self.inner.complete(parentScope)
- assert self.inner.isComplete()
- return self.inner
-
- # Do we need a resolveType impl? I don't think it's particularly useful....
-
- def tag(self):
- return self.inner.tag()
-
- def unroll(self):
- return self.inner.unroll()
-
- def isDistinguishableFrom(self, other):
- return self.inner.isDistinguishableFrom(other)
-
- def _getDependentObjects(self):
- return self.inner._getDependentObjects()
-
- def withExtendedAttributes(self, attrs):
- return IDLTypedefType(
- self.location, self.inner.withExtendedAttributes(attrs), self.name
- )
-
-
-class IDLTypedef(IDLObjectWithIdentifier):
- def __init__(self, location, parentScope, innerType, name):
- # Set self.innerType first, because IDLObjectWithIdentifier.__init__
- # will call our __str__, which wants to use it.
- self.innerType = innerType
- identifier = IDLUnresolvedIdentifier(location, name)
- IDLObjectWithIdentifier.__init__(self, location, parentScope, identifier)
-
- def __str__(self):
- return "Typedef %s %s" % (self.identifier.name, self.innerType)
-
- def finish(self, parentScope):
- if not self.innerType.isComplete():
- self.innerType = self.innerType.complete(parentScope)
-
- def validate(self):
- pass
-
- def isTypedef(self):
- return True
-
- def addExtendedAttributes(self, attrs):
- if len(attrs) != 0:
- raise WebIDLError(
- "There are no extended attributes that are " "allowed on typedefs",
- [attrs[0].location, self.location],
- )
-
- def _getDependentObjects(self):
- return self.innerType._getDependentObjects()
-
-
-class IDLWrapperType(IDLType):
- def __init__(self, location, inner):
- IDLType.__init__(self, location, inner.identifier.name)
- self.inner = inner
- self._identifier = inner.identifier
- self.builtin = False
-
- def __hash__(self):
- return hash(self._identifier) + hash(self.builtin)
-
- def __eq__(self, other):
- return (
- isinstance(other, IDLWrapperType)
- and self._identifier == other._identifier
- and self.builtin == other.builtin
- )
-
- def __str__(self):
- return str(self.name) + " (Wrapper)"
-
- def isDictionary(self):
- return isinstance(self.inner, IDLDictionary)
-
- def isInterface(self):
- return isinstance(self.inner, IDLInterface) or isinstance(
- self.inner, IDLExternalInterface
- )
-
- def isCallbackInterface(self):
- return self.isInterface() and self.inner.isCallback()
-
- def isNonCallbackInterface(self):
- return self.isInterface() and not self.inner.isCallback()
-
- def isEnum(self):
- return isinstance(self.inner, IDLEnum)
-
- def isJSONType(self):
- if self.isInterface():
- if self.inner.isExternal():
- return False
- iface = self.inner
- while iface:
- if any(m.isMethod() and m.isToJSON() for m in iface.members):
- return True
- iface = iface.parent
- return False
- elif self.isEnum():
- return True
- elif self.isDictionary():
- dictionary = self.inner
- while dictionary:
- if not all(m.type.isJSONType() for m in dictionary.members):
- return False
- dictionary = dictionary.parent
- return True
- else:
- raise WebIDLError(
- "IDLWrapperType wraps type %s that we don't know if "
- "is serializable" % type(self.inner),
- [self.location],
- )
-
- def resolveType(self, parentScope):
- assert isinstance(parentScope, IDLScope)
- self.inner.resolve(parentScope)
-
- def isComplete(self):
- return True
-
- def tag(self):
- if self.isInterface():
- return IDLType.Tags.interface
- elif self.isEnum():
- return IDLType.Tags.enum
- elif self.isDictionary():
- return IDLType.Tags.dictionary
- else:
- assert False
-
- def isDistinguishableFrom(self, other):
- if other.isPromise():
- return False
- if other.isUnion():
- # Just forward to the union; it'll deal
- return other.isDistinguishableFrom(self)
- assert self.isInterface() or self.isEnum() or self.isDictionary()
- if self.isEnum():
- return (
- other.isUndefined()
- or other.isPrimitive()
- or other.isInterface()
- or other.isObject()
- or other.isCallback()
- or other.isDictionary()
- or other.isSequence()
- or other.isRecord()
- )
- if self.isDictionary() and (other.nullable() or other.isUndefined()):
- return False
- if (
- other.isPrimitive()
- or other.isString()
- or other.isEnum()
- or other.isSequence()
- ):
- return True
- if self.isDictionary():
- return other.isNonCallbackInterface()
-
- assert self.isInterface()
- if other.isInterface():
- if other.isSpiderMonkeyInterface():
- # Just let |other| handle things
- return other.isDistinguishableFrom(self)
- assert self.isGeckoInterface() and other.isGeckoInterface()
- if self.inner.isExternal() or other.unroll().inner.isExternal():
- return self != other
- return len(
- self.inner.interfacesBasedOnSelf
- & other.unroll().inner.interfacesBasedOnSelf
- ) == 0 and (self.isNonCallbackInterface() or other.isNonCallbackInterface())
- if (
- other.isUndefined()
- or other.isDictionary()
- or other.isCallback()
- or other.isRecord()
- ):
- return self.isNonCallbackInterface()
-
- # Not much else |other| can be
- assert other.isObject()
- return False
-
- def isExposedInAllOf(self, exposureSet):
- if not self.isInterface():
- return True
- iface = self.inner
- if iface.isExternal():
- # Let's say true, so we don't have to implement exposure mixins on
- # external interfaces and sprinkle [Exposed=Window] on every single
- # external interface declaration.
- return True
- return iface.exposureSet.issuperset(exposureSet)
-
- def _getDependentObjects(self):
- # NB: The codegen for an interface type depends on
- # a) That the identifier is in fact an interface (as opposed to
- # a dictionary or something else).
- # b) The native type of the interface.
- # If we depend on the interface object we will also depend on
- # anything the interface depends on which is undesirable. We
- # considered implementing a dependency just on the interface type
- # file, but then every modification to an interface would cause this
- # to be regenerated which is still undesirable. We decided not to
- # depend on anything, reasoning that:
- # 1) Changing the concrete type of the interface requires modifying
- # Bindings.conf, which is still a global dependency.
- # 2) Changing an interface to a dictionary (or vice versa) with the
- # same identifier should be incredibly rare.
- #
- # On the other hand, if our type is a dictionary, we should
- # depend on it, because the member types of a dictionary
- # affect whether a method taking the dictionary as an argument
- # takes a JSContext* argument or not.
- if self.isDictionary():
- return set([self.inner])
- return set()
-
-
-class IDLPromiseType(IDLParametrizedType):
- def __init__(self, location, innerType):
- IDLParametrizedType.__init__(self, location, "Promise", innerType)
-
- def __hash__(self):
- return hash(self.promiseInnerType())
-
- def __eq__(self, other):
- return (
- isinstance(other, IDLPromiseType)
- and self.promiseInnerType() == other.promiseInnerType()
- )
-
- def __str__(self):
- return self.inner.__str__() + "Promise"
-
- def prettyName(self):
- return "Promise<%s>" % self.inner.prettyName()
-
- def isPromise(self):
- return True
-
- def promiseInnerType(self):
- return self.inner
-
- def tag(self):
- return IDLType.Tags.promise
-
- def complete(self, scope):
- if self.inner.isObservableArray():
- raise WebIDLError(
- "The inner type of a promise type must not be an ObservableArray type",
- [self.location, self.inner.location],
- )
-
- self.inner = self.promiseInnerType().complete(scope)
- return self
-
- def unroll(self):
- # We do not unroll our inner. Just stop at ourselves. That
- # lets us add headers for both ourselves and our inner as
- # needed.
- return self
-
- def isDistinguishableFrom(self, other):
- # Promises are not distinguishable from anything.
- return False
-
- def isExposedInAllOf(self, exposureSet):
- # Check the internal type
- return self.promiseInnerType().unroll().isExposedInAllOf(exposureSet)
-
-
-class IDLBuiltinType(IDLType):
-
- Types = enum(
- # The integer types
- "byte",
- "octet",
- "short",
- "unsigned_short",
- "long",
- "unsigned_long",
- "long_long",
- "unsigned_long_long",
- # Additional primitive types
- "boolean",
- "unrestricted_float",
- "float",
- "unrestricted_double",
- # IMPORTANT: "double" must be the last primitive type listed
- "double",
- # Other types
- "any",
- "undefined",
- "domstring",
- "bytestring",
- "usvstring",
- "utf8string",
- "jsstring",
- "object",
- # Funny stuff
- "ArrayBuffer",
- "ArrayBufferView",
- "Int8Array",
- "Uint8Array",
- "Uint8ClampedArray",
- "Int16Array",
- "Uint16Array",
- "Int32Array",
- "Uint32Array",
- "Float32Array",
- "Float64Array",
- "ReadableStream",
- )
-
- TagLookup = {
- Types.byte: IDLType.Tags.int8,
- Types.octet: IDLType.Tags.uint8,
- Types.short: IDLType.Tags.int16,
- Types.unsigned_short: IDLType.Tags.uint16,
- Types.long: IDLType.Tags.int32,
- Types.unsigned_long: IDLType.Tags.uint32,
- Types.long_long: IDLType.Tags.int64,
- Types.unsigned_long_long: IDLType.Tags.uint64,
- Types.boolean: IDLType.Tags.bool,
- Types.unrestricted_float: IDLType.Tags.unrestricted_float,
- Types.float: IDLType.Tags.float,
- Types.unrestricted_double: IDLType.Tags.unrestricted_double,
- Types.double: IDLType.Tags.double,
- Types.any: IDLType.Tags.any,
- Types.undefined: IDLType.Tags.undefined,
- Types.domstring: IDLType.Tags.domstring,
- Types.bytestring: IDLType.Tags.bytestring,
- Types.usvstring: IDLType.Tags.usvstring,
- Types.utf8string: IDLType.Tags.utf8string,
- Types.jsstring: IDLType.Tags.jsstring,
- Types.object: IDLType.Tags.object,
- Types.ArrayBuffer: IDLType.Tags.interface,
- Types.ArrayBufferView: IDLType.Tags.interface,
- Types.Int8Array: IDLType.Tags.interface,
- Types.Uint8Array: IDLType.Tags.interface,
- Types.Uint8ClampedArray: IDLType.Tags.interface,
- Types.Int16Array: IDLType.Tags.interface,
- Types.Uint16Array: IDLType.Tags.interface,
- Types.Int32Array: IDLType.Tags.interface,
- Types.Uint32Array: IDLType.Tags.interface,
- Types.Float32Array: IDLType.Tags.interface,
- Types.Float64Array: IDLType.Tags.interface,
- Types.ReadableStream: IDLType.Tags.interface,
- }
-
- PrettyNames = {
- Types.byte: "byte",
- Types.octet: "octet",
- Types.short: "short",
- Types.unsigned_short: "unsigned short",
- Types.long: "long",
- Types.unsigned_long: "unsigned long",
- Types.long_long: "long long",
- Types.unsigned_long_long: "unsigned long long",
- Types.boolean: "boolean",
- Types.unrestricted_float: "unrestricted float",
- Types.float: "float",
- Types.unrestricted_double: "unrestricted double",
- Types.double: "double",
- Types.any: "any",
- Types.undefined: "undefined",
- Types.domstring: "DOMString",
- Types.bytestring: "ByteString",
- Types.usvstring: "USVString",
- Types.utf8string: "USVString", # That's what it is in spec terms
- Types.jsstring: "USVString", # Again, that's what it is in spec terms
- Types.object: "object",
- Types.ArrayBuffer: "ArrayBuffer",
- Types.ArrayBufferView: "ArrayBufferView",
- Types.Int8Array: "Int8Array",
- Types.Uint8Array: "Uint8Array",
- Types.Uint8ClampedArray: "Uint8ClampedArray",
- Types.Int16Array: "Int16Array",
- Types.Uint16Array: "Uint16Array",
- Types.Int32Array: "Int32Array",
- Types.Uint32Array: "Uint32Array",
- Types.Float32Array: "Float32Array",
- Types.Float64Array: "Float64Array",
- Types.ReadableStream: "ReadableStream",
- }
-
- def __init__(
- self,
- location,
- name,
- type,
- clamp=False,
- enforceRange=False,
- legacyNullToEmptyString=False,
- allowShared=False,
- attrLocation=[],
- ):
- """
- The mutually exclusive clamp/enforceRange/legacyNullToEmptyString/allowShared arguments are used
- to create instances of this type with the appropriate attributes attached. Use .clamped(),
- .rangeEnforced(), .withLegacyNullToEmptyString() and .withAllowShared().
-
- attrLocation is an array of source locations of these attributes for error reporting.
- """
- IDLType.__init__(self, location, name)
- self.builtin = True
- self._typeTag = type
- self._clamped = None
- self._rangeEnforced = None
- self._withLegacyNullToEmptyString = None
- self._withAllowShared = None
- if self.isInteger():
- if clamp:
- self._clamp = True
- self.name = "Clamped" + self.name
- self._extendedAttrDict["Clamp"] = True
- elif enforceRange:
- self._enforceRange = True
- self.name = "RangeEnforced" + self.name
- self._extendedAttrDict["EnforceRange"] = True
- elif clamp or enforceRange:
- raise WebIDLError(
- "Non-integer types cannot be [Clamp] or [EnforceRange]", attrLocation
- )
- if self.isDOMString() or self.isUTF8String():
- if legacyNullToEmptyString:
- self.legacyNullToEmptyString = True
- self.name = "NullIsEmpty" + self.name
- self._extendedAttrDict["LegacyNullToEmptyString"] = True
- elif legacyNullToEmptyString:
- raise WebIDLError(
- "Non-string types cannot be [LegacyNullToEmptyString]", attrLocation
- )
- if self.isBufferSource():
- if allowShared:
- self._allowShared = True
- self._extendedAttrDict["AllowShared"] = True
- elif allowShared:
- raise WebIDLError(
- "Types that are not buffer source types cannot be [AllowShared]",
- attrLocation,
- )
-
- def __str__(self):
- if self._allowShared:
- assert self.isBufferSource()
- return "MaybeShared" + str(self.name)
- return str(self.name)
-
- def prettyName(self):
- return IDLBuiltinType.PrettyNames[self._typeTag]
-
- def clamped(self, attrLocation):
- if not self._clamped:
- self._clamped = IDLBuiltinType(
- self.location,
- self.name,
- self._typeTag,
- clamp=True,
- attrLocation=attrLocation,
- )
- return self._clamped
-
- def rangeEnforced(self, attrLocation):
- if not self._rangeEnforced:
- self._rangeEnforced = IDLBuiltinType(
- self.location,
- self.name,
- self._typeTag,
- enforceRange=True,
- attrLocation=attrLocation,
- )
- return self._rangeEnforced
-
- def withLegacyNullToEmptyString(self, attrLocation):
- if not self._withLegacyNullToEmptyString:
- self._withLegacyNullToEmptyString = IDLBuiltinType(
- self.location,
- self.name,
- self._typeTag,
- legacyNullToEmptyString=True,
- attrLocation=attrLocation,
- )
- return self._withLegacyNullToEmptyString
-
- def withAllowShared(self, attrLocation):
- if not self._withAllowShared:
- self._withAllowShared = IDLBuiltinType(
- self.location,
- self.name,
- self._typeTag,
- allowShared=True,
- attrLocation=attrLocation,
- )
- return self._withAllowShared
-
- def isPrimitive(self):
- return self._typeTag <= IDLBuiltinType.Types.double
-
- def isBoolean(self):
- return self._typeTag == IDLBuiltinType.Types.boolean
-
- def isUndefined(self):
- return self._typeTag == IDLBuiltinType.Types.undefined
-
- def isNumeric(self):
- return self.isPrimitive() and not self.isBoolean()
-
- def isString(self):
- return (
- self._typeTag == IDLBuiltinType.Types.domstring
- or self._typeTag == IDLBuiltinType.Types.bytestring
- or self._typeTag == IDLBuiltinType.Types.usvstring
- or self._typeTag == IDLBuiltinType.Types.utf8string
- or self._typeTag == IDLBuiltinType.Types.jsstring
- )
-
- def isByteString(self):
- return self._typeTag == IDLBuiltinType.Types.bytestring
-
- def isDOMString(self):
- return self._typeTag == IDLBuiltinType.Types.domstring
-
- def isUSVString(self):
- return self._typeTag == IDLBuiltinType.Types.usvstring
-
- def isUTF8String(self):
- return self._typeTag == IDLBuiltinType.Types.utf8string
-
- def isJSString(self):
- return self._typeTag == IDLBuiltinType.Types.jsstring
-
- def isInteger(self):
- return self._typeTag <= IDLBuiltinType.Types.unsigned_long_long
-
- def isArrayBuffer(self):
- return self._typeTag == IDLBuiltinType.Types.ArrayBuffer
-
- def isArrayBufferView(self):
- return self._typeTag == IDLBuiltinType.Types.ArrayBufferView
-
- def isTypedArray(self):
- return (
- self._typeTag >= IDLBuiltinType.Types.Int8Array
- and self._typeTag <= IDLBuiltinType.Types.Float64Array
- )
-
- def isReadableStream(self):
- return self._typeTag == IDLBuiltinType.Types.ReadableStream
-
- def isInterface(self):
- # TypedArray things are interface types per the TypedArray spec,
- # but we handle them as builtins because SpiderMonkey implements
- # all of it internally.
- return (
- self.isArrayBuffer()
- or self.isArrayBufferView()
- or self.isTypedArray()
- or self.isReadableStream()
- )
-
- def isNonCallbackInterface(self):
- # All the interfaces we can be are non-callback
- return self.isInterface()
-
- def isFloat(self):
- return (
- self._typeTag == IDLBuiltinType.Types.float
- or self._typeTag == IDLBuiltinType.Types.double
- or self._typeTag == IDLBuiltinType.Types.unrestricted_float
- or self._typeTag == IDLBuiltinType.Types.unrestricted_double
- )
-
- def isUnrestricted(self):
- assert self.isFloat()
- return (
- self._typeTag == IDLBuiltinType.Types.unrestricted_float
- or self._typeTag == IDLBuiltinType.Types.unrestricted_double
- )
-
- def isJSONType(self):
- return self.isPrimitive() or self.isString() or self.isObject()
-
- def includesRestrictedFloat(self):
- return self.isFloat() and not self.isUnrestricted()
-
- def tag(self):
- return IDLBuiltinType.TagLookup[self._typeTag]
-
- def isDistinguishableFrom(self, other):
- if other.isPromise():
- return False
- if other.isUnion():
- # Just forward to the union; it'll deal
- return other.isDistinguishableFrom(self)
- if self.isUndefined():
- return not (other.isUndefined() or other.isDictionaryLike())
- if self.isPrimitive():
- if (
- other.isUndefined()
- or other.isString()
- or other.isEnum()
- or other.isInterface()
- or other.isObject()
- or other.isCallback()
- or other.isDictionary()
- or other.isSequence()
- or other.isRecord()
- ):
- return True
- if self.isBoolean():
- return other.isNumeric()
- assert self.isNumeric()
- return other.isBoolean()
- if self.isString():
- return (
- other.isUndefined()
- or other.isPrimitive()
- or other.isInterface()
- or other.isObject()
- or other.isCallback()
- or other.isDictionary()
- or other.isSequence()
- or other.isRecord()
- )
- if self.isAny():
- # Can't tell "any" apart from anything
- return False
- if self.isObject():
- return (
- other.isUndefined()
- or other.isPrimitive()
- or other.isString()
- or other.isEnum()
- )
- # Not much else we could be!
- assert self.isSpiderMonkeyInterface()
- # Like interfaces, but we know we're not a callback
- return (
- other.isUndefined()
- or other.isPrimitive()
- or other.isString()
- or other.isEnum()
- or other.isCallback()
- or other.isDictionary()
- or other.isSequence()
- or other.isRecord()
- or (
- other.isInterface()
- and (
- # ArrayBuffer is distinguishable from everything
- # that's not an ArrayBuffer or a callback interface
- (self.isArrayBuffer() and not other.isArrayBuffer())
- or (self.isReadableStream() and not other.isReadableStream())
- or
- # ArrayBufferView is distinguishable from everything
- # that's not an ArrayBufferView or typed array.
- (
- self.isArrayBufferView()
- and not other.isArrayBufferView()
- and not other.isTypedArray()
- )
- or
- # Typed arrays are distinguishable from everything
- # except ArrayBufferView and the same type of typed
- # array
- (
- self.isTypedArray()
- and not other.isArrayBufferView()
- and not (other.isTypedArray() and other.name == self.name)
- )
- )
- )
- )
-
- def _getDependentObjects(self):
- return set()
-
- def withExtendedAttributes(self, attrs):
- ret = self
- for attribute in attrs:
- identifier = attribute.identifier()
- if identifier == "Clamp":
- if not attribute.noArguments():
- raise WebIDLError(
- "[Clamp] must take no arguments", [attribute.location]
- )
- if ret.hasEnforceRange() or self._enforceRange:
- raise WebIDLError(
- "[EnforceRange] and [Clamp] are mutually exclusive",
- [self.location, attribute.location],
- )
- ret = self.clamped([self.location, attribute.location])
- elif identifier == "EnforceRange":
- if not attribute.noArguments():
- raise WebIDLError(
- "[EnforceRange] must take no arguments", [attribute.location]
- )
- if ret.hasClamp() or self._clamp:
- raise WebIDLError(
- "[EnforceRange] and [Clamp] are mutually exclusive",
- [self.location, attribute.location],
- )
- ret = self.rangeEnforced([self.location, attribute.location])
- elif identifier == "LegacyNullToEmptyString":
- if not (self.isDOMString() or self.isUTF8String()):
- raise WebIDLError(
- "[LegacyNullToEmptyString] only allowed on DOMStrings and UTF8Strings",
- [self.location, attribute.location],
- )
- assert not self.nullable()
- if attribute.hasValue():
- raise WebIDLError(
- "[LegacyNullToEmptyString] must take no identifier argument",
- [attribute.location],
- )
- ret = self.withLegacyNullToEmptyString(
- [self.location, attribute.location]
- )
- elif identifier == "AllowShared":
- if not attribute.noArguments():
- raise WebIDLError(
- "[AllowShared] must take no arguments", [attribute.location]
- )
- if not self.isBufferSource():
- raise WebIDLError(
- "[AllowShared] only allowed on buffer source types",
- [self.location, attribute.location],
- )
- ret = self.withAllowShared([self.location, attribute.location])
-
- else:
- raise WebIDLError(
- "Unhandled extended attribute on type",
- [self.location, attribute.location],
- )
- return ret
-
-
-BuiltinTypes = {
- IDLBuiltinType.Types.byte: IDLBuiltinType(
- BuiltinLocation("<builtin type>"), "Byte", IDLBuiltinType.Types.byte
- ),
- IDLBuiltinType.Types.octet: IDLBuiltinType(
- BuiltinLocation("<builtin type>"), "Octet", IDLBuiltinType.Types.octet
- ),
- IDLBuiltinType.Types.short: IDLBuiltinType(
- BuiltinLocation("<builtin type>"), "Short", IDLBuiltinType.Types.short
- ),
- IDLBuiltinType.Types.unsigned_short: IDLBuiltinType(
- BuiltinLocation("<builtin type>"),
- "UnsignedShort",
- IDLBuiltinType.Types.unsigned_short,
- ),
- IDLBuiltinType.Types.long: IDLBuiltinType(
- BuiltinLocation("<builtin type>"), "Long", IDLBuiltinType.Types.long
- ),
- IDLBuiltinType.Types.unsigned_long: IDLBuiltinType(
- BuiltinLocation("<builtin type>"),
- "UnsignedLong",
- IDLBuiltinType.Types.unsigned_long,
- ),
- IDLBuiltinType.Types.long_long: IDLBuiltinType(
- BuiltinLocation("<builtin type>"), "LongLong", IDLBuiltinType.Types.long_long
- ),
- IDLBuiltinType.Types.unsigned_long_long: IDLBuiltinType(
- BuiltinLocation("<builtin type>"),
- "UnsignedLongLong",
- IDLBuiltinType.Types.unsigned_long_long,
- ),
- IDLBuiltinType.Types.undefined: IDLBuiltinType(
- BuiltinLocation("<builtin type>"), "Undefined", IDLBuiltinType.Types.undefined
- ),
- IDLBuiltinType.Types.boolean: IDLBuiltinType(
- BuiltinLocation("<builtin type>"), "Boolean", IDLBuiltinType.Types.boolean
- ),
- IDLBuiltinType.Types.float: IDLBuiltinType(
- BuiltinLocation("<builtin type>"), "Float", IDLBuiltinType.Types.float
- ),
- IDLBuiltinType.Types.unrestricted_float: IDLBuiltinType(
- BuiltinLocation("<builtin type>"),
- "UnrestrictedFloat",
- IDLBuiltinType.Types.unrestricted_float,
- ),
- IDLBuiltinType.Types.double: IDLBuiltinType(
- BuiltinLocation("<builtin type>"), "Double", IDLBuiltinType.Types.double
- ),
- IDLBuiltinType.Types.unrestricted_double: IDLBuiltinType(
- BuiltinLocation("<builtin type>"),
- "UnrestrictedDouble",
- IDLBuiltinType.Types.unrestricted_double,
- ),
- IDLBuiltinType.Types.any: IDLBuiltinType(
- BuiltinLocation("<builtin type>"), "Any", IDLBuiltinType.Types.any
- ),
- IDLBuiltinType.Types.domstring: IDLBuiltinType(
- BuiltinLocation("<builtin type>"), "String", IDLBuiltinType.Types.domstring
- ),
- IDLBuiltinType.Types.bytestring: IDLBuiltinType(
- BuiltinLocation("<builtin type>"), "ByteString", IDLBuiltinType.Types.bytestring
- ),
- IDLBuiltinType.Types.usvstring: IDLBuiltinType(
- BuiltinLocation("<builtin type>"), "USVString", IDLBuiltinType.Types.usvstring
- ),
- IDLBuiltinType.Types.utf8string: IDLBuiltinType(
- BuiltinLocation("<builtin type>"), "UTF8String", IDLBuiltinType.Types.utf8string
- ),
- IDLBuiltinType.Types.jsstring: IDLBuiltinType(
- BuiltinLocation("<builtin type>"), "JSString", IDLBuiltinType.Types.jsstring
- ),
- IDLBuiltinType.Types.object: IDLBuiltinType(
- BuiltinLocation("<builtin type>"), "Object", IDLBuiltinType.Types.object
- ),
- IDLBuiltinType.Types.ArrayBuffer: IDLBuiltinType(
- BuiltinLocation("<builtin type>"),
- "ArrayBuffer",
- IDLBuiltinType.Types.ArrayBuffer,
- ),
- IDLBuiltinType.Types.ArrayBufferView: IDLBuiltinType(
- BuiltinLocation("<builtin type>"),
- "ArrayBufferView",
- IDLBuiltinType.Types.ArrayBufferView,
- ),
- IDLBuiltinType.Types.Int8Array: IDLBuiltinType(
- BuiltinLocation("<builtin type>"), "Int8Array", IDLBuiltinType.Types.Int8Array
- ),
- IDLBuiltinType.Types.Uint8Array: IDLBuiltinType(
- BuiltinLocation("<builtin type>"), "Uint8Array", IDLBuiltinType.Types.Uint8Array
- ),
- IDLBuiltinType.Types.Uint8ClampedArray: IDLBuiltinType(
- BuiltinLocation("<builtin type>"),
- "Uint8ClampedArray",
- IDLBuiltinType.Types.Uint8ClampedArray,
- ),
- IDLBuiltinType.Types.Int16Array: IDLBuiltinType(
- BuiltinLocation("<builtin type>"), "Int16Array", IDLBuiltinType.Types.Int16Array
- ),
- IDLBuiltinType.Types.Uint16Array: IDLBuiltinType(
- BuiltinLocation("<builtin type>"),
- "Uint16Array",
- IDLBuiltinType.Types.Uint16Array,
- ),
- IDLBuiltinType.Types.Int32Array: IDLBuiltinType(
- BuiltinLocation("<builtin type>"), "Int32Array", IDLBuiltinType.Types.Int32Array
- ),
- IDLBuiltinType.Types.Uint32Array: IDLBuiltinType(
- BuiltinLocation("<builtin type>"),
- "Uint32Array",
- IDLBuiltinType.Types.Uint32Array,
- ),
- IDLBuiltinType.Types.Float32Array: IDLBuiltinType(
- BuiltinLocation("<builtin type>"),
- "Float32Array",
- IDLBuiltinType.Types.Float32Array,
- ),
- IDLBuiltinType.Types.Float64Array: IDLBuiltinType(
- BuiltinLocation("<builtin type>"),
- "Float64Array",
- IDLBuiltinType.Types.Float64Array,
- ),
- IDLBuiltinType.Types.ReadableStream: IDLBuiltinType(
- BuiltinLocation("<builtin type>"),
- "ReadableStream",
- IDLBuiltinType.Types.ReadableStream,
- ),
-}
-
-
-integerTypeSizes = {
- IDLBuiltinType.Types.byte: (-128, 127),
- IDLBuiltinType.Types.octet: (0, 255),
- IDLBuiltinType.Types.short: (-32768, 32767),
- IDLBuiltinType.Types.unsigned_short: (0, 65535),
- IDLBuiltinType.Types.long: (-2147483648, 2147483647),
- IDLBuiltinType.Types.unsigned_long: (0, 4294967295),
- IDLBuiltinType.Types.long_long: (-9223372036854775808, 9223372036854775807),
- IDLBuiltinType.Types.unsigned_long_long: (0, 18446744073709551615),
-}
-
-
-def matchIntegerValueToType(value):
- for type, extremes in integerTypeSizes.items():
- (min, max) = extremes
- if value <= max and value >= min:
- return BuiltinTypes[type]
-
- return None
-
-
-class NoCoercionFoundError(WebIDLError):
- """
- A class we use to indicate generic coercion failures because none of the
- types worked out in IDLValue.coerceToType.
- """
-
-
-class IDLValue(IDLObject):
- def __init__(self, location, type, value):
- IDLObject.__init__(self, location)
- self.type = type
- assert isinstance(type, IDLType)
-
- self.value = value
-
- def coerceToType(self, type, location):
- if type == self.type:
- return self # Nothing to do
-
- # We first check for unions to ensure that even if the union is nullable
- # we end up with the right flat member type, not the union's type.
- if type.isUnion():
- # We use the flat member types here, because if we have a nullable
- # member type, or a nested union, we want the type the value
- # actually coerces to, not the nullable or nested union type.
- for subtype in type.unroll().flatMemberTypes:
- try:
- coercedValue = self.coerceToType(subtype, location)
- # Create a new IDLValue to make sure that we have the
- # correct float/double type. This is necessary because we
- # use the value's type when it is a default value of a
- # union, and the union cares about the exact float type.
- return IDLValue(self.location, subtype, coercedValue.value)
- except Exception as e:
- # Make sure to propagate out WebIDLErrors that are not the
- # generic "hey, we could not coerce to this type at all"
- # exception, because those are specific "coercion failed for
- # reason X" exceptions. Note that we want to swallow
- # non-WebIDLErrors here, because those can just happen if
- # "type" is not something that can have a default value at
- # all.
- if isinstance(e, WebIDLError) and not isinstance(
- e, NoCoercionFoundError
- ):
- raise e
-
- # If the type allows null, rerun this matching on the inner type, except
- # nullable enums. We handle those specially, because we want our
- # default string values to stay strings even when assigned to a nullable
- # enum.
- elif type.nullable() and not type.isEnum():
- innerValue = self.coerceToType(type.inner, location)
- return IDLValue(self.location, type, innerValue.value)
-
- elif self.type.isInteger() and type.isInteger():
- # We're both integer types. See if we fit.
-
- (min, max) = integerTypeSizes[type._typeTag]
- if self.value <= max and self.value >= min:
- # Promote
- return IDLValue(self.location, type, self.value)
- else:
- raise WebIDLError(
- "Value %s is out of range for type %s." % (self.value, type),
- [location],
- )
- elif self.type.isInteger() and type.isFloat():
- # Convert an integer literal into float
- if -(2 ** 24) <= self.value <= 2 ** 24:
- return IDLValue(self.location, type, float(self.value))
- else:
- raise WebIDLError(
- "Converting value %s to %s will lose precision."
- % (self.value, type),
- [location],
- )
- elif self.type.isString() and type.isEnum():
- # Just keep our string, but make sure it's a valid value for this enum
- enum = type.unroll().inner
- if self.value not in enum.values():
- raise WebIDLError(
- "'%s' is not a valid default value for enum %s"
- % (self.value, enum.identifier.name),
- [location, enum.location],
- )
- return self
- elif self.type.isFloat() and type.isFloat():
- if not type.isUnrestricted() and (
- self.value == float("inf")
- or self.value == float("-inf")
- or math.isnan(self.value)
- ):
- raise WebIDLError(
- "Trying to convert unrestricted value %s to non-unrestricted"
- % self.value,
- [location],
- )
- return IDLValue(self.location, type, self.value)
- elif self.type.isString() and type.isUSVString():
- # Allow USVStrings to use default value just like
- # DOMString. No coercion is required in this case as Codegen.py
- # treats USVString just like DOMString, but with an
- # extra normalization step.
- assert self.type.isDOMString()
- return self
- elif self.type.isString() and (
- type.isByteString() or type.isJSString() or type.isUTF8String()
- ):
- # Allow ByteStrings, UTF8String, and JSStrings to use a default
- # value like DOMString.
- # No coercion is required as Codegen.py will handle the
- # extra steps. We want to make sure that our string contains
- # only valid characters, so we check that here.
- valid_ascii_lit = (
- " " + string.ascii_letters + string.digits + string.punctuation
- )
- for idx, c in enumerate(self.value):
- if c not in valid_ascii_lit:
- raise WebIDLError(
- "Coercing this string literal %s to a ByteString is not supported yet. "
- "Coercion failed due to an unsupported byte %d at index %d."
- % (self.value.__repr__(), ord(c), idx),
- [location],
- )
-
- return IDLValue(self.location, type, self.value)
- elif self.type.isDOMString() and type.legacyNullToEmptyString:
- # LegacyNullToEmptyString is a different type for resolution reasons,
- # however once you have a value it doesn't matter
- return self
-
- raise NoCoercionFoundError(
- "Cannot coerce type %s to type %s." % (self.type, type), [location]
- )
-
- def _getDependentObjects(self):
- return set()
-
-
-class IDLNullValue(IDLObject):
- def __init__(self, location):
- IDLObject.__init__(self, location)
- self.type = None
- self.value = None
-
- def coerceToType(self, type, location):
- if (
- not isinstance(type, IDLNullableType)
- and not (type.isUnion() and type.hasNullableType)
- and not type.isAny()
- ):
- raise WebIDLError("Cannot coerce null value to type %s." % type, [location])
-
- nullValue = IDLNullValue(self.location)
- if type.isUnion() and not type.nullable() and type.hasDictionaryType():
- # We're actually a default value for the union's dictionary member.
- # Use its type.
- for t in type.flatMemberTypes:
- if t.isDictionary():
- nullValue.type = t
- return nullValue
- nullValue.type = type
- return nullValue
-
- def _getDependentObjects(self):
- return set()
-
-
-class IDLEmptySequenceValue(IDLObject):
- def __init__(self, location):
- IDLObject.__init__(self, location)
- self.type = None
- self.value = None
-
- def coerceToType(self, type, location):
- if type.isUnion():
- # We use the flat member types here, because if we have a nullable
- # member type, or a nested union, we want the type the value
- # actually coerces to, not the nullable or nested union type.
- for subtype in type.unroll().flatMemberTypes:
- try:
- return self.coerceToType(subtype, location)
- except:
- pass
-
- if not type.isSequence():
- raise WebIDLError(
- "Cannot coerce empty sequence value to type %s." % type, [location]
- )
-
- emptySequenceValue = IDLEmptySequenceValue(self.location)
- emptySequenceValue.type = type
- return emptySequenceValue
-
- def _getDependentObjects(self):
- return set()
-
-
-class IDLDefaultDictionaryValue(IDLObject):
- def __init__(self, location):
- IDLObject.__init__(self, location)
- self.type = None
- self.value = None
-
- def coerceToType(self, type, location):
- if type.isUnion():
- # We use the flat member types here, because if we have a nullable
- # member type, or a nested union, we want the type the value
- # actually coerces to, not the nullable or nested union type.
- for subtype in type.unroll().flatMemberTypes:
- try:
- return self.coerceToType(subtype, location)
- except:
- pass
-
- if not type.isDictionary():
- raise WebIDLError(
- "Cannot coerce default dictionary value to type %s." % type, [location]
- )
-
- defaultDictionaryValue = IDLDefaultDictionaryValue(self.location)
- defaultDictionaryValue.type = type
- return defaultDictionaryValue
-
- def _getDependentObjects(self):
- return set()
-
-
-class IDLUndefinedValue(IDLObject):
- def __init__(self, location):
- IDLObject.__init__(self, location)
- self.type = None
- self.value = None
-
- def coerceToType(self, type, location):
- if not type.isAny():
- raise WebIDLError(
- "Cannot coerce undefined value to type %s." % type, [location]
- )
-
- undefinedValue = IDLUndefinedValue(self.location)
- undefinedValue.type = type
- return undefinedValue
-
- def _getDependentObjects(self):
- return set()
-
-
-class IDLInterfaceMember(IDLObjectWithIdentifier, IDLExposureMixins):
-
- Tags = enum(
- "Const", "Attr", "Method", "MaplikeOrSetlike", "AsyncIterable", "Iterable"
- )
-
- Special = enum("Static", "Stringifier")
-
- AffectsValues = ("Nothing", "Everything")
- DependsOnValues = ("Nothing", "DOMState", "DeviceState", "Everything")
-
- def __init__(self, location, identifier, tag, extendedAttrDict=None):
- IDLObjectWithIdentifier.__init__(self, location, None, identifier)
- IDLExposureMixins.__init__(self, location)
- self.tag = tag
- if extendedAttrDict is None:
- self._extendedAttrDict = {}
- else:
- self._extendedAttrDict = extendedAttrDict
-
- def isMethod(self):
- return self.tag == IDLInterfaceMember.Tags.Method
-
- def isAttr(self):
- return self.tag == IDLInterfaceMember.Tags.Attr
-
- def isConst(self):
- return self.tag == IDLInterfaceMember.Tags.Const
-
- def isMaplikeOrSetlikeOrIterable(self):
- return (
- self.tag == IDLInterfaceMember.Tags.MaplikeOrSetlike
- or self.tag == IDLInterfaceMember.Tags.AsyncIterable
- or self.tag == IDLInterfaceMember.Tags.Iterable
- )
-
- def isMaplikeOrSetlike(self):
- return self.tag == IDLInterfaceMember.Tags.MaplikeOrSetlike
-
- def addExtendedAttributes(self, attrs):
- for attr in attrs:
- self.handleExtendedAttribute(attr)
- attrlist = attr.listValue()
- self._extendedAttrDict[attr.identifier()] = (
- attrlist if len(attrlist) else True
- )
-
- def handleExtendedAttribute(self, attr):
- pass
-
- def getExtendedAttribute(self, name):
- return self._extendedAttrDict.get(name, None)
-
- def finish(self, scope):
- IDLExposureMixins.finish(self, scope)
-
- def validate(self):
- if self.isAttr() or self.isMethod():
- if self.affects == "Everything" and self.dependsOn != "Everything":
- raise WebIDLError(
- "Interface member is flagged as affecting "
- "everything but not depending on everything. "
- "That seems rather unlikely.",
- [self.location],
- )
-
- if self.getExtendedAttribute("NewObject"):
- if self.dependsOn == "Nothing" or self.dependsOn == "DOMState":
- raise WebIDLError(
- "A [NewObject] method is not idempotent, "
- "so it has to depend on something other than DOM state.",
- [self.location],
- )
- if self.getExtendedAttribute("Cached") or self.getExtendedAttribute(
- "StoreInSlot"
- ):
- raise WebIDLError(
- "A [NewObject] attribute shouldnt be "
- "[Cached] or [StoreInSlot], since the point "
- "of those is to keep returning the same "
- "thing across multiple calls, which is not "
- "what [NewObject] does.",
- [self.location],
- )
-
- def _setDependsOn(self, dependsOn):
- if self.dependsOn != "Everything":
- raise WebIDLError(
- "Trying to specify multiple different DependsOn, "
- "Pure, or Constant extended attributes for "
- "attribute",
- [self.location],
- )
- if dependsOn not in IDLInterfaceMember.DependsOnValues:
- raise WebIDLError(
- "Invalid [DependsOn=%s] on attribute" % dependsOn, [self.location]
- )
- self.dependsOn = dependsOn
-
- def _setAffects(self, affects):
- if self.affects != "Everything":
- raise WebIDLError(
- "Trying to specify multiple different Affects, "
- "Pure, or Constant extended attributes for "
- "attribute",
- [self.location],
- )
- if affects not in IDLInterfaceMember.AffectsValues:
- raise WebIDLError(
- "Invalid [Affects=%s] on attribute" % dependsOn, [self.location]
- )
- self.affects = affects
-
- def _addAlias(self, alias):
- if alias in self.aliases:
- raise WebIDLError(
- "Duplicate [Alias=%s] on attribute" % alias, [self.location]
- )
- self.aliases.append(alias)
-
- def _addBindingAlias(self, bindingAlias):
- if bindingAlias in self.bindingAliases:
- raise WebIDLError(
- "Duplicate [BindingAlias=%s] on attribute" % bindingAlias,
- [self.location],
- )
- self.bindingAliases.append(bindingAlias)
-
-
-class IDLMaplikeOrSetlikeOrIterableBase(IDLInterfaceMember):
- def __init__(self, location, identifier, ifaceType, keyType, valueType, ifaceKind):
- IDLInterfaceMember.__init__(self, location, identifier, ifaceKind)
- if keyType is not None:
- assert isinstance(keyType, IDLType)
- else:
- assert valueType is not None
- assert ifaceType in ["maplike", "setlike", "iterable", "asynciterable"]
- if valueType is not None:
- assert isinstance(valueType, IDLType)
- self.keyType = keyType
- self.valueType = valueType
- self.maplikeOrSetlikeOrIterableType = ifaceType
- self.disallowedMemberNames = []
- self.disallowedNonMethodNames = []
-
- def isMaplike(self):
- return self.maplikeOrSetlikeOrIterableType == "maplike"
-
- def isSetlike(self):
- return self.maplikeOrSetlikeOrIterableType == "setlike"
-
- def isIterable(self):
- return self.maplikeOrSetlikeOrIterableType == "iterable"
-
- def isAsyncIterable(self):
- return self.maplikeOrSetlikeOrIterableType == "asynciterable"
-
- def hasKeyType(self):
- return self.keyType is not None
-
- def hasValueType(self):
- return self.valueType is not None
-
- def checkCollisions(self, members, isAncestor):
- for member in members:
- # Check that there are no disallowed members
- if member.identifier.name in self.disallowedMemberNames and not (
- (member.isMethod() and member.isMaplikeOrSetlikeOrIterableMethod())
- or (member.isAttr() and member.isMaplikeOrSetlikeAttr())
- ):
- raise WebIDLError(
- "Member '%s' conflicts "
- "with reserved %s name."
- % (member.identifier.name, self.maplikeOrSetlikeOrIterableType),
- [self.location, member.location],
- )
- # Check that there are no disallowed non-method members.
- # Ancestor members are always disallowed here; own members
- # are disallowed only if they're non-methods.
- if (
- isAncestor or member.isAttr() or member.isConst()
- ) and member.identifier.name in self.disallowedNonMethodNames:
- raise WebIDLError(
- "Member '%s' conflicts "
- "with reserved %s method."
- % (member.identifier.name, self.maplikeOrSetlikeOrIterableType),
- [self.location, member.location],
- )
-
- def addMethod(
- self,
- name,
- members,
- allowExistingOperations,
- returnType,
- args=[],
- chromeOnly=False,
- isPure=False,
- affectsNothing=False,
- newObject=False,
- isIteratorAlias=False,
- ):
- """
- Create an IDLMethod based on the parameters passed in.
-
- - members is the member list to add this function to, since this is
- called during the member expansion portion of interface object
- building.
-
- - chromeOnly is only True for read-only js implemented classes, to
- implement underscore prefixed convenience functions which would
- otherwise not be available, unlike the case of C++ bindings.
-
- - isPure is only True for idempotent functions, so it is not valid for
- things like keys, values, etc. that return a new object every time.
-
- - affectsNothing means that nothing changes due to this method, which
- affects JIT optimization behavior
-
- - newObject means the method creates and returns a new object.
-
- """
- # Only add name to lists for collision checks if it's not chrome
- # only.
- if chromeOnly:
- name = "__" + name
- else:
- if not allowExistingOperations:
- self.disallowedMemberNames.append(name)
- else:
- self.disallowedNonMethodNames.append(name)
- # If allowExistingOperations is True, and another operation exists
- # with the same name as the one we're trying to add, don't add the
- # maplike/setlike operation. However, if the operation is static,
- # then fail by way of creating the function, which will cause a
- # naming conflict, per the spec.
- if allowExistingOperations:
- for m in members:
- if m.identifier.name == name and m.isMethod() and not m.isStatic():
- return
- method = IDLMethod(
- self.location,
- IDLUnresolvedIdentifier(
- self.location, name, allowDoubleUnderscore=chromeOnly
- ),
- returnType,
- args,
- maplikeOrSetlikeOrIterable=self,
- )
- # We need to be able to throw from declaration methods
- method.addExtendedAttributes([IDLExtendedAttribute(self.location, ("Throws",))])
- if chromeOnly:
- method.addExtendedAttributes(
- [IDLExtendedAttribute(self.location, ("ChromeOnly",))]
- )
- if isPure:
- method.addExtendedAttributes(
- [IDLExtendedAttribute(self.location, ("Pure",))]
- )
- # Following attributes are used for keys/values/entries. Can't mark
- # them pure, since they return a new object each time they are run.
- if affectsNothing:
- method.addExtendedAttributes(
- [
- IDLExtendedAttribute(self.location, ("DependsOn", "Everything")),
- IDLExtendedAttribute(self.location, ("Affects", "Nothing")),
- ]
- )
- if newObject:
- method.addExtendedAttributes(
- [IDLExtendedAttribute(self.location, ("NewObject",))]
- )
- if isIteratorAlias:
- if not self.isAsyncIterable():
- method.addExtendedAttributes(
- [IDLExtendedAttribute(self.location, ("Alias", "@@iterator"))]
- )
- else:
- method.addExtendedAttributes(
- [IDLExtendedAttribute(self.location, ("Alias", "@@asyncIterator"))]
- )
- members.append(method)
-
- def resolve(self, parentScope):
- if self.keyType:
- self.keyType.resolveType(parentScope)
- if self.valueType:
- self.valueType.resolveType(parentScope)
-
- def finish(self, scope):
- IDLInterfaceMember.finish(self, scope)
- if self.keyType and not self.keyType.isComplete():
- t = self.keyType.complete(scope)
-
- assert not isinstance(t, IDLUnresolvedType)
- assert not isinstance(t, IDLTypedefType)
- assert not isinstance(t.name, IDLUnresolvedIdentifier)
- self.keyType = t
- if self.valueType and not self.valueType.isComplete():
- t = self.valueType.complete(scope)
-
- assert not isinstance(t, IDLUnresolvedType)
- assert not isinstance(t, IDLTypedefType)
- assert not isinstance(t.name, IDLUnresolvedIdentifier)
- self.valueType = t
-
- def validate(self):
- IDLInterfaceMember.validate(self)
-
- def handleExtendedAttribute(self, attr):
- IDLInterfaceMember.handleExtendedAttribute(self, attr)
-
- def _getDependentObjects(self):
- deps = set()
- if self.keyType:
- deps.add(self.keyType)
- if self.valueType:
- deps.add(self.valueType)
- return deps
-
- def getForEachArguments(self):
- return [
- IDLArgument(
- self.location,
- IDLUnresolvedIdentifier(
- BuiltinLocation("<auto-generated-identifier>"), "callback"
- ),
- BuiltinTypes[IDLBuiltinType.Types.object],
- ),
- IDLArgument(
- self.location,
- IDLUnresolvedIdentifier(
- BuiltinLocation("<auto-generated-identifier>"), "thisArg"
- ),
- BuiltinTypes[IDLBuiltinType.Types.any],
- optional=True,
- ),
- ]
-
-
-# Iterable adds ES6 iterator style functions and traits
-# (keys/values/entries/@@iterator) to an interface.
-class IDLIterable(IDLMaplikeOrSetlikeOrIterableBase):
- def __init__(self, location, identifier, keyType, valueType, scope):
- IDLMaplikeOrSetlikeOrIterableBase.__init__(
- self,
- location,
- identifier,
- "iterable",
- keyType,
- valueType,
- IDLInterfaceMember.Tags.Iterable,
- )
- self.iteratorType = None
-
- def __str__(self):
- return "declared iterable with key '%s' and value '%s'" % (
- self.keyType,
- self.valueType,
- )
-
- def expand(self, members):
- """
- In order to take advantage of all of the method machinery in Codegen,
- we generate our functions as if they were part of the interface
- specification during parsing.
- """
- # We only need to add entries/keys/values here if we're a pair iterator.
- # Value iterators just copy these from %ArrayPrototype% instead.
- if not self.isPairIterator():
- return
-
- # object entries()
- self.addMethod(
- "entries",
- members,
- False,
- self.iteratorType,
- affectsNothing=True,
- newObject=True,
- isIteratorAlias=True,
- )
- # object keys()
- self.addMethod(
- "keys",
- members,
- False,
- self.iteratorType,
- affectsNothing=True,
- newObject=True,
- )
- # object values()
- self.addMethod(
- "values",
- members,
- False,
- self.iteratorType,
- affectsNothing=True,
- newObject=True,
- )
-
- # undefined forEach(callback(valueType, keyType), optional any thisArg)
- self.addMethod(
- "forEach",
- members,
- False,
- BuiltinTypes[IDLBuiltinType.Types.undefined],
- self.getForEachArguments(),
- )
-
- def isValueIterator(self):
- return not self.isPairIterator()
-
- def isPairIterator(self):
- return self.hasKeyType()
-
-
-class IDLAsyncIterable(IDLMaplikeOrSetlikeOrIterableBase):
- def __init__(self, location, identifier, keyType, valueType, argList, scope):
- for arg in argList:
- if not arg.optional:
- raise WebIDLError(
- "The arguments of the asynchronously iterable declaration on "
- "%s must all be optional arguments." % identifier,
- [arg.location],
- )
-
- IDLMaplikeOrSetlikeOrIterableBase.__init__(
- self,
- location,
- identifier,
- "asynciterable",
- keyType,
- valueType,
- IDLInterfaceMember.Tags.AsyncIterable,
- )
- self.iteratorType = None
- self.argList = argList
-
- def __str__(self):
- return "declared async iterable with key '%s' and value '%s'" % (
- self.keyType,
- self.valueType,
- )
-
- def expand(self, members):
- """
- In order to take advantage of all of the method machinery in Codegen,
- we generate our functions as if they were part of the interface
- specification during parsing.
- """
- # object values()
- self.addMethod(
- "values",
- members,
- False,
- self.iteratorType,
- self.argList,
- affectsNothing=True,
- newObject=True,
- isIteratorAlias=(not self.isPairIterator()),
- )
-
- # We only need to add entries/keys here if we're a pair iterator.
- if not self.isPairIterator():
- return
-
- # Methods can't share their IDLArguments, so we need to make copies here.
- def copyArgList(argList):
- return map(copy.copy, argList)
-
- # object entries()
- self.addMethod(
- "entries",
- members,
- False,
- self.iteratorType,
- copyArgList(self.argList),
- affectsNothing=True,
- newObject=True,
- isIteratorAlias=True,
- )
- # object keys()
- self.addMethod(
- "keys",
- members,
- False,
- self.iteratorType,
- copyArgList(self.argList),
- affectsNothing=True,
- newObject=True,
- )
-
- def isValueIterator(self):
- return not self.isPairIterator()
-
- def isPairIterator(self):
- return self.hasKeyType()
-
-
-# MaplikeOrSetlike adds ES6 map-or-set-like traits to an interface.
-class IDLMaplikeOrSetlike(IDLMaplikeOrSetlikeOrIterableBase):
- def __init__(
- self, location, identifier, maplikeOrSetlikeType, readonly, keyType, valueType
- ):
- IDLMaplikeOrSetlikeOrIterableBase.__init__(
- self,
- location,
- identifier,
- maplikeOrSetlikeType,
- keyType,
- valueType,
- IDLInterfaceMember.Tags.MaplikeOrSetlike,
- )
- self.readonly = readonly
- self.slotIndices = None
-
- # When generating JSAPI access code, we need to know the backing object
- # type prefix to create the correct function. Generate here for reuse.
- if self.isMaplike():
- self.prefix = "Map"
- elif self.isSetlike():
- self.prefix = "Set"
-
- def __str__(self):
- return "declared '%s' with key '%s'" % (
- self.maplikeOrSetlikeOrIterableType,
- self.keyType,
- )
-
- def expand(self, members):
- """
- In order to take advantage of all of the method machinery in Codegen,
- we generate our functions as if they were part of the interface
- specification during parsing.
- """
- # Both maplike and setlike have a size attribute
- members.append(
- IDLAttribute(
- self.location,
- IDLUnresolvedIdentifier(
- BuiltinLocation("<auto-generated-identifier>"), "size"
- ),
- BuiltinTypes[IDLBuiltinType.Types.unsigned_long],
- True,
- maplikeOrSetlike=self,
- )
- )
- self.reserved_ro_names = ["size"]
- self.disallowedMemberNames.append("size")
-
- # object entries()
- self.addMethod(
- "entries",
- members,
- False,
- BuiltinTypes[IDLBuiltinType.Types.object],
- affectsNothing=True,
- isIteratorAlias=self.isMaplike(),
- )
- # object keys()
- self.addMethod(
- "keys",
- members,
- False,
- BuiltinTypes[IDLBuiltinType.Types.object],
- affectsNothing=True,
- )
- # object values()
- self.addMethod(
- "values",
- members,
- False,
- BuiltinTypes[IDLBuiltinType.Types.object],
- affectsNothing=True,
- isIteratorAlias=self.isSetlike(),
- )
-
- # undefined forEach(callback(valueType, keyType), thisVal)
- self.addMethod(
- "forEach",
- members,
- False,
- BuiltinTypes[IDLBuiltinType.Types.undefined],
- self.getForEachArguments(),
- )
-
- def getKeyArg():
- return IDLArgument(
- self.location,
- IDLUnresolvedIdentifier(self.location, "key"),
- self.keyType,
- )
-
- # boolean has(keyType key)
- self.addMethod(
- "has",
- members,
- False,
- BuiltinTypes[IDLBuiltinType.Types.boolean],
- [getKeyArg()],
- isPure=True,
- )
-
- if not self.readonly:
- # undefined clear()
- self.addMethod(
- "clear", members, True, BuiltinTypes[IDLBuiltinType.Types.undefined], []
- )
- # boolean delete(keyType key)
- self.addMethod(
- "delete",
- members,
- True,
- BuiltinTypes[IDLBuiltinType.Types.boolean],
- [getKeyArg()],
- )
-
- if self.isSetlike():
- if not self.readonly:
- # Add returns the set object it just added to.
- # object add(keyType key)
-
- self.addMethod(
- "add",
- members,
- True,
- BuiltinTypes[IDLBuiltinType.Types.object],
- [getKeyArg()],
- )
- return
-
- # If we get this far, we're a maplike declaration.
-
- # valueType get(keyType key)
- #
- # Note that instead of the value type, we're using any here. The
- # validity checks should happen as things are inserted into the map,
- # and using any as the return type makes code generation much simpler.
- #
- # TODO: Bug 1155340 may change this to use specific type to provide
- # more info to JIT.
- self.addMethod(
- "get",
- members,
- False,
- BuiltinTypes[IDLBuiltinType.Types.any],
- [getKeyArg()],
- isPure=True,
- )
-
- def getValueArg():
- return IDLArgument(
- self.location,
- IDLUnresolvedIdentifier(self.location, "value"),
- self.valueType,
- )
-
- if not self.readonly:
- self.addMethod(
- "set",
- members,
- True,
- BuiltinTypes[IDLBuiltinType.Types.object],
- [getKeyArg(), getValueArg()],
- )
-
-
-class IDLConst(IDLInterfaceMember):
- def __init__(self, location, identifier, type, value):
- IDLInterfaceMember.__init__(
- self, location, identifier, IDLInterfaceMember.Tags.Const
- )
-
- assert isinstance(type, IDLType)
- if type.isDictionary():
- raise WebIDLError(
- "A constant cannot be of a dictionary type", [self.location]
- )
- if type.isRecord():
- raise WebIDLError("A constant cannot be of a record type", [self.location])
- self.type = type
- self.value = value
-
- if identifier.name == "prototype":
- raise WebIDLError(
- "The identifier of a constant must not be 'prototype'", [location]
- )
-
- def __str__(self):
- return "'%s' const '%s'" % (self.type, self.identifier)
-
- def finish(self, scope):
- IDLInterfaceMember.finish(self, scope)
-
- if not self.type.isComplete():
- type = self.type.complete(scope)
- if not type.isPrimitive() and not type.isString():
- locations = [self.type.location, type.location]
- try:
- locations.append(type.inner.location)
- except:
- pass
- raise WebIDLError("Incorrect type for constant", locations)
- self.type = type
-
- # The value might not match the type
- coercedValue = self.value.coerceToType(self.type, self.location)
- assert coercedValue
-
- self.value = coercedValue
-
- def validate(self):
- IDLInterfaceMember.validate(self)
-
- def handleExtendedAttribute(self, attr):
- identifier = attr.identifier()
- if identifier == "Exposed":
- convertExposedAttrToGlobalNameSet(attr, self._exposureGlobalNames)
- elif (
- identifier == "Pref"
- or identifier == "ChromeOnly"
- or identifier == "Func"
- or identifier == "Trial"
- or identifier == "SecureContext"
- or identifier == "NonEnumerable"
- ):
- # Known attributes that we don't need to do anything with here
- pass
- else:
- raise WebIDLError(
- "Unknown extended attribute %s on constant" % identifier,
- [attr.location],
- )
- IDLInterfaceMember.handleExtendedAttribute(self, attr)
-
- def _getDependentObjects(self):
- return set([self.type, self.value])
-
-
-class IDLAttribute(IDLInterfaceMember):
- def __init__(
- self,
- location,
- identifier,
- type,
- readonly,
- inherit=False,
- static=False,
- stringifier=False,
- maplikeOrSetlike=None,
- extendedAttrDict=None,
- ):
- IDLInterfaceMember.__init__(
- self,
- location,
- identifier,
- IDLInterfaceMember.Tags.Attr,
- extendedAttrDict=extendedAttrDict,
- )
-
- assert isinstance(type, IDLType)
- self.type = type
- self.readonly = readonly
- self.inherit = inherit
- self._static = static
- self.legacyLenientThis = False
- self._legacyUnforgeable = False
- self.stringifier = stringifier
- self.slotIndices = None
- assert maplikeOrSetlike is None or isinstance(
- maplikeOrSetlike, IDLMaplikeOrSetlike
- )
- self.maplikeOrSetlike = maplikeOrSetlike
- self.dependsOn = "Everything"
- self.affects = "Everything"
- self.bindingAliases = []
-
- if static and identifier.name == "prototype":
- raise WebIDLError(
- "The identifier of a static attribute must not be 'prototype'",
- [location],
- )
-
- if readonly and inherit:
- raise WebIDLError(
- "An attribute cannot be both 'readonly' and 'inherit'", [self.location]
- )
-
- def isStatic(self):
- return self._static
-
- def forceStatic(self):
- self._static = True
-
- def __str__(self):
- return "'%s' attribute '%s'" % (self.type, self.identifier)
-
- def finish(self, scope):
- IDLInterfaceMember.finish(self, scope)
-
- if not self.type.isComplete():
- t = self.type.complete(scope)
-
- assert not isinstance(t, IDLUnresolvedType)
- assert not isinstance(t, IDLTypedefType)
- assert not isinstance(t.name, IDLUnresolvedIdentifier)
- self.type = t
-
- if self.readonly and (
- self.type.hasClamp()
- or self.type.hasEnforceRange()
- or self.type.hasAllowShared()
- or self.type.legacyNullToEmptyString
- ):
- raise WebIDLError(
- "A readonly attribute cannot be [Clamp] or [EnforceRange] or [AllowShared]",
- [self.location],
- )
- if self.type.isDictionary() and not self.getExtendedAttribute("Cached"):
- raise WebIDLError(
- "An attribute cannot be of a dictionary type", [self.location]
- )
- if self.type.isSequence() and not self.getExtendedAttribute("Cached"):
- raise WebIDLError(
- "A non-cached attribute cannot be of a sequence " "type",
- [self.location],
- )
- if self.type.isRecord() and not self.getExtendedAttribute("Cached"):
- raise WebIDLError(
- "A non-cached attribute cannot be of a record " "type", [self.location]
- )
- if self.type.isUnion():
- for f in self.type.unroll().flatMemberTypes:
- if f.isDictionary():
- raise WebIDLError(
- "An attribute cannot be of a union "
- "type if one of its member types (or "
- "one of its member types's member "
- "types, and so on) is a dictionary "
- "type",
- [self.location, f.location],
- )
- if f.isSequence():
- raise WebIDLError(
- "An attribute cannot be of a union "
- "type if one of its member types (or "
- "one of its member types's member "
- "types, and so on) is a sequence "
- "type",
- [self.location, f.location],
- )
- if f.isRecord():
- raise WebIDLError(
- "An attribute cannot be of a union "
- "type if one of its member types (or "
- "one of its member types's member "
- "types, and so on) is a record "
- "type",
- [self.location, f.location],
- )
- if not self.type.isInterface() and self.getExtendedAttribute("PutForwards"):
- raise WebIDLError(
- "An attribute with [PutForwards] must have an "
- "interface type as its type",
- [self.location],
- )
-
- if not self.type.isInterface() and self.getExtendedAttribute("SameObject"):
- raise WebIDLError(
- "An attribute with [SameObject] must have an "
- "interface type as its type",
- [self.location],
- )
-
- if self.type.isPromise() and not self.readonly:
- raise WebIDLError(
- "Promise-returning attributes must be readonly", [self.location]
- )
-
- if self.type.isObservableArray():
- if self.isStatic():
- raise WebIDLError(
- "A static attribute cannot have an ObservableArray type",
- [self.location],
- )
- if self.getExtendedAttribute("Cached") or self.getExtendedAttribute(
- "StoreInSlot"
- ):
- raise WebIDLError(
- "[Cached] and [StoreInSlot] must not be used "
- "on an attribute whose type is ObservableArray",
- [self.location],
- )
-
- def validate(self):
- def typeContainsChromeOnlyDictionaryMember(type):
- if type.nullable() or type.isSequence() or type.isRecord():
- return typeContainsChromeOnlyDictionaryMember(type.inner)
-
- if type.isUnion():
- for memberType in type.flatMemberTypes:
- (contains, location) = typeContainsChromeOnlyDictionaryMember(
- memberType
- )
- if contains:
- return (True, location)
-
- if type.isDictionary():
- dictionary = type.inner
- while dictionary:
- (contains, location) = dictionaryContainsChromeOnlyMember(
- dictionary
- )
- if contains:
- return (True, location)
- dictionary = dictionary.parent
-
- return (False, None)
-
- def dictionaryContainsChromeOnlyMember(dictionary):
- for member in dictionary.members:
- if member.getExtendedAttribute("ChromeOnly"):
- return (True, member.location)
- (contains, location) = typeContainsChromeOnlyDictionaryMember(
- member.type
- )
- if contains:
- return (True, location)
- return (False, None)
-
- IDLInterfaceMember.validate(self)
-
- if self.getExtendedAttribute("Cached") or self.getExtendedAttribute(
- "StoreInSlot"
- ):
- if not self.affects == "Nothing":
- raise WebIDLError(
- "Cached attributes and attributes stored in "
- "slots must be Constant or Pure or "
- "Affects=Nothing, since the getter won't always "
- "be called.",
- [self.location],
- )
- (contains, location) = typeContainsChromeOnlyDictionaryMember(self.type)
- if contains:
- raise WebIDLError(
- "[Cached] and [StoreInSlot] must not be used "
- "on an attribute whose type contains a "
- "[ChromeOnly] dictionary member",
- [self.location, location],
- )
- if self.getExtendedAttribute("Frozen"):
- if (
- not self.type.isSequence()
- and not self.type.isDictionary()
- and not self.type.isRecord()
- ):
- raise WebIDLError(
- "[Frozen] is only allowed on "
- "sequence-valued, dictionary-valued, and "
- "record-valued attributes",
- [self.location],
- )
- if not self.type.unroll().isExposedInAllOf(self.exposureSet):
- raise WebIDLError(
- "Attribute returns a type that is not exposed "
- "everywhere where the attribute is exposed",
- [self.location],
- )
- if self.getExtendedAttribute("CEReactions"):
- if self.readonly:
- raise WebIDLError(
- "[CEReactions] is not allowed on " "readonly attributes",
- [self.location],
- )
-
- def handleExtendedAttribute(self, attr):
- identifier = attr.identifier()
- if (
- identifier == "SetterThrows"
- or identifier == "SetterCanOOM"
- or identifier == "SetterNeedsSubjectPrincipal"
- ) and self.readonly:
- raise WebIDLError(
- "Readonly attributes must not be flagged as " "[%s]" % identifier,
- [self.location],
- )
- elif identifier == "BindingAlias":
- if not attr.hasValue():
- raise WebIDLError(
- "[BindingAlias] takes an identifier or string", [attr.location]
- )
- self._addBindingAlias(attr.value())
- elif (
- (
- identifier == "Throws"
- or identifier == "GetterThrows"
- or identifier == "CanOOM"
- or identifier == "GetterCanOOM"
- )
- and self.getExtendedAttribute("StoreInSlot")
- ) or (
- identifier == "StoreInSlot"
- and (
- self.getExtendedAttribute("Throws")
- or self.getExtendedAttribute("GetterThrows")
- or self.getExtendedAttribute("CanOOM")
- or self.getExtendedAttribute("GetterCanOOM")
- )
- ):
- raise WebIDLError("Throwing things can't be [StoreInSlot]", [attr.location])
- elif identifier == "LegacyLenientThis":
- if not attr.noArguments():
- raise WebIDLError(
- "[LegacyLenientThis] must take no arguments", [attr.location]
- )
- if self.isStatic():
- raise WebIDLError(
- "[LegacyLenientThis] is only allowed on non-static " "attributes",
- [attr.location, self.location],
- )
- if self.getExtendedAttribute("CrossOriginReadable"):
- raise WebIDLError(
- "[LegacyLenientThis] is not allowed in combination "
- "with [CrossOriginReadable]",
- [attr.location, self.location],
- )
- if self.getExtendedAttribute("CrossOriginWritable"):
- raise WebIDLError(
- "[LegacyLenientThis] is not allowed in combination "
- "with [CrossOriginWritable]",
- [attr.location, self.location],
- )
- self.legacyLenientThis = True
- elif identifier == "LegacyUnforgeable":
- if self.isStatic():
- raise WebIDLError(
- "[LegacyUnforgeable] is only allowed on non-static " "attributes",
- [attr.location, self.location],
- )
- self._legacyUnforgeable = True
- elif identifier == "SameObject" and not self.readonly:
- raise WebIDLError(
- "[SameObject] only allowed on readonly attributes",
- [attr.location, self.location],
- )
- elif identifier == "Constant" and not self.readonly:
- raise WebIDLError(
- "[Constant] only allowed on readonly attributes",
- [attr.location, self.location],
- )
- elif identifier == "PutForwards":
- if not self.readonly:
- raise WebIDLError(
- "[PutForwards] is only allowed on readonly " "attributes",
- [attr.location, self.location],
- )
- if self.type.isPromise():
- raise WebIDLError(
- "[PutForwards] is not allowed on " "Promise-typed attributes",
- [attr.location, self.location],
- )
- if self.isStatic():
- raise WebIDLError(
- "[PutForwards] is only allowed on non-static " "attributes",
- [attr.location, self.location],
- )
- if self.getExtendedAttribute("Replaceable") is not None:
- raise WebIDLError(
- "[PutForwards] and [Replaceable] can't both "
- "appear on the same attribute",
- [attr.location, self.location],
- )
- if not attr.hasValue():
- raise WebIDLError(
- "[PutForwards] takes an identifier", [attr.location, self.location]
- )
- elif identifier == "Replaceable":
- if not attr.noArguments():
- raise WebIDLError(
- "[Replaceable] must take no arguments", [attr.location]
- )
- if not self.readonly:
- raise WebIDLError(
- "[Replaceable] is only allowed on readonly " "attributes",
- [attr.location, self.location],
- )
- if self.type.isPromise():
- raise WebIDLError(
- "[Replaceable] is not allowed on " "Promise-typed attributes",
- [attr.location, self.location],
- )
- if self.isStatic():
- raise WebIDLError(
- "[Replaceable] is only allowed on non-static " "attributes",
- [attr.location, self.location],
- )
- if self.getExtendedAttribute("PutForwards") is not None:
- raise WebIDLError(
- "[PutForwards] and [Replaceable] can't both "
- "appear on the same attribute",
- [attr.location, self.location],
- )
- elif identifier == "LegacyLenientSetter":
- if not attr.noArguments():
- raise WebIDLError(
- "[LegacyLenientSetter] must take no arguments", [attr.location]
- )
- if not self.readonly:
- raise WebIDLError(
- "[LegacyLenientSetter] is only allowed on readonly " "attributes",
- [attr.location, self.location],
- )
- if self.type.isPromise():
- raise WebIDLError(
- "[LegacyLenientSetter] is not allowed on "
- "Promise-typed attributes",
- [attr.location, self.location],
- )
- if self.isStatic():
- raise WebIDLError(
- "[LegacyLenientSetter] is only allowed on non-static " "attributes",
- [attr.location, self.location],
- )
- if self.getExtendedAttribute("PutForwards") is not None:
- raise WebIDLError(
- "[LegacyLenientSetter] and [PutForwards] can't both "
- "appear on the same attribute",
- [attr.location, self.location],
- )
- if self.getExtendedAttribute("Replaceable") is not None:
- raise WebIDLError(
- "[LegacyLenientSetter] and [Replaceable] can't both "
- "appear on the same attribute",
- [attr.location, self.location],
- )
- elif identifier == "LenientFloat":
- if self.readonly:
- raise WebIDLError(
- "[LenientFloat] used on a readonly attribute",
- [attr.location, self.location],
- )
- if not self.type.includesRestrictedFloat():
- raise WebIDLError(
- "[LenientFloat] used on an attribute with a "
- "non-restricted-float type",
- [attr.location, self.location],
- )
- elif identifier == "StoreInSlot":
- if self.getExtendedAttribute("Cached"):
- raise WebIDLError(
- "[StoreInSlot] and [Cached] must not be "
- "specified on the same attribute",
- [attr.location, self.location],
- )
- elif identifier == "Cached":
- if self.getExtendedAttribute("StoreInSlot"):
- raise WebIDLError(
- "[Cached] and [StoreInSlot] must not be "
- "specified on the same attribute",
- [attr.location, self.location],
- )
- elif identifier == "CrossOriginReadable" or identifier == "CrossOriginWritable":
- if not attr.noArguments():
- raise WebIDLError(
- "[%s] must take no arguments" % identifier, [attr.location]
- )
- if self.isStatic():
- raise WebIDLError(
- "[%s] is only allowed on non-static " "attributes" % identifier,
- [attr.location, self.location],
- )
- if self.getExtendedAttribute("LegacyLenientThis"):
- raise WebIDLError(
- "[LegacyLenientThis] is not allowed in combination "
- "with [%s]" % identifier,
- [attr.location, self.location],
- )
- elif identifier == "Exposed":
- convertExposedAttrToGlobalNameSet(attr, self._exposureGlobalNames)
- elif identifier == "Pure":
- if not attr.noArguments():
- raise WebIDLError("[Pure] must take no arguments", [attr.location])
- self._setDependsOn("DOMState")
- self._setAffects("Nothing")
- elif identifier == "Constant" or identifier == "SameObject":
- if not attr.noArguments():
- raise WebIDLError(
- "[%s] must take no arguments" % identifier, [attr.location]
- )
- self._setDependsOn("Nothing")
- self._setAffects("Nothing")
- elif identifier == "Affects":
- if not attr.hasValue():
- raise WebIDLError("[Affects] takes an identifier", [attr.location])
- self._setAffects(attr.value())
- elif identifier == "DependsOn":
- if not attr.hasValue():
- raise WebIDLError("[DependsOn] takes an identifier", [attr.location])
- if (
- attr.value() != "Everything"
- and attr.value() != "DOMState"
- and not self.readonly
- ):
- raise WebIDLError(
- "[DependsOn=%s] only allowed on "
- "readonly attributes" % attr.value(),
- [attr.location, self.location],
- )
- self._setDependsOn(attr.value())
- elif identifier == "UseCounter":
- if self.stringifier:
- raise WebIDLError(
- "[UseCounter] must not be used on a " "stringifier attribute",
- [attr.location, self.location],
- )
- elif identifier == "Unscopable":
- if not attr.noArguments():
- raise WebIDLError(
- "[Unscopable] must take no arguments", [attr.location]
- )
- if self.isStatic():
- raise WebIDLError(
- "[Unscopable] is only allowed on non-static "
- "attributes and operations",
- [attr.location, self.location],
- )
- elif identifier == "CEReactions":
- if not attr.noArguments():
- raise WebIDLError(
- "[CEReactions] must take no arguments", [attr.location]
- )
- elif (
- identifier == "Pref"
- or identifier == "Deprecated"
- or identifier == "SetterThrows"
- or identifier == "Throws"
- or identifier == "GetterThrows"
- or identifier == "SetterCanOOM"
- or identifier == "CanOOM"
- or identifier == "GetterCanOOM"
- or identifier == "ChromeOnly"
- or identifier == "Func"
- or identifier == "Trial"
- or identifier == "SecureContext"
- or identifier == "Frozen"
- or identifier == "NewObject"
- or identifier == "NeedsSubjectPrincipal"
- or identifier == "SetterNeedsSubjectPrincipal"
- or identifier == "GetterNeedsSubjectPrincipal"
- or identifier == "NeedsCallerType"
- or identifier == "ReturnValueNeedsContainsHack"
- or identifier == "BinaryName"
- or identifier == "NonEnumerable"
- ):
- # Known attributes that we don't need to do anything with here
- pass
- else:
- raise WebIDLError(
- "Unknown extended attribute %s on attribute" % identifier,
- [attr.location],
- )
- IDLInterfaceMember.handleExtendedAttribute(self, attr)
-
- def resolve(self, parentScope):
- assert isinstance(parentScope, IDLScope)
- self.type.resolveType(parentScope)
- IDLObjectWithIdentifier.resolve(self, parentScope)
-
- def hasLegacyLenientThis(self):
- return self.legacyLenientThis
-
- def isMaplikeOrSetlikeAttr(self):
- """
- True if this attribute was generated from an interface with
- maplike/setlike (e.g. this is the size attribute for
- maplike/setlike)
- """
- return self.maplikeOrSetlike is not None
-
- def isLegacyUnforgeable(self):
- return self._legacyUnforgeable
-
- def _getDependentObjects(self):
- return set([self.type])
-
- def expand(self, members):
- assert self.stringifier
- if (
- not self.type.isDOMString()
- and not self.type.isUSVString()
- and not self.type.isUTF8String()
- ):
- raise WebIDLError(
- "The type of a stringifer attribute must be "
- "either DOMString, USVString or UTF8String",
- [self.location],
- )
- identifier = IDLUnresolvedIdentifier(
- self.location, "__stringifier", allowDoubleUnderscore=True
- )
- method = IDLMethod(
- self.location,
- identifier,
- returnType=self.type,
- arguments=[],
- stringifier=True,
- underlyingAttr=self,
- )
- allowedExtAttrs = ["Throws", "NeedsSubjectPrincipal", "Pure"]
- # Safe to ignore these as they are only meaningful for attributes
- attributeOnlyExtAttrs = [
- "CEReactions",
- "CrossOriginWritable",
- "SetterThrows",
- ]
- for (key, value) in self._extendedAttrDict.items():
- if key in allowedExtAttrs:
- if value is not True:
- raise WebIDLError(
- "[%s] with a value is currently "
- "unsupported in stringifier attributes, "
- "please file a bug to add support" % key,
- [self.location],
- )
- method.addExtendedAttributes(
- [IDLExtendedAttribute(self.location, (key,))]
- )
- elif not key in attributeOnlyExtAttrs:
- raise WebIDLError(
- "[%s] is currently unsupported in "
- "stringifier attributes, please file a bug "
- "to add support" % key,
- [self.location],
- )
- members.append(method)
-
-
-class IDLArgument(IDLObjectWithIdentifier):
- def __init__(
- self,
- location,
- identifier,
- type,
- optional=False,
- defaultValue=None,
- variadic=False,
- dictionaryMember=False,
- allowTypeAttributes=False,
- ):
- IDLObjectWithIdentifier.__init__(self, location, None, identifier)
-
- assert isinstance(type, IDLType)
- self.type = type
-
- self.optional = optional
- self.defaultValue = defaultValue
- self.variadic = variadic
- self.dictionaryMember = dictionaryMember
- self._isComplete = False
- self._allowTreatNonCallableAsNull = False
- self._extendedAttrDict = {}
- self.allowTypeAttributes = allowTypeAttributes
-
- assert not variadic or optional
- assert not variadic or not defaultValue
-
- def addExtendedAttributes(self, attrs):
- for attribute in attrs:
- identifier = attribute.identifier()
- if self.allowTypeAttributes and (
- identifier == "EnforceRange"
- or identifier == "Clamp"
- or identifier == "LegacyNullToEmptyString"
- or identifier == "AllowShared"
- ):
- self.type = self.type.withExtendedAttributes([attribute])
- elif identifier == "TreatNonCallableAsNull":
- self._allowTreatNonCallableAsNull = True
- elif self.dictionaryMember and (
- identifier == "ChromeOnly"
- or identifier == "Func"
- or identifier == "Trial"
- or identifier == "Pref"
- ):
- if not self.optional:
- raise WebIDLError(
- "[%s] must not be used on a required "
- "dictionary member" % identifier,
- [attribute.location],
- )
- else:
- raise WebIDLError(
- "Unhandled extended attribute on %s"
- % (
- "a dictionary member"
- if self.dictionaryMember
- else "an argument"
- ),
- [attribute.location],
- )
- attrlist = attribute.listValue()
- self._extendedAttrDict[identifier] = attrlist if len(attrlist) else True
-
- def getExtendedAttribute(self, name):
- return self._extendedAttrDict.get(name, None)
-
- def isComplete(self):
- return self._isComplete
-
- def complete(self, scope):
- if self._isComplete:
- return
-
- self._isComplete = True
-
- if not self.type.isComplete():
- type = self.type.complete(scope)
- assert not isinstance(type, IDLUnresolvedType)
- assert not isinstance(type, IDLTypedefType)
- assert not isinstance(type.name, IDLUnresolvedIdentifier)
- self.type = type
-
- if self.type.isUndefined():
- raise WebIDLError(
- "undefined must not be used as the type of an argument in any circumstance",
- [self.location],
- )
-
- if self.type.isAny():
- assert self.defaultValue is None or isinstance(
- self.defaultValue, IDLNullValue
- )
- # optional 'any' values always have a default value
- if self.optional and not self.defaultValue and not self.variadic:
- # Set the default value to undefined, for simplicity, so the
- # codegen doesn't have to special-case this.
- self.defaultValue = IDLUndefinedValue(self.location)
-
- if self.dictionaryMember and self.type.legacyNullToEmptyString:
- raise WebIDLError(
- "Dictionary members cannot be [LegacyNullToEmptyString]",
- [self.location],
- )
- if self.type.isObservableArray():
- raise WebIDLError(
- "%s cannot have an ObservableArray type"
- % ("Dictionary members" if self.dictionaryMember else "Arguments"),
- [self.location],
- )
- # Now do the coercing thing; this needs to happen after the
- # above creation of a default value.
- if self.defaultValue:
- self.defaultValue = self.defaultValue.coerceToType(self.type, self.location)
- assert self.defaultValue
-
- def allowTreatNonCallableAsNull(self):
- return self._allowTreatNonCallableAsNull
-
- def _getDependentObjects(self):
- deps = set([self.type])
- if self.defaultValue:
- deps.add(self.defaultValue)
- return deps
-
- def canHaveMissingValue(self):
- return self.optional and not self.defaultValue
-
-
-class IDLCallback(IDLObjectWithScope):
- def __init__(
- self, location, parentScope, identifier, returnType, arguments, isConstructor
- ):
- assert isinstance(returnType, IDLType)
-
- self._returnType = returnType
- # Clone the list
- self._arguments = list(arguments)
-
- IDLObjectWithScope.__init__(self, location, parentScope, identifier)
-
- for (returnType, arguments) in self.signatures():
- for argument in arguments:
- argument.resolve(self)
-
- self._treatNonCallableAsNull = False
- self._treatNonObjectAsNull = False
- self._isRunScriptBoundary = False
- self._isConstructor = isConstructor
-
- def isCallback(self):
- return True
-
- def isConstructor(self):
- return self._isConstructor
-
- def signatures(self):
- return [(self._returnType, self._arguments)]
-
- def finish(self, scope):
- if not self._returnType.isComplete():
- type = self._returnType.complete(scope)
-
- assert not isinstance(type, IDLUnresolvedType)
- assert not isinstance(type, IDLTypedefType)
- assert not isinstance(type.name, IDLUnresolvedIdentifier)
- self._returnType = type
-
- for argument in self._arguments:
- if argument.type.isComplete():
- continue
-
- type = argument.type.complete(scope)
-
- assert not isinstance(type, IDLUnresolvedType)
- assert not isinstance(type, IDLTypedefType)
- assert not isinstance(type.name, IDLUnresolvedIdentifier)
- argument.type = type
-
- def validate(self):
- for argument in self._arguments:
- if argument.type.isUndefined():
- raise WebIDLError(
- "undefined must not be used as the type of an argument in any circumstance",
- [self.location],
- )
-
- def addExtendedAttributes(self, attrs):
- unhandledAttrs = []
- for attr in attrs:
- if attr.identifier() == "TreatNonCallableAsNull":
- self._treatNonCallableAsNull = True
- elif attr.identifier() == "LegacyTreatNonObjectAsNull":
- if self._isConstructor:
- raise WebIDLError(
- "[LegacyTreatNonObjectAsNull] is not supported "
- "on constructors",
- [self.location],
- )
- self._treatNonObjectAsNull = True
- elif attr.identifier() == "MOZ_CAN_RUN_SCRIPT_BOUNDARY":
- if self._isConstructor:
- raise WebIDLError(
- "[MOZ_CAN_RUN_SCRIPT_BOUNDARY] is not "
- "permitted on constructors",
- [self.location],
- )
- self._isRunScriptBoundary = True
- else:
- unhandledAttrs.append(attr)
- if self._treatNonCallableAsNull and self._treatNonObjectAsNull:
- raise WebIDLError(
- "Cannot specify both [TreatNonCallableAsNull] "
- "and [LegacyTreatNonObjectAsNull]",
- [self.location],
- )
- if len(unhandledAttrs) != 0:
- IDLType.addExtendedAttributes(self, unhandledAttrs)
-
- def _getDependentObjects(self):
- return set([self._returnType] + self._arguments)
-
- def isRunScriptBoundary(self):
- return self._isRunScriptBoundary
-
-
-class IDLCallbackType(IDLType):
- def __init__(self, location, callback):
- IDLType.__init__(self, location, callback.identifier.name)
- self.callback = callback
-
- def isCallback(self):
- return True
-
- def tag(self):
- return IDLType.Tags.callback
-
- def isDistinguishableFrom(self, other):
- if other.isPromise():
- return False
- if other.isUnion():
- # Just forward to the union; it'll deal
- return other.isDistinguishableFrom(self)
- return (
- other.isUndefined()
- or other.isPrimitive()
- or other.isString()
- or other.isEnum()
- or other.isNonCallbackInterface()
- or other.isSequence()
- )
-
- def _getDependentObjects(self):
- return self.callback._getDependentObjects()
-
-
-class IDLMethodOverload:
- """
- A class that represents a single overload of a WebIDL method. This is not
- quite the same as an element of the "effective overload set" in the spec,
- because separate IDLMethodOverloads are not created based on arguments being
- optional. Rather, when multiple methods have the same name, there is an
- IDLMethodOverload for each one, all hanging off an IDLMethod representing
- the full set of overloads.
- """
-
- def __init__(self, returnType, arguments, location):
- self.returnType = returnType
- # Clone the list of arguments, just in case
- self.arguments = list(arguments)
- self.location = location
-
- def _getDependentObjects(self):
- deps = set(self.arguments)
- deps.add(self.returnType)
- return deps
-
- def includesRestrictedFloatArgument(self):
- return any(arg.type.includesRestrictedFloat() for arg in self.arguments)
-
-
-class IDLMethod(IDLInterfaceMember, IDLScope):
-
- Special = enum(
- "Getter", "Setter", "Deleter", "LegacyCaller", base=IDLInterfaceMember.Special
- )
-
- NamedOrIndexed = enum("Neither", "Named", "Indexed")
-
- def __init__(
- self,
- location,
- identifier,
- returnType,
- arguments,
- static=False,
- getter=False,
- setter=False,
- deleter=False,
- specialType=NamedOrIndexed.Neither,
- legacycaller=False,
- stringifier=False,
- maplikeOrSetlikeOrIterable=None,
- underlyingAttr=None,
- ):
- # REVIEW: specialType is NamedOrIndexed -- wow, this is messed up.
- IDLInterfaceMember.__init__(
- self, location, identifier, IDLInterfaceMember.Tags.Method
- )
-
- self._hasOverloads = False
-
- assert isinstance(returnType, IDLType)
-
- # self._overloads is a list of IDLMethodOverloads
- self._overloads = [IDLMethodOverload(returnType, arguments, location)]
-
- assert isinstance(static, bool)
- self._static = static
- assert isinstance(getter, bool)
- self._getter = getter
- assert isinstance(setter, bool)
- self._setter = setter
- assert isinstance(deleter, bool)
- self._deleter = deleter
- assert isinstance(legacycaller, bool)
- self._legacycaller = legacycaller
- assert isinstance(stringifier, bool)
- self._stringifier = stringifier
- assert maplikeOrSetlikeOrIterable is None or isinstance(
- maplikeOrSetlikeOrIterable, IDLMaplikeOrSetlikeOrIterableBase
- )
- self.maplikeOrSetlikeOrIterable = maplikeOrSetlikeOrIterable
- self._htmlConstructor = False
- self.underlyingAttr = underlyingAttr
- self._specialType = specialType
- self._legacyUnforgeable = False
- self.dependsOn = "Everything"
- self.affects = "Everything"
- self.aliases = []
-
- if static and identifier.name == "prototype":
- raise WebIDLError(
- "The identifier of a static operation must not be 'prototype'",
- [location],
- )
-
- self.assertSignatureConstraints()
-
- def __str__(self):
- return "Method '%s'" % self.identifier
-
- def assertSignatureConstraints(self):
- if self._getter or self._deleter:
- assert len(self._overloads) == 1
- overload = self._overloads[0]
- arguments = overload.arguments
- assert len(arguments) == 1
- assert (
- arguments[0].type == BuiltinTypes[IDLBuiltinType.Types.domstring]
- or arguments[0].type == BuiltinTypes[IDLBuiltinType.Types.unsigned_long]
- )
- assert not arguments[0].optional and not arguments[0].variadic
- assert not self._getter or not overload.returnType.isUndefined()
-
- if self._setter:
- assert len(self._overloads) == 1
- arguments = self._overloads[0].arguments
- assert len(arguments) == 2
- assert (
- arguments[0].type == BuiltinTypes[IDLBuiltinType.Types.domstring]
- or arguments[0].type == BuiltinTypes[IDLBuiltinType.Types.unsigned_long]
- )
- assert not arguments[0].optional and not arguments[0].variadic
- assert not arguments[1].optional and not arguments[1].variadic
-
- if self._stringifier:
- assert len(self._overloads) == 1
- overload = self._overloads[0]
- assert len(overload.arguments) == 0
- if not self.underlyingAttr:
- assert (
- overload.returnType == BuiltinTypes[IDLBuiltinType.Types.domstring]
- )
-
- def isStatic(self):
- return self._static
-
- def forceStatic(self):
- self._static = True
-
- def isGetter(self):
- return self._getter
-
- def isSetter(self):
- return self._setter
-
- def isDeleter(self):
- return self._deleter
-
- def isNamed(self):
- assert (
- self._specialType == IDLMethod.NamedOrIndexed.Named
- or self._specialType == IDLMethod.NamedOrIndexed.Indexed
- )
- return self._specialType == IDLMethod.NamedOrIndexed.Named
-
- def isIndexed(self):
- assert (
- self._specialType == IDLMethod.NamedOrIndexed.Named
- or self._specialType == IDLMethod.NamedOrIndexed.Indexed
- )
- return self._specialType == IDLMethod.NamedOrIndexed.Indexed
-
- def isLegacycaller(self):
- return self._legacycaller
-
- def isStringifier(self):
- return self._stringifier
-
- def isToJSON(self):
- return self.identifier.name == "toJSON"
-
- def isDefaultToJSON(self):
- return self.isToJSON() and self.getExtendedAttribute("Default")
-
- def isMaplikeOrSetlikeOrIterableMethod(self):
- """
- True if this method was generated as part of a
- maplike/setlike/etc interface (e.g. has/get methods)
- """
- return self.maplikeOrSetlikeOrIterable is not None
-
- def isSpecial(self):
- return (
- self.isGetter()
- or self.isSetter()
- or self.isDeleter()
- or self.isLegacycaller()
- or self.isStringifier()
- )
-
- def isHTMLConstructor(self):
- return self._htmlConstructor
-
- def hasOverloads(self):
- return self._hasOverloads
-
- def isIdentifierLess(self):
- """
- True if the method name started with __, and if the method is not a
- maplike/setlike method. Interfaces with maplike/setlike will generate
- methods starting with __ for chrome only backing object access in JS
- implemented interfaces, so while these functions use what is considered
- an non-identifier name, they actually DO have an identifier.
- """
- return (
- self.identifier.name[:2] == "__"
- and not self.isMaplikeOrSetlikeOrIterableMethod()
- )
-
- def resolve(self, parentScope):
- assert isinstance(parentScope, IDLScope)
- IDLObjectWithIdentifier.resolve(self, parentScope)
- IDLScope.__init__(self, self.location, parentScope, self.identifier)
- for (returnType, arguments) in self.signatures():
- for argument in arguments:
- argument.resolve(self)
-
- def addOverload(self, method):
- assert len(method._overloads) == 1
-
- if self._extendedAttrDict != method._extendedAttrDict:
- extendedAttrDiff = set(self._extendedAttrDict.keys()) ^ set(
- method._extendedAttrDict.keys()
- )
-
- if extendedAttrDiff == {"LenientFloat"}:
- if "LenientFloat" not in self._extendedAttrDict:
- for overload in self._overloads:
- if overload.includesRestrictedFloatArgument():
- raise WebIDLError(
- "Restricted float behavior differs on different "
- "overloads of %s" % method.identifier,
- [overload.location, method.location],
- )
- self._extendedAttrDict["LenientFloat"] = method._extendedAttrDict[
- "LenientFloat"
- ]
- elif method._overloads[0].includesRestrictedFloatArgument():
- raise WebIDLError(
- "Restricted float behavior differs on different "
- "overloads of %s" % method.identifier,
- [self.location, method.location],
- )
- else:
- raise WebIDLError(
- "Extended attributes differ on different "
- "overloads of %s" % method.identifier,
- [self.location, method.location],
- )
-
- self._overloads.extend(method._overloads)
-
- self._hasOverloads = True
-
- if self.isStatic() != method.isStatic():
- raise WebIDLError(
- "Overloaded identifier %s appears with different values of the 'static' attribute"
- % method.identifier,
- [method.location],
- )
-
- if self.isLegacycaller() != method.isLegacycaller():
- raise WebIDLError(
- "Overloaded identifier %s appears with different values of the 'legacycaller' attribute"
- % method.identifier,
- [method.location],
- )
-
- # Can't overload special things!
- assert not self.isGetter()
- assert not method.isGetter()
- assert not self.isSetter()
- assert not method.isSetter()
- assert not self.isDeleter()
- assert not method.isDeleter()
- assert not self.isStringifier()
- assert not method.isStringifier()
- assert not self.isHTMLConstructor()
- assert not method.isHTMLConstructor()
-
- return self
-
- def signatures(self):
- return [
- (overload.returnType, overload.arguments) for overload in self._overloads
- ]
-
- def finish(self, scope):
- IDLInterfaceMember.finish(self, scope)
-
- for overload in self._overloads:
- returnType = overload.returnType
- if not returnType.isComplete():
- returnType = returnType.complete(scope)
- assert not isinstance(returnType, IDLUnresolvedType)
- assert not isinstance(returnType, IDLTypedefType)
- assert not isinstance(returnType.name, IDLUnresolvedIdentifier)
- overload.returnType = returnType
-
- for argument in overload.arguments:
- if not argument.isComplete():
- argument.complete(scope)
- assert argument.type.isComplete()
-
- # Now compute various information that will be used by the
- # WebIDL overload resolution algorithm.
- self.maxArgCount = max(len(s[1]) for s in self.signatures())
- self.allowedArgCounts = [
- i
- for i in range(self.maxArgCount + 1)
- if len(self.signaturesForArgCount(i)) != 0
- ]
-
- def validate(self):
- IDLInterfaceMember.validate(self)
-
- # Make sure our overloads are properly distinguishable and don't have
- # different argument types before the distinguishing args.
- for argCount in self.allowedArgCounts:
- possibleOverloads = self.overloadsForArgCount(argCount)
- if len(possibleOverloads) == 1:
- continue
- distinguishingIndex = self.distinguishingIndexForArgCount(argCount)
- for idx in range(distinguishingIndex):
- firstSigType = possibleOverloads[0].arguments[idx].type
- for overload in possibleOverloads[1:]:
- if overload.arguments[idx].type != firstSigType:
- raise WebIDLError(
- "Signatures for method '%s' with %d arguments have "
- "different types of arguments at index %d, which "
- "is before distinguishing index %d"
- % (
- self.identifier.name,
- argCount,
- idx,
- distinguishingIndex,
- ),
- [self.location, overload.location],
- )
-
- overloadWithPromiseReturnType = None
- overloadWithoutPromiseReturnType = None
- for overload in self._overloads:
- returnType = overload.returnType
- if not returnType.unroll().isExposedInAllOf(self.exposureSet):
- raise WebIDLError(
- "Overload returns a type that is not exposed "
- "everywhere where the method is exposed",
- [overload.location],
- )
-
- variadicArgument = None
-
- arguments = overload.arguments
- for (idx, argument) in enumerate(arguments):
- assert argument.type.isComplete()
-
- if (
- argument.type.isDictionary()
- and argument.type.unroll().inner.canBeEmpty()
- ) or (
- argument.type.isUnion()
- and argument.type.unroll().hasPossiblyEmptyDictionaryType()
- ):
- # Optional dictionaries and unions containing optional
- # dictionaries at the end of the list or followed by
- # optional arguments must be optional.
- if not argument.optional and all(
- arg.optional for arg in arguments[idx + 1 :]
- ):
- raise WebIDLError(
- "Dictionary argument without any "
- "required fields or union argument "
- "containing such dictionary not "
- "followed by a required argument "
- "must be optional",
- [argument.location],
- )
-
- if not argument.defaultValue and all(
- arg.optional for arg in arguments[idx + 1 :]
- ):
- raise WebIDLError(
- "Dictionary argument without any "
- "required fields or union argument "
- "containing such dictionary not "
- "followed by a required argument "
- "must have a default value",
- [argument.location],
- )
-
- # An argument cannot be a nullable dictionary or a
- # nullable union containing a dictionary.
- if argument.type.nullable() and (
- argument.type.isDictionary()
- or (
- argument.type.isUnion()
- and argument.type.unroll().hasDictionaryType()
- )
- ):
- raise WebIDLError(
- "An argument cannot be a nullable "
- "dictionary or nullable union "
- "containing a dictionary",
- [argument.location],
- )
-
- # Only the last argument can be variadic
- if variadicArgument:
- raise WebIDLError(
- "Variadic argument is not last argument",
- [variadicArgument.location],
- )
- if argument.variadic:
- variadicArgument = argument
-
- if returnType.isPromise():
- overloadWithPromiseReturnType = overload
- else:
- overloadWithoutPromiseReturnType = overload
-
- # Make sure either all our overloads return Promises or none do
- if overloadWithPromiseReturnType and overloadWithoutPromiseReturnType:
- raise WebIDLError(
- "We have overloads with both Promise and " "non-Promise return types",
- [
- overloadWithPromiseReturnType.location,
- overloadWithoutPromiseReturnType.location,
- ],
- )
-
- if overloadWithPromiseReturnType and self._legacycaller:
- raise WebIDLError(
- "May not have a Promise return type for a " "legacycaller.",
- [overloadWithPromiseReturnType.location],
- )
-
- if self.getExtendedAttribute("StaticClassOverride") and not (
- self.identifier.scope.isJSImplemented() and self.isStatic()
- ):
- raise WebIDLError(
- "StaticClassOverride can be applied to static"
- " methods on JS-implemented classes only.",
- [self.location],
- )
-
- # Ensure that toJSON methods satisfy the spec constraints on them.
- if self.identifier.name == "toJSON":
- if len(self.signatures()) != 1:
- raise WebIDLError(
- "toJSON method has multiple overloads",
- [self._overloads[0].location, self._overloads[1].location],
- )
- if len(self.signatures()[0][1]) != 0:
- raise WebIDLError("toJSON method has arguments", [self.location])
- if not self.signatures()[0][0].isJSONType():
- raise WebIDLError(
- "toJSON method has non-JSON return type", [self.location]
- )
-
- def overloadsForArgCount(self, argc):
- return [
- overload
- for overload in self._overloads
- if len(overload.arguments) == argc
- or (
- len(overload.arguments) > argc
- and all(arg.optional for arg in overload.arguments[argc:])
- )
- or (
- len(overload.arguments) < argc
- and len(overload.arguments) > 0
- and overload.arguments[-1].variadic
- )
- ]
-
- def signaturesForArgCount(self, argc):
- return [
- (overload.returnType, overload.arguments)
- for overload in self.overloadsForArgCount(argc)
- ]
-
- def locationsForArgCount(self, argc):
- return [overload.location for overload in self.overloadsForArgCount(argc)]
-
- def distinguishingIndexForArgCount(self, argc):
- def isValidDistinguishingIndex(idx, signatures):
- for (firstSigIndex, (firstRetval, firstArgs)) in enumerate(signatures[:-1]):
- for (secondRetval, secondArgs) in signatures[firstSigIndex + 1 :]:
- if idx < len(firstArgs):
- firstType = firstArgs[idx].type
- else:
- assert firstArgs[-1].variadic
- firstType = firstArgs[-1].type
- if idx < len(secondArgs):
- secondType = secondArgs[idx].type
- else:
- assert secondArgs[-1].variadic
- secondType = secondArgs[-1].type
- if not firstType.isDistinguishableFrom(secondType):
- return False
- return True
-
- signatures = self.signaturesForArgCount(argc)
- for idx in range(argc):
- if isValidDistinguishingIndex(idx, signatures):
- return idx
- # No valid distinguishing index. Time to throw
- locations = self.locationsForArgCount(argc)
- raise WebIDLError(
- "Signatures with %d arguments for method '%s' are not "
- "distinguishable" % (argc, self.identifier.name),
- locations,
- )
-
- def handleExtendedAttribute(self, attr):
- identifier = attr.identifier()
- if (
- identifier == "GetterThrows"
- or identifier == "SetterThrows"
- or identifier == "GetterCanOOM"
- or identifier == "SetterCanOOM"
- or identifier == "SetterNeedsSubjectPrincipal"
- or identifier == "GetterNeedsSubjectPrincipal"
- ):
- raise WebIDLError(
- "Methods must not be flagged as " "[%s]" % identifier,
- [attr.location, self.location],
- )
- elif identifier == "LegacyUnforgeable":
- if self.isStatic():
- raise WebIDLError(
- "[LegacyUnforgeable] is only allowed on non-static " "methods",
- [attr.location, self.location],
- )
- self._legacyUnforgeable = True
- elif identifier == "SameObject":
- raise WebIDLError(
- "Methods must not be flagged as [SameObject]",
- [attr.location, self.location],
- )
- elif identifier == "Constant":
- raise WebIDLError(
- "Methods must not be flagged as [Constant]",
- [attr.location, self.location],
- )
- elif identifier == "PutForwards":
- raise WebIDLError(
- "Only attributes support [PutForwards]", [attr.location, self.location]
- )
- elif identifier == "LegacyLenientSetter":
- raise WebIDLError(
- "Only attributes support [LegacyLenientSetter]",
- [attr.location, self.location],
- )
- elif identifier == "LenientFloat":
- # This is called before we've done overload resolution
- overloads = self._overloads
- assert len(overloads) == 1
- if not overloads[0].returnType.isUndefined():
- raise WebIDLError(
- "[LenientFloat] used on a non-undefined method",
- [attr.location, self.location],
- )
- if not overloads[0].includesRestrictedFloatArgument():
- raise WebIDLError(
- "[LenientFloat] used on an operation with no "
- "restricted float type arguments",
- [attr.location, self.location],
- )
- elif identifier == "Exposed":
- convertExposedAttrToGlobalNameSet(attr, self._exposureGlobalNames)
- elif (
- identifier == "CrossOriginCallable"
- or identifier == "WebGLHandlesContextLoss"
- ):
- # Known no-argument attributes.
- if not attr.noArguments():
- raise WebIDLError(
- "[%s] must take no arguments" % identifier, [attr.location]
- )
- if identifier == "CrossOriginCallable" and self.isStatic():
- raise WebIDLError(
- "[CrossOriginCallable] is only allowed on non-static " "attributes",
- [attr.location, self.location],
- )
- elif identifier == "Pure":
- if not attr.noArguments():
- raise WebIDLError("[Pure] must take no arguments", [attr.location])
- self._setDependsOn("DOMState")
- self._setAffects("Nothing")
- elif identifier == "Affects":
- if not attr.hasValue():
- raise WebIDLError("[Affects] takes an identifier", [attr.location])
- self._setAffects(attr.value())
- elif identifier == "DependsOn":
- if not attr.hasValue():
- raise WebIDLError("[DependsOn] takes an identifier", [attr.location])
- self._setDependsOn(attr.value())
- elif identifier == "Alias":
- if not attr.hasValue():
- raise WebIDLError(
- "[Alias] takes an identifier or string", [attr.location]
- )
- self._addAlias(attr.value())
- elif identifier == "UseCounter":
- if self.isSpecial():
- raise WebIDLError(
- "[UseCounter] must not be used on a special " "operation",
- [attr.location, self.location],
- )
- elif identifier == "Unscopable":
- if not attr.noArguments():
- raise WebIDLError(
- "[Unscopable] must take no arguments", [attr.location]
- )
- if self.isStatic():
- raise WebIDLError(
- "[Unscopable] is only allowed on non-static "
- "attributes and operations",
- [attr.location, self.location],
- )
- elif identifier == "CEReactions":
- if not attr.noArguments():
- raise WebIDLError(
- "[CEReactions] must take no arguments", [attr.location]
- )
-
- if self.isSpecial() and not self.isSetter() and not self.isDeleter():
- raise WebIDLError(
- "[CEReactions] is only allowed on operation, "
- "attribute, setter, and deleter",
- [attr.location, self.location],
- )
- elif identifier == "Default":
- if not attr.noArguments():
- raise WebIDLError("[Default] must take no arguments", [attr.location])
-
- if not self.isToJSON():
- raise WebIDLError(
- "[Default] is only allowed on toJSON operations",
- [attr.location, self.location],
- )
-
- if self.signatures()[0][0] != BuiltinTypes[IDLBuiltinType.Types.object]:
- raise WebIDLError(
- "The return type of the default toJSON "
- "operation must be 'object'",
- [attr.location, self.location],
- )
- elif (
- identifier == "Throws"
- or identifier == "CanOOM"
- or identifier == "NewObject"
- or identifier == "ChromeOnly"
- or identifier == "Pref"
- or identifier == "Deprecated"
- or identifier == "Func"
- or identifier == "Trial"
- or identifier == "SecureContext"
- or identifier == "BinaryName"
- or identifier == "NeedsSubjectPrincipal"
- or identifier == "NeedsCallerType"
- or identifier == "StaticClassOverride"
- or identifier == "NonEnumerable"
- or identifier == "Unexposed"
- or identifier == "WebExtensionStub"
- ):
- # Known attributes that we don't need to do anything with here
- pass
- else:
- raise WebIDLError(
- "Unknown extended attribute %s on method" % identifier, [attr.location]
- )
- IDLInterfaceMember.handleExtendedAttribute(self, attr)
-
- def returnsPromise(self):
- return self._overloads[0].returnType.isPromise()
-
- def isLegacyUnforgeable(self):
- return self._legacyUnforgeable
-
- def _getDependentObjects(self):
- deps = set()
- for overload in self._overloads:
- deps.update(overload._getDependentObjects())
- return deps
-
-
-class IDLConstructor(IDLMethod):
- def __init__(self, location, args, name):
- # We can't actually init our IDLMethod yet, because we do not know the
- # return type yet. Just save the info we have for now and we will init
- # it later.
- self._initLocation = location
- self._initArgs = args
- self._initName = name
- self._inited = False
- self._initExtendedAttrs = []
-
- def addExtendedAttributes(self, attrs):
- if self._inited:
- return IDLMethod.addExtendedAttributes(self, attrs)
- self._initExtendedAttrs.extend(attrs)
-
- def handleExtendedAttribute(self, attr):
- identifier = attr.identifier()
- if (
- identifier == "BinaryName"
- or identifier == "ChromeOnly"
- or identifier == "NewObject"
- or identifier == "SecureContext"
- or identifier == "Throws"
- or identifier == "Func"
- or identifier == "Trial"
- or identifier == "Pref"
- or identifier == "UseCounter"
- ):
- IDLMethod.handleExtendedAttribute(self, attr)
- elif identifier == "HTMLConstructor":
- if not attr.noArguments():
- raise WebIDLError(
- "[HTMLConstructor] must take no arguments", [attr.location]
- )
- # We shouldn't end up here for legacy factory functions.
- assert self.identifier.name == "constructor"
-
- if any(len(sig[1]) != 0 for sig in self.signatures()):
- raise WebIDLError(
- "[HTMLConstructor] must not be applied to a "
- "constructor operation that has arguments.",
- [attr.location],
- )
- self._htmlConstructor = True
- else:
- raise WebIDLError(
- "Unknown extended attribute %s on method" % identifier, [attr.location]
- )
-
- def reallyInit(self, parentInterface):
- name = self._initName
- location = self._initLocation
- identifier = IDLUnresolvedIdentifier(location, name, allowForbidden=True)
- retType = IDLWrapperType(parentInterface.location, parentInterface)
- IDLMethod.__init__(
- self, location, identifier, retType, self._initArgs, static=True
- )
- self._inited = True
- # Propagate through whatever extended attributes we already had
- self.addExtendedAttributes(self._initExtendedAttrs)
- self._initExtendedAttrs = []
- # Constructors are always NewObject. Whether they throw or not is
- # indicated by [Throws] annotations in the usual way.
- self.addExtendedAttributes(
- [IDLExtendedAttribute(self.location, ("NewObject",))]
- )
-
-
-class IDLIncludesStatement(IDLObject):
- def __init__(self, location, interface, mixin):
- IDLObject.__init__(self, location)
- self.interface = interface
- self.mixin = mixin
- self._finished = False
-
- def finish(self, scope):
- if self._finished:
- return
- self._finished = True
- assert isinstance(self.interface, IDLIdentifierPlaceholder)
- assert isinstance(self.mixin, IDLIdentifierPlaceholder)
- interface = self.interface.finish(scope)
- mixin = self.mixin.finish(scope)
- # NOTE: we depend on not setting self.interface and
- # self.mixin here to keep track of the original
- # locations.
- if not isinstance(interface, IDLInterface):
- raise WebIDLError(
- "Left-hand side of 'includes' is not an " "interface",
- [self.interface.location, interface.location],
- )
- if interface.isCallback():
- raise WebIDLError(
- "Left-hand side of 'includes' is a callback " "interface",
- [self.interface.location, interface.location],
- )
- if not isinstance(mixin, IDLInterfaceMixin):
- raise WebIDLError(
- "Right-hand side of 'includes' is not an " "interface mixin",
- [self.mixin.location, mixin.location],
- )
-
- mixin.actualExposureGlobalNames.update(interface._exposureGlobalNames)
-
- interface.addIncludedMixin(mixin)
- self.interface = interface
- self.mixin = mixin
-
- def validate(self):
- pass
-
- def addExtendedAttributes(self, attrs):
- if len(attrs) != 0:
- raise WebIDLError(
- "There are no extended attributes that are "
- "allowed on includes statements",
- [attrs[0].location, self.location],
- )
-
-
-class IDLExtendedAttribute(IDLObject):
- """
- A class to represent IDL extended attributes so we can give them locations
- """
-
- def __init__(self, location, tuple):
- IDLObject.__init__(self, location)
- self._tuple = tuple
-
- def identifier(self):
- return self._tuple[0]
-
- def noArguments(self):
- return len(self._tuple) == 1
-
- def hasValue(self):
- return len(self._tuple) >= 2 and isinstance(self._tuple[1], str)
-
- def value(self):
- assert self.hasValue()
- return self._tuple[1]
-
- def hasArgs(self):
- return (
- len(self._tuple) == 2
- and isinstance(self._tuple[1], list)
- or len(self._tuple) == 3
- )
-
- def args(self):
- assert self.hasArgs()
- # Our args are our last element
- return self._tuple[-1]
-
- def listValue(self):
- """
- Backdoor for storing random data in _extendedAttrDict
- """
- return list(self._tuple)[1:]
-
-
-# Parser
-
-
-class Tokenizer(object):
- tokens = ["INTEGER", "FLOATLITERAL", "IDENTIFIER", "STRING", "WHITESPACE", "OTHER"]
-
- def t_FLOATLITERAL(self, t):
- r"(-?(([0-9]+\.[0-9]*|[0-9]*\.[0-9]+)([Ee][+-]?[0-9]+)?|[0-9]+[Ee][+-]?[0-9]+|Infinity))|NaN"
- t.value = float(t.value)
- return t
-
- def t_INTEGER(self, t):
- r"-?(0([0-7]+|[Xx][0-9A-Fa-f]+)?|[1-9][0-9]*)"
- try:
- # Can't use int(), because that doesn't handle octal properly.
- t.value = parseInt(t.value)
- except:
- raise WebIDLError(
- "Invalid integer literal",
- [
- Location(
- lexer=self.lexer,
- lineno=self.lexer.lineno,
- lexpos=self.lexer.lexpos,
- filename=self._filename,
- )
- ],
- )
- return t
-
- def t_IDENTIFIER(self, t):
- r"[_-]?[A-Za-z][0-9A-Z_a-z-]*"
- t.type = self.keywords.get(t.value, "IDENTIFIER")
- # If Builtin readable streams are disabled, mark ReadableStream as an identifier.
- if t.type == "READABLESTREAM" and not self._use_builtin_readable_streams:
- t.type = "IDENTIFIER"
- return t
-
- def t_STRING(self, t):
- r'"[^"]*"'
- t.value = t.value[1:-1]
- return t
-
- def t_WHITESPACE(self, t):
- r"[\t\n\r ]+|[\t\n\r ]*((//[^\n]*|/\*.*?\*/)[\t\n\r ]*)+"
- pass
-
- def t_ELLIPSIS(self, t):
- r"\.\.\."
- t.type = self.keywords.get(t.value)
- return t
-
- def t_OTHER(self, t):
- r"[^\t\n\r 0-9A-Z_a-z]"
- t.type = self.keywords.get(t.value, "OTHER")
- return t
-
- keywords = {
- "interface": "INTERFACE",
- "partial": "PARTIAL",
- "mixin": "MIXIN",
- "dictionary": "DICTIONARY",
- "exception": "EXCEPTION",
- "enum": "ENUM",
- "callback": "CALLBACK",
- "typedef": "TYPEDEF",
- "includes": "INCLUDES",
- "const": "CONST",
- "null": "NULL",
- "true": "TRUE",
- "false": "FALSE",
- "serializer": "SERIALIZER",
- "stringifier": "STRINGIFIER",
- "unrestricted": "UNRESTRICTED",
- "attribute": "ATTRIBUTE",
- "readonly": "READONLY",
- "inherit": "INHERIT",
- "static": "STATIC",
- "getter": "GETTER",
- "setter": "SETTER",
- "deleter": "DELETER",
- "legacycaller": "LEGACYCALLER",
- "optional": "OPTIONAL",
- "...": "ELLIPSIS",
- "::": "SCOPE",
- "DOMString": "DOMSTRING",
- "ByteString": "BYTESTRING",
- "USVString": "USVSTRING",
- "JSString": "JSSTRING",
- "UTF8String": "UTF8STRING",
- "any": "ANY",
- "boolean": "BOOLEAN",
- "byte": "BYTE",
- "double": "DOUBLE",
- "float": "FLOAT",
- "long": "LONG",
- "object": "OBJECT",
- "ObservableArray": "OBSERVABLEARRAY",
- "octet": "OCTET",
- "Promise": "PROMISE",
- "required": "REQUIRED",
- "sequence": "SEQUENCE",
- "record": "RECORD",
- "short": "SHORT",
- "unsigned": "UNSIGNED",
- "undefined": "UNDEFINED",
- ":": "COLON",
- ";": "SEMICOLON",
- "{": "LBRACE",
- "}": "RBRACE",
- "(": "LPAREN",
- ")": "RPAREN",
- "[": "LBRACKET",
- "]": "RBRACKET",
- "?": "QUESTIONMARK",
- "*": "ASTERISK",
- ",": "COMMA",
- "=": "EQUALS",
- "<": "LT",
- ">": "GT",
- "ArrayBuffer": "ARRAYBUFFER",
- "or": "OR",
- "maplike": "MAPLIKE",
- "setlike": "SETLIKE",
- "iterable": "ITERABLE",
- "namespace": "NAMESPACE",
- "ReadableStream": "READABLESTREAM",
- "constructor": "CONSTRUCTOR",
- "symbol": "SYMBOL",
- "async": "ASYNC",
- }
-
- tokens.extend(keywords.values())
-
- def t_error(self, t):
- raise WebIDLError(
- "Unrecognized Input",
- [
- Location(
- lexer=self.lexer,
- lineno=self.lexer.lineno,
- lexpos=self.lexer.lexpos,
- filename=self.filename,
- )
- ],
- )
-
- def __init__(self, outputdir, lexer=None, use_builtin_readable_streams=True):
- self._use_builtin_readable_streams = use_builtin_readable_streams
- if lexer:
- self.lexer = lexer
- else:
- self.lexer = lex.lex(object=self, reflags=re.DOTALL)
-
-
-class SqueakyCleanLogger(object):
- errorWhitelist = [
- # Web IDL defines the WHITESPACE token, but doesn't actually
- # use it ... so far.
- "Token 'WHITESPACE' defined, but not used",
- # And that means we have an unused token
- "There is 1 unused token",
- # Web IDL defines a OtherOrComma rule that's only used in
- # ExtendedAttributeInner, which we don't use yet.
- "Rule 'OtherOrComma' defined, but not used",
- # And an unused rule
- "There is 1 unused rule",
- # And the OtherOrComma grammar symbol is unreachable.
- "Symbol 'OtherOrComma' is unreachable",
- # Which means the Other symbol is unreachable.
- "Symbol 'Other' is unreachable",
- ]
-
- def __init__(self):
- self.errors = []
-
- def debug(self, msg, *args, **kwargs):
- pass
-
- info = debug
-
- def warning(self, msg, *args, **kwargs):
- if (
- msg == "%s:%d: Rule %r defined, but not used"
- or msg == "%s:%d: Rule '%s' defined, but not used"
- ):
- # Munge things so we don't have to hardcode filenames and
- # line numbers in our whitelist.
- whitelistmsg = "Rule %r defined, but not used"
- whitelistargs = args[2:]
- else:
- whitelistmsg = msg
- whitelistargs = args
- if (whitelistmsg % whitelistargs) not in SqueakyCleanLogger.errorWhitelist:
- self.errors.append(msg % args)
-
- error = warning
-
- def reportGrammarErrors(self):
- if self.errors:
- raise WebIDLError("\n".join(self.errors), [])
-
-
-class Parser(Tokenizer):
- def getLocation(self, p, i):
- return Location(self.lexer, p.lineno(i), p.lexpos(i), self._filename)
-
- def globalScope(self):
- return self._globalScope
-
- # The p_Foo functions here must match the WebIDL spec's grammar.
- # It's acceptable to split things at '|' boundaries.
- def p_Definitions(self, p):
- """
- Definitions : ExtendedAttributeList Definition Definitions
- """
- if p[2]:
- p[0] = [p[2]]
- p[2].addExtendedAttributes(p[1])
- else:
- assert not p[1]
- p[0] = []
-
- p[0].extend(p[3])
-
- def p_DefinitionsEmpty(self, p):
- """
- Definitions :
- """
- p[0] = []
-
- def p_Definition(self, p):
- """
- Definition : CallbackOrInterfaceOrMixin
- | Namespace
- | Partial
- | Dictionary
- | Exception
- | Enum
- | Typedef
- | IncludesStatement
- """
- p[0] = p[1]
- assert p[1] # We might not have implemented something ...
-
- def p_CallbackOrInterfaceOrMixinCallback(self, p):
- """
- CallbackOrInterfaceOrMixin : CALLBACK CallbackRestOrInterface
- """
- if p[2].isInterface():
- assert isinstance(p[2], IDLInterface)
- p[2].setCallback(True)
-
- p[0] = p[2]
-
- def p_CallbackOrInterfaceOrMixinInterfaceOrMixin(self, p):
- """
- CallbackOrInterfaceOrMixin : INTERFACE InterfaceOrMixin
- """
- p[0] = p[2]
-
- def p_CallbackRestOrInterface(self, p):
- """
- CallbackRestOrInterface : CallbackRest
- | CallbackConstructorRest
- | CallbackInterface
- """
- assert p[1]
- p[0] = p[1]
-
- def handleNonPartialObject(
- self, location, identifier, constructor, constructorArgs, nonPartialArgs
- ):
- """
- This handles non-partial objects (interfaces, namespaces and
- dictionaries) by checking for an existing partial object, and promoting
- it to non-partial as needed. The return value is the non-partial
- object.
-
- constructorArgs are all the args for the constructor except the last
- one: isKnownNonPartial.
-
- nonPartialArgs are the args for the setNonPartial call.
- """
- # The name of the class starts with "IDL", so strip that off.
- # Also, starts with a capital letter after that, so nix that
- # as well.
- prettyname = constructor.__name__[3:].lower()
-
- try:
- existingObj = self.globalScope()._lookupIdentifier(identifier)
- if existingObj:
- if not isinstance(existingObj, constructor):
- raise WebIDLError(
- "%s has the same name as "
- "non-%s object" % (prettyname.capitalize(), prettyname),
- [location, existingObj.location],
- )
- existingObj.setNonPartial(*nonPartialArgs)
- return existingObj
- except Exception as ex:
- if isinstance(ex, WebIDLError):
- raise ex
- pass
-
- # True for isKnownNonPartial
- return constructor(*(constructorArgs + [True]))
-
- def p_InterfaceOrMixin(self, p):
- """
- InterfaceOrMixin : InterfaceRest
- | MixinRest
- """
- p[0] = p[1]
-
- def p_CallbackInterface(self, p):
- """
- CallbackInterface : INTERFACE InterfaceRest
- """
- p[0] = p[2]
-
- def p_InterfaceRest(self, p):
- """
- InterfaceRest : IDENTIFIER Inheritance LBRACE InterfaceMembers RBRACE SEMICOLON
- """
- location = self.getLocation(p, 1)
- identifier = IDLUnresolvedIdentifier(location, p[1])
- members = p[4]
- parent = p[2]
-
- p[0] = self.handleNonPartialObject(
- location,
- identifier,
- IDLInterface,
- [location, self.globalScope(), identifier, parent, members],
- [location, parent, members],
- )
-
- def p_InterfaceForwardDecl(self, p):
- """
- InterfaceRest : IDENTIFIER SEMICOLON
- """
- location = self.getLocation(p, 1)
- identifier = IDLUnresolvedIdentifier(location, p[1])
-
- try:
- if self.globalScope()._lookupIdentifier(identifier):
- p[0] = self.globalScope()._lookupIdentifier(identifier)
- if not isinstance(p[0], IDLExternalInterface):
- raise WebIDLError(
- "Name collision between external "
- "interface declaration for identifier "
- "%s and %s" % (identifier.name, p[0]),
- [location, p[0].location],
- )
- return
- except Exception as ex:
- if isinstance(ex, WebIDLError):
- raise ex
- pass
-
- p[0] = IDLExternalInterface(location, self.globalScope(), identifier)
-
- def p_MixinRest(self, p):
- """
- MixinRest : MIXIN IDENTIFIER LBRACE MixinMembers RBRACE SEMICOLON
- """
- location = self.getLocation(p, 1)
- identifier = IDLUnresolvedIdentifier(self.getLocation(p, 2), p[2])
- members = p[4]
-
- p[0] = self.handleNonPartialObject(
- location,
- identifier,
- IDLInterfaceMixin,
- [location, self.globalScope(), identifier, members],
- [location, members],
- )
-
- def p_Namespace(self, p):
- """
- Namespace : NAMESPACE IDENTIFIER LBRACE InterfaceMembers RBRACE SEMICOLON
- """
- location = self.getLocation(p, 1)
- identifier = IDLUnresolvedIdentifier(self.getLocation(p, 2), p[2])
- members = p[4]
-
- p[0] = self.handleNonPartialObject(
- location,
- identifier,
- IDLNamespace,
- [location, self.globalScope(), identifier, members],
- [location, None, members],
- )
-
- def p_Partial(self, p):
- """
- Partial : PARTIAL PartialDefinition
- """
- p[0] = p[2]
-
- def p_PartialDefinitionInterface(self, p):
- """
- PartialDefinition : INTERFACE PartialInterfaceOrPartialMixin
- """
- p[0] = p[2]
-
- def p_PartialDefinition(self, p):
- """
- PartialDefinition : PartialNamespace
- | PartialDictionary
- """
- p[0] = p[1]
-
- def handlePartialObject(
- self,
- location,
- identifier,
- nonPartialConstructor,
- nonPartialConstructorArgs,
- partialConstructorArgs,
- ):
- """
- This handles partial objects (interfaces, namespaces and dictionaries)
- by checking for an existing non-partial object, and adding ourselves to
- it as needed. The return value is our partial object. We use
- IDLPartialInterfaceOrNamespace for partial interfaces or namespaces,
- and IDLPartialDictionary for partial dictionaries.
-
- nonPartialConstructorArgs are all the args for the non-partial
- constructor except the last two: members and isKnownNonPartial.
-
- partialConstructorArgs are the arguments for the partial object
- constructor, except the last one (the non-partial object).
- """
- # The name of the class starts with "IDL", so strip that off.
- # Also, starts with a capital letter after that, so nix that
- # as well.
- prettyname = nonPartialConstructor.__name__[3:].lower()
-
- nonPartialObject = None
- try:
- nonPartialObject = self.globalScope()._lookupIdentifier(identifier)
- if nonPartialObject:
- if not isinstance(nonPartialObject, nonPartialConstructor):
- raise WebIDLError(
- "Partial %s has the same name as "
- "non-%s object" % (prettyname, prettyname),
- [location, nonPartialObject.location],
- )
- except Exception as ex:
- if isinstance(ex, WebIDLError):
- raise ex
- pass
-
- if not nonPartialObject:
- nonPartialObject = nonPartialConstructor(
- # No members, False for isKnownNonPartial
- *(nonPartialConstructorArgs),
- members=[],
- isKnownNonPartial=False
- )
-
- partialObject = None
- if isinstance(nonPartialObject, IDLDictionary):
- partialObject = IDLPartialDictionary(
- *(partialConstructorArgs + [nonPartialObject])
- )
- elif isinstance(
- nonPartialObject, (IDLInterface, IDLInterfaceMixin, IDLNamespace)
- ):
- partialObject = IDLPartialInterfaceOrNamespace(
- *(partialConstructorArgs + [nonPartialObject])
- )
- else:
- raise WebIDLError(
- "Unknown partial object type %s" % type(partialObject), [location]
- )
-
- return partialObject
-
- def p_PartialInterfaceOrPartialMixin(self, p):
- """
- PartialInterfaceOrPartialMixin : PartialInterfaceRest
- | PartialMixinRest
- """
- p[0] = p[1]
-
- def p_PartialInterfaceRest(self, p):
- """
- PartialInterfaceRest : IDENTIFIER LBRACE PartialInterfaceMembers RBRACE SEMICOLON
- """
- location = self.getLocation(p, 1)
- identifier = IDLUnresolvedIdentifier(location, p[1])
- members = p[3]
-
- p[0] = self.handlePartialObject(
- location,
- identifier,
- IDLInterface,
- [location, self.globalScope(), identifier, None],
- [location, identifier, members],
- )
-
- def p_PartialMixinRest(self, p):
- """
- PartialMixinRest : MIXIN IDENTIFIER LBRACE MixinMembers RBRACE SEMICOLON
- """
- location = self.getLocation(p, 1)
- identifier = IDLUnresolvedIdentifier(self.getLocation(p, 2), p[2])
- members = p[4]
-
- p[0] = self.handlePartialObject(
- location,
- identifier,
- IDLInterfaceMixin,
- [location, self.globalScope(), identifier],
- [location, identifier, members],
- )
-
- def p_PartialNamespace(self, p):
- """
- PartialNamespace : NAMESPACE IDENTIFIER LBRACE InterfaceMembers RBRACE SEMICOLON
- """
- location = self.getLocation(p, 1)
- identifier = IDLUnresolvedIdentifier(self.getLocation(p, 2), p[2])
- members = p[4]
-
- p[0] = self.handlePartialObject(
- location,
- identifier,
- IDLNamespace,
- [location, self.globalScope(), identifier],
- [location, identifier, members],
- )
-
- def p_PartialDictionary(self, p):
- """
- PartialDictionary : DICTIONARY IDENTIFIER LBRACE DictionaryMembers RBRACE SEMICOLON
- """
- location = self.getLocation(p, 1)
- identifier = IDLUnresolvedIdentifier(self.getLocation(p, 2), p[2])
- members = p[4]
-
- p[0] = self.handlePartialObject(
- location,
- identifier,
- IDLDictionary,
- [location, self.globalScope(), identifier],
- [location, identifier, members],
- )
-
- def p_Inheritance(self, p):
- """
- Inheritance : COLON ScopedName
- """
- p[0] = IDLIdentifierPlaceholder(self.getLocation(p, 2), p[2])
-
- def p_InheritanceEmpty(self, p):
- """
- Inheritance :
- """
- pass
-
- def p_InterfaceMembers(self, p):
- """
- InterfaceMembers : ExtendedAttributeList InterfaceMember InterfaceMembers
- """
- p[0] = [p[2]]
-
- assert not p[1] or p[2]
- p[2].addExtendedAttributes(p[1])
-
- p[0].extend(p[3])
-
- def p_InterfaceMembersEmpty(self, p):
- """
- InterfaceMembers :
- """
- p[0] = []
-
- def p_InterfaceMember(self, p):
- """
- InterfaceMember : PartialInterfaceMember
- | Constructor
- """
- p[0] = p[1]
-
- def p_Constructor(self, p):
- """
- Constructor : CONSTRUCTOR LPAREN ArgumentList RPAREN SEMICOLON
- """
- p[0] = IDLConstructor(self.getLocation(p, 1), p[3], "constructor")
-
- def p_PartialInterfaceMembers(self, p):
- """
- PartialInterfaceMembers : ExtendedAttributeList PartialInterfaceMember PartialInterfaceMembers
- """
- p[0] = [p[2]]
-
- assert not p[1] or p[2]
- p[2].addExtendedAttributes(p[1])
-
- p[0].extend(p[3])
-
- def p_PartialInterfaceMembersEmpty(self, p):
- """
- PartialInterfaceMembers :
- """
- p[0] = []
-
- def p_PartialInterfaceMember(self, p):
- """
- PartialInterfaceMember : Const
- | AttributeOrOperationOrMaplikeOrSetlikeOrIterable
- """
- p[0] = p[1]
-
- def p_MixinMembersEmpty(self, p):
- """
- MixinMembers :
- """
- p[0] = []
-
- def p_MixinMembers(self, p):
- """
- MixinMembers : ExtendedAttributeList MixinMember MixinMembers
- """
- p[0] = [p[2]]
-
- assert not p[1] or p[2]
- p[2].addExtendedAttributes(p[1])
-
- p[0].extend(p[3])
-
- def p_MixinMember(self, p):
- """
- MixinMember : Const
- | Attribute
- | Operation
- """
- p[0] = p[1]
-
- def p_Dictionary(self, p):
- """
- Dictionary : DICTIONARY IDENTIFIER Inheritance LBRACE DictionaryMembers RBRACE SEMICOLON
- """
- location = self.getLocation(p, 1)
- identifier = IDLUnresolvedIdentifier(self.getLocation(p, 2), p[2])
- members = p[5]
- p[0] = IDLDictionary(location, self.globalScope(), identifier, p[3], members)
-
- def p_DictionaryMembers(self, p):
- """
- DictionaryMembers : ExtendedAttributeList DictionaryMember DictionaryMembers
- |
- """
- if len(p) == 1:
- # We're at the end of the list
- p[0] = []
- return
- p[2].addExtendedAttributes(p[1])
- p[0] = [p[2]]
- p[0].extend(p[3])
-
- def p_DictionaryMemberRequired(self, p):
- """
- DictionaryMember : REQUIRED TypeWithExtendedAttributes IDENTIFIER SEMICOLON
- """
- # These quack a lot like required arguments, so just treat them that way.
- t = p[2]
- assert isinstance(t, IDLType)
- identifier = IDLUnresolvedIdentifier(self.getLocation(p, 3), p[3])
-
- p[0] = IDLArgument(
- self.getLocation(p, 3),
- identifier,
- t,
- optional=False,
- defaultValue=None,
- variadic=False,
- dictionaryMember=True,
- )
-
- def p_DictionaryMember(self, p):
- """
- DictionaryMember : Type IDENTIFIER Default SEMICOLON
- """
- # These quack a lot like optional arguments, so just treat them that way.
- t = p[1]
- assert isinstance(t, IDLType)
- identifier = IDLUnresolvedIdentifier(self.getLocation(p, 2), p[2])
- defaultValue = p[3]
-
- # Any attributes that precede this may apply to the type, so
- # we configure the argument to forward type attributes down instead of producing
- # a parse error
- p[0] = IDLArgument(
- self.getLocation(p, 2),
- identifier,
- t,
- optional=True,
- defaultValue=defaultValue,
- variadic=False,
- dictionaryMember=True,
- allowTypeAttributes=True,
- )
-
- def p_Default(self, p):
- """
- Default : EQUALS DefaultValue
- |
- """
- if len(p) > 1:
- p[0] = p[2]
- else:
- p[0] = None
-
- def p_DefaultValue(self, p):
- """
- DefaultValue : ConstValue
- | LBRACKET RBRACKET
- | LBRACE RBRACE
- """
- if len(p) == 2:
- p[0] = p[1]
- else:
- assert len(p) == 3 # Must be [] or {}
- if p[1] == "[":
- p[0] = IDLEmptySequenceValue(self.getLocation(p, 1))
- else:
- assert p[1] == "{"
- p[0] = IDLDefaultDictionaryValue(self.getLocation(p, 1))
-
- def p_DefaultValueNull(self, p):
- """
- DefaultValue : NULL
- """
- p[0] = IDLNullValue(self.getLocation(p, 1))
-
- def p_DefaultValueUndefined(self, p):
- """
- DefaultValue : UNDEFINED
- """
- p[0] = IDLUndefinedValue(self.getLocation(p, 1))
-
- def p_Exception(self, p):
- """
- Exception : EXCEPTION IDENTIFIER Inheritance LBRACE ExceptionMembers RBRACE SEMICOLON
- """
- pass
-
- def p_Enum(self, p):
- """
- Enum : ENUM IDENTIFIER LBRACE EnumValueList RBRACE SEMICOLON
- """
- location = self.getLocation(p, 1)
- identifier = IDLUnresolvedIdentifier(self.getLocation(p, 2), p[2])
-
- values = p[4]
- assert values
- p[0] = IDLEnum(location, self.globalScope(), identifier, values)
-
- def p_EnumValueList(self, p):
- """
- EnumValueList : STRING EnumValueListComma
- """
- p[0] = [p[1]]
- p[0].extend(p[2])
-
- def p_EnumValueListComma(self, p):
- """
- EnumValueListComma : COMMA EnumValueListString
- """
- p[0] = p[2]
-
- def p_EnumValueListCommaEmpty(self, p):
- """
- EnumValueListComma :
- """
- p[0] = []
-
- def p_EnumValueListString(self, p):
- """
- EnumValueListString : STRING EnumValueListComma
- """
- p[0] = [p[1]]
- p[0].extend(p[2])
-
- def p_EnumValueListStringEmpty(self, p):
- """
- EnumValueListString :
- """
- p[0] = []
-
- def p_CallbackRest(self, p):
- """
- CallbackRest : IDENTIFIER EQUALS Type LPAREN ArgumentList RPAREN SEMICOLON
- """
- identifier = IDLUnresolvedIdentifier(self.getLocation(p, 1), p[1])
- p[0] = IDLCallback(
- self.getLocation(p, 1),
- self.globalScope(),
- identifier,
- p[3],
- p[5],
- isConstructor=False,
- )
-
- def p_CallbackConstructorRest(self, p):
- """
- CallbackConstructorRest : CONSTRUCTOR IDENTIFIER EQUALS Type LPAREN ArgumentList RPAREN SEMICOLON
- """
- identifier = IDLUnresolvedIdentifier(self.getLocation(p, 2), p[2])
- p[0] = IDLCallback(
- self.getLocation(p, 2),
- self.globalScope(),
- identifier,
- p[4],
- p[6],
- isConstructor=True,
- )
-
- def p_ExceptionMembers(self, p):
- """
- ExceptionMembers : ExtendedAttributeList ExceptionMember ExceptionMembers
- |
- """
- pass
-
- def p_Typedef(self, p):
- """
- Typedef : TYPEDEF TypeWithExtendedAttributes IDENTIFIER SEMICOLON
- """
- typedef = IDLTypedef(self.getLocation(p, 1), self.globalScope(), p[2], p[3])
- p[0] = typedef
-
- def p_IncludesStatement(self, p):
- """
- IncludesStatement : ScopedName INCLUDES ScopedName SEMICOLON
- """
- assert p[2] == "includes"
- interface = IDLIdentifierPlaceholder(self.getLocation(p, 1), p[1])
- mixin = IDLIdentifierPlaceholder(self.getLocation(p, 3), p[3])
- p[0] = IDLIncludesStatement(self.getLocation(p, 1), interface, mixin)
-
- def p_Const(self, p):
- """
- Const : CONST ConstType IDENTIFIER EQUALS ConstValue SEMICOLON
- """
- location = self.getLocation(p, 1)
- type = p[2]
- identifier = IDLUnresolvedIdentifier(self.getLocation(p, 3), p[3])
- value = p[5]
- p[0] = IDLConst(location, identifier, type, value)
-
- def p_ConstValueBoolean(self, p):
- """
- ConstValue : BooleanLiteral
- """
- location = self.getLocation(p, 1)
- booleanType = BuiltinTypes[IDLBuiltinType.Types.boolean]
- p[0] = IDLValue(location, booleanType, p[1])
-
- def p_ConstValueInteger(self, p):
- """
- ConstValue : INTEGER
- """
- location = self.getLocation(p, 1)
-
- # We don't know ahead of time what type the integer literal is.
- # Determine the smallest type it could possibly fit in and use that.
- integerType = matchIntegerValueToType(p[1])
- if integerType is None:
- raise WebIDLError("Integer literal out of range", [location])
-
- p[0] = IDLValue(location, integerType, p[1])
-
- def p_ConstValueFloat(self, p):
- """
- ConstValue : FLOATLITERAL
- """
- location = self.getLocation(p, 1)
- p[0] = IDLValue(
- location, BuiltinTypes[IDLBuiltinType.Types.unrestricted_float], p[1]
- )
-
- def p_ConstValueString(self, p):
- """
- ConstValue : STRING
- """
- location = self.getLocation(p, 1)
- stringType = BuiltinTypes[IDLBuiltinType.Types.domstring]
- p[0] = IDLValue(location, stringType, p[1])
-
- def p_BooleanLiteralTrue(self, p):
- """
- BooleanLiteral : TRUE
- """
- p[0] = True
-
- def p_BooleanLiteralFalse(self, p):
- """
- BooleanLiteral : FALSE
- """
- p[0] = False
-
- def p_AttributeOrOperationOrMaplikeOrSetlikeOrIterable(self, p):
- """
- AttributeOrOperationOrMaplikeOrSetlikeOrIterable : Attribute
- | Maplike
- | Setlike
- | Iterable
- | AsyncIterable
- | Operation
- """
- p[0] = p[1]
-
- def p_Iterable(self, p):
- """
- Iterable : ITERABLE LT TypeWithExtendedAttributes GT SEMICOLON
- | ITERABLE LT TypeWithExtendedAttributes COMMA TypeWithExtendedAttributes GT SEMICOLON
- """
- location = self.getLocation(p, 2)
- identifier = IDLUnresolvedIdentifier(
- location, "__iterable", allowDoubleUnderscore=True
- )
- if len(p) > 6:
- keyType = p[3]
- valueType = p[5]
- else:
- keyType = None
- valueType = p[3]
-
- p[0] = IDLIterable(location, identifier, keyType, valueType, self.globalScope())
-
- def p_AsyncIterable(self, p):
- """
- AsyncIterable : ASYNC ITERABLE LT TypeWithExtendedAttributes GT SEMICOLON
- | ASYNC ITERABLE LT TypeWithExtendedAttributes COMMA TypeWithExtendedAttributes GT SEMICOLON
- | ASYNC ITERABLE LT TypeWithExtendedAttributes GT LPAREN ArgumentList RPAREN SEMICOLON
- | ASYNC ITERABLE LT TypeWithExtendedAttributes COMMA TypeWithExtendedAttributes GT LPAREN ArgumentList RPAREN SEMICOLON
- """
- location = self.getLocation(p, 2)
- identifier = IDLUnresolvedIdentifier(
- location, "__iterable", allowDoubleUnderscore=True
- )
- if len(p) == 12:
- keyType = p[4]
- valueType = p[6]
- argList = p[9]
- elif len(p) == 10:
- keyType = None
- valueType = p[4]
- argList = p[7]
- elif len(p) == 9:
- keyType = p[4]
- valueType = p[6]
- argList = []
- else:
- keyType = None
- valueType = p[4]
- argList = []
-
- p[0] = IDLAsyncIterable(
- location, identifier, keyType, valueType, argList, self.globalScope()
- )
-
- def p_Setlike(self, p):
- """
- Setlike : ReadOnly SETLIKE LT TypeWithExtendedAttributes GT SEMICOLON
- """
- readonly = p[1]
- maplikeOrSetlikeType = p[2]
- location = self.getLocation(p, 2)
- identifier = IDLUnresolvedIdentifier(
- location, "__setlike", allowDoubleUnderscore=True
- )
- keyType = p[4]
- valueType = keyType
- p[0] = IDLMaplikeOrSetlike(
- location, identifier, maplikeOrSetlikeType, readonly, keyType, valueType
- )
-
- def p_Maplike(self, p):
- """
- Maplike : ReadOnly MAPLIKE LT TypeWithExtendedAttributes COMMA TypeWithExtendedAttributes GT SEMICOLON
- """
- readonly = p[1]
- maplikeOrSetlikeType = p[2]
- location = self.getLocation(p, 2)
- identifier = IDLUnresolvedIdentifier(
- location, "__maplike", allowDoubleUnderscore=True
- )
- keyType = p[4]
- valueType = p[6]
- p[0] = IDLMaplikeOrSetlike(
- location, identifier, maplikeOrSetlikeType, readonly, keyType, valueType
- )
-
- def p_AttributeWithQualifier(self, p):
- """
- Attribute : Qualifier AttributeRest
- """
- static = IDLInterfaceMember.Special.Static in p[1]
- stringifier = IDLInterfaceMember.Special.Stringifier in p[1]
- (location, identifier, type, readonly) = p[2]
- p[0] = IDLAttribute(
- location, identifier, type, readonly, static=static, stringifier=stringifier
- )
-
- def p_AttributeInherited(self, p):
- """
- Attribute : INHERIT AttributeRest
- """
- (location, identifier, type, readonly) = p[2]
- p[0] = IDLAttribute(location, identifier, type, readonly, inherit=True)
-
- def p_Attribute(self, p):
- """
- Attribute : AttributeRest
- """
- (location, identifier, type, readonly) = p[1]
- p[0] = IDLAttribute(location, identifier, type, readonly, inherit=False)
-
- def p_AttributeRest(self, p):
- """
- AttributeRest : ReadOnly ATTRIBUTE TypeWithExtendedAttributes AttributeName SEMICOLON
- """
- location = self.getLocation(p, 2)
- readonly = p[1]
- t = p[3]
- identifier = IDLUnresolvedIdentifier(self.getLocation(p, 4), p[4])
- p[0] = (location, identifier, t, readonly)
-
- def p_ReadOnly(self, p):
- """
- ReadOnly : READONLY
- """
- p[0] = True
-
- def p_ReadOnlyEmpty(self, p):
- """
- ReadOnly :
- """
- p[0] = False
-
- def p_Operation(self, p):
- """
- Operation : Qualifiers OperationRest
- """
- qualifiers = p[1]
-
- # Disallow duplicates in the qualifier set
- if not len(set(qualifiers)) == len(qualifiers):
- raise WebIDLError(
- "Duplicate qualifiers are not allowed", [self.getLocation(p, 1)]
- )
-
- static = IDLInterfaceMember.Special.Static in p[1]
- # If static is there that's all that's allowed. This is disallowed
- # by the parser, so we can assert here.
- assert not static or len(qualifiers) == 1
-
- stringifier = IDLInterfaceMember.Special.Stringifier in p[1]
- # If stringifier is there that's all that's allowed. This is disallowed
- # by the parser, so we can assert here.
- assert not stringifier or len(qualifiers) == 1
-
- getter = True if IDLMethod.Special.Getter in p[1] else False
- setter = True if IDLMethod.Special.Setter in p[1] else False
- deleter = True if IDLMethod.Special.Deleter in p[1] else False
- legacycaller = True if IDLMethod.Special.LegacyCaller in p[1] else False
-
- if getter or deleter:
- if setter:
- raise WebIDLError(
- "getter and deleter are incompatible with setter",
- [self.getLocation(p, 1)],
- )
-
- (returnType, identifier, arguments) = p[2]
-
- assert isinstance(returnType, IDLType)
-
- specialType = IDLMethod.NamedOrIndexed.Neither
-
- if getter or deleter:
- if len(arguments) != 1:
- raise WebIDLError(
- "%s has wrong number of arguments"
- % ("getter" if getter else "deleter"),
- [self.getLocation(p, 2)],
- )
- argType = arguments[0].type
- if argType == BuiltinTypes[IDLBuiltinType.Types.domstring]:
- specialType = IDLMethod.NamedOrIndexed.Named
- elif argType == BuiltinTypes[IDLBuiltinType.Types.unsigned_long]:
- specialType = IDLMethod.NamedOrIndexed.Indexed
- if deleter:
- raise WebIDLError(
- "There is no such thing as an indexed deleter.",
- [self.getLocation(p, 1)],
- )
- else:
- raise WebIDLError(
- "%s has wrong argument type (must be DOMString or UnsignedLong)"
- % ("getter" if getter else "deleter"),
- [arguments[0].location],
- )
- if arguments[0].optional or arguments[0].variadic:
- raise WebIDLError(
- "%s cannot have %s argument"
- % (
- "getter" if getter else "deleter",
- "optional" if arguments[0].optional else "variadic",
- ),
- [arguments[0].location],
- )
- if getter:
- if returnType.isUndefined():
- raise WebIDLError(
- "getter cannot have undefined return type", [self.getLocation(p, 2)]
- )
- if setter:
- if len(arguments) != 2:
- raise WebIDLError(
- "setter has wrong number of arguments", [self.getLocation(p, 2)]
- )
- argType = arguments[0].type
- if argType == BuiltinTypes[IDLBuiltinType.Types.domstring]:
- specialType = IDLMethod.NamedOrIndexed.Named
- elif argType == BuiltinTypes[IDLBuiltinType.Types.unsigned_long]:
- specialType = IDLMethod.NamedOrIndexed.Indexed
- else:
- raise WebIDLError(
- "settter has wrong argument type (must be DOMString or UnsignedLong)",
- [arguments[0].location],
- )
- if arguments[0].optional or arguments[0].variadic:
- raise WebIDLError(
- "setter cannot have %s argument"
- % ("optional" if arguments[0].optional else "variadic"),
- [arguments[0].location],
- )
- if arguments[1].optional or arguments[1].variadic:
- raise WebIDLError(
- "setter cannot have %s argument"
- % ("optional" if arguments[1].optional else "variadic"),
- [arguments[1].location],
- )
-
- if stringifier:
- if len(arguments) != 0:
- raise WebIDLError(
- "stringifier has wrong number of arguments",
- [self.getLocation(p, 2)],
- )
- if not returnType.isDOMString():
- raise WebIDLError(
- "stringifier must have DOMString return type",
- [self.getLocation(p, 2)],
- )
-
- # identifier might be None. This is only permitted for special methods.
- if not identifier:
- if (
- not getter
- and not setter
- and not deleter
- and not legacycaller
- and not stringifier
- ):
- raise WebIDLError(
- "Identifier required for non-special methods",
- [self.getLocation(p, 2)],
- )
-
- location = BuiltinLocation("<auto-generated-identifier>")
- identifier = IDLUnresolvedIdentifier(
- location,
- "__%s%s%s%s%s%s"
- % (
- "named"
- if specialType == IDLMethod.NamedOrIndexed.Named
- else "indexed"
- if specialType == IDLMethod.NamedOrIndexed.Indexed
- else "",
- "getter" if getter else "",
- "setter" if setter else "",
- "deleter" if deleter else "",
- "legacycaller" if legacycaller else "",
- "stringifier" if stringifier else "",
- ),
- allowDoubleUnderscore=True,
- )
-
- method = IDLMethod(
- self.getLocation(p, 2),
- identifier,
- returnType,
- arguments,
- static=static,
- getter=getter,
- setter=setter,
- deleter=deleter,
- specialType=specialType,
- legacycaller=legacycaller,
- stringifier=stringifier,
- )
- p[0] = method
-
- def p_Stringifier(self, p):
- """
- Operation : STRINGIFIER SEMICOLON
- """
- identifier = IDLUnresolvedIdentifier(
- BuiltinLocation("<auto-generated-identifier>"),
- "__stringifier",
- allowDoubleUnderscore=True,
- )
- method = IDLMethod(
- self.getLocation(p, 1),
- identifier,
- returnType=BuiltinTypes[IDLBuiltinType.Types.domstring],
- arguments=[],
- stringifier=True,
- )
- p[0] = method
-
- def p_QualifierStatic(self, p):
- """
- Qualifier : STATIC
- """
- p[0] = [IDLInterfaceMember.Special.Static]
-
- def p_QualifierStringifier(self, p):
- """
- Qualifier : STRINGIFIER
- """
- p[0] = [IDLInterfaceMember.Special.Stringifier]
-
- def p_Qualifiers(self, p):
- """
- Qualifiers : Qualifier
- | Specials
- """
- p[0] = p[1]
-
- def p_Specials(self, p):
- """
- Specials : Special Specials
- """
- p[0] = [p[1]]
- p[0].extend(p[2])
-
- def p_SpecialsEmpty(self, p):
- """
- Specials :
- """
- p[0] = []
-
- def p_SpecialGetter(self, p):
- """
- Special : GETTER
- """
- p[0] = IDLMethod.Special.Getter
-
- def p_SpecialSetter(self, p):
- """
- Special : SETTER
- """
- p[0] = IDLMethod.Special.Setter
-
- def p_SpecialDeleter(self, p):
- """
- Special : DELETER
- """
- p[0] = IDLMethod.Special.Deleter
-
- def p_SpecialLegacyCaller(self, p):
- """
- Special : LEGACYCALLER
- """
- p[0] = IDLMethod.Special.LegacyCaller
-
- def p_OperationRest(self, p):
- """
- OperationRest : Type OptionalIdentifier LPAREN ArgumentList RPAREN SEMICOLON
- """
- p[0] = (p[1], p[2], p[4])
-
- def p_OptionalIdentifier(self, p):
- """
- OptionalIdentifier : IDENTIFIER
- """
- p[0] = IDLUnresolvedIdentifier(self.getLocation(p, 1), p[1])
-
- def p_OptionalIdentifierEmpty(self, p):
- """
- OptionalIdentifier :
- """
- pass
-
- def p_ArgumentList(self, p):
- """
- ArgumentList : Argument Arguments
- """
- p[0] = [p[1]] if p[1] else []
- p[0].extend(p[2])
-
- def p_ArgumentListEmpty(self, p):
- """
- ArgumentList :
- """
- p[0] = []
-
- def p_Arguments(self, p):
- """
- Arguments : COMMA Argument Arguments
- """
- p[0] = [p[2]] if p[2] else []
- p[0].extend(p[3])
-
- def p_ArgumentsEmpty(self, p):
- """
- Arguments :
- """
- p[0] = []
-
- def p_Argument(self, p):
- """
- Argument : ExtendedAttributeList ArgumentRest
- """
- p[0] = p[2]
- p[0].addExtendedAttributes(p[1])
-
- def p_ArgumentRestOptional(self, p):
- """
- ArgumentRest : OPTIONAL TypeWithExtendedAttributes ArgumentName Default
- """
- t = p[2]
- assert isinstance(t, IDLType)
- # Arg names can be reserved identifiers
- identifier = IDLUnresolvedIdentifier(
- self.getLocation(p, 3), p[3], allowForbidden=True
- )
-
- defaultValue = p[4]
-
- # We can't test t.isAny() here and give it a default value as needed,
- # since at this point t is not a fully resolved type yet (e.g. it might
- # be a typedef). We'll handle the 'any' case in IDLArgument.complete.
-
- p[0] = IDLArgument(
- self.getLocation(p, 3), identifier, t, True, defaultValue, False
- )
-
- def p_ArgumentRest(self, p):
- """
- ArgumentRest : Type Ellipsis ArgumentName
- """
- t = p[1]
- assert isinstance(t, IDLType)
- # Arg names can be reserved identifiers
- identifier = IDLUnresolvedIdentifier(
- self.getLocation(p, 3), p[3], allowForbidden=True
- )
-
- variadic = p[2]
-
- # We can't test t.isAny() here and give it a default value as needed,
- # since at this point t is not a fully resolved type yet (e.g. it might
- # be a typedef). We'll handle the 'any' case in IDLArgument.complete.
-
- # variadic implies optional
- # Any attributes that precede this may apply to the type, so
- # we configure the argument to forward type attributes down instead of producing
- # a parse error
- p[0] = IDLArgument(
- self.getLocation(p, 3),
- identifier,
- t,
- variadic,
- None,
- variadic,
- allowTypeAttributes=True,
- )
-
- def p_ArgumentName(self, p):
- """
- ArgumentName : IDENTIFIER
- | ArgumentNameKeyword
- """
- p[0] = p[1]
-
- def p_ArgumentNameKeyword(self, p):
- """
- ArgumentNameKeyword : ASYNC
- | ATTRIBUTE
- | CALLBACK
- | CONST
- | CONSTRUCTOR
- | DELETER
- | DICTIONARY
- | ENUM
- | EXCEPTION
- | GETTER
- | INCLUDES
- | INHERIT
- | INTERFACE
- | ITERABLE
- | LEGACYCALLER
- | MAPLIKE
- | MIXIN
- | NAMESPACE
- | PARTIAL
- | READONLY
- | REQUIRED
- | SERIALIZER
- | SETLIKE
- | SETTER
- | STATIC
- | STRINGIFIER
- | TYPEDEF
- | UNRESTRICTED
- """
- p[0] = p[1]
-
- def p_AttributeName(self, p):
- """
- AttributeName : IDENTIFIER
- | AttributeNameKeyword
- """
- p[0] = p[1]
-
- def p_AttributeNameKeyword(self, p):
- """
- AttributeNameKeyword : ASYNC
- | REQUIRED
- """
- p[0] = p[1]
-
- def p_Ellipsis(self, p):
- """
- Ellipsis : ELLIPSIS
- """
- p[0] = True
-
- def p_EllipsisEmpty(self, p):
- """
- Ellipsis :
- """
- p[0] = False
-
- def p_ExceptionMember(self, p):
- """
- ExceptionMember : Const
- | ExceptionField
- """
- pass
-
- def p_ExceptionField(self, p):
- """
- ExceptionField : Type IDENTIFIER SEMICOLON
- """
- pass
-
- def p_ExtendedAttributeList(self, p):
- """
- ExtendedAttributeList : LBRACKET ExtendedAttribute ExtendedAttributes RBRACKET
- """
- p[0] = [p[2]]
- if p[3]:
- p[0].extend(p[3])
-
- def p_ExtendedAttributeListEmpty(self, p):
- """
- ExtendedAttributeList :
- """
- p[0] = []
-
- def p_ExtendedAttribute(self, p):
- """
- ExtendedAttribute : ExtendedAttributeNoArgs
- | ExtendedAttributeArgList
- | ExtendedAttributeIdent
- | ExtendedAttributeWildcard
- | ExtendedAttributeNamedArgList
- | ExtendedAttributeIdentList
- """
- p[0] = IDLExtendedAttribute(self.getLocation(p, 1), p[1])
-
- def p_ExtendedAttributeEmpty(self, p):
- """
- ExtendedAttribute :
- """
- pass
-
- def p_ExtendedAttributes(self, p):
- """
- ExtendedAttributes : COMMA ExtendedAttribute ExtendedAttributes
- """
- p[0] = [p[2]] if p[2] else []
- p[0].extend(p[3])
-
- def p_ExtendedAttributesEmpty(self, p):
- """
- ExtendedAttributes :
- """
- p[0] = []
-
- def p_Other(self, p):
- """
- Other : INTEGER
- | FLOATLITERAL
- | IDENTIFIER
- | STRING
- | OTHER
- | ELLIPSIS
- | COLON
- | SCOPE
- | SEMICOLON
- | LT
- | EQUALS
- | GT
- | QUESTIONMARK
- | ASTERISK
- | DOMSTRING
- | BYTESTRING
- | USVSTRING
- | UTF8STRING
- | JSSTRING
- | PROMISE
- | ANY
- | BOOLEAN
- | BYTE
- | DOUBLE
- | FALSE
- | FLOAT
- | LONG
- | NULL
- | OBJECT
- | OCTET
- | OR
- | OPTIONAL
- | RECORD
- | SEQUENCE
- | SHORT
- | SYMBOL
- | TRUE
- | UNSIGNED
- | UNDEFINED
- | ArgumentNameKeyword
- """
- pass
-
- def p_OtherOrComma(self, p):
- """
- OtherOrComma : Other
- | COMMA
- """
- pass
-
- def p_TypeSingleType(self, p):
- """
- Type : SingleType
- """
- p[0] = p[1]
-
- def p_TypeUnionType(self, p):
- """
- Type : UnionType Null
- """
- p[0] = self.handleNullable(p[1], p[2])
-
- def p_TypeWithExtendedAttributes(self, p):
- """
- TypeWithExtendedAttributes : ExtendedAttributeList Type
- """
- p[0] = p[2].withExtendedAttributes(p[1])
-
- def p_SingleTypeDistinguishableType(self, p):
- """
- SingleType : DistinguishableType
- """
- p[0] = p[1]
-
- def p_SingleTypeAnyType(self, p):
- """
- SingleType : ANY
- """
- p[0] = BuiltinTypes[IDLBuiltinType.Types.any]
-
- def p_SingleTypePromiseType(self, p):
- """
- SingleType : PROMISE LT Type GT
- """
- p[0] = IDLPromiseType(self.getLocation(p, 1), p[3])
-
- def p_UnionType(self, p):
- """
- UnionType : LPAREN UnionMemberType OR UnionMemberType UnionMemberTypes RPAREN
- """
- types = [p[2], p[4]]
- types.extend(p[5])
- p[0] = IDLUnionType(self.getLocation(p, 1), types)
-
- def p_UnionMemberTypeDistinguishableType(self, p):
- """
- UnionMemberType : ExtendedAttributeList DistinguishableType
- """
- p[0] = p[2].withExtendedAttributes(p[1])
-
- def p_UnionMemberType(self, p):
- """
- UnionMemberType : UnionType Null
- """
- p[0] = self.handleNullable(p[1], p[2])
-
- def p_UnionMemberTypes(self, p):
- """
- UnionMemberTypes : OR UnionMemberType UnionMemberTypes
- """
- p[0] = [p[2]]
- p[0].extend(p[3])
-
- def p_UnionMemberTypesEmpty(self, p):
- """
- UnionMemberTypes :
- """
- p[0] = []
-
- def p_DistinguishableType(self, p):
- """
- DistinguishableType : PrimitiveType Null
- | ARRAYBUFFER Null
- | READABLESTREAM Null
- | OBJECT Null
- | UNDEFINED Null
- """
- if p[1] == "object":
- type = BuiltinTypes[IDLBuiltinType.Types.object]
- elif p[1] == "ArrayBuffer":
- type = BuiltinTypes[IDLBuiltinType.Types.ArrayBuffer]
- elif p[1] == "ReadableStream":
- type = BuiltinTypes[IDLBuiltinType.Types.ReadableStream]
- elif p[1] == "undefined":
- type = BuiltinTypes[IDLBuiltinType.Types.undefined]
- else:
- type = BuiltinTypes[p[1]]
-
- p[0] = self.handleNullable(type, p[2])
-
- def p_DistinguishableTypeStringType(self, p):
- """
- DistinguishableType : StringType Null
- """
- p[0] = self.handleNullable(p[1], p[2])
-
- def p_DistinguishableTypeSequenceType(self, p):
- """
- DistinguishableType : SEQUENCE LT TypeWithExtendedAttributes GT Null
- """
- innerType = p[3]
- type = IDLSequenceType(self.getLocation(p, 1), innerType)
- p[0] = self.handleNullable(type, p[5])
-
- def p_DistinguishableTypeRecordType(self, p):
- """
- DistinguishableType : RECORD LT StringType COMMA TypeWithExtendedAttributes GT Null
- """
- keyType = p[3]
- valueType = p[5]
- type = IDLRecordType(self.getLocation(p, 1), keyType, valueType)
- p[0] = self.handleNullable(type, p[7])
-
- def p_DistinguishableTypeObservableArrayType(self, p):
- """
- DistinguishableType : OBSERVABLEARRAY LT TypeWithExtendedAttributes GT Null
- """
- innerType = p[3]
- type = IDLObservableArrayType(self.getLocation(p, 1), innerType)
- p[0] = self.handleNullable(type, p[5])
-
- def p_DistinguishableTypeScopedName(self, p):
- """
- DistinguishableType : ScopedName Null
- """
- assert isinstance(p[1], IDLUnresolvedIdentifier)
-
- if p[1].name == "Promise":
- raise WebIDLError(
- "Promise used without saying what it's " "parametrized over",
- [self.getLocation(p, 1)],
- )
-
- type = None
-
- try:
- if self.globalScope()._lookupIdentifier(p[1]):
- obj = self.globalScope()._lookupIdentifier(p[1])
- assert not obj.isType()
- if obj.isTypedef():
- type = IDLTypedefType(
- self.getLocation(p, 1), obj.innerType, obj.identifier.name
- )
- elif obj.isCallback() and not obj.isInterface():
- type = IDLCallbackType(obj.location, obj)
- else:
- type = IDLWrapperType(self.getLocation(p, 1), p[1])
- p[0] = self.handleNullable(type, p[2])
- return
- except:
- pass
-
- type = IDLUnresolvedType(self.getLocation(p, 1), p[1])
- p[0] = self.handleNullable(type, p[2])
-
- def p_ConstType(self, p):
- """
- ConstType : PrimitiveType
- """
- p[0] = BuiltinTypes[p[1]]
-
- def p_ConstTypeIdentifier(self, p):
- """
- ConstType : IDENTIFIER
- """
- identifier = IDLUnresolvedIdentifier(self.getLocation(p, 1), p[1])
-
- p[0] = IDLUnresolvedType(self.getLocation(p, 1), identifier)
-
- def p_PrimitiveTypeUint(self, p):
- """
- PrimitiveType : UnsignedIntegerType
- """
- p[0] = p[1]
-
- def p_PrimitiveTypeBoolean(self, p):
- """
- PrimitiveType : BOOLEAN
- """
- p[0] = IDLBuiltinType.Types.boolean
-
- def p_PrimitiveTypeByte(self, p):
- """
- PrimitiveType : BYTE
- """
- p[0] = IDLBuiltinType.Types.byte
-
- def p_PrimitiveTypeOctet(self, p):
- """
- PrimitiveType : OCTET
- """
- p[0] = IDLBuiltinType.Types.octet
-
- def p_PrimitiveTypeFloat(self, p):
- """
- PrimitiveType : FLOAT
- """
- p[0] = IDLBuiltinType.Types.float
-
- def p_PrimitiveTypeUnrestictedFloat(self, p):
- """
- PrimitiveType : UNRESTRICTED FLOAT
- """
- p[0] = IDLBuiltinType.Types.unrestricted_float
-
- def p_PrimitiveTypeDouble(self, p):
- """
- PrimitiveType : DOUBLE
- """
- p[0] = IDLBuiltinType.Types.double
-
- def p_PrimitiveTypeUnrestictedDouble(self, p):
- """
- PrimitiveType : UNRESTRICTED DOUBLE
- """
- p[0] = IDLBuiltinType.Types.unrestricted_double
-
- def p_StringType(self, p):
- """
- StringType : BuiltinStringType
- """
- p[0] = BuiltinTypes[p[1]]
-
- def p_BuiltinStringTypeDOMString(self, p):
- """
- BuiltinStringType : DOMSTRING
- """
- p[0] = IDLBuiltinType.Types.domstring
-
- def p_BuiltinStringTypeBytestring(self, p):
- """
- BuiltinStringType : BYTESTRING
- """
- p[0] = IDLBuiltinType.Types.bytestring
-
- def p_BuiltinStringTypeUSVString(self, p):
- """
- BuiltinStringType : USVSTRING
- """
- p[0] = IDLBuiltinType.Types.usvstring
-
- def p_BuiltinStringTypeUTF8String(self, p):
- """
- BuiltinStringType : UTF8STRING
- """
- p[0] = IDLBuiltinType.Types.utf8string
-
- def p_BuiltinStringTypeJSString(self, p):
- """
- BuiltinStringType : JSSTRING
- """
- p[0] = IDLBuiltinType.Types.jsstring
-
- def p_UnsignedIntegerTypeUnsigned(self, p):
- """
- UnsignedIntegerType : UNSIGNED IntegerType
- """
- # Adding one to a given signed integer type gets you the unsigned type:
- p[0] = p[2] + 1
-
- def p_UnsignedIntegerType(self, p):
- """
- UnsignedIntegerType : IntegerType
- """
- p[0] = p[1]
-
- def p_IntegerTypeShort(self, p):
- """
- IntegerType : SHORT
- """
- p[0] = IDLBuiltinType.Types.short
-
- def p_IntegerTypeLong(self, p):
- """
- IntegerType : LONG OptionalLong
- """
- if p[2]:
- p[0] = IDLBuiltinType.Types.long_long
- else:
- p[0] = IDLBuiltinType.Types.long
-
- def p_OptionalLong(self, p):
- """
- OptionalLong : LONG
- """
- p[0] = True
-
- def p_OptionalLongEmpty(self, p):
- """
- OptionalLong :
- """
- p[0] = False
-
- def p_Null(self, p):
- """
- Null : QUESTIONMARK
- |
- """
- if len(p) > 1:
- p[0] = self.getLocation(p, 1)
- else:
- p[0] = None
-
- def p_ScopedName(self, p):
- """
- ScopedName : AbsoluteScopedName
- | RelativeScopedName
- """
- p[0] = p[1]
-
- def p_AbsoluteScopedName(self, p):
- """
- AbsoluteScopedName : SCOPE IDENTIFIER ScopedNameParts
- """
- assert False
- pass
-
- def p_RelativeScopedName(self, p):
- """
- RelativeScopedName : IDENTIFIER ScopedNameParts
- """
- assert not p[2] # Not implemented!
-
- p[0] = IDLUnresolvedIdentifier(self.getLocation(p, 1), p[1])
-
- def p_ScopedNameParts(self, p):
- """
- ScopedNameParts : SCOPE IDENTIFIER ScopedNameParts
- """
- assert False
- pass
-
- def p_ScopedNamePartsEmpty(self, p):
- """
- ScopedNameParts :
- """
- p[0] = None
-
- def p_ExtendedAttributeNoArgs(self, p):
- """
- ExtendedAttributeNoArgs : IDENTIFIER
- """
- p[0] = (p[1],)
-
- def p_ExtendedAttributeArgList(self, p):
- """
- ExtendedAttributeArgList : IDENTIFIER LPAREN ArgumentList RPAREN
- """
- p[0] = (p[1], p[3])
-
- def p_ExtendedAttributeIdent(self, p):
- """
- ExtendedAttributeIdent : IDENTIFIER EQUALS STRING
- | IDENTIFIER EQUALS IDENTIFIER
- """
- p[0] = (p[1], p[3])
-
- def p_ExtendedAttributeWildcard(self, p):
- """
- ExtendedAttributeWildcard : IDENTIFIER EQUALS ASTERISK
- """
- p[0] = (p[1], p[3])
-
- def p_ExtendedAttributeNamedArgList(self, p):
- """
- ExtendedAttributeNamedArgList : IDENTIFIER EQUALS IDENTIFIER LPAREN ArgumentList RPAREN
- """
- p[0] = (p[1], p[3], p[5])
-
- def p_ExtendedAttributeIdentList(self, p):
- """
- ExtendedAttributeIdentList : IDENTIFIER EQUALS LPAREN IdentifierList RPAREN
- """
- p[0] = (p[1], p[4])
-
- def p_IdentifierList(self, p):
- """
- IdentifierList : IDENTIFIER Identifiers
- """
- idents = list(p[2])
- # This is only used for identifier-list-valued extended attributes, and if
- # we're going to restrict to IDENTIFIER here we should at least allow
- # escaping with leading '_' as usual for identifiers.
- ident = p[1]
- if ident[0] == "_":
- ident = ident[1:]
- idents.insert(0, ident)
- p[0] = idents
-
- def p_IdentifiersList(self, p):
- """
- Identifiers : COMMA IDENTIFIER Identifiers
- """
- idents = list(p[3])
- # This is only used for identifier-list-valued extended attributes, and if
- # we're going to restrict to IDENTIFIER here we should at least allow
- # escaping with leading '_' as usual for identifiers.
- ident = p[2]
- if ident[0] == "_":
- ident = ident[1:]
- idents.insert(0, ident)
- p[0] = idents
-
- def p_IdentifiersEmpty(self, p):
- """
- Identifiers :
- """
- p[0] = []
-
- def p_error(self, p):
- if not p:
- raise WebIDLError(
- "Syntax Error at end of file. Possibly due to missing semicolon(;), braces(}) or both",
- [self._filename],
- )
- else:
- raise WebIDLError(
- "invalid syntax",
- [Location(self.lexer, p.lineno, p.lexpos, self._filename)],
- )
-
- def __init__(self, outputdir="", lexer=None, use_builtin_readable_stream=True):
- Tokenizer.__init__(self, outputdir, lexer, use_builtin_readable_stream)
-
- logger = SqueakyCleanLogger()
- try:
- self.parser = yacc.yacc(
- module=self,
- outputdir=outputdir,
- errorlog=logger,
- debug=False,
- write_tables=False,
- # Pickling the grammar is a speedup in
- # some cases (older Python?) but a
- # significant slowdown in others.
- # We're not pickling for now, until it
- # becomes a speedup again.
- # , picklefile='WebIDLGrammar.pkl'
- )
- finally:
- logger.reportGrammarErrors()
-
- self._globalScope = IDLScope(BuiltinLocation("<Global Scope>"), None, None)
- self._installBuiltins(self._globalScope)
- self._productions = []
-
- self._filename = "<builtin>"
- self.lexer.input(Parser._builtins)
- self._filename = None
-
- self.parser.parse(lexer=self.lexer, tracking=True)
-
- def _installBuiltins(self, scope):
- assert isinstance(scope, IDLScope)
-
- # range omits the last value.
- for x in range(
- IDLBuiltinType.Types.ArrayBuffer, IDLBuiltinType.Types.Float64Array + 1
- ):
- builtin = BuiltinTypes[x]
- name = builtin.name
- typedef = IDLTypedef(
- BuiltinLocation("<builtin type>"), scope, builtin, name
- )
-
- @staticmethod
- def handleNullable(type, questionMarkLocation):
- if questionMarkLocation is not None:
- type = IDLNullableType(questionMarkLocation, type)
-
- return type
-
- def parse(self, t, filename=None):
- self.lexer.input(t)
-
- # for tok in iter(self.lexer.token, None):
- # print tok
-
- self._filename = filename
- self._productions.extend(self.parser.parse(lexer=self.lexer, tracking=True))
- self._filename = None
-
- def finish(self):
- # If we have interfaces that are iterable, create their
- # iterator interfaces and add them to the productions array.
- interfaceStatements = []
- for p in self._productions:
- if isinstance(p, IDLInterface):
- interfaceStatements.append(p)
-
- for iface in interfaceStatements:
- iterable = None
- # We haven't run finish() on the interface yet, so we don't know
- # whether our interface is maplike/setlike/iterable or not. This
- # means we have to loop through the members to see if we have an
- # iterable member.
- for m in iface.members:
- if isinstance(m, (IDLIterable, IDLAsyncIterable)):
- iterable = m
- break
- if iterable and (iterable.isPairIterator() or iterable.isAsyncIterable()):
-
- def simpleExtendedAttr(str):
- return IDLExtendedAttribute(iface.location, (str,))
-
- if isinstance(iterable, IDLAsyncIterable):
- nextReturnType = IDLPromiseType(
- iterable.location, BuiltinTypes[IDLBuiltinType.Types.any]
- )
- else:
- nextReturnType = BuiltinTypes[IDLBuiltinType.Types.object]
- nextMethod = IDLMethod(
- iterable.location,
- IDLUnresolvedIdentifier(iterable.location, "next"),
- nextReturnType,
- [],
- )
- nextMethod.addExtendedAttributes([simpleExtendedAttr("Throws")])
-
- methods = [nextMethod]
-
- if iterable.getExtendedAttribute("GenerateReturnMethod"):
- assert isinstance(iterable, IDLAsyncIterable)
-
- returnMethod = IDLMethod(
- iterable.location,
- IDLUnresolvedIdentifier(iterable.location, "return"),
- IDLPromiseType(
- iterable.location, BuiltinTypes[IDLBuiltinType.Types.any]
- ),
- [
- IDLArgument(
- iterable.location,
- IDLUnresolvedIdentifier(
- BuiltinLocation("<auto-generated-identifier>"),
- "value",
- ),
- BuiltinTypes[IDLBuiltinType.Types.any],
- optional=True,
- ),
- ],
- )
- returnMethod.addExtendedAttributes([simpleExtendedAttr("Throws")])
- methods.append(returnMethod)
-
- if iterable.isIterable():
- itr_suffix = "Iterator"
- else:
- itr_suffix = "AsyncIterator"
- itr_ident = IDLUnresolvedIdentifier(
- iface.location, iface.identifier.name + itr_suffix
- )
- if iterable.isIterable():
- classNameOverride = iface.identifier.name + " Iterator"
- elif iterable.isAsyncIterable():
- classNameOverride = iface.identifier.name + " AsyncIterator"
- itr_iface = IDLInterface(
- iface.location,
- self.globalScope(),
- itr_ident,
- None,
- methods,
- isKnownNonPartial=True,
- classNameOverride=classNameOverride,
- )
- itr_iface.addExtendedAttributes(
- [simpleExtendedAttr("LegacyNoInterfaceObject")]
- )
- # Make sure the exposure set for the iterator interface is the
- # same as the exposure set for the iterable interface, because
- # we're going to generate methods on the iterable that return
- # instances of the iterator.
- itr_iface._exposureGlobalNames = set(iface._exposureGlobalNames)
- # Always append generated iterable interfaces after the
- # interface they're a member of, otherwise nativeType generation
- # won't work correctly.
- if iterable.isIterable():
- itr_iface.iterableInterface = iface
- else:
- itr_iface.asyncIterableInterface = iface
- self._productions.append(itr_iface)
- iterable.iteratorType = IDLWrapperType(iface.location, itr_iface)
-
- # Make sure we finish IDLIncludesStatements before we finish the
- # IDLInterfaces.
- # XXX khuey hates this bit and wants to nuke it from orbit.
- includesStatements = [
- p for p in self._productions if isinstance(p, IDLIncludesStatement)
- ]
- otherStatements = [
- p for p in self._productions if not isinstance(p, IDLIncludesStatement)
- ]
- for production in includesStatements:
- production.finish(self.globalScope())
- for production in otherStatements:
- production.finish(self.globalScope())
-
- # Do any post-finish validation we need to do
- for production in self._productions:
- production.validate()
-
- # De-duplicate self._productions, without modifying its order.
- seen = set()
- result = []
- for p in self._productions:
- if p not in seen:
- seen.add(p)
- result.append(p)
- return result
-
- def reset(self):
- return Parser(lexer=self.lexer)
-
- # Builtin IDL defined by WebIDL
- _builtins = """
- typedef (ArrayBufferView or ArrayBuffer) BufferSource;
- """
-
-
-def main():
- # Parse arguments.
- from optparse import OptionParser
-
- usageString = "usage: %prog [options] files"
- o = OptionParser(usage=usageString)
- o.add_option(
- "--cachedir",
- dest="cachedir",
- default=None,
- help="Directory in which to cache lex/parse tables.",
- )
- o.add_option(
- "--verbose-errors",
- action="store_true",
- default=False,
- help="When an error happens, display the Python traceback.",
- )
- (options, args) = o.parse_args()
-
- if len(args) < 1:
- o.error(usageString)
-
- fileList = args
- baseDir = os.getcwd()
-
- # Parse the WebIDL.
- parser = Parser(options.cachedir)
- try:
- for filename in fileList:
- fullPath = os.path.normpath(os.path.join(baseDir, filename))
- f = open(fullPath, "rb")
- lines = f.readlines()
- f.close()
- print(fullPath)
- parser.parse("".join(lines), fullPath)
- parser.finish()
- except WebIDLError as e:
- if options.verbose_errors:
- traceback.print_exc()
- else:
- print(e)
-
-
-if __name__ == "__main__":
- main()
diff --git a/components/script/dom/bindings/codegen/parser/abstract.patch b/components/script/dom/bindings/codegen/parser/abstract.patch
deleted file mode 100644
index 316ed8ce0a1..00000000000
--- a/components/script/dom/bindings/codegen/parser/abstract.patch
+++ /dev/null
@@ -1,10 +0,0 @@
---- WebIDL.py
-+++ WebIDL.py
-@@ -1987,6 +1987,7 @@ class IDLInterface(IDLInterfaceOrNamespace):
- or identifier == "RunConstructorInCallerCompartment"
- or identifier == "WantsEventListenerHooks"
- or identifier == "Serializable"
-+ or identifier == "Abstract"
- ):
- # Known extended attributes that do not take values
- if not attr.noArguments():
diff --git a/components/script/dom/bindings/codegen/parser/callback-location.patch b/components/script/dom/bindings/codegen/parser/callback-location.patch
deleted file mode 100644
index b7a308df631..00000000000
--- a/components/script/dom/bindings/codegen/parser/callback-location.patch
+++ /dev/null
@@ -1,20 +0,0 @@
---- WebIDL.py
-+++ WebIDL.py
-@@ -2283,7 +2283,7 @@ class IDLUnresolvedType(IDLType):
- return typedefType.complete(scope).withExtendedAttributes(self.extraTypeAttributes)
- elif obj.isCallback() and not obj.isInterface():
- assert self.name.name == obj.identifier.name
-- return IDLCallbackType(self.location, obj)
-+ return IDLCallbackType(obj.location, obj)
-
- name = self.name.resolve(scope, None)
- return IDLWrapperType(self.location, obj)
-@@ -6854,7 +6854,7 @@ class Parser(Tokenizer):
- type = IDLTypedefType(self.getLocation(p, 1), obj.innerType,
- obj.identifier.name)
- elif obj.isCallback() and not obj.isInterface():
-- type = IDLCallbackType(self.getLocation(p, 1), obj)
-+ type = IDLCallbackType(obj.location, obj)
- else:
- type = IDLWrapperType(self.getLocation(p, 1), p[1])
- p[0] = self.handleNullable(type, p[2])
diff --git a/components/script/dom/bindings/codegen/parser/debug.patch b/components/script/dom/bindings/codegen/parser/debug.patch
deleted file mode 100644
index ffab062d801..00000000000
--- a/components/script/dom/bindings/codegen/parser/debug.patch
+++ /dev/null
@@ -1,10 +0,0 @@
---- WebIDL.py
-+++ WebIDL.py
-@@ -8827,6 +8827,7 @@ class Parser(Tokenizer):
- module=self,
- outputdir=outputdir,
- errorlog=logger,
-+ debug=False,
- write_tables=False,
- # Pickling the grammar is a speedup in
- # some cases (older Python?) but a
diff --git a/components/script/dom/bindings/codegen/parser/ext-attribute-no-value-error.patch b/components/script/dom/bindings/codegen/parser/ext-attribute-no-value-error.patch
deleted file mode 100644
index 210134d8ca6..00000000000
--- a/components/script/dom/bindings/codegen/parser/ext-attribute-no-value-error.patch
+++ /dev/null
@@ -1,11 +0,0 @@
---- WebIDL.py
-+++ WebIDL.py
-@@ -3490,7 +3490,7 @@ class IDLBuiltinType(IDLType):
- [self.location, attribute.location])
- assert not self.nullable()
- if not attribute.hasValue():
-- raise WebIDLError("[TreatNullAs] must take an identifier argument"
-+ raise WebIDLError("[TreatNullAs] must take an identifier argument",
- [attribute.location])
- value = attribute.value()
- if value != 'EmptyString':
diff --git a/components/script/dom/bindings/codegen/parser/inline.patch b/components/script/dom/bindings/codegen/parser/inline.patch
deleted file mode 100644
index ad4d0f8f959..00000000000
--- a/components/script/dom/bindings/codegen/parser/inline.patch
+++ /dev/null
@@ -1,10 +0,0 @@
---- WebIDL.py
-+++ WebIDL.py
-@@ -1988,6 +1988,7 @@ class IDLInterface(IDLInterfaceOrNamespace):
- or identifier == "WantsEventListenerHooks"
- or identifier == "Serializable"
- or identifier == "Abstract"
-+ or identifier == "Inline"
- ):
- # Known extended attributes that do not take values
- if not attr.noArguments():
diff --git a/components/script/dom/bindings/codegen/parser/readable-stream.patch b/components/script/dom/bindings/codegen/parser/readable-stream.patch
deleted file mode 100644
index 4b90067696e..00000000000
--- a/components/script/dom/bindings/codegen/parser/readable-stream.patch
+++ /dev/null
@@ -1,162 +0,0 @@
---- WebIDL.py
-+++ WebIDL.py
-@@ -2498,6 +2498,9 @@ class IDLType(IDLObject):
- def isRecord(self):
- return False
-
-+ def isReadableStream(self):
-+ return False
-+
- def isArrayBuffer(self):
- return False
-
-@@ -2526,7 +2529,7 @@ class IDLType(IDLObject):
- def isSpiderMonkeyInterface(self):
- """Returns a boolean indicating whether this type is an 'interface'
- type that is implemented in SpiderMonkey."""
-- return self.isInterface() and self.isBufferSource()
-+ return self.isInterface() and (self.isBufferSource() or self.isReadableStream())
-
- def isAny(self):
- return self.tag() == IDLType.Tags.any
-@@ -2743,6 +2746,9 @@ class IDLNullableType(IDLParametrizedType):
- def isRecord(self):
- return self.inner.isRecord()
-
-+ def isReadableStream(self):
-+ return self.inner.isReadableStream()
-+
- def isArrayBuffer(self):
- return self.inner.isArrayBuffer()
-
-@@ -3252,6 +3258,9 @@ class IDLTypedefType(IDLType):
- def isRecord(self):
- return self.inner.isRecord()
-
-+ def isReadableStream(self):
-+ return self.inner.isReadableStream()
-+
- def isDictionary(self):
- return self.inner.isDictionary()
-
-@@ -3597,6 +3606,7 @@ class IDLBuiltinType(IDLType):
- "Uint32Array",
- "Float32Array",
- "Float64Array",
-+ "ReadableStream",
- )
-
- TagLookup = {
-@@ -3632,6 +3642,7 @@ class IDLBuiltinType(IDLType):
- Types.Uint32Array: IDLType.Tags.interface,
- Types.Float32Array: IDLType.Tags.interface,
- Types.Float64Array: IDLType.Tags.interface,
-+ Types.ReadableStream: IDLType.Tags.interface,
- }
-
- PrettyNames = {
-@@ -3667,6 +3678,7 @@ class IDLBuiltinType(IDLType):
- Types.Uint32Array: "Uint32Array",
- Types.Float32Array: "Float32Array",
- Types.Float64Array: "Float64Array",
-+ Types.ReadableStream: "ReadableStream",
- }
-
- def __init__(
-@@ -3830,11 +3842,19 @@ class IDLBuiltinType(IDLType):
- and self._typeTag <= IDLBuiltinType.Types.Float64Array
- )
-
-+ def isReadableStream(self):
-+ return self._typeTag == IDLBuiltinType.Types.ReadableStream
-+
- def isInterface(self):
- # TypedArray things are interface types per the TypedArray spec,
- # but we handle them as builtins because SpiderMonkey implements
- # all of it internally.
-- return self.isArrayBuffer() or self.isArrayBufferView() or self.isTypedArray()
-+ return (
-+ self.isArrayBuffer()
-+ or self.isArrayBufferView()
-+ or self.isTypedArray()
-+ or self.isReadableStream()
-+ )
-
- def isNonCallbackInterface(self):
- # All the interfaces we can be are non-callback
-@@ -3928,6 +3948,7 @@ class IDLBuiltinType(IDLType):
- # ArrayBuffer is distinguishable from everything
- # that's not an ArrayBuffer or a callback interface
- (self.isArrayBuffer() and not other.isArrayBuffer())
-+ or (self.isReadableStream() and not other.isReadableStream())
- or
- # ArrayBufferView is distinguishable from everything
- # that's not an ArrayBufferView or typed array.
-@@ -4134,6 +4155,11 @@ BuiltinTypes = {
- "Float64Array",
- IDLBuiltinType.Types.Float64Array,
- ),
-+ IDLBuiltinType.Types.ReadableStream: IDLBuiltinType(
-+ BuiltinLocation("<builtin type>"),
-+ "ReadableStream",
-+ IDLBuiltinType.Types.ReadableStream,
-+ ),
- }
-
-
-@@ -6883,6 +6909,9 @@ class Tokenizer(object):
- def t_IDENTIFIER(self, t):
- r"[_-]?[A-Za-z][0-9A-Z_a-z-]*"
- t.type = self.keywords.get(t.value, "IDENTIFIER")
-+ # If Builtin readable streams are disabled, mark ReadableStream as an identifier.
-+ if t.type == "READABLESTREAM" and not self._use_builtin_readable_streams:
-+ t.type = "IDENTIFIER"
- return t
-
- def t_STRING(self, t):
-@@ -6973,6 +7002,7 @@ class Tokenizer(object):
- "setlike": "SETLIKE",
- "iterable": "ITERABLE",
- "namespace": "NAMESPACE",
-+ "ReadableStream": "READABLESTREAM",
- "constructor": "CONSTRUCTOR",
- "symbol": "SYMBOL",
- "async": "ASYNC",
-@@ -6993,7 +7023,8 @@ class Tokenizer(object):
- ],
- )
-
-- def __init__(self, outputdir, lexer=None):
-+ def __init__(self, outputdir, lexer=None, use_builtin_readable_streams=True):
-+ self._use_builtin_readable_streams = use_builtin_readable_streams
- if lexer:
- self.lexer = lexer
- else:
-@@ -8482,6 +8513,7 @@ class Parser(Tokenizer):
- """
- DistinguishableType : PrimitiveType Null
- | ARRAYBUFFER Null
-+ | READABLESTREAM Null
- | OBJECT Null
- | UNDEFINED Null
- """
-@@ -8489,6 +8521,8 @@ class Parser(Tokenizer):
- type = BuiltinTypes[IDLBuiltinType.Types.object]
- elif p[1] == "ArrayBuffer":
- type = BuiltinTypes[IDLBuiltinType.Types.ArrayBuffer]
-+ elif p[1] == "ReadableStream":
-+ type = BuiltinTypes[IDLBuiltinType.Types.ReadableStream]
- elif p[1] == "undefined":
- type = BuiltinTypes[IDLBuiltinType.Types.undefined]
- else:
-@@ -8827,8 +8861,8 @@ class Parser(Tokenizer):
- [Location(self.lexer, p.lineno, p.lexpos, self._filename)],
- )
-
-- def __init__(self, outputdir="", lexer=None):
-- Tokenizer.__init__(self, outputdir, lexer)
-+ def __init__(self, outputdir="", lexer=None, use_builtin_readable_stream=True):
-+ Tokenizer.__init__(self, outputdir, lexer, use_builtin_readable_stream)
-
- logger = SqueakyCleanLogger()
- try:
diff --git a/components/script/dom/bindings/codegen/parser/runtests.py b/components/script/dom/bindings/codegen/parser/runtests.py
deleted file mode 100644
index 0599bf55fec..00000000000
--- a/components/script/dom/bindings/codegen/parser/runtests.py
+++ /dev/null
@@ -1,79 +0,0 @@
-# This Source Code Form is subject to the terms of the Mozilla Public
-# License, v. 2.0. If a copy of the MPL was not distributed with this
-# file, You can obtain one at https://mozilla.org/MPL/2.0/.
-
-import os, sys
-import glob
-import optparse
-import traceback
-import WebIDL
-
-class TestHarness(object):
- def __init__(self, test, verbose):
- self.test = test
- self.verbose = verbose
- self.printed_intro = False
-
- def start(self):
- if self.verbose:
- self.maybe_print_intro()
-
- def finish(self):
- if self.verbose or self.printed_intro:
- print("Finished test %s" % self.test)
-
- def maybe_print_intro(self):
- if not self.printed_intro:
- print("Starting test %s" % self.test)
- self.printed_intro = True
-
- def test_pass(self, msg):
- if self.verbose:
- print("TEST-PASS | %s" % msg)
-
- def test_fail(self, msg):
- self.maybe_print_intro()
- print("TEST-UNEXPECTED-FAIL | %s" % msg)
-
- def ok(self, condition, msg):
- if condition:
- self.test_pass(msg)
- else:
- self.test_fail(msg)
-
- def check(self, a, b, msg):
- if a == b:
- self.test_pass(msg)
- else:
- self.test_fail(msg)
- print("\tGot %s expected %s" % (a, b))
-
-def run_tests(tests, verbose):
- testdir = os.path.join(os.path.dirname(__file__), 'tests')
- if not tests:
- tests = glob.iglob(os.path.join(testdir, "*.py"))
- sys.path.append(testdir)
-
- for test in tests:
- (testpath, ext) = os.path.splitext(os.path.basename(test))
- _test = __import__(testpath, globals(), locals(), ['WebIDLTest'])
-
- harness = TestHarness(test, verbose)
- harness.start()
- try:
- _test.WebIDLTest.__call__(WebIDL.Parser(), harness)
- except Exception as ex:
- print("TEST-UNEXPECTED-FAIL | Unhandled exception in test %s: %s" % (testpath, ex))
- traceback.print_exc()
- finally:
- harness.finish()
-
-if __name__ == '__main__':
- usage = """%prog [OPTIONS] [TESTS]
- Where TESTS are relative to the tests directory."""
- parser = optparse.OptionParser(usage=usage)
- parser.add_option('-q', '--quiet', action='store_false', dest='verbose', default=True,
- help="Don't print passing tests.")
- options, tests = parser.parse_args()
-
- run_tests(tests, verbose=options.verbose)
diff --git a/components/script/dom/bindings/codegen/parser/tests/test_any_null.py b/components/script/dom/bindings/codegen/parser/tests/test_any_null.py
deleted file mode 100644
index f9afdacb02f..00000000000
--- a/components/script/dom/bindings/codegen/parser/tests/test_any_null.py
+++ /dev/null
@@ -1,16 +0,0 @@
-def WebIDLTest(parser, harness):
- threw = False
- try:
- parser.parse(
- """
- interface DoubleNull {
- attribute any? foo;
- };
- """
- )
-
- results = parser.finish()
- except:
- threw = True
-
- harness.ok(threw, "Should have thrown.")
diff --git a/components/script/dom/bindings/codegen/parser/tests/test_argument_identifier_conflicts.py b/components/script/dom/bindings/codegen/parser/tests/test_argument_identifier_conflicts.py
deleted file mode 100644
index 3f50cb05158..00000000000
--- a/components/script/dom/bindings/codegen/parser/tests/test_argument_identifier_conflicts.py
+++ /dev/null
@@ -1,16 +0,0 @@
-def WebIDLTest(parser, harness):
- threw = False
- try:
- parser.parse(
- """
- interface ArgumentIdentifierConflict {
- undefined foo(boolean arg1, boolean arg1);
- };
- """
- )
-
- results = parser.finish()
- except:
- threw = True
-
- harness.ok(threw, "Should have thrown.")
diff --git a/components/script/dom/bindings/codegen/parser/tests/test_argument_keywords.py b/components/script/dom/bindings/codegen/parser/tests/test_argument_keywords.py
deleted file mode 100644
index bbed33df926..00000000000
--- a/components/script/dom/bindings/codegen/parser/tests/test_argument_keywords.py
+++ /dev/null
@@ -1,22 +0,0 @@
-def WebIDLTest(parser, harness):
- parser.parse(
- """
- interface Foo {
- undefined foo(object constructor);
- };
- """
- )
-
- results = parser.finish()
- harness.check(len(results), 1, "Should have an interface")
- iface = results[0]
- harness.check(len(iface.members), 1, "Should have an operation")
- operation = iface.members[0]
- harness.check(len(operation.signatures()), 1, "Should have one signature")
- (retval, args) = operation.signatures()[0]
- harness.check(len(args), 1, "Should have an argument")
- harness.check(
- args[0].identifier.name,
- "constructor",
- "Should have an identifier named 'constructor'",
- )
diff --git a/components/script/dom/bindings/codegen/parser/tests/test_arraybuffer.py b/components/script/dom/bindings/codegen/parser/tests/test_arraybuffer.py
deleted file mode 100644
index b762d06ac29..00000000000
--- a/components/script/dom/bindings/codegen/parser/tests/test_arraybuffer.py
+++ /dev/null
@@ -1,95 +0,0 @@
-import WebIDL
-
-
-def WebIDLTest(parser, harness):
- parser.parse(
- """
- interface TestArrayBuffer {
- attribute ArrayBuffer bufferAttr;
- undefined bufferMethod(ArrayBuffer arg1, ArrayBuffer? arg2, sequence<ArrayBuffer> arg3);
-
- attribute ArrayBufferView viewAttr;
- undefined viewMethod(ArrayBufferView arg1, ArrayBufferView? arg2, sequence<ArrayBufferView> arg3);
-
- attribute Int8Array int8ArrayAttr;
- undefined int8ArrayMethod(Int8Array arg1, Int8Array? arg2, sequence<Int8Array> arg3);
-
- attribute Uint8Array uint8ArrayAttr;
- undefined uint8ArrayMethod(Uint8Array arg1, Uint8Array? arg2, sequence<Uint8Array> arg3);
-
- attribute Uint8ClampedArray uint8ClampedArrayAttr;
- undefined uint8ClampedArrayMethod(Uint8ClampedArray arg1, Uint8ClampedArray? arg2, sequence<Uint8ClampedArray> arg3);
-
- attribute Int16Array int16ArrayAttr;
- undefined int16ArrayMethod(Int16Array arg1, Int16Array? arg2, sequence<Int16Array> arg3);
-
- attribute Uint16Array uint16ArrayAttr;
- undefined uint16ArrayMethod(Uint16Array arg1, Uint16Array? arg2, sequence<Uint16Array> arg3);
-
- attribute Int32Array int32ArrayAttr;
- undefined int32ArrayMethod(Int32Array arg1, Int32Array? arg2, sequence<Int32Array> arg3);
-
- attribute Uint32Array uint32ArrayAttr;
- undefined uint32ArrayMethod(Uint32Array arg1, Uint32Array? arg2, sequence<Uint32Array> arg3);
-
- attribute Float32Array float32ArrayAttr;
- undefined float32ArrayMethod(Float32Array arg1, Float32Array? arg2, sequence<Float32Array> arg3);
-
- attribute Float64Array float64ArrayAttr;
- undefined float64ArrayMethod(Float64Array arg1, Float64Array? arg2, sequence<Float64Array> arg3);
- };
- """
- )
-
- results = parser.finish()
-
- iface = results[0]
-
- harness.ok(True, "TestArrayBuffer interface parsed without error")
- harness.check(len(iface.members), 22, "Interface should have twenty two members")
-
- members = iface.members
-
- def checkStuff(attr, method, t):
- harness.ok(isinstance(attr, WebIDL.IDLAttribute), "Expect an IDLAttribute")
- harness.ok(isinstance(method, WebIDL.IDLMethod), "Expect an IDLMethod")
-
- harness.check(str(attr.type), t, "Expect an ArrayBuffer type")
- harness.ok(attr.type.isSpiderMonkeyInterface(), "Should test as a js interface")
-
- (retType, arguments) = method.signatures()[0]
- harness.ok(retType.isUndefined(), "Should have an undefined return type")
- harness.check(len(arguments), 3, "Expect 3 arguments")
-
- harness.check(str(arguments[0].type), t, "Expect an ArrayBuffer type")
- harness.ok(
- arguments[0].type.isSpiderMonkeyInterface(), "Should test as a js interface"
- )
-
- harness.check(
- str(arguments[1].type), t + "OrNull", "Expect an ArrayBuffer type"
- )
- harness.ok(
- arguments[1].type.inner.isSpiderMonkeyInterface(),
- "Should test as a js interface",
- )
-
- harness.check(
- str(arguments[2].type), t + "Sequence", "Expect an ArrayBuffer type"
- )
- harness.ok(
- arguments[2].type.inner.isSpiderMonkeyInterface(),
- "Should test as a js interface",
- )
-
- checkStuff(members[0], members[1], "ArrayBuffer")
- checkStuff(members[2], members[3], "ArrayBufferView")
- checkStuff(members[4], members[5], "Int8Array")
- checkStuff(members[6], members[7], "Uint8Array")
- checkStuff(members[8], members[9], "Uint8ClampedArray")
- checkStuff(members[10], members[11], "Int16Array")
- checkStuff(members[12], members[13], "Uint16Array")
- checkStuff(members[14], members[15], "Int32Array")
- checkStuff(members[16], members[17], "Uint32Array")
- checkStuff(members[18], members[19], "Float32Array")
- checkStuff(members[20], members[21], "Float64Array")
diff --git a/components/script/dom/bindings/codegen/parser/tests/test_attr.py b/components/script/dom/bindings/codegen/parser/tests/test_attr.py
deleted file mode 100644
index e19689a81a9..00000000000
--- a/components/script/dom/bindings/codegen/parser/tests/test_attr.py
+++ /dev/null
@@ -1,199 +0,0 @@
-import WebIDL
-
-
-def WebIDLTest(parser, harness):
- testData = [
- ("::TestAttr%s::b", "b", "Byte%s", False),
- ("::TestAttr%s::rb", "rb", "Byte%s", True),
- ("::TestAttr%s::o", "o", "Octet%s", False),
- ("::TestAttr%s::ro", "ro", "Octet%s", True),
- ("::TestAttr%s::s", "s", "Short%s", False),
- ("::TestAttr%s::rs", "rs", "Short%s", True),
- ("::TestAttr%s::us", "us", "UnsignedShort%s", False),
- ("::TestAttr%s::rus", "rus", "UnsignedShort%s", True),
- ("::TestAttr%s::l", "l", "Long%s", False),
- ("::TestAttr%s::rl", "rl", "Long%s", True),
- ("::TestAttr%s::ul", "ul", "UnsignedLong%s", False),
- ("::TestAttr%s::rul", "rul", "UnsignedLong%s", True),
- ("::TestAttr%s::ll", "ll", "LongLong%s", False),
- ("::TestAttr%s::rll", "rll", "LongLong%s", True),
- ("::TestAttr%s::ull", "ull", "UnsignedLongLong%s", False),
- ("::TestAttr%s::rull", "rull", "UnsignedLongLong%s", True),
- ("::TestAttr%s::str", "str", "String%s", False),
- ("::TestAttr%s::rstr", "rstr", "String%s", True),
- ("::TestAttr%s::obj", "obj", "Object%s", False),
- ("::TestAttr%s::robj", "robj", "Object%s", True),
- ("::TestAttr%s::object", "object", "Object%s", False),
- ("::TestAttr%s::f", "f", "Float%s", False),
- ("::TestAttr%s::rf", "rf", "Float%s", True),
- ]
-
- parser.parse(
- """
- interface TestAttr {
- attribute byte b;
- readonly attribute byte rb;
- attribute octet o;
- readonly attribute octet ro;
- attribute short s;
- readonly attribute short rs;
- attribute unsigned short us;
- readonly attribute unsigned short rus;
- attribute long l;
- readonly attribute long rl;
- attribute unsigned long ul;
- readonly attribute unsigned long rul;
- attribute long long ll;
- readonly attribute long long rll;
- attribute unsigned long long ull;
- readonly attribute unsigned long long rull;
- attribute DOMString str;
- readonly attribute DOMString rstr;
- attribute object obj;
- readonly attribute object robj;
- attribute object _object;
- attribute float f;
- readonly attribute float rf;
- };
-
- interface TestAttrNullable {
- attribute byte? b;
- readonly attribute byte? rb;
- attribute octet? o;
- readonly attribute octet? ro;
- attribute short? s;
- readonly attribute short? rs;
- attribute unsigned short? us;
- readonly attribute unsigned short? rus;
- attribute long? l;
- readonly attribute long? rl;
- attribute unsigned long? ul;
- readonly attribute unsigned long? rul;
- attribute long long? ll;
- readonly attribute long long? rll;
- attribute unsigned long long? ull;
- readonly attribute unsigned long long? rull;
- attribute DOMString? str;
- readonly attribute DOMString? rstr;
- attribute object? obj;
- readonly attribute object? robj;
- attribute object? _object;
- attribute float? f;
- readonly attribute float? rf;
- };
- """
- )
-
- results = parser.finish()
-
- def checkAttr(attr, QName, name, type, readonly):
- harness.ok(isinstance(attr, WebIDL.IDLAttribute), "Should be an IDLAttribute")
- harness.ok(attr.isAttr(), "Attr is an Attr")
- harness.ok(not attr.isMethod(), "Attr is not an method")
- harness.ok(not attr.isConst(), "Attr is not a const")
- harness.check(attr.identifier.QName(), QName, "Attr has the right QName")
- harness.check(attr.identifier.name, name, "Attr has the right name")
- harness.check(str(attr.type), type, "Attr has the right type")
- harness.check(attr.readonly, readonly, "Attr's readonly state is correct")
-
- harness.ok(True, "TestAttr interface parsed without error.")
- harness.check(len(results), 2, "Should be two productions.")
- iface = results[0]
- harness.ok(isinstance(iface, WebIDL.IDLInterface), "Should be an IDLInterface")
- harness.check(
- iface.identifier.QName(), "::TestAttr", "Interface has the right QName"
- )
- harness.check(iface.identifier.name, "TestAttr", "Interface has the right name")
- harness.check(
- len(iface.members), len(testData), "Expect %s members" % len(testData)
- )
-
- attrs = iface.members
-
- for i in range(len(attrs)):
- data = testData[i]
- attr = attrs[i]
- (QName, name, type, readonly) = data
- checkAttr(attr, QName % "", name, type % "", readonly)
-
- iface = results[1]
- harness.ok(isinstance(iface, WebIDL.IDLInterface), "Should be an IDLInterface")
- harness.check(
- iface.identifier.QName(), "::TestAttrNullable", "Interface has the right QName"
- )
- harness.check(
- iface.identifier.name, "TestAttrNullable", "Interface has the right name"
- )
- harness.check(
- len(iface.members), len(testData), "Expect %s members" % len(testData)
- )
-
- attrs = iface.members
-
- for i in range(len(attrs)):
- data = testData[i]
- attr = attrs[i]
- (QName, name, type, readonly) = data
- checkAttr(attr, QName % "Nullable", name, type % "OrNull", readonly)
-
- parser = parser.reset()
- threw = False
- try:
- parser.parse(
- """
- interface A {
- [SetterThrows] readonly attribute boolean foo;
- };
- """
- )
- results = parser.finish()
- except Exception as x:
- threw = True
- harness.ok(threw, "Should not allow [SetterThrows] on readonly attributes")
-
- parser = parser.reset()
- threw = False
- try:
- parser.parse(
- """
- interface A {
- [Throw] readonly attribute boolean foo;
- };
- """
- )
- results = parser.finish()
- except Exception as x:
- threw = True
- harness.ok(threw, "Should spell [Throws] correctly")
-
- parser = parser.reset()
- threw = False
- try:
- parser.parse(
- """
- interface A {
- [SameObject] readonly attribute boolean foo;
- };
- """
- )
- results = parser.finish()
- except Exception as x:
- threw = True
- harness.ok(
- threw, "Should not allow [SameObject] on attributes not of interface type"
- )
-
- parser = parser.reset()
- threw = False
- try:
- parser.parse(
- """
- interface A {
- [SameObject] readonly attribute A foo;
- };
- """
- )
- results = parser.finish()
- except Exception as x:
- threw = True
- harness.ok(not threw, "Should allow [SameObject] on attributes of interface type")
diff --git a/components/script/dom/bindings/codegen/parser/tests/test_attr_sequence_type.py b/components/script/dom/bindings/codegen/parser/tests/test_attr_sequence_type.py
deleted file mode 100644
index f3249de900a..00000000000
--- a/components/script/dom/bindings/codegen/parser/tests/test_attr_sequence_type.py
+++ /dev/null
@@ -1,77 +0,0 @@
-def WebIDLTest(parser, harness):
- threw = False
- try:
- parser.parse(
- """
- interface AttrSequenceType {
- attribute sequence<object> foo;
- };
- """
- )
-
- results = parser.finish()
- except:
- threw = True
-
- harness.ok(threw, "Attribute type must not be a sequence type")
-
- parser.reset()
-
- threw = False
- try:
- parser.parse(
- """
- interface AttrUnionWithSequenceType {
- attribute (sequence<object> or DOMString) foo;
- };
- """
- )
-
- results = parser.finish()
- except:
- threw = True
-
- harness.ok(threw, "Attribute type must not be a union with a sequence member type")
-
- parser.reset()
-
- threw = False
- try:
- parser.parse(
- """
- interface AttrNullableUnionWithSequenceType {
- attribute (sequence<object>? or DOMString) foo;
- };
- """
- )
-
- results = parser.finish()
- except:
- threw = True
-
- harness.ok(
- threw,
- "Attribute type must not be a union with a nullable sequence " "member type",
- )
-
- parser.reset()
-
- threw = False
- try:
- parser.parse(
- """
- interface AttrUnionWithUnionWithSequenceType {
- attribute ((sequence<object> or DOMString) or AttrUnionWithUnionWithSequenceType) foo;
- };
- """
- )
-
- results = parser.finish()
- except:
- threw = True
-
- harness.ok(
- threw,
- "Attribute type must not be a union type with a union member "
- "type that has a sequence member type",
- )
diff --git a/components/script/dom/bindings/codegen/parser/tests/test_attributes_on_types.py b/components/script/dom/bindings/codegen/parser/tests/test_attributes_on_types.py
deleted file mode 100644
index 97a7f47859a..00000000000
--- a/components/script/dom/bindings/codegen/parser/tests/test_attributes_on_types.py
+++ /dev/null
@@ -1,570 +0,0 @@
-# Import the WebIDL module, so we can do isinstance checks and whatnot
-import WebIDL
-
-
-def WebIDLTest(parser, harness):
- # Basic functionality
- threw = False
- try:
- parser.parse(
- """
- typedef [EnforceRange] long Foo;
- typedef [Clamp] long Bar;
- typedef [LegacyNullToEmptyString] DOMString Baz;
- dictionary A {
- required [EnforceRange] long a;
- required [Clamp] long b;
- [ChromeOnly, EnforceRange] long c;
- Foo d;
- };
- interface B {
- attribute Foo typedefFoo;
- attribute [EnforceRange] long foo;
- attribute [Clamp] long bar;
- attribute [LegacyNullToEmptyString] DOMString baz;
- undefined method([EnforceRange] long foo, [Clamp] long bar,
- [LegacyNullToEmptyString] DOMString baz);
- undefined method2(optional [EnforceRange] long foo, optional [Clamp] long bar,
- optional [LegacyNullToEmptyString] DOMString baz);
- undefined method3(optional [LegacyNullToEmptyString] UTF8String foo = "");
- };
- interface C {
- attribute [EnforceRange] long? foo;
- attribute [Clamp] long? bar;
- undefined method([EnforceRange] long? foo, [Clamp] long? bar);
- undefined method2(optional [EnforceRange] long? foo, optional [Clamp] long? bar);
- };
- interface Setlike {
- setlike<[Clamp] long>;
- };
- interface Maplike {
- maplike<[Clamp] long, [EnforceRange] long>;
- };
- interface Iterable {
- iterable<[Clamp] long, [EnforceRange] long>;
- };
- """
- )
- results = parser.finish()
- except:
- threw = True
-
- harness.ok(not threw, "Should not have thrown on parsing normal")
- if not threw:
- harness.check(
- results[0].innerType.hasEnforceRange(), True, "Foo is [EnforceRange]"
- )
- harness.check(results[1].innerType.hasClamp(), True, "Bar is [Clamp]")
- harness.check(
- results[2].innerType.legacyNullToEmptyString,
- True,
- "Baz is [LegacyNullToEmptyString]",
- )
- A = results[3]
- harness.check(
- A.members[0].type.hasEnforceRange(), True, "A.a is [EnforceRange]"
- )
- harness.check(A.members[1].type.hasClamp(), True, "A.b is [Clamp]")
- harness.check(
- A.members[2].type.hasEnforceRange(), True, "A.c is [EnforceRange]"
- )
- harness.check(
- A.members[3].type.hasEnforceRange(), True, "A.d is [EnforceRange]"
- )
- B = results[4]
- harness.check(
- B.members[0].type.hasEnforceRange(), True, "B.typedefFoo is [EnforceRange]"
- )
- harness.check(
- B.members[1].type.hasEnforceRange(), True, "B.foo is [EnforceRange]"
- )
- harness.check(B.members[2].type.hasClamp(), True, "B.bar is [Clamp]")
- harness.check(
- B.members[3].type.legacyNullToEmptyString,
- True,
- "B.baz is [LegacyNullToEmptyString]",
- )
- method = B.members[4].signatures()[0][1]
- harness.check(
- method[0].type.hasEnforceRange(),
- True,
- "foo argument of method is [EnforceRange]",
- )
- harness.check(
- method[1].type.hasClamp(), True, "bar argument of method is [Clamp]"
- )
- harness.check(
- method[2].type.legacyNullToEmptyString,
- True,
- "baz argument of method is [LegacyNullToEmptyString]",
- )
- method2 = B.members[5].signatures()[0][1]
- harness.check(
- method2[0].type.hasEnforceRange(),
- True,
- "foo argument of method2 is [EnforceRange]",
- )
- harness.check(
- method2[1].type.hasClamp(), True, "bar argument of method2 is [Clamp]"
- )
- harness.check(
- method2[2].type.legacyNullToEmptyString,
- True,
- "baz argument of method2 is [LegacyNullToEmptyString]",
- )
-
- method3 = B.members[6].signatures()[0][1]
- harness.check(
- method3[0].type.legacyNullToEmptyString,
- True,
- "bar argument of method2 is [LegacyNullToEmptyString]",
- )
- harness.check(
- method3[0].defaultValue.type.isUTF8String(),
- True,
- "default value of bar argument of method2 is correctly coerced to UTF8String",
- )
-
- C = results[5]
- harness.ok(C.members[0].type.nullable(), "C.foo is nullable")
- harness.ok(C.members[0].type.hasEnforceRange(), "C.foo has [EnforceRange]")
- harness.ok(C.members[1].type.nullable(), "C.bar is nullable")
- harness.ok(C.members[1].type.hasClamp(), "C.bar has [Clamp]")
- method = C.members[2].signatures()[0][1]
- harness.ok(method[0].type.nullable(), "foo argument of method is nullable")
- harness.ok(
- method[0].type.hasEnforceRange(),
- "foo argument of method has [EnforceRange]",
- )
- harness.ok(method[1].type.nullable(), "bar argument of method is nullable")
- harness.ok(method[1].type.hasClamp(), "bar argument of method has [Clamp]")
- method2 = C.members[3].signatures()[0][1]
- harness.ok(method2[0].type.nullable(), "foo argument of method2 is nullable")
- harness.ok(
- method2[0].type.hasEnforceRange(),
- "foo argument of method2 has [EnforceRange]",
- )
- harness.ok(method2[1].type.nullable(), "bar argument of method2 is nullable")
- harness.ok(method2[1].type.hasClamp(), "bar argument of method2 has [Clamp]")
-
- # Test [AllowShared]
- parser = parser.reset()
- threw = False
- try:
- parser.parse(
- """
- typedef [AllowShared] ArrayBufferView Foo;
- dictionary A {
- required [AllowShared] ArrayBufferView a;
- [ChromeOnly, AllowShared] ArrayBufferView b;
- Foo c;
- };
- interface B {
- attribute Foo typedefFoo;
- attribute [AllowShared] ArrayBufferView foo;
- undefined method([AllowShared] ArrayBufferView foo);
- undefined method2(optional [AllowShared] ArrayBufferView foo);
- };
- interface C {
- attribute [AllowShared] ArrayBufferView? foo;
- undefined method([AllowShared] ArrayBufferView? foo);
- undefined method2(optional [AllowShared] ArrayBufferView? foo);
- };
- interface Setlike {
- setlike<[AllowShared] ArrayBufferView>;
- };
- interface Maplike {
- maplike<[Clamp] long, [AllowShared] ArrayBufferView>;
- };
- interface Iterable {
- iterable<[Clamp] long, [AllowShared] ArrayBufferView>;
- };
- """
- )
- results = parser.finish()
- except:
- threw = True
-
- harness.ok(not threw, "Should not have thrown on parsing normal")
- if not threw:
- harness.ok(results[0].innerType.hasAllowShared(), "Foo is [AllowShared]")
- A = results[1]
- harness.ok(A.members[0].type.hasAllowShared(), "A.a is [AllowShared]")
- harness.ok(A.members[1].type.hasAllowShared(), "A.b is [AllowShared]")
- harness.ok(A.members[2].type.hasAllowShared(), "A.c is [AllowShared]")
- B = results[2]
- harness.ok(B.members[0].type.hasAllowShared(), "B.typedefFoo is [AllowShared]")
- harness.ok(B.members[1].type.hasAllowShared(), "B.foo is [AllowShared]")
- method = B.members[2].signatures()[0][1]
- harness.ok(
- method[0].type.hasAllowShared(), "foo argument of method is [AllowShared]"
- )
- method2 = B.members[3].signatures()[0][1]
- harness.ok(
- method2[0].type.hasAllowShared(), "foo argument of method2 is [AllowShared]"
- )
- C = results[3]
- harness.ok(C.members[0].type.nullable(), "C.foo is nullable")
- harness.ok(C.members[0].type.hasAllowShared(), "C.foo is [AllowShared]")
- method = C.members[1].signatures()[0][1]
- harness.ok(method[0].type.nullable(), "foo argument of method is nullable")
- harness.ok(
- method[0].type.hasAllowShared(), "foo argument of method is [AllowShared]"
- )
- method2 = C.members[2].signatures()[0][1]
- harness.ok(method2[0].type.nullable(), "foo argument of method2 is nullable")
- harness.ok(
- method2[0].type.hasAllowShared(), "foo argument of method2 is [AllowShared]"
- )
-
- ATTRIBUTES = [
- ("[Clamp]", "long"),
- ("[EnforceRange]", "long"),
- ("[LegacyNullToEmptyString]", "DOMString"),
- ("[AllowShared]", "ArrayBufferView"),
- ]
- TEMPLATES = [
- (
- "required dictionary members",
- """
- dictionary Foo {
- %s required %s foo;
- };
- """,
- ),
- (
- "optional arguments",
- """
- interface Foo {
- undefined foo(%s optional %s foo);
- };
- """,
- ),
- (
- "typedefs",
- """
- %s typedef %s foo;
- """,
- ),
- (
- "attributes",
- """
- interface Foo {
- %s attribute %s foo;
- };
- """,
- ),
- (
- "readonly attributes",
- """
- interface Foo {
- readonly attribute %s %s foo;
- };
- """,
- ),
- (
- "readonly unresolved attributes",
- """
- interface Foo {
- readonly attribute Bar baz;
- };
- typedef %s %s Bar;
- """,
- ),
- (
- "method",
- """
- interface Foo {
- %s %s foo();
- };
- """,
- ),
- (
- "interface",
- """
- %s
- interface Foo {
- attribute %s foo;
- };
- """,
- ),
- (
- "partial interface",
- """
- interface Foo {
- undefined foo();
- };
- %s
- partial interface Foo {
- attribute %s bar;
- };
- """,
- ),
- (
- "interface mixin",
- """
- %s
- interface mixin Foo {
- attribute %s foo;
- };
- """,
- ),
- (
- "namespace",
- """
- %s
- namespace Foo {
- attribute %s foo;
- };
- """,
- ),
- (
- "partial namespace",
- """
- namespace Foo {
- undefined foo();
- };
- %s
- partial namespace Foo {
- attribute %s bar;
- };
- """,
- ),
- (
- "dictionary",
- """
- %s
- dictionary Foo {
- %s foo;
- };
- """,
- ),
- ]
-
- for (name, template) in TEMPLATES:
- parser = parser.reset()
- threw = False
- try:
- parser.parse(template % ("", "long"))
- parser.finish()
- except:
- threw = True
- harness.ok(not threw, "Template for %s parses without attributes" % name)
- for (attribute, type) in ATTRIBUTES:
- parser = parser.reset()
- threw = False
- try:
- parser.parse(template % (attribute, type))
- parser.finish()
- except:
- threw = True
- harness.ok(threw, "Should not allow %s on %s" % (attribute, name))
-
- parser = parser.reset()
- threw = False
- try:
- parser.parse(
- """
- typedef [Clamp, EnforceRange] long Foo;
- """
- )
- parser.finish()
- except:
- threw = True
-
- harness.ok(threw, "Should not allow mixing [Clamp] and [EnforceRange]")
-
- parser = parser.reset()
- threw = False
- try:
- parser.parse(
- """
- typedef [EnforceRange, Clamp] long Foo;
- """
- )
- parser.finish()
- except:
- threw = True
-
- harness.ok(threw, "Should not allow mixing [Clamp] and [EnforceRange]")
-
- parser = parser.reset()
- threw = False
- try:
- parser.parse(
- """
- typedef [Clamp] long Foo;
- typedef [EnforceRange] Foo bar;
- """
- )
- parser.finish()
- except:
- threw = True
-
- harness.ok(threw, "Should not allow mixing [Clamp] and [EnforceRange] via typedefs")
-
- parser = parser.reset()
- threw = False
- try:
- parser.parse(
- """
- typedef [EnforceRange] long Foo;
- typedef [Clamp] Foo bar;
- """
- )
- parser.finish()
- except:
- threw = True
-
- harness.ok(threw, "Should not allow mixing [Clamp] and [EnforceRange] via typedefs")
-
- TYPES = [
- "DOMString",
- "unrestricted float",
- "float",
- "unrestricted double",
- "double",
- ]
-
- for type in TYPES:
- parser = parser.reset()
- threw = False
- try:
- parser.parse(
- """
- typedef [Clamp] %s Foo;
- """
- % type
- )
- parser.finish()
- except:
- threw = True
-
- harness.ok(threw, "Should not allow [Clamp] on %s" % type)
-
- parser = parser.reset()
- threw = False
- try:
- parser.parse(
- """
- typedef [EnforceRange] %s Foo;
- """
- % type
- )
- parser.finish()
- except:
- threw = True
-
- harness.ok(threw, "Should not allow [EnforceRange] on %s" % type)
-
- parser = parser.reset()
- threw = False
- try:
- parser.parse(
- """
- typedef [LegacyNullToEmptyString] long Foo;
- """
- )
- parser.finish()
- except:
- threw = True
-
- harness.ok(threw, "Should not allow [LegacyNullToEmptyString] on long")
-
- parser = parser.reset()
- threw = False
- try:
- parser.parse(
- """
- typedef [LegacyNullToEmptyString] JSString Foo;
- """
- )
- parser.finish()
- except:
- threw = True
-
- harness.ok(threw, "Should not allow [LegacyNullToEmptyString] on JSString")
-
- parser = parser.reset()
- threw = False
- try:
- parser.parse(
- """
- typedef [LegacyNullToEmptyString] DOMString? Foo;
- """
- )
- parser.finish()
- except:
- threw = True
-
- harness.ok(
- threw, "Should not allow [LegacyNullToEmptyString] on nullable DOMString"
- )
-
- parser = parser.reset()
- threw = False
- try:
- parser.parse(
- """
- typedef [AllowShared] DOMString Foo;
- """
- )
- results = parser.finish()
- except:
- threw = True
- harness.ok(threw, "[AllowShared] only allowed on buffer source types")
-
- parser = parser.reset()
- threw = False
- try:
- parser.parse(
- """
- typedef [AllowShared=something] ArrayBufferView Foo;
- """
- )
- results = parser.finish()
- except:
- threw = True
- harness.ok(threw, "[AllowShared] must take no arguments")
-
- parser = parser.reset()
- threw = False
- try:
- parser.parse(
- """
- interface Foo {
- undefined foo([Clamp] Bar arg);
- };
- typedef long Bar;
- """
- )
- results = parser.finish()
- except:
- threw = True
- harness.ok(not threw, "Should allow type attributes on unresolved types")
- harness.check(
- results[0].members[0].signatures()[0][1][0].type.hasClamp(),
- True,
- "Unresolved types with type attributes should correctly resolve with attributes",
- )
-
- parser = parser.reset()
- threw = False
- try:
- parser.parse(
- """
- interface Foo {
- undefined foo(Bar arg);
- };
- typedef [Clamp] long Bar;
- """
- )
- results = parser.finish()
- except:
- threw = True
- harness.ok(not threw, "Should allow type attributes on typedefs")
- harness.check(
- results[0].members[0].signatures()[0][1][0].type.hasClamp(),
- True,
- "Unresolved types that resolve to typedefs with attributes should correctly resolve with attributes",
- )
diff --git a/components/script/dom/bindings/codegen/parser/tests/test_builtin_filename.py b/components/script/dom/bindings/codegen/parser/tests/test_builtin_filename.py
deleted file mode 100644
index 6c913bba822..00000000000
--- a/components/script/dom/bindings/codegen/parser/tests/test_builtin_filename.py
+++ /dev/null
@@ -1,14 +0,0 @@
-import WebIDL
-
-
-def WebIDLTest(parser, harness):
- parser.parse(
- """
- interface Test {
- attribute long b;
- };
- """
- )
-
- attr = parser.finish()[0].members[0]
- harness.check(attr.type.filename(), "<builtin>", "Filename on builtin type")
diff --git a/components/script/dom/bindings/codegen/parser/tests/test_builtins.py b/components/script/dom/bindings/codegen/parser/tests/test_builtins.py
deleted file mode 100644
index a75a12e8143..00000000000
--- a/components/script/dom/bindings/codegen/parser/tests/test_builtins.py
+++ /dev/null
@@ -1,59 +0,0 @@
-import WebIDL
-
-
-def WebIDLTest(parser, harness):
- parser.parse(
- """
- interface TestBuiltins {
- attribute boolean b;
- attribute byte s8;
- attribute octet u8;
- attribute short s16;
- attribute unsigned short u16;
- attribute long s32;
- attribute unsigned long u32;
- attribute long long s64;
- attribute unsigned long long u64;
- };
- """
- )
-
- results = parser.finish()
-
- harness.ok(True, "TestBuiltins interface parsed without error.")
- harness.check(len(results), 1, "Should be one production")
- harness.ok(isinstance(results[0], WebIDL.IDLInterface), "Should be an IDLInterface")
- iface = results[0]
- harness.check(
- iface.identifier.QName(), "::TestBuiltins", "Interface has the right QName"
- )
- harness.check(iface.identifier.name, "TestBuiltins", "Interface has the right name")
- harness.check(iface.parent, None, "Interface has no parent")
-
- members = iface.members
- harness.check(len(members), 9, "Should be one production")
-
- names = ["b", "s8", "u8", "s16", "u16", "s32", "u32", "s64", "u64", "ts"]
- types = [
- "Boolean",
- "Byte",
- "Octet",
- "Short",
- "UnsignedShort",
- "Long",
- "UnsignedLong",
- "LongLong",
- "UnsignedLongLong",
- "UnsignedLongLong",
- ]
- for i in range(9):
- attr = members[i]
- harness.ok(isinstance(attr, WebIDL.IDLAttribute), "Should be an IDLAttribute")
- harness.check(
- attr.identifier.QName(),
- "::TestBuiltins::" + names[i],
- "Attr has correct QName",
- )
- harness.check(attr.identifier.name, names[i], "Attr has correct name")
- harness.check(str(attr.type), types[i], "Attr type is the correct name")
- harness.ok(attr.type.isPrimitive(), "Should be a primitive type")
diff --git a/components/script/dom/bindings/codegen/parser/tests/test_bytestring.py b/components/script/dom/bindings/codegen/parser/tests/test_bytestring.py
deleted file mode 100644
index a6f9f6ab9cb..00000000000
--- a/components/script/dom/bindings/codegen/parser/tests/test_bytestring.py
+++ /dev/null
@@ -1,125 +0,0 @@
-# -*- coding: UTF-8 -*-
-
-import WebIDL
-
-
-def WebIDLTest(parser, harness):
- parser.parse(
- """
- interface TestByteString {
- attribute ByteString bs;
- attribute DOMString ds;
- };
- """
- )
-
- results = parser.finish()
-
- harness.ok(True, "TestByteString interface parsed without error.")
-
- harness.check(len(results), 1, "Should be one production")
- harness.ok(isinstance(results[0], WebIDL.IDLInterface), "Should be an IDLInterface")
- iface = results[0]
- harness.check(
- iface.identifier.QName(), "::TestByteString", "Interface has the right QName"
- )
- harness.check(
- iface.identifier.name, "TestByteString", "Interface has the right name"
- )
- harness.check(iface.parent, None, "Interface has no parent")
-
- members = iface.members
- harness.check(len(members), 2, "Should be two productions")
-
- attr = members[0]
- harness.ok(isinstance(attr, WebIDL.IDLAttribute), "Should be an IDLAttribute")
- harness.check(
- attr.identifier.QName(), "::TestByteString::bs", "Attr has correct QName"
- )
- harness.check(attr.identifier.name, "bs", "Attr has correct name")
- harness.check(str(attr.type), "ByteString", "Attr type is the correct name")
- harness.ok(attr.type.isByteString(), "Should be ByteString type")
- harness.ok(attr.type.isString(), "Should be String collective type")
- harness.ok(not attr.type.isDOMString(), "Should be not be DOMString type")
-
- # now check we haven't broken DOMStrings in the process.
- attr = members[1]
- harness.ok(isinstance(attr, WebIDL.IDLAttribute), "Should be an IDLAttribute")
- harness.check(
- attr.identifier.QName(), "::TestByteString::ds", "Attr has correct QName"
- )
- harness.check(attr.identifier.name, "ds", "Attr has correct name")
- harness.check(str(attr.type), "String", "Attr type is the correct name")
- harness.ok(attr.type.isDOMString(), "Should be DOMString type")
- harness.ok(attr.type.isString(), "Should be String collective type")
- harness.ok(not attr.type.isByteString(), "Should be not be ByteString type")
-
- # Cannot represent constant ByteString in IDL.
- threw = False
- try:
- parser.parse(
- """
- interface ConstByteString {
- const ByteString foo = "hello"
- };
- """
- )
- except WebIDL.WebIDLError:
- threw = True
- harness.ok(
- threw, "Should have thrown a WebIDL error for ByteString default in interface"
- )
-
- # Can have optional ByteStrings with default values
- try:
- parser.parse(
- """
- interface OptionalByteString {
- undefined passByteString(optional ByteString arg = "hello");
- };
- """
- )
- results2 = parser.finish()
- except WebIDL.WebIDLError as e:
- harness.ok(
- False,
- "Should not have thrown a WebIDL error for ByteString "
- "default in dictionary. " + str(e),
- )
-
- # Can have a default ByteString value in a dictionary
- try:
- parser.parse(
- """
- dictionary OptionalByteStringDict {
- ByteString item = "some string";
- };
- """
- )
- results3 = parser.finish()
- except WebIDL.WebIDLError as e:
- harness.ok(
- False,
- "Should not have thrown a WebIDL error for ByteString "
- "default in dictionary. " + str(e),
- )
-
- # Don't allow control characters in ByteString literals
- threw = False
- try:
- parser.parse(
- """
- dictionary OptionalByteStringDict2 {
- ByteString item = "\x03";
- };
- """
- )
- results4 = parser.finish()
- except WebIDL.WebIDLError as e:
- threw = True
-
- harness.ok(
- threw,
- "Should have thrown a WebIDL error for invalid ByteString "
- "default in dictionary",
- )
diff --git a/components/script/dom/bindings/codegen/parser/tests/test_callback.py b/components/script/dom/bindings/codegen/parser/tests/test_callback.py
deleted file mode 100644
index 407644a6a8d..00000000000
--- a/components/script/dom/bindings/codegen/parser/tests/test_callback.py
+++ /dev/null
@@ -1,42 +0,0 @@
-import WebIDL
-
-
-def WebIDLTest(parser, harness):
- parser.parse(
- """
- interface TestCallback {
- attribute CallbackType? listener;
- };
-
- callback CallbackType = boolean (unsigned long arg);
- """
- )
-
- results = parser.finish()
-
- harness.ok(True, "TestCallback interface parsed without error.")
- harness.check(len(results), 2, "Should be two productions.")
- iface = results[0]
- harness.ok(isinstance(iface, WebIDL.IDLInterface), "Should be an IDLInterface")
- harness.check(
- iface.identifier.QName(), "::TestCallback", "Interface has the right QName"
- )
- harness.check(iface.identifier.name, "TestCallback", "Interface has the right name")
- harness.check(len(iface.members), 1, "Expect %s members" % 1)
-
- attr = iface.members[0]
- harness.ok(isinstance(attr, WebIDL.IDLAttribute), "Should be an IDLAttribute")
- harness.ok(attr.isAttr(), "Should be an attribute")
- harness.ok(not attr.isMethod(), "Attr is not an method")
- harness.ok(not attr.isConst(), "Attr is not a const")
- harness.check(
- attr.identifier.QName(), "::TestCallback::listener", "Attr has the right QName"
- )
- harness.check(attr.identifier.name, "listener", "Attr has the right name")
- t = attr.type
- harness.ok(not isinstance(t, WebIDL.IDLWrapperType), "Attr has the right type")
- harness.ok(isinstance(t, WebIDL.IDLNullableType), "Attr has the right type")
- harness.ok(t.isCallback(), "Attr has the right type")
-
- callback = results[1]
- harness.ok(not callback.isConstructor(), "callback is not constructor")
diff --git a/components/script/dom/bindings/codegen/parser/tests/test_callback_constructor.py b/components/script/dom/bindings/codegen/parser/tests/test_callback_constructor.py
deleted file mode 100644
index 832a92bb147..00000000000
--- a/components/script/dom/bindings/codegen/parser/tests/test_callback_constructor.py
+++ /dev/null
@@ -1,84 +0,0 @@
-import WebIDL
-
-
-def WebIDLTest(parser, harness):
- parser.parse(
- """
- interface TestCallbackConstructor {
- attribute CallbackConstructorType? constructorAttribute;
- };
-
- callback constructor CallbackConstructorType = TestCallbackConstructor (unsigned long arg);
- """
- )
-
- results = parser.finish()
-
- harness.ok(True, "TestCallbackConstructor interface parsed without error.")
- harness.check(len(results), 2, "Should be two productions.")
- iface = results[0]
- harness.ok(isinstance(iface, WebIDL.IDLInterface), "Should be an IDLInterface")
- harness.check(
- iface.identifier.QName(),
- "::TestCallbackConstructor",
- "Interface has the right QName",
- )
- harness.check(
- iface.identifier.name, "TestCallbackConstructor", "Interface has the right name"
- )
- harness.check(len(iface.members), 1, "Expect %s members" % 1)
-
- attr = iface.members[0]
- harness.ok(isinstance(attr, WebIDL.IDLAttribute), "Should be an IDLAttribute")
- harness.ok(attr.isAttr(), "Should be an attribute")
- harness.ok(not attr.isMethod(), "Attr is not an method")
- harness.ok(not attr.isConst(), "Attr is not a const")
- harness.check(
- attr.identifier.QName(),
- "::TestCallbackConstructor::constructorAttribute",
- "Attr has the right QName",
- )
- harness.check(
- attr.identifier.name, "constructorAttribute", "Attr has the right name"
- )
- t = attr.type
- harness.ok(not isinstance(t, WebIDL.IDLWrapperType), "Attr has the right type")
- harness.ok(isinstance(t, WebIDL.IDLNullableType), "Attr has the right type")
- harness.ok(t.isCallback(), "Attr has the right type")
-
- callback = results[1]
- harness.ok(callback.isConstructor(), "Callback is constructor")
-
- parser.reset()
- threw = False
- try:
- parser.parse(
- """
- [LegacyTreatNonObjectAsNull]
- callback constructor CallbackConstructorType = object ();
- """
- )
- results = parser.finish()
- except:
- threw = True
-
- harness.ok(
- threw, "Should throw on LegacyTreatNonObjectAsNull callback constructors"
- )
-
- parser.reset()
- threw = False
- try:
- parser.parse(
- """
- [MOZ_CAN_RUN_SCRIPT_BOUNDARY]
- callback constructor CallbackConstructorType = object ();
- """
- )
- results = parser.finish()
- except:
- threw = True
-
- harness.ok(
- threw, "Should not permit MOZ_CAN_RUN_SCRIPT_BOUNDARY callback constructors"
- )
diff --git a/components/script/dom/bindings/codegen/parser/tests/test_callback_interface.py b/components/script/dom/bindings/codegen/parser/tests/test_callback_interface.py
deleted file mode 100644
index 0d657f48032..00000000000
--- a/components/script/dom/bindings/codegen/parser/tests/test_callback_interface.py
+++ /dev/null
@@ -1,106 +0,0 @@
-import WebIDL
-
-
-def WebIDLTest(parser, harness):
- parser.parse(
- """
- callback interface TestCallbackInterface {
- attribute boolean bool;
- };
- """
- )
-
- results = parser.finish()
-
- iface = results[0]
-
- harness.ok(iface.isCallback(), "Interface should be a callback")
-
- parser = parser.reset()
- threw = False
- try:
- parser.parse(
- """
- interface TestInterface {
- };
- callback interface TestCallbackInterface : TestInterface {
- attribute boolean bool;
- };
- """
- )
- results = parser.finish()
- except:
- threw = True
-
- harness.ok(threw, "Should not allow non-callback parent of callback interface")
-
- parser = parser.reset()
- threw = False
- try:
- parser.parse(
- """
- interface TestInterface : TestCallbackInterface {
- };
- callback interface TestCallbackInterface {
- attribute boolean bool;
- };
- """
- )
- results = parser.finish()
- except:
- threw = True
-
- harness.ok(threw, "Should not allow callback parent of non-callback interface")
-
- parser = parser.reset()
- parser.parse(
- """
- callback interface TestCallbackInterface1 {
- undefined foo();
- };
- callback interface TestCallbackInterface2 {
- undefined foo(DOMString arg);
- undefined foo(TestCallbackInterface1 arg);
- };
- callback interface TestCallbackInterface3 {
- undefined foo(DOMString arg);
- undefined foo(TestCallbackInterface1 arg);
- static undefined bar();
- };
- callback interface TestCallbackInterface4 {
- undefined foo(DOMString arg);
- undefined foo(TestCallbackInterface1 arg);
- static undefined bar();
- const long baz = 5;
- };
- callback interface TestCallbackInterface5 {
- static attribute boolean bool;
- undefined foo();
- };
- callback interface TestCallbackInterface6 {
- undefined foo(DOMString arg);
- undefined foo(TestCallbackInterface1 arg);
- undefined bar();
- };
- callback interface TestCallbackInterface7 {
- static attribute boolean bool;
- };
- callback interface TestCallbackInterface8 {
- attribute boolean bool;
- };
- callback interface TestCallbackInterface9 : TestCallbackInterface1 {
- undefined foo();
- };
- callback interface TestCallbackInterface10 : TestCallbackInterface1 {
- undefined bar();
- };
- """
- )
- results = parser.finish()
- for (i, iface) in enumerate(results):
- harness.check(
- iface.isSingleOperationInterface(),
- i < 4,
- "Interface %s should be a single operation interface"
- % iface.identifier.name,
- )
diff --git a/components/script/dom/bindings/codegen/parser/tests/test_cereactions.py b/components/script/dom/bindings/codegen/parser/tests/test_cereactions.py
deleted file mode 100644
index c56c3dbde10..00000000000
--- a/components/script/dom/bindings/codegen/parser/tests/test_cereactions.py
+++ /dev/null
@@ -1,157 +0,0 @@
-def WebIDLTest(parser, harness):
- threw = False
- try:
- parser.parse(
- """
- interface Foo {
- [CEReactions(DOMString a)] undefined foo(boolean arg2);
- };
- """
- )
-
- results = parser.finish()
- except:
- threw = True
-
- harness.ok(threw, "Should have thrown for [CEReactions] with an argument")
-
- parser = parser.reset()
- threw = False
- try:
- parser.parse(
- """
- interface Foo {
- [CEReactions(DOMString b)] readonly attribute boolean bar;
- };
- """
- )
-
- results = parser.finish()
- except:
- threw = True
-
- harness.ok(threw, "Should have thrown for [CEReactions] with an argument")
-
- parser = parser.reset()
- threw = False
- try:
- parser.parse(
- """
- interface Foo {
- [CEReactions] attribute boolean bar;
- };
- """
- )
-
- results = parser.finish()
- except Exception as e:
- harness.ok(
- False,
- "Shouldn't have thrown for [CEReactions] used on writable attribute. %s"
- % e,
- )
- threw = True
-
- parser = parser.reset()
- threw = False
- try:
- parser.parse(
- """
- interface Foo {
- [CEReactions] undefined foo(boolean arg2);
- };
- """
- )
-
- results = parser.finish()
- except Exception as e:
- harness.ok(
- False,
- "Shouldn't have thrown for [CEReactions] used on regular operations. %s"
- % e,
- )
- threw = True
-
- parser = parser.reset()
- threw = False
- try:
- parser.parse(
- """
- interface Foo {
- [CEReactions] readonly attribute boolean A;
- };
- """
- )
-
- results = parser.finish()
- except:
- threw = True
-
- harness.ok(
- threw, "Should have thrown for [CEReactions] used on a readonly attribute"
- )
-
- parser = parser.reset()
- threw = False
- try:
- parser.parse(
- """
- [CEReactions]
- interface Foo {
- }
- """
- )
-
- results = parser.finish()
- except:
- threw = True
-
- harness.ok(threw, "Should have thrown for [CEReactions] used on a interface")
-
- parser = parser.reset()
- threw = False
- try:
- parser.parse(
- """
- interface Foo {
- [CEReactions] getter any(DOMString name);
- };
- """
- )
- results = parser.finish()
- except:
- threw = True
-
- harness.ok(threw, "Should have thrown for [CEReactions] used on a named getter")
-
- parser = parser.reset()
- threw = False
- try:
- parser.parse(
- """
- interface Foo {
- [CEReactions] legacycaller double compute(double x);
- };
- """
- )
- results = parser.finish()
- except:
- threw = True
-
- harness.ok(threw, "Should have thrown for [CEReactions] used on a legacycaller")
-
- parser = parser.reset()
- threw = False
- try:
- parser.parse(
- """
- interface Foo {
- [CEReactions] stringifier DOMString ();
- };
- """
- )
- results = parser.finish()
- except:
- threw = True
-
- harness.ok(threw, "Should have thrown for [CEReactions] used on a stringifier")
diff --git a/components/script/dom/bindings/codegen/parser/tests/test_conditional_dictionary_member.py b/components/script/dom/bindings/codegen/parser/tests/test_conditional_dictionary_member.py
deleted file mode 100644
index 2aef8ebe8ff..00000000000
--- a/components/script/dom/bindings/codegen/parser/tests/test_conditional_dictionary_member.py
+++ /dev/null
@@ -1,128 +0,0 @@
-def WebIDLTest(parser, harness):
- parser.parse(
- """
- dictionary Dict {
- any foo;
- [ChromeOnly] any bar;
- };
- """
- )
- results = parser.finish()
- harness.check(len(results), 1, "Should have a dictionary")
- members = results[0].members
- harness.check(len(members), 2, "Should have two members")
- # Note that members are ordered lexicographically, so "bar" comes
- # before "foo".
- harness.ok(
- members[0].getExtendedAttribute("ChromeOnly"), "First member is not ChromeOnly"
- )
- harness.ok(
- not members[1].getExtendedAttribute("ChromeOnly"), "Second member is ChromeOnly"
- )
-
- parser = parser.reset()
- parser.parse(
- """
- dictionary Dict {
- any foo;
- any bar;
- };
-
- interface Iface {
- [Constant, Cached] readonly attribute Dict dict;
- };
- """
- )
- results = parser.finish()
- harness.check(len(results), 2, "Should have a dictionary and an interface")
-
- parser = parser.reset()
- exception = None
- try:
- parser.parse(
- """
- dictionary Dict {
- any foo;
- [ChromeOnly] any bar;
- };
-
- interface Iface {
- [Constant, Cached] readonly attribute Dict dict;
- };
- """
- )
- results = parser.finish()
- except Exception as e:
- exception = e
-
- harness.ok(exception, "Should have thrown.")
- harness.check(
- exception.message,
- "[Cached] and [StoreInSlot] must not be used on an attribute "
- "whose type contains a [ChromeOnly] dictionary member",
- "Should have thrown the right exception",
- )
-
- parser = parser.reset()
- exception = None
- try:
- parser.parse(
- """
- dictionary ParentDict {
- [ChromeOnly] any bar;
- };
-
- dictionary Dict : ParentDict {
- any foo;
- };
-
- interface Iface {
- [Constant, Cached] readonly attribute Dict dict;
- };
- """
- )
- results = parser.finish()
- except Exception as e:
- exception = e
-
- harness.ok(exception, "Should have thrown (2).")
- harness.check(
- exception.message,
- "[Cached] and [StoreInSlot] must not be used on an attribute "
- "whose type contains a [ChromeOnly] dictionary member",
- "Should have thrown the right exception (2)",
- )
-
- parser = parser.reset()
- exception = None
- try:
- parser.parse(
- """
- dictionary GrandParentDict {
- [ChromeOnly] any baz;
- };
-
- dictionary ParentDict : GrandParentDict {
- any bar;
- };
-
- dictionary Dict : ParentDict {
- any foo;
- };
-
- interface Iface {
- [Constant, Cached] readonly attribute Dict dict;
- };
- """
- )
- results = parser.finish()
- except Exception as e:
- exception = e
-
- harness.ok(exception, "Should have thrown (3).")
- harness.check(
- exception.message,
- "[Cached] and [StoreInSlot] must not be used on an attribute "
- "whose type contains a [ChromeOnly] dictionary member",
- "Should have thrown the right exception (3)",
- )
diff --git a/components/script/dom/bindings/codegen/parser/tests/test_const.py b/components/script/dom/bindings/codegen/parser/tests/test_const.py
deleted file mode 100644
index f2d4b79d467..00000000000
--- a/components/script/dom/bindings/codegen/parser/tests/test_const.py
+++ /dev/null
@@ -1,96 +0,0 @@
-import WebIDL
-
-expected = [
- ("::TestConsts::zero", "zero", "Byte", 0),
- ("::TestConsts::b", "b", "Byte", -1),
- ("::TestConsts::o", "o", "Octet", 2),
- ("::TestConsts::s", "s", "Short", -3),
- ("::TestConsts::us", "us", "UnsignedShort", 4),
- ("::TestConsts::l", "l", "Long", -5),
- ("::TestConsts::ul", "ul", "UnsignedLong", 6),
- ("::TestConsts::ull", "ull", "UnsignedLongLong", 7),
- ("::TestConsts::ll", "ll", "LongLong", -8),
- ("::TestConsts::t", "t", "Boolean", True),
- ("::TestConsts::f", "f", "Boolean", False),
- ("::TestConsts::fl", "fl", "Float", 0.2),
- ("::TestConsts::db", "db", "Double", 0.2),
- ("::TestConsts::ufl", "ufl", "UnrestrictedFloat", 0.2),
- ("::TestConsts::udb", "udb", "UnrestrictedDouble", 0.2),
- ("::TestConsts::fli", "fli", "Float", 2),
- ("::TestConsts::dbi", "dbi", "Double", 2),
- ("::TestConsts::ufli", "ufli", "UnrestrictedFloat", 2),
- ("::TestConsts::udbi", "udbi", "UnrestrictedDouble", 2),
-]
-
-
-def WebIDLTest(parser, harness):
- parser.parse(
- """
- interface TestConsts {
- const byte zero = 0;
- const byte b = -1;
- const octet o = 2;
- const short s = -3;
- const unsigned short us = 0x4;
- const long l = -0X5;
- const unsigned long ul = 6;
- const unsigned long long ull = 7;
- const long long ll = -010;
- const boolean t = true;
- const boolean f = false;
- const float fl = 0.2;
- const double db = 0.2;
- const unrestricted float ufl = 0.2;
- const unrestricted double udb = 0.2;
- const float fli = 2;
- const double dbi = 2;
- const unrestricted float ufli = 2;
- const unrestricted double udbi = 2;
- };
- """
- )
-
- results = parser.finish()
-
- harness.ok(True, "TestConsts interface parsed without error.")
- harness.check(len(results), 1, "Should be one production.")
- iface = results[0]
- harness.ok(isinstance(iface, WebIDL.IDLInterface), "Should be an IDLInterface")
- harness.check(
- iface.identifier.QName(), "::TestConsts", "Interface has the right QName"
- )
- harness.check(iface.identifier.name, "TestConsts", "Interface has the right name")
- harness.check(
- len(iface.members), len(expected), "Expect %s members" % len(expected)
- )
-
- for (const, (QName, name, type, value)) in zip(iface.members, expected):
- harness.ok(isinstance(const, WebIDL.IDLConst), "Should be an IDLConst")
- harness.ok(const.isConst(), "Const is a const")
- harness.ok(not const.isAttr(), "Const is not an attr")
- harness.ok(not const.isMethod(), "Const is not a method")
- harness.check(const.identifier.QName(), QName, "Const has the right QName")
- harness.check(const.identifier.name, name, "Const has the right name")
- harness.check(str(const.type), type, "Const has the right type")
- harness.ok(const.type.isPrimitive(), "All consts should be primitive")
- harness.check(
- str(const.value.type),
- str(const.type),
- "Const's value has the same type as the type",
- )
- harness.check(const.value.value, value, "Const value has the right value.")
-
- parser = parser.reset()
- threw = False
- try:
- parser.parse(
- """
- interface TestConsts {
- const boolean? zero = 0;
- };
- """
- )
- parser.finish()
- except:
- threw = True
- harness.ok(threw, "Nullable types are not allowed for consts.")
diff --git a/components/script/dom/bindings/codegen/parser/tests/test_constructor.py b/components/script/dom/bindings/codegen/parser/tests/test_constructor.py
deleted file mode 100644
index de5d52f1412..00000000000
--- a/components/script/dom/bindings/codegen/parser/tests/test_constructor.py
+++ /dev/null
@@ -1,594 +0,0 @@
-import WebIDL
-
-
-def WebIDLTest(parser, harness):
- def checkArgument(argument, QName, name, type, optional, variadic):
- harness.ok(isinstance(argument, WebIDL.IDLArgument), "Should be an IDLArgument")
- harness.check(
- argument.identifier.QName(), QName, "Argument has the right QName"
- )
- harness.check(argument.identifier.name, name, "Argument has the right name")
- harness.check(str(argument.type), type, "Argument has the right return type")
- harness.check(
- argument.optional, optional, "Argument has the right optional value"
- )
- harness.check(
- argument.variadic, variadic, "Argument has the right variadic value"
- )
-
- def checkMethod(
- method,
- QName,
- name,
- signatures,
- static=True,
- getter=False,
- setter=False,
- deleter=False,
- legacycaller=False,
- stringifier=False,
- chromeOnly=False,
- htmlConstructor=False,
- secureContext=False,
- pref=None,
- func=None,
- ):
- harness.ok(isinstance(method, WebIDL.IDLMethod), "Should be an IDLMethod")
- harness.ok(method.isMethod(), "Method is a method")
- harness.ok(not method.isAttr(), "Method is not an attr")
- harness.ok(not method.isConst(), "Method is not a const")
- harness.check(method.identifier.QName(), QName, "Method has the right QName")
- harness.check(method.identifier.name, name, "Method has the right name")
- harness.check(method.isStatic(), static, "Method has the correct static value")
- harness.check(method.isGetter(), getter, "Method has the correct getter value")
- harness.check(method.isSetter(), setter, "Method has the correct setter value")
- harness.check(
- method.isDeleter(), deleter, "Method has the correct deleter value"
- )
- harness.check(
- method.isLegacycaller(),
- legacycaller,
- "Method has the correct legacycaller value",
- )
- harness.check(
- method.isStringifier(),
- stringifier,
- "Method has the correct stringifier value",
- )
- harness.check(
- method.getExtendedAttribute("ChromeOnly") is not None,
- chromeOnly,
- "Method has the correct value for ChromeOnly",
- )
- harness.check(
- method.isHTMLConstructor(),
- htmlConstructor,
- "Method has the correct htmlConstructor value",
- )
- harness.check(
- len(method.signatures()),
- len(signatures),
- "Method has the correct number of signatures",
- )
- harness.check(
- method.getExtendedAttribute("Pref"),
- pref,
- "Method has the correct pref value",
- )
- harness.check(
- method.getExtendedAttribute("Func"),
- func,
- "Method has the correct func value",
- )
- harness.check(
- method.getExtendedAttribute("SecureContext") is not None,
- secureContext,
- "Method has the correct SecureContext value",
- )
-
- sigpairs = zip(method.signatures(), signatures)
- for (gotSignature, expectedSignature) in sigpairs:
- (gotRetType, gotArgs) = gotSignature
- (expectedRetType, expectedArgs) = expectedSignature
-
- harness.check(
- str(gotRetType), expectedRetType, "Method has the expected return type."
- )
-
- for i in range(0, len(gotArgs)):
- (QName, name, type, optional, variadic) = expectedArgs[i]
- checkArgument(gotArgs[i], QName, name, type, optional, variadic)
-
- def checkResults(results):
- harness.check(len(results), 3, "Should be three productions")
- harness.ok(
- isinstance(results[0], WebIDL.IDLInterface), "Should be an IDLInterface"
- )
- harness.ok(
- isinstance(results[1], WebIDL.IDLInterface), "Should be an IDLInterface"
- )
- harness.ok(
- isinstance(results[2], WebIDL.IDLInterface), "Should be an IDLInterface"
- )
-
- checkMethod(
- results[0].ctor(),
- "::TestConstructorNoArgs::constructor",
- "constructor",
- [("TestConstructorNoArgs (Wrapper)", [])],
- )
- harness.check(
- len(results[0].members), 0, "TestConstructorNoArgs should not have members"
- )
- checkMethod(
- results[1].ctor(),
- "::TestConstructorWithArgs::constructor",
- "constructor",
- [
- (
- "TestConstructorWithArgs (Wrapper)",
- [
- (
- "::TestConstructorWithArgs::constructor::name",
- "name",
- "String",
- False,
- False,
- )
- ],
- )
- ],
- )
- harness.check(
- len(results[1].members),
- 0,
- "TestConstructorWithArgs should not have members",
- )
- checkMethod(
- results[2].ctor(),
- "::TestConstructorOverloads::constructor",
- "constructor",
- [
- (
- "TestConstructorOverloads (Wrapper)",
- [
- (
- "::TestConstructorOverloads::constructor::foo",
- "foo",
- "Object",
- False,
- False,
- )
- ],
- ),
- (
- "TestConstructorOverloads (Wrapper)",
- [
- (
- "::TestConstructorOverloads::constructor::bar",
- "bar",
- "Boolean",
- False,
- False,
- )
- ],
- ),
- ],
- )
- harness.check(
- len(results[2].members),
- 0,
- "TestConstructorOverloads should not have members",
- )
-
- parser.parse(
- """
- interface TestConstructorNoArgs {
- constructor();
- };
-
- interface TestConstructorWithArgs {
- constructor(DOMString name);
- };
-
- interface TestConstructorOverloads {
- constructor(object foo);
- constructor(boolean bar);
- };
- """
- )
- results = parser.finish()
- checkResults(results)
-
- parser = parser.reset()
- parser.parse(
- """
- interface TestPrefConstructor {
- [Pref="dom.webidl.test1"] constructor();
- };
- """
- )
- results = parser.finish()
- harness.check(len(results), 1, "Should be one production")
- harness.ok(isinstance(results[0], WebIDL.IDLInterface), "Should be an IDLInterface")
-
- checkMethod(
- results[0].ctor(),
- "::TestPrefConstructor::constructor",
- "constructor",
- [("TestPrefConstructor (Wrapper)", [])],
- pref=["dom.webidl.test1"],
- )
-
- parser = parser.reset()
- parser.parse(
- """
- interface TestChromeOnlyConstructor {
- [ChromeOnly] constructor();
- };
- """
- )
- results = parser.finish()
- harness.check(len(results), 1, "Should be one production")
- harness.ok(isinstance(results[0], WebIDL.IDLInterface), "Should be an IDLInterface")
-
- checkMethod(
- results[0].ctor(),
- "::TestChromeOnlyConstructor::constructor",
- "constructor",
- [("TestChromeOnlyConstructor (Wrapper)", [])],
- chromeOnly=True,
- )
-
- parser = parser.reset()
- parser.parse(
- """
- interface TestSCConstructor {
- [SecureContext] constructor();
- };
- """
- )
- results = parser.finish()
- harness.check(len(results), 1, "Should be one production")
- harness.ok(isinstance(results[0], WebIDL.IDLInterface), "Should be an IDLInterface")
-
- checkMethod(
- results[0].ctor(),
- "::TestSCConstructor::constructor",
- "constructor",
- [("TestSCConstructor (Wrapper)", [])],
- secureContext=True,
- )
-
- parser = parser.reset()
- parser.parse(
- """
- interface TestFuncConstructor {
- [Func="Document::IsWebAnimationsEnabled"] constructor();
- };
- """
- )
- results = parser.finish()
- harness.check(len(results), 1, "Should be one production")
- harness.ok(isinstance(results[0], WebIDL.IDLInterface), "Should be an IDLInterface")
-
- checkMethod(
- results[0].ctor(),
- "::TestFuncConstructor::constructor",
- "constructor",
- [("TestFuncConstructor (Wrapper)", [])],
- func=["Document::IsWebAnimationsEnabled"],
- )
-
- parser = parser.reset()
- parser.parse(
- """
- interface TestPrefChromeOnlySCFuncConstructor {
- [ChromeOnly, Pref="dom.webidl.test1", SecureContext, Func="Document::IsWebAnimationsEnabled"]
- constructor();
- };
- """
- )
- results = parser.finish()
- harness.check(len(results), 1, "Should be one production")
- harness.ok(isinstance(results[0], WebIDL.IDLInterface), "Should be an IDLInterface")
-
- checkMethod(
- results[0].ctor(),
- "::TestPrefChromeOnlySCFuncConstructor::constructor",
- "constructor",
- [("TestPrefChromeOnlySCFuncConstructor (Wrapper)", [])],
- func=["Document::IsWebAnimationsEnabled"],
- pref=["dom.webidl.test1"],
- chromeOnly=True,
- secureContext=True,
- )
-
- parser = parser.reset()
- parser.parse(
- """
- interface TestHTMLConstructor {
- [HTMLConstructor] constructor();
- };
- """
- )
- results = parser.finish()
- harness.check(len(results), 1, "Should be one production")
- harness.ok(isinstance(results[0], WebIDL.IDLInterface), "Should be an IDLInterface")
-
- checkMethod(
- results[0].ctor(),
- "::TestHTMLConstructor::constructor",
- "constructor",
- [("TestHTMLConstructor (Wrapper)", [])],
- htmlConstructor=True,
- )
-
- parser = parser.reset()
- threw = False
- try:
- parser.parse(
- """
- interface TestChromeOnlyConstructor {
- constructor()
- [ChromeOnly] constructor(DOMString a);
- };
- """
- )
- results = parser.finish()
- except:
- threw = True
-
- harness.ok(threw, "Can't have both a constructor and a ChromeOnly constructor")
-
- # Test HTMLConstructor with argument
- parser = parser.reset()
- threw = False
- try:
- parser.parse(
- """
- interface TestHTMLConstructorWithArgs {
- [HTMLConstructor] constructor(DOMString a);
- };
- """
- )
- results = parser.finish()
- except:
- threw = True
-
- harness.ok(threw, "HTMLConstructor should take no argument")
-
- # Test HTMLConstructor on a callback interface
- parser = parser.reset()
- threw = False
- try:
- parser.parse(
- """
- callback interface TestHTMLConstructorOnCallbackInterface {
- [HTMLConstructor] constructor();
- };
- """
- )
- results = parser.finish()
- except:
- threw = True
-
- harness.ok(threw, "HTMLConstructor can't be used on a callback interface")
-
- # Test HTMLConstructor and constructor operation
- parser = parser.reset()
- threw = False
- try:
- parser.parse(
- """
- interface TestHTMLConstructorAndConstructor {
- constructor();
- [HTMLConstructor] constructor();
- };
- """
- )
- results = parser.finish()
- except:
- threw = True
-
- harness.ok(threw, "Can't have both a constructor and a HTMLConstructor")
-
- parser = parser.reset()
- threw = False
- try:
- parser.parse(
- """
- interface TestHTMLConstructorAndConstructor {
- [Throws]
- constructor();
- [HTMLConstructor] constructor();
- };
- """
- )
- results = parser.finish()
- except:
- threw = True
-
- harness.ok(threw, "Can't have both a throwing constructor and a HTMLConstructor")
-
- parser = parser.reset()
- threw = False
- try:
- parser.parse(
- """
- interface TestHTMLConstructorAndConstructor {
- constructor(DOMString a);
- [HTMLConstructor] constructor();
- };
- """
- )
- results = parser.finish()
- except:
- threw = True
-
- harness.ok(threw, "Can't have both a HTMLConstructor and a constructor operation")
-
- parser = parser.reset()
- threw = False
- try:
- parser.parse(
- """
- interface TestHTMLConstructorAndConstructor {
- [Throws]
- constructor(DOMString a);
- [HTMLConstructor] constructor();
- };
- """
- )
- results = parser.finish()
- except:
- threw = True
-
- harness.ok(
- threw,
- "Can't have both a HTMLConstructor and a throwing constructor " "operation",
- )
-
- # Test HTMLConstructor and [ChromeOnly] constructor operation
- parser = parser.reset()
- threw = False
- try:
- parser.parse(
- """
- interface TestHTMLConstructorAndConstructor {
- [ChromeOnly]
- constructor();
- [HTMLConstructor] constructor();
- };
- """
- )
- results = parser.finish()
- except:
- threw = True
-
- harness.ok(threw, "Can't have both a ChromeOnly constructor and a HTMLConstructor")
-
- parser = parser.reset()
- threw = False
- try:
- parser.parse(
- """
- interface TestHTMLConstructorAndConstructor {
- [Throws, ChromeOnly]
- constructor();
- [HTMLConstructor] constructor();
- };
- """
- )
- results = parser.finish()
- except:
- threw = True
-
- harness.ok(
- threw,
- "Can't have both a throwing chromeonly constructor and a " "HTMLConstructor",
- )
-
- parser = parser.reset()
- threw = False
- try:
- parser.parse(
- """
- interface TestHTMLConstructorAndConstructor {
- [ChromeOnly]
- constructor(DOMString a);
- [HTMLConstructor] constructor();
- };
- """
- )
- results = parser.finish()
- except:
- threw = True
-
- harness.ok(
- threw,
- "Can't have both a HTMLConstructor and a chromeonly constructor " "operation",
- )
-
- parser = parser.reset()
- threw = False
- try:
- parser.parse(
- """
- interface TestHTMLConstructorAndConstructor {
- [Throws, ChromeOnly]
- constructor(DOMString a);
- [HTMLConstructor] constructor();
- };
- """
- )
- results = parser.finish()
- except:
- threw = True
-
- harness.ok(
- threw,
- "Can't have both a HTMLConstructor and a throwing chromeonly "
- "constructor operation",
- )
-
- parser = parser.reset()
- threw = False
- try:
- parser.parse(
- """
- [LegacyNoInterfaceObject]
- interface InterfaceWithoutInterfaceObject {
- constructor();
- };
- """
- )
- results = parser.finish()
- except:
- threw = True
-
- harness.ok(
- threw,
- "Can't have a constructor operation on a [LegacyNoInterfaceObject] "
- "interface",
- )
-
- parser = parser.reset()
- threw = False
- try:
- parser.parse(
- """
- interface InterfaceWithPartial {
- };
-
- partial interface InterfaceWithPartial {
- constructor();
- };
- """
- )
- results = parser.finish()
- except:
- threw = True
-
- harness.ok(threw, "Can't have a constructor operation on a partial interface")
-
- parser = parser.reset()
- threw = False
- try:
- parser.parse(
- """
- interface InterfaceWithMixin {
- };
-
- interface mixin Mixin {
- constructor();
- };
-
- InterfaceWithMixin includes Mixin
- """
- )
- results = parser.finish()
- except:
- threw = True
-
- harness.ok(threw, "Can't have a constructor operation on a mixin")
diff --git a/components/script/dom/bindings/codegen/parser/tests/test_constructor_global.py b/components/script/dom/bindings/codegen/parser/tests/test_constructor_global.py
deleted file mode 100644
index 5f3663602e4..00000000000
--- a/components/script/dom/bindings/codegen/parser/tests/test_constructor_global.py
+++ /dev/null
@@ -1,72 +0,0 @@
-import traceback
-
-
-def WebIDLTest(parser, harness):
- threw = False
- try:
- parser.parse(
- """
- [Global, Exposed=TestConstructorGlobal]
- interface TestConstructorGlobal {
- constructor();
- };
- """
- )
-
- results = parser.finish()
- except:
- threw = True
-
- harness.ok(threw, "Should have thrown.")
-
- parser = parser.reset()
- threw = False
- try:
- parser.parse(
- """
- [Global, Exposed=TestLegacyFactoryFunctionGlobal,
- LegacyFactoryFunction=FooBar]
- interface TestLegacyFactoryFunctionGlobal {
- };
- """
- )
- results = parser.finish()
- except:
- threw = True
-
- harness.ok(threw, "Should have thrown.")
-
- parser = parser.reset()
- threw = False
- try:
- parser.parse(
- """
- [LegacyFactoryFunction=FooBar, Global,
- Exposed=TestLegacyFactoryFunctionGlobal]
- interface TestLegacyFactoryFunctionGlobal {
- };
- """
- )
- results = parser.finish()
- except:
- threw = True
-
- harness.ok(threw, "Should have thrown.")
-
- parser = parser.reset()
- threw = False
- try:
- parser.parse(
- """
- [Global, Exposed=TestHTMLConstructorGlobal]
- interface TestHTMLConstructorGlobal {
- [HTMLConstructor] constructor();
- };
- """
- )
-
- results = parser.finish()
- except:
- threw = True
-
- harness.ok(threw, "Should have thrown.")
diff --git a/components/script/dom/bindings/codegen/parser/tests/test_constructor_no_interface_object.py b/components/script/dom/bindings/codegen/parser/tests/test_constructor_no_interface_object.py
deleted file mode 100644
index 9855352a9d4..00000000000
--- a/components/script/dom/bindings/codegen/parser/tests/test_constructor_no_interface_object.py
+++ /dev/null
@@ -1,47 +0,0 @@
-def WebIDLTest(parser, harness):
- threw = False
- try:
- parser.parse(
- """
- [LegacyNoInterfaceObject]
- interface TestConstructorLegacyNoInterfaceObject {
- constructor();
- };
- """
- )
-
- results = parser.finish()
- except:
- threw = True
-
- harness.ok(threw, "Should have thrown.")
-
- parser = parser.reset()
-
- parser.parse(
- """
- [LegacyNoInterfaceObject, LegacyFactoryFunction=FooBar]
- interface TestLegacyFactoryFunctionLegacyNoInterfaceObject {
- };
- """
- )
-
- # Test HTMLConstructor and LegacyNoInterfaceObject
- parser = parser.reset()
-
- threw = False
- try:
- parser.parse(
- """
- [LegacyNoInterfaceObject]
- interface TestHTMLConstructorLegacyNoInterfaceObject {
- [HTMLConstructor] constructor();
- };
- """
- )
-
- results = parser.finish()
- except:
- threw = True
-
- harness.ok(threw, "Should have thrown.")
diff --git a/components/script/dom/bindings/codegen/parser/tests/test_deduplicate.py b/components/script/dom/bindings/codegen/parser/tests/test_deduplicate.py
deleted file mode 100644
index 6649f4ec05d..00000000000
--- a/components/script/dom/bindings/codegen/parser/tests/test_deduplicate.py
+++ /dev/null
@@ -1,20 +0,0 @@
-import WebIDL
-
-
-def WebIDLTest(parser, harness):
- parser.parse(
- """
- interface Foo;
- interface Bar;
- interface Foo;
- """
- )
-
- results = parser.finish()
-
- # There should be no duplicate interfaces in the result.
- expectedNames = sorted(["Foo", "Bar"])
- actualNames = sorted(map(lambda iface: iface.identifier.name, results))
- harness.check(
- actualNames, expectedNames, "Parser shouldn't output duplicate names."
- )
diff --git a/components/script/dom/bindings/codegen/parser/tests/test_dictionary.py b/components/script/dom/bindings/codegen/parser/tests/test_dictionary.py
deleted file mode 100644
index e7d04f995a9..00000000000
--- a/components/script/dom/bindings/codegen/parser/tests/test_dictionary.py
+++ /dev/null
@@ -1,875 +0,0 @@
-def WebIDLTest(parser, harness):
- parser.parse(
- """
- dictionary Dict2 : Dict1 {
- long child = 5;
- Dict1 aaandAnother;
- };
- dictionary Dict1 {
- long parent;
- double otherParent;
- };
- """
- )
- results = parser.finish()
-
- dict1 = results[1]
- dict2 = results[0]
-
- harness.check(len(dict1.members), 2, "Dict1 has two members")
- harness.check(len(dict2.members), 2, "Dict2 has four members")
-
- harness.check(
- dict1.members[0].identifier.name, "otherParent", "'o' comes before 'p'"
- )
- harness.check(
- dict1.members[1].identifier.name, "parent", "'o' really comes before 'p'"
- )
- harness.check(
- dict2.members[0].identifier.name, "aaandAnother", "'a' comes before 'c'"
- )
- harness.check(
- dict2.members[1].identifier.name, "child", "'a' really comes before 'c'"
- )
-
- # Test partial dictionary.
- parser = parser.reset()
- parser.parse(
- """
- dictionary A {
- long c;
- long g;
- };
- partial dictionary A {
- long h;
- long d;
- };
- """
- )
- results = parser.finish()
-
- dict1 = results[0]
- harness.check(len(dict1.members), 4, "Dict1 has four members")
- harness.check(dict1.members[0].identifier.name, "c", "c should be first")
- harness.check(dict1.members[1].identifier.name, "d", "d should come after c")
- harness.check(dict1.members[2].identifier.name, "g", "g should come after d")
- harness.check(dict1.members[3].identifier.name, "h", "h should be last")
-
- # Now reset our parser
- parser = parser.reset()
- threw = False
- try:
- parser.parse(
- """
- dictionary Dict {
- long prop = 5;
- long prop;
- };
- """
- )
- results = parser.finish()
- except:
- threw = True
-
- harness.ok(threw, "Should not allow name duplication in a dictionary")
-
- # Test no name duplication across normal and partial dictionary.
- parser = parser.reset()
- threw = False
- try:
- parser.parse(
- """
- dictionary A {
- long prop = 5;
- };
- partial dictionary A {
- long prop;
- };
- """
- )
- results = parser.finish()
- except:
- threw = True
-
- harness.ok(
- threw, "Should not allow name duplication across normal and partial dictionary"
- )
-
- # Now reset our parser again
- parser = parser.reset()
- threw = False
- try:
- parser.parse(
- """
- dictionary Dict1 : Dict2 {
- long prop = 5;
- };
- dictionary Dict2 : Dict3 {
- long prop2;
- };
- dictionary Dict3 {
- double prop;
- };
- """
- )
- results = parser.finish()
- except:
- threw = True
-
- harness.ok(
- threw, "Should not allow name duplication in a dictionary and " "its ancestor"
- )
-
- # More reset
- parser = parser.reset()
- threw = False
- try:
- parser.parse(
- """
- interface Iface {};
- dictionary Dict : Iface {
- long prop;
- };
- """
- )
- results = parser.finish()
- except:
- threw = True
-
- harness.ok(threw, "Should not allow non-dictionary parents for dictionaries")
-
- # Even more reset
- parser = parser.reset()
- threw = False
- try:
- parser.parse(
- """
- dictionary A : B {};
- dictionary B : A {};
- """
- )
- results = parser.finish()
- except:
- threw = True
-
- harness.ok(threw, "Should not allow cycles in dictionary inheritance chains")
-
- parser = parser.reset()
- threw = False
- try:
- parser.parse(
- """
- dictionary A {
- [LegacyNullToEmptyString] DOMString foo;
- };
- """
- )
- results = parser.finish()
- except:
- threw = True
-
- harness.ok(
- threw, "Should not allow [LegacyNullToEmptyString] on dictionary members"
- )
-
- parser = parser.reset()
- threw = False
- try:
- parser.parse(
- """
- dictionary A {
- };
- interface X {
- undefined doFoo(A arg);
- };
- """
- )
- results = parser.finish()
- except:
- threw = True
-
- harness.ok(threw, "Trailing dictionary arg must be optional")
-
- parser = parser.reset()
- threw = False
- try:
- parser.parse(
- """
- dictionary A {
- };
- interface X {
- undefined doFoo(optional A arg);
- };
- """
- )
- results = parser.finish()
- except:
- threw = True
-
- harness.ok(threw, "Trailing dictionary arg must have a default value")
-
- parser = parser.reset()
- threw = False
- try:
- parser.parse(
- """
- dictionary A {
- };
- interface X {
- undefined doFoo((A or DOMString) arg);
- };
- """
- )
- results = parser.finish()
- except:
- threw = True
-
- harness.ok(threw, "Trailing union arg containing a dictionary must be optional")
-
- parser = parser.reset()
- threw = False
- try:
- parser.parse(
- """
- dictionary A {
- };
- interface X {
- undefined doFoo(optional (A or DOMString) arg);
- };
- """
- )
- results = parser.finish()
- except:
- threw = True
-
- harness.ok(
- threw, "Trailing union arg containing a dictionary must have a default value"
- )
-
- parser = parser.reset()
- threw = False
- try:
- parser.parse(
- """
- dictionary A {
- };
- interface X {
- undefined doFoo(A arg1, optional long arg2);
- };
- """
- )
- results = parser.finish()
- except:
- threw = True
-
- harness.ok(threw, "Dictionary arg followed by optional arg must be optional")
-
- parser = parser.reset()
- threw = False
- try:
- parser.parse(
- """
- dictionary A {
- };
- interface X {
- undefined doFoo(optional A arg1, optional long arg2);
- };
- """
- )
- results = parser.finish()
- except:
- threw = True
-
- harness.ok(threw, "Dictionary arg followed by optional arg must have default value")
-
- parser = parser.reset()
- threw = False
- try:
- parser.parse(
- """
- dictionary A {
- };
- interface X {
- undefined doFoo(A arg1, optional long arg2, long arg3);
- };
- """
- )
- results = parser.finish()
- except:
- threw = True
-
- harness.ok(
- not threw,
- "Dictionary arg followed by non-optional arg doesn't have to be optional",
- )
-
- parser = parser.reset()
- threw = False
- try:
- parser.parse(
- """
- dictionary A {
- };
- interface X {
- undefined doFoo((A or DOMString) arg1, optional long arg2);
- };
- """
- )
- results = parser.finish()
- except:
- threw = True
-
- harness.ok(
- threw,
- "Union arg containing dictionary followed by optional arg must " "be optional",
- )
-
- parser = parser.reset()
- threw = False
- try:
- parser.parse(
- """
- dictionary A {
- };
- interface X {
- undefined doFoo(optional (A or DOMString) arg1, optional long arg2);
- };
- """
- )
- results = parser.finish()
- except:
- threw = True
-
- harness.ok(
- threw,
- "Union arg containing dictionary followed by optional arg must "
- "have a default value",
- )
-
- parser = parser.reset()
- parser.parse(
- """
- dictionary A {
- };
- interface X {
- undefined doFoo(A arg1, long arg2);
- };
- """
- )
- results = parser.finish()
- harness.ok(True, "Dictionary arg followed by required arg can be required")
-
- parser = parser.reset()
- threw = False
- try:
- parser.parse(
- """
- dictionary A {
- };
- interface X {
- undefined doFoo(optional A? arg1 = {});
- };
- """
- )
- results = parser.finish()
- except Exception as x:
- threw = x
-
- harness.ok(threw, "Optional dictionary arg must not be nullable")
- harness.ok(
- "nullable" in str(threw),
- "Must have the expected exception for optional nullable dictionary arg",
- )
-
- parser = parser.reset()
- threw = False
- try:
- parser.parse(
- """
- dictionary A {
- required long x;
- };
- interface X {
- undefined doFoo(A? arg1);
- };
- """
- )
- results = parser.finish()
- except Exception as x:
- threw = x
-
- harness.ok(threw, "Required dictionary arg must not be nullable")
- harness.ok(
- "nullable" in str(threw),
- "Must have the expected exception for required nullable " "dictionary arg",
- )
-
- parser = parser.reset()
- threw = False
- try:
- parser.parse(
- """
- dictionary A {
- };
- interface X {
- undefined doFoo(optional (A or long)? arg1 = {});
- };
- """
- )
- results = parser.finish()
- except Exception as x:
- threw = x
-
- harness.ok(threw, "Dictionary arg must not be in an optional nullable union")
- harness.ok(
- "nullable" in str(threw),
- "Must have the expected exception for optional nullable union "
- "arg containing dictionary",
- )
-
- parser = parser.reset()
- threw = False
- try:
- parser.parse(
- """
- dictionary A {
- required long x;
- };
- interface X {
- undefined doFoo((A or long)? arg1);
- };
- """
- )
- results = parser.finish()
- except Exception as x:
- threw = x
-
- harness.ok(threw, "Dictionary arg must not be in a required nullable union")
- harness.ok(
- "nullable" in str(threw),
- "Must have the expected exception for required nullable union "
- "arg containing dictionary",
- )
-
- parser = parser.reset()
- threw = False
- try:
- parser.parse(
- """
- dictionary A {
- };
- interface X {
- undefined doFoo(sequence<A?> arg1);
- };
- """
- )
- results = parser.finish()
- except:
- threw = True
-
- harness.ok(not threw, "Nullable union should be allowed in a sequence argument")
-
- parser = parser.reset()
- threw = False
- try:
- parser.parse(
- """
- dictionary A {
- };
- interface X {
- undefined doFoo(optional (A or long?) arg1);
- };
- """
- )
- results = parser.finish()
- except:
- threw = True
- harness.ok(threw, "Dictionary must not be in a union with a nullable type")
-
- parser = parser.reset()
- threw = False
- try:
- parser.parse(
- """
- dictionary A {
- };
- interface X {
- undefined doFoo(optional (long? or A) arg1);
- };
- """
- )
- results = parser.finish()
- except:
- threw = True
- harness.ok(threw, "A nullable type must not be in a union with a dictionary")
-
- parser = parser.reset()
- parser.parse(
- """
- dictionary A {
- };
- interface X {
- A? doFoo();
- };
- """
- )
- results = parser.finish()
- harness.ok(True, "Dictionary return value can be nullable")
-
- parser = parser.reset()
- parser.parse(
- """
- dictionary A {
- };
- interface X {
- undefined doFoo(optional A arg = {});
- };
- """
- )
- results = parser.finish()
- harness.ok(True, "Dictionary arg should actually parse")
-
- parser = parser.reset()
- parser.parse(
- """
- dictionary A {
- };
- interface X {
- undefined doFoo(optional (A or DOMString) arg = {});
- };
- """
- )
- results = parser.finish()
- harness.ok(True, "Union arg containing a dictionary should actually parse")
-
- parser = parser.reset()
- parser.parse(
- """
- dictionary A {
- };
- interface X {
- undefined doFoo(optional (A or DOMString) arg = "abc");
- };
- """
- )
- results = parser.finish()
- harness.ok(
- True,
- "Union arg containing a dictionary with string default should actually parse",
- )
-
- parser = parser.reset()
- threw = False
- try:
- parser.parse(
- """
- dictionary Foo {
- Foo foo;
- };
- """
- )
- results = parser.finish()
- except:
- threw = True
-
- harness.ok(threw, "Member type must not be its Dictionary.")
-
- parser = parser.reset()
- threw = False
- try:
- parser.parse(
- """
- dictionary Foo3 : Foo {
- short d;
- };
-
- dictionary Foo2 : Foo3 {
- boolean c;
- };
-
- dictionary Foo1 : Foo2 {
- long a;
- };
-
- dictionary Foo {
- Foo1 b;
- };
- """
- )
- results = parser.finish()
- except:
- threw = True
-
- harness.ok(
- threw,
- "Member type must not be a Dictionary that " "inherits from its Dictionary.",
- )
-
- parser = parser.reset()
- threw = False
- try:
- parser.parse(
- """
- dictionary Foo {
- (Foo or DOMString)[]? b;
- };
- """
- )
- results = parser.finish()
- except:
- threw = True
-
- harness.ok(
- threw,
- "Member type must not be a Nullable type "
- "whose inner type includes its Dictionary.",
- )
-
- parser = parser.reset()
- threw = False
- try:
- parser.parse(
- """
- dictionary Foo {
- (DOMString or Foo) b;
- };
- """
- )
- results = parser.finish()
- except:
- threw = True
-
- harness.ok(
- threw,
- "Member type must not be a Union type, one of "
- "whose member types includes its Dictionary.",
- )
-
- parser = parser.reset()
- threw = False
- try:
- parser.parse(
- """
- dictionary Foo {
- sequence<sequence<sequence<Foo>>> c;
- };
- """
- )
- results = parser.finish()
- except:
- threw = True
-
- harness.ok(
- threw,
- "Member type must not be a Sequence type "
- "whose element type includes its Dictionary.",
- )
-
- parser = parser.reset()
- threw = False
- try:
- parser.parse(
- """
- dictionary Foo {
- (DOMString or Foo)[] d;
- };
- """
- )
- results = parser.finish()
- except:
- threw = True
-
- harness.ok(
- threw,
- "Member type must not be an Array type "
- "whose element type includes its Dictionary.",
- )
-
- parser = parser.reset()
- threw = False
- try:
- parser.parse(
- """
- dictionary Foo {
- Foo1 b;
- };
-
- dictionary Foo3 {
- Foo d;
- };
-
- dictionary Foo2 : Foo3 {
- short c;
- };
-
- dictionary Foo1 : Foo2 {
- long a;
- };
- """
- )
- results = parser.finish()
- except:
- threw = True
-
- harness.ok(
- threw,
- "Member type must not be a Dictionary, one of whose "
- "members or inherited members has a type that includes "
- "its Dictionary.",
- )
-
- parser = parser.reset()
- threw = False
- try:
- parser.parse(
- """
- dictionary Foo {
- };
-
- dictionary Bar {
- Foo? d;
- };
- """
- )
- results = parser.finish()
- except:
- threw = True
-
- harness.ok(threw, "Member type must not be a nullable dictionary")
-
- parser = parser.reset()
- parser.parse(
- """
- dictionary Foo {
- unrestricted float urFloat = 0;
- unrestricted float urFloat2 = 1.1;
- unrestricted float urFloat3 = -1.1;
- unrestricted float? urFloat4 = null;
- unrestricted float infUrFloat = Infinity;
- unrestricted float negativeInfUrFloat = -Infinity;
- unrestricted float nanUrFloat = NaN;
-
- unrestricted double urDouble = 0;
- unrestricted double urDouble2 = 1.1;
- unrestricted double urDouble3 = -1.1;
- unrestricted double? urDouble4 = null;
- unrestricted double infUrDouble = Infinity;
- unrestricted double negativeInfUrDouble = -Infinity;
- unrestricted double nanUrDouble = NaN;
- };
- """
- )
- results = parser.finish()
- harness.ok(True, "Parsing default values for unrestricted types succeeded.")
-
- parser = parser.reset()
- threw = False
- try:
- parser.parse(
- """
- dictionary Foo {
- double f = Infinity;
- };
- """
- )
- results = parser.finish()
- except:
- threw = True
-
- harness.ok(threw, "Only unrestricted values can be initialized to Infinity")
-
- parser = parser.reset()
- threw = False
- try:
- parser.parse(
- """
- dictionary Foo {
- double f = -Infinity;
- };
- """
- )
- results = parser.finish()
- except:
- threw = True
-
- harness.ok(threw, "Only unrestricted values can be initialized to -Infinity")
-
- parser = parser.reset()
- threw = False
- try:
- parser.parse(
- """
- dictionary Foo {
- double f = NaN;
- };
- """
- )
- results = parser.finish()
- except:
- threw = True
-
- harness.ok(threw, "Only unrestricted values can be initialized to NaN")
-
- parser = parser.reset()
- threw = False
- try:
- parser.parse(
- """
- dictionary Foo {
- float f = Infinity;
- };
- """
- )
- results = parser.finish()
- except:
- threw = True
-
- harness.ok(threw, "Only unrestricted values can be initialized to Infinity")
-
- parser = parser.reset()
- threw = False
- try:
- parser.parse(
- """
- dictionary Foo {
- float f = -Infinity;
- };
- """
- )
- results = parser.finish()
- except:
- threw = True
-
- harness.ok(threw, "Only unrestricted values can be initialized to -Infinity")
-
- parser = parser.reset()
- threw = False
- try:
- parser.parse(
- """
- dictionary Foo {
- float f = NaN;
- };
- """
- )
- results = parser.finish()
- except:
- threw = True
-
- harness.ok(threw, "Only unrestricted values can be initialized to NaN")
-
- parser = parser.reset()
- threw = False
- try:
- parser.parse(
- """
- dictionary Foo {
- long module;
- };
- """
- )
- results = parser.finish()
- except:
- threw = True
-
- harness.ok(not threw, "Should be able to use 'module' as a dictionary member name")
diff --git a/components/script/dom/bindings/codegen/parser/tests/test_distinguishability.py b/components/script/dom/bindings/codegen/parser/tests/test_distinguishability.py
deleted file mode 100644
index e96026c2a09..00000000000
--- a/components/script/dom/bindings/codegen/parser/tests/test_distinguishability.py
+++ /dev/null
@@ -1,425 +0,0 @@
-import traceback
-
-
-def firstArgType(method):
- return method.signatures()[0][1][0].type
-
-
-def WebIDLTest(parser, harness):
- parser.parse(
- """
- // Give our dictionary a required member so we don't need to
- // mess with optional and default values.
- dictionary Dict {
- required long member;
- };
- callback interface Foo {
- };
- interface Bar {
- // Bit of a pain to get things that have dictionary types
- undefined passDict(Dict arg);
- undefined passFoo(Foo arg);
- undefined passNullableUnion((object? or DOMString) arg);
- undefined passNullable(Foo? arg);
- };
- """
- )
- results = parser.finish()
-
- iface = results[2]
- harness.ok(iface.isInterface(), "Should have interface")
- dictMethod = iface.members[0]
- ifaceMethod = iface.members[1]
- nullableUnionMethod = iface.members[2]
- nullableIfaceMethod = iface.members[3]
-
- dictType = firstArgType(dictMethod)
- ifaceType = firstArgType(ifaceMethod)
-
- harness.ok(dictType.isDictionary(), "Should have dictionary type")
- harness.ok(ifaceType.isInterface(), "Should have interface type")
- harness.ok(ifaceType.isCallbackInterface(), "Should have callback interface type")
-
- harness.ok(
- not dictType.isDistinguishableFrom(ifaceType),
- "Dictionary not distinguishable from callback interface",
- )
- harness.ok(
- not ifaceType.isDistinguishableFrom(dictType),
- "Callback interface not distinguishable from dictionary",
- )
-
- nullableUnionType = firstArgType(nullableUnionMethod)
- nullableIfaceType = firstArgType(nullableIfaceMethod)
-
- harness.ok(nullableUnionType.isUnion(), "Should have union type")
- harness.ok(nullableIfaceType.isInterface(), "Should have interface type")
- harness.ok(nullableIfaceType.nullable(), "Should have nullable type")
-
- harness.ok(
- not nullableUnionType.isDistinguishableFrom(nullableIfaceType),
- "Nullable type not distinguishable from union with nullable " "member type",
- )
- harness.ok(
- not nullableIfaceType.isDistinguishableFrom(nullableUnionType),
- "Union with nullable member type not distinguishable from " "nullable type",
- )
-
- parser = parser.reset()
- parser.parse(
- """
- interface TestIface {
- undefined passKid(Kid arg);
- undefined passParent(Parent arg);
- undefined passGrandparent(Grandparent arg);
- undefined passUnrelated1(Unrelated1 arg);
- undefined passUnrelated2(Unrelated2 arg);
- undefined passArrayBuffer(ArrayBuffer arg);
- undefined passArrayBuffer(ArrayBufferView arg);
- };
-
- interface Kid : Parent {};
- interface Parent : Grandparent {};
- interface Grandparent {};
- interface Unrelated1 {};
- interface Unrelated2 {};
- """
- )
- results = parser.finish()
-
- iface = results[0]
- harness.ok(iface.isInterface(), "Should have interface")
- argTypes = [firstArgType(method) for method in iface.members]
- unrelatedTypes = [firstArgType(method) for method in iface.members[-3:]]
-
- for type1 in argTypes:
- for type2 in argTypes:
- distinguishable = type1 is not type2 and (
- type1 in unrelatedTypes or type2 in unrelatedTypes
- )
-
- harness.check(
- type1.isDistinguishableFrom(type2),
- distinguishable,
- "Type %s should %sbe distinguishable from type %s"
- % (type1, "" if distinguishable else "not ", type2),
- )
- harness.check(
- type2.isDistinguishableFrom(type1),
- distinguishable,
- "Type %s should %sbe distinguishable from type %s"
- % (type2, "" if distinguishable else "not ", type1),
- )
-
- parser = parser.reset()
- parser.parse(
- """
- interface Dummy {};
- interface TestIface {
- undefined method(long arg1, TestIface arg2);
- undefined method(long arg1, long arg2);
- undefined method(long arg1, Dummy arg2);
- undefined method(DOMString arg1, DOMString arg2, DOMString arg3);
- };
- """
- )
- results = parser.finish()
- harness.check(len(results[1].members), 1, "Should look like we have one method")
- harness.check(
- len(results[1].members[0].signatures()), 4, "Should have four signatures"
- )
-
- parser = parser.reset()
- threw = False
- try:
- parser.parse(
- """
- interface Dummy {};
- interface TestIface {
- undefined method(long arg1, TestIface arg2);
- undefined method(long arg1, long arg2);
- undefined method(any arg1, Dummy arg2);
- undefined method(DOMString arg1, DOMString arg2, DOMString arg3);
- };
- """
- )
- results = parser.finish()
- except:
- threw = True
-
- harness.ok(
- threw,
- "Should throw when args before the distinguishing arg are not "
- "all the same type",
- )
-
- parser = parser.reset()
- threw = False
- try:
- parser.parse(
- """
- interface Dummy {};
- interface TestIface {
- undefined method(long arg1, TestIface arg2);
- undefined method(long arg1, long arg2);
- undefined method(any arg1, DOMString arg2);
- undefined method(DOMString arg1, DOMString arg2, DOMString arg3);
- };
- """
- )
- results = parser.finish()
- except:
- threw = True
-
- harness.ok(threw, "Should throw when there is no distinguishing index")
-
- # Now let's test our whole distinguishability table
- argTypes = [
- "long",
- "short",
- "long?",
- "short?",
- "boolean",
- "boolean?",
- "undefined",
- "undefined?",
- "DOMString",
- "ByteString",
- "UTF8String",
- "Enum",
- "Enum2",
- "Interface",
- "Interface?",
- "AncestorInterface",
- "UnrelatedInterface",
- "CallbackInterface",
- "CallbackInterface?",
- "CallbackInterface2",
- "object",
- "Callback",
- "Callback2",
- "Dict",
- "Dict2",
- "sequence<long>",
- "sequence<short>",
- "record<DOMString, object>",
- "record<USVString, Dict>",
- "record<ByteString, long>",
- "record<UTF8String, long>",
- "any",
- "Promise<any>",
- "Promise<any>?",
- "USVString",
- "JSString",
- "ArrayBuffer",
- "ArrayBufferView",
- "Uint8Array",
- "Uint16Array",
- "(long or Callback)",
- "(long or Dict)",
- ]
-
- # Try to categorize things a bit to keep list lengths down
- def allBut(list1, list2):
- return [
- a
- for a in list1
- if a not in list2
- and (a != "any" and a != "Promise<any>" and a != "Promise<any>?")
- ]
-
- unions = ["(long or Callback)", "(long or Dict)"]
- numerics = ["long", "short", "long?", "short?"]
- booleans = ["boolean", "boolean?"]
- undefineds = ["undefined", "undefined?"]
- primitives = numerics + booleans
- nonNumerics = allBut(argTypes, numerics + unions)
- nonBooleans = allBut(argTypes, booleans)
- strings = [
- "DOMString",
- "ByteString",
- "Enum",
- "Enum2",
- "USVString",
- "JSString",
- "UTF8String",
- ]
- nonStrings = allBut(argTypes, strings)
- nonObjects = undefineds + primitives + strings
- objects = allBut(argTypes, nonObjects)
- bufferSourceTypes = ["ArrayBuffer", "ArrayBufferView", "Uint8Array", "Uint16Array"]
- interfaces = [
- "Interface",
- "Interface?",
- "AncestorInterface",
- "UnrelatedInterface",
- ] + bufferSourceTypes
- nullables = [
- "long?",
- "short?",
- "boolean?",
- "undefined?",
- "Interface?",
- "CallbackInterface?",
- "Dict",
- "Dict2",
- "Date?",
- "any",
- "Promise<any>?",
- ] + allBut(unions, ["(long or Callback)"])
- sequences = ["sequence<long>", "sequence<short>"]
- nonUserObjects = nonObjects + interfaces + sequences
- otherObjects = allBut(argTypes, nonUserObjects + ["object"])
- notRelatedInterfaces = (
- nonObjects
- + ["UnrelatedInterface"]
- + otherObjects
- + sequences
- + bufferSourceTypes
- )
- records = [
- "record<DOMString, object>",
- "record<USVString, Dict>",
- "record<ByteString, long>",
- "record<UTF8String, long>",
- ] # JSString not supported in records
- dictionaryLike = (
- [
- "Dict",
- "Dict2",
- "CallbackInterface",
- "CallbackInterface?",
- "CallbackInterface2",
- ]
- + records
- + allBut(unions, ["(long or Callback)"])
- )
-
- # Build a representation of the distinguishability table as a dict
- # of dicts, holding True values where needed, holes elsewhere.
- data = dict()
- for type in argTypes:
- data[type] = dict()
-
- def setDistinguishable(type, types):
- for other in types:
- data[type][other] = True
-
- setDistinguishable("long", nonNumerics)
- setDistinguishable("short", nonNumerics)
- setDistinguishable("long?", allBut(nonNumerics, nullables))
- setDistinguishable("short?", allBut(nonNumerics, nullables))
- setDistinguishable("boolean", nonBooleans)
- setDistinguishable("boolean?", allBut(nonBooleans, nullables))
- setDistinguishable("undefined", allBut(argTypes, undefineds + dictionaryLike))
- setDistinguishable(
- "undefined?", allBut(argTypes, undefineds + dictionaryLike + nullables)
- )
- setDistinguishable("DOMString", nonStrings)
- setDistinguishable("ByteString", nonStrings)
- setDistinguishable("UTF8String", nonStrings)
- setDistinguishable("USVString", nonStrings)
- setDistinguishable("JSString", nonStrings)
- setDistinguishable("Enum", nonStrings)
- setDistinguishable("Enum2", nonStrings)
- setDistinguishable("Interface", notRelatedInterfaces)
- setDistinguishable("Interface?", allBut(notRelatedInterfaces, nullables))
- setDistinguishable("AncestorInterface", notRelatedInterfaces)
- setDistinguishable(
- "UnrelatedInterface", allBut(argTypes, ["object", "UnrelatedInterface"])
- )
- setDistinguishable("CallbackInterface", allBut(nonUserObjects, undefineds))
- setDistinguishable(
- "CallbackInterface?", allBut(nonUserObjects, nullables + undefineds)
- )
- setDistinguishable("CallbackInterface2", allBut(nonUserObjects, undefineds))
- setDistinguishable("object", nonObjects)
- setDistinguishable("Callback", nonUserObjects)
- setDistinguishable("Callback2", nonUserObjects)
- setDistinguishable("Dict", allBut(nonUserObjects, nullables + undefineds))
- setDistinguishable("Dict2", allBut(nonUserObjects, nullables + undefineds))
- setDistinguishable("sequence<long>", allBut(argTypes, sequences + ["object"]))
- setDistinguishable("sequence<short>", allBut(argTypes, sequences + ["object"]))
- setDistinguishable("record<DOMString, object>", allBut(nonUserObjects, undefineds))
- setDistinguishable("record<USVString, Dict>", allBut(nonUserObjects, undefineds))
- # JSString not supported in records
- setDistinguishable("record<ByteString, long>", allBut(nonUserObjects, undefineds))
- setDistinguishable("record<UTF8String, long>", allBut(nonUserObjects, undefineds))
- setDistinguishable("any", [])
- setDistinguishable("Promise<any>", [])
- setDistinguishable("Promise<any>?", [])
- setDistinguishable("ArrayBuffer", allBut(argTypes, ["ArrayBuffer", "object"]))
- setDistinguishable(
- "ArrayBufferView",
- allBut(argTypes, ["ArrayBufferView", "Uint8Array", "Uint16Array", "object"]),
- )
- setDistinguishable(
- "Uint8Array", allBut(argTypes, ["ArrayBufferView", "Uint8Array", "object"])
- )
- setDistinguishable(
- "Uint16Array", allBut(argTypes, ["ArrayBufferView", "Uint16Array", "object"])
- )
- setDistinguishable("(long or Callback)", allBut(nonUserObjects, numerics))
- setDistinguishable(
- "(long or Dict)", allBut(nonUserObjects, numerics + nullables + undefineds)
- )
-
- def areDistinguishable(type1, type2):
- return data[type1].get(type2, False)
-
- def checkDistinguishability(parser, type1, type2):
- idlTemplate = """
- enum Enum { "a", "b" };
- enum Enum2 { "c", "d" };
- interface Interface : AncestorInterface {};
- interface AncestorInterface {};
- interface UnrelatedInterface {};
- callback interface CallbackInterface {};
- callback interface CallbackInterface2 {};
- callback Callback = any();
- callback Callback2 = long(short arg);
- // Give our dictionaries required members so we don't need to
- // mess with optional and default values.
- dictionary Dict { required long member; };
- dictionary Dict2 { required long member; };
- interface TestInterface {%s
- };
- """
- if type1 in undefineds or type2 in undefineds:
- methods = """
- (%s or %s) myMethod();""" % (
- type1,
- type2,
- )
- else:
- methodTemplate = """
- undefined myMethod(%s arg);"""
- methods = (methodTemplate % type1) + (methodTemplate % type2)
- idl = idlTemplate % methods
-
- parser = parser.reset()
- threw = False
- try:
- parser.parse(idl)
- results = parser.finish()
- except:
- threw = True
-
- if areDistinguishable(type1, type2):
- harness.ok(
- not threw,
- "Should not throw for '%s' and '%s' because they are distinguishable"
- % (type1, type2),
- )
- else:
- harness.ok(
- threw,
- "Should throw for '%s' and '%s' because they are not distinguishable"
- % (type1, type2),
- )
-
- # Enumerate over everything in both orders, since order matters in
- # terms of our implementation of distinguishability checks
- for type1 in argTypes:
- for type2 in argTypes:
- checkDistinguishability(parser, type1, type2)
diff --git a/components/script/dom/bindings/codegen/parser/tests/test_double_null.py b/components/script/dom/bindings/codegen/parser/tests/test_double_null.py
deleted file mode 100644
index a8876a7fd2d..00000000000
--- a/components/script/dom/bindings/codegen/parser/tests/test_double_null.py
+++ /dev/null
@@ -1,16 +0,0 @@
-def WebIDLTest(parser, harness):
- threw = False
- try:
- parser.parse(
- """
- interface DoubleNull {
- attribute byte?? foo;
- };
- """
- )
-
- results = parser.finish()
- except:
- threw = True
-
- harness.ok(threw, "Should have thrown.")
diff --git a/components/script/dom/bindings/codegen/parser/tests/test_duplicate_qualifiers.py b/components/script/dom/bindings/codegen/parser/tests/test_duplicate_qualifiers.py
deleted file mode 100644
index 89a4e1acf0b..00000000000
--- a/components/script/dom/bindings/codegen/parser/tests/test_duplicate_qualifiers.py
+++ /dev/null
@@ -1,64 +0,0 @@
-def WebIDLTest(parser, harness):
- threw = False
- try:
- parser.parse(
- """
- interface DuplicateQualifiers1 {
- getter getter byte foo(unsigned long index);
- };
- """
- )
-
- results = parser.finish()
- except:
- threw = True
-
- harness.ok(threw, "Should have thrown.")
-
- threw = False
- try:
- parser.parse(
- """
- interface DuplicateQualifiers2 {
- setter setter byte foo(unsigned long index, byte value);
- };
- """
- )
-
- results = parser.finish()
- except:
- threw = True
-
- harness.ok(threw, "Should have thrown.")
-
- threw = False
- try:
- parser.parse(
- """
- interface DuplicateQualifiers4 {
- deleter deleter byte foo(unsigned long index);
- };
- """
- )
-
- results = parser.finish()
- except:
- threw = True
-
- harness.ok(threw, "Should have thrown.")
-
- threw = False
- try:
- parser.parse(
- """
- interface DuplicateQualifiers5 {
- getter deleter getter byte foo(unsigned long index);
- };
- """
- )
-
- results = parser.finish()
- except:
- threw = True
-
- harness.ok(threw, "Should have thrown.")
diff --git a/components/script/dom/bindings/codegen/parser/tests/test_empty_enum.py b/components/script/dom/bindings/codegen/parser/tests/test_empty_enum.py
deleted file mode 100644
index 09333a659cd..00000000000
--- a/components/script/dom/bindings/codegen/parser/tests/test_empty_enum.py
+++ /dev/null
@@ -1,17 +0,0 @@
-import WebIDL
-
-
-def WebIDLTest(parser, harness):
- try:
- parser.parse(
- """
- enum TestEmptyEnum {
- };
- """
- )
-
- harness.ok(False, "Should have thrown!")
- except:
- harness.ok(True, "Parsing TestEmptyEnum enum should fail")
-
- results = parser.finish()
diff --git a/components/script/dom/bindings/codegen/parser/tests/test_empty_sequence_default_value.py b/components/script/dom/bindings/codegen/parser/tests/test_empty_sequence_default_value.py
deleted file mode 100644
index 21837743523..00000000000
--- a/components/script/dom/bindings/codegen/parser/tests/test_empty_sequence_default_value.py
+++ /dev/null
@@ -1,54 +0,0 @@
-import WebIDL
-
-
-def WebIDLTest(parser, harness):
- threw = False
- try:
- parser.parse(
- """
- interface X {
- const sequence<long> foo = [];
- };
- """
- )
-
- results = parser.finish()
- except Exception as x:
- threw = True
-
- harness.ok(threw, "Constant cannot have [] as a default value")
-
- parser = parser.reset()
-
- parser.parse(
- """
- interface X {
- undefined foo(optional sequence<long> arg = []);
- };
- """
- )
- results = parser.finish()
-
- harness.ok(
- isinstance(
- results[0].members[0].signatures()[0][1][0].defaultValue,
- WebIDL.IDLEmptySequenceValue,
- ),
- "Should have IDLEmptySequenceValue as default value of argument",
- )
-
- parser = parser.reset()
-
- parser.parse(
- """
- dictionary X {
- sequence<long> foo = [];
- };
- """
- )
- results = parser.finish()
-
- harness.ok(
- isinstance(results[0].members[0].defaultValue, WebIDL.IDLEmptySequenceValue),
- "Should have IDLEmptySequenceValue as default value of " "dictionary member",
- )
diff --git a/components/script/dom/bindings/codegen/parser/tests/test_enum.py b/components/script/dom/bindings/codegen/parser/tests/test_enum.py
deleted file mode 100644
index 56c6b3f64aa..00000000000
--- a/components/script/dom/bindings/codegen/parser/tests/test_enum.py
+++ /dev/null
@@ -1,107 +0,0 @@
-import WebIDL
-
-
-def WebIDLTest(parser, harness):
- parser.parse(
- """
- enum TestEnum {
- "",
- "foo",
- "bar"
- };
-
- interface TestEnumInterface {
- TestEnum doFoo(boolean arg);
- readonly attribute TestEnum foo;
- };
- """
- )
-
- results = parser.finish()
-
- harness.ok(True, "TestEnumInterfaces interface parsed without error.")
- harness.check(len(results), 2, "Should be one production")
- harness.ok(isinstance(results[0], WebIDL.IDLEnum), "Should be an IDLEnum")
- harness.ok(isinstance(results[1], WebIDL.IDLInterface), "Should be an IDLInterface")
-
- enum = results[0]
- harness.check(enum.identifier.QName(), "::TestEnum", "Enum has the right QName")
- harness.check(enum.identifier.name, "TestEnum", "Enum has the right name")
- harness.check(enum.values(), ["", "foo", "bar"], "Enum has the right values")
-
- iface = results[1]
-
- harness.check(
- iface.identifier.QName(), "::TestEnumInterface", "Interface has the right QName"
- )
- harness.check(
- iface.identifier.name, "TestEnumInterface", "Interface has the right name"
- )
- harness.check(iface.parent, None, "Interface has no parent")
-
- members = iface.members
- harness.check(len(members), 2, "Should be one production")
- harness.ok(isinstance(members[0], WebIDL.IDLMethod), "Should be an IDLMethod")
- method = members[0]
- harness.check(
- method.identifier.QName(),
- "::TestEnumInterface::doFoo",
- "Method has correct QName",
- )
- harness.check(method.identifier.name, "doFoo", "Method has correct name")
-
- signatures = method.signatures()
- harness.check(len(signatures), 1, "Expect one signature")
-
- (returnType, arguments) = signatures[0]
- harness.check(
- str(returnType), "TestEnum (Wrapper)", "Method type is the correct name"
- )
- harness.check(len(arguments), 1, "Method has the right number of arguments")
- arg = arguments[0]
- harness.ok(isinstance(arg, WebIDL.IDLArgument), "Should be an IDLArgument")
- harness.check(str(arg.type), "Boolean", "Argument has the right type")
-
- attr = members[1]
- harness.check(
- attr.identifier.QName(), "::TestEnumInterface::foo", "Attr has correct QName"
- )
- harness.check(attr.identifier.name, "foo", "Attr has correct name")
-
- harness.check(str(attr.type), "TestEnum (Wrapper)", "Attr type is the correct name")
-
- # Now reset our parser
- parser = parser.reset()
- threw = False
- try:
- parser.parse(
- """
- enum Enum {
- "a",
- "b",
- "c"
- };
- interface TestInterface {
- undefined foo(optional Enum e = "d");
- };
- """
- )
- results = parser.finish()
- except:
- threw = True
-
- harness.ok(threw, "Should not allow a bogus default value for an enum")
-
- # Now reset our parser
- parser = parser.reset()
- parser.parse(
- """
- enum Enum {
- "a",
- "b",
- "c",
- };
- """
- )
- results = parser.finish()
- harness.check(len(results), 1, "Should allow trailing comma in enum")
diff --git a/components/script/dom/bindings/codegen/parser/tests/test_enum_duplicate_values.py b/components/script/dom/bindings/codegen/parser/tests/test_enum_duplicate_values.py
deleted file mode 100644
index 8969281e1c7..00000000000
--- a/components/script/dom/bindings/codegen/parser/tests/test_enum_duplicate_values.py
+++ /dev/null
@@ -1,16 +0,0 @@
-import WebIDL
-
-
-def WebIDLTest(parser, harness):
- try:
- parser.parse(
- """
- enum TestEnumDuplicateValue {
- "",
- ""
- };
- """
- )
- harness.ok(False, "Should have thrown!")
- except:
- harness.ok(True, "Enum TestEnumDuplicateValue should throw")
diff --git a/components/script/dom/bindings/codegen/parser/tests/test_error_colno.py b/components/script/dom/bindings/codegen/parser/tests/test_error_colno.py
deleted file mode 100644
index 1c9bb065580..00000000000
--- a/components/script/dom/bindings/codegen/parser/tests/test_error_colno.py
+++ /dev/null
@@ -1,24 +0,0 @@
-import WebIDL
-
-
-def WebIDLTest(parser, harness):
- # Check that error messages put the '^' in the right place.
-
- threw = False
- input = "interface ?"
- try:
- parser.parse(input)
- results = parser.finish()
- except WebIDL.WebIDLError as e:
- threw = True
- lines = str(e).split("\n")
-
- harness.check(len(lines), 3, "Expected number of lines in error message")
- harness.check(lines[1], input, "Second line shows error")
- harness.check(
- lines[2],
- " " * (len(input) - 1) + "^",
- "Correct column pointer in error message",
- )
-
- harness.ok(threw, "Should have thrown.")
diff --git a/components/script/dom/bindings/codegen/parser/tests/test_error_lineno.py b/components/script/dom/bindings/codegen/parser/tests/test_error_lineno.py
deleted file mode 100644
index 0d10e006787..00000000000
--- a/components/script/dom/bindings/codegen/parser/tests/test_error_lineno.py
+++ /dev/null
@@ -1,38 +0,0 @@
-import WebIDL
-
-
-def WebIDLTest(parser, harness):
- # Check that error messages put the '^' in the right place.
-
- threw = False
- input = """\
-// This is a comment.
-interface Foo {
-};
-
-/* This is also a comment. */
-interface ?"""
- try:
- parser.parse(input)
- results = parser.finish()
- except WebIDL.WebIDLError as e:
- threw = True
- lines = str(e).split("\n")
-
- harness.check(len(lines), 3, "Expected number of lines in error message")
- harness.ok(
- lines[0].endswith("line 6:10"),
- 'First line of error should end with "line 6:10", but was "%s".' % lines[0],
- )
- harness.check(
- lines[1],
- "interface ?",
- "Second line of error message is the line which caused the error.",
- )
- harness.check(
- lines[2],
- " " * (len("interface ?") - 1) + "^",
- "Correct column pointer in error message.",
- )
-
- harness.ok(threw, "Should have thrown.")
diff --git a/components/script/dom/bindings/codegen/parser/tests/test_exposed_extended_attribute.py b/components/script/dom/bindings/codegen/parser/tests/test_exposed_extended_attribute.py
deleted file mode 100644
index c5ea8e4b88b..00000000000
--- a/components/script/dom/bindings/codegen/parser/tests/test_exposed_extended_attribute.py
+++ /dev/null
@@ -1,383 +0,0 @@
-import WebIDL
-
-
-def WebIDLTest(parser, harness):
- parser.parse(
- """
- [Global, Exposed=Foo] interface Foo {};
- [Global=(Bar, Bar1,Bar2), Exposed=Bar] interface Bar {};
- [Global=(Baz, Baz2), Exposed=Baz] interface Baz {};
-
- [Exposed=(Foo,Bar1)]
- interface Iface {
- undefined method1();
-
- [Exposed=Bar1]
- readonly attribute any attr;
- };
-
- [Exposed=Foo]
- partial interface Iface {
- undefined method2();
- };
- """
- )
-
- results = parser.finish()
-
- harness.check(len(results), 5, "Should know about five things")
- iface = results[3]
- harness.ok(isinstance(iface, WebIDL.IDLInterface), "Should have an interface here")
- members = iface.members
- harness.check(len(members), 3, "Should have three members")
-
- harness.ok(
- members[0].exposureSet == set(["Foo", "Bar"]),
- "method1 should have the right exposure set",
- )
- harness.ok(
- members[0]._exposureGlobalNames == set(["Foo", "Bar1"]),
- "method1 should have the right exposure global names",
- )
-
- harness.ok(
- members[1].exposureSet == set(["Bar"]),
- "attr should have the right exposure set",
- )
- harness.ok(
- members[1]._exposureGlobalNames == set(["Bar1"]),
- "attr should have the right exposure global names",
- )
-
- harness.ok(
- members[2].exposureSet == set(["Foo"]),
- "method2 should have the right exposure set",
- )
- harness.ok(
- members[2]._exposureGlobalNames == set(["Foo"]),
- "method2 should have the right exposure global names",
- )
-
- harness.ok(
- iface.exposureSet == set(["Foo", "Bar"]),
- "Iface should have the right exposure set",
- )
- harness.ok(
- iface._exposureGlobalNames == set(["Foo", "Bar1"]),
- "Iface should have the right exposure global names",
- )
-
- parser = parser.reset()
- parser.parse(
- """
- [Global, Exposed=Foo] interface Foo {};
- [Global=(Bar, Bar1, Bar2), Exposed=Bar] interface Bar {};
- [Global=(Baz, Baz2), Exposed=Baz] interface Baz {};
-
- [Exposed=Foo]
- interface Iface2 {
- undefined method3();
- };
- """
- )
- results = parser.finish()
-
- harness.check(len(results), 4, "Should know about four things")
- iface = results[3]
- harness.ok(isinstance(iface, WebIDL.IDLInterface), "Should have an interface here")
- members = iface.members
- harness.check(len(members), 1, "Should have one member")
-
- harness.ok(
- members[0].exposureSet == set(["Foo"]),
- "method3 should have the right exposure set",
- )
- harness.ok(
- members[0]._exposureGlobalNames == set(["Foo"]),
- "method3 should have the right exposure global names",
- )
-
- harness.ok(
- iface.exposureSet == set(["Foo"]), "Iface2 should have the right exposure set"
- )
- harness.ok(
- iface._exposureGlobalNames == set(["Foo"]),
- "Iface2 should have the right exposure global names",
- )
-
- parser = parser.reset()
- parser.parse(
- """
- [Global, Exposed=Foo] interface Foo {};
- [Global=(Bar, Bar1, Bar2), Exposed=Bar] interface Bar {};
- [Global=(Baz, Baz2), Exposed=Baz] interface Baz {};
-
- [Exposed=Foo]
- interface Iface3 {
- undefined method4();
- };
-
- [Exposed=(Foo,Bar1)]
- interface mixin Mixin {
- undefined method5();
- };
-
- Iface3 includes Mixin;
- """
- )
- results = parser.finish()
- harness.check(len(results), 6, "Should know about six things")
- iface = results[3]
- harness.ok(isinstance(iface, WebIDL.IDLInterface), "Should have an interface here")
- members = iface.members
- harness.check(len(members), 2, "Should have two members")
-
- harness.ok(
- members[0].exposureSet == set(["Foo"]),
- "method4 should have the right exposure set",
- )
- harness.ok(
- members[0]._exposureGlobalNames == set(["Foo"]),
- "method4 should have the right exposure global names",
- )
-
- harness.ok(
- members[1].exposureSet == set(["Foo", "Bar"]),
- "method5 should have the right exposure set",
- )
- harness.ok(
- members[1]._exposureGlobalNames == set(["Foo", "Bar1"]),
- "method5 should have the right exposure global names",
- )
-
- parser = parser.reset()
- threw = False
- try:
- parser.parse(
- """
- [Exposed=Foo]
- interface Bar {
- };
- """
- )
-
- results = parser.finish()
- except Exception as x:
- threw = True
-
- harness.ok(threw, "Should have thrown on invalid Exposed value on interface.")
-
- parser = parser.reset()
- threw = False
- try:
- parser.parse(
- """
- interface Bar {
- [Exposed=Foo]
- readonly attribute bool attr;
- };
- """
- )
-
- results = parser.finish()
- except Exception as x:
- threw = True
-
- harness.ok(threw, "Should have thrown on invalid Exposed value on attribute.")
-
- parser = parser.reset()
- threw = False
- try:
- parser.parse(
- """
- interface Bar {
- [Exposed=Foo]
- undefined operation();
- };
- """
- )
-
- results = parser.finish()
- except Exception as x:
- threw = True
-
- harness.ok(threw, "Should have thrown on invalid Exposed value on operation.")
-
- parser = parser.reset()
- threw = False
- try:
- parser.parse(
- """
- interface Bar {
- [Exposed=Foo]
- const long constant = 5;
- };
- """
- )
-
- results = parser.finish()
- except Exception as x:
- threw = True
-
- harness.ok(threw, "Should have thrown on invalid Exposed value on constant.")
-
- parser = parser.reset()
- threw = False
- try:
- parser.parse(
- """
- [Global, Exposed=Foo] interface Foo {};
- [Global, Exposed=Bar] interface Bar {};
-
- [Exposed=Foo]
- interface Baz {
- [Exposed=Bar]
- undefined method();
- };
- """
- )
-
- results = parser.finish()
- except Exception as x:
- threw = True
-
- harness.ok(
- threw, "Should have thrown on member exposed where its interface is not."
- )
-
- parser = parser.reset()
- parser.parse(
- """
- [Global, Exposed=Foo] interface Foo {};
- [Global, Exposed=Bar] interface Bar {};
-
- [Exposed=Foo]
- interface Baz {
- undefined method();
- };
-
- [Exposed=Bar]
- interface mixin Mixin {
- undefined otherMethod();
- };
-
- Baz includes Mixin;
- """
- )
-
- results = parser.finish()
-
- harness.check(len(results), 5, "Should know about five things")
- iface = results[2]
- harness.ok(isinstance(iface, WebIDL.IDLInterface), "Should have an interface here")
- members = iface.members
- harness.check(len(members), 2, "Should have two members")
-
- harness.ok(
- members[0].exposureSet == set(["Foo"]),
- "method should have the right exposure set",
- )
- harness.ok(
- members[0]._exposureGlobalNames == set(["Foo"]),
- "method should have the right exposure global names",
- )
-
- harness.ok(
- members[1].exposureSet == set(["Bar"]),
- "otherMethod should have the right exposure set",
- )
- harness.ok(
- members[1]._exposureGlobalNames == set(["Bar"]),
- "otherMethod should have the right exposure global names",
- )
-
- parser = parser.reset()
- parser.parse(
- """
- [Global, Exposed=Foo] interface Foo {};
- [Global, Exposed=Bar] interface Bar {};
-
- [Exposed=*]
- interface Baz {
- undefined methodWild();
- };
-
- [Exposed=Bar]
- interface mixin Mixin {
- undefined methodNotWild();
- };
-
- Baz includes Mixin;
- """
- )
-
- results = parser.finish()
-
- harness.check(len(results), 5, "Should know about five things")
- iface = results[2]
- harness.ok(isinstance(iface, WebIDL.IDLInterface), "Should have an interface here")
- members = iface.members
- harness.check(len(members), 2, "Should have two members")
-
- harness.ok(
- members[0].exposureSet == set(["Foo", "Bar"]),
- "methodWild should have the right exposure set",
- )
- harness.ok(
- members[0]._exposureGlobalNames == set(["Foo", "Bar"]),
- "methodWild should have the right exposure global names",
- )
-
- harness.ok(
- members[1].exposureSet == set(["Bar"]),
- "methodNotWild should have the right exposure set",
- )
- harness.ok(
- members[1]._exposureGlobalNames == set(["Bar"]),
- "methodNotWild should have the right exposure global names",
- )
-
- parser = parser.reset()
- threw = False
- try:
- parser.parse(
- """
- [Global, Exposed=Foo] interface Foo {};
- [Global, Exposed=Bar] interface Bar {};
-
- [Exposed=Foo]
- interface Baz {
- [Exposed=*]
- undefined method();
- };
- """
- )
-
- results = parser.finish()
- except Exception as x:
- threw = True
-
- harness.ok(
- threw, "Should have thrown on member exposed where its interface is not."
- )
-
- parser = parser.reset()
- threw = False
- try:
- parser.parse(
- """
- [Global, Exposed=Foo] interface Foo {};
- [Global, Exposed=Bar] interface Bar {};
-
- [Exposed=(Foo,*)]
- interface Baz {
- undefined method();
- };
- """
- )
-
- results = parser.finish()
- except Exception as x:
- threw = True
-
- harness.ok(threw, "Should have thrown on a wildcard in an identifier list.")
diff --git a/components/script/dom/bindings/codegen/parser/tests/test_extended_attributes.py b/components/script/dom/bindings/codegen/parser/tests/test_extended_attributes.py
deleted file mode 100644
index 423a67540c7..00000000000
--- a/components/script/dom/bindings/codegen/parser/tests/test_extended_attributes.py
+++ /dev/null
@@ -1,131 +0,0 @@
-import WebIDL
-
-
-def WebIDLTest(parser, harness):
- parser.parse(
- """
- [LegacyNoInterfaceObject]
- interface TestExtendedAttr {
- [LegacyUnforgeable] readonly attribute byte b;
- };
- """
- )
-
- results = parser.finish()
-
- parser = parser.reset()
- parser.parse(
- """
- [Pref="foo.bar",Pref=flop]
- interface TestExtendedAttr {
- [Pref="foo.bar"] attribute byte b;
- };
- """
- )
-
- results = parser.finish()
-
- parser = parser.reset()
- parser.parse(
- """
- interface TestLegacyLenientThis {
- [LegacyLenientThis] attribute byte b;
- };
- """
- )
-
- results = parser.finish()
- harness.ok(
- results[0].members[0].hasLegacyLenientThis(), "Should have a lenient this"
- )
-
- parser = parser.reset()
- threw = False
- try:
- parser.parse(
- """
- interface TestLegacyLenientThis2 {
- [LegacyLenientThis=something] attribute byte b;
- };
- """
- )
- results = parser.finish()
- except:
- threw = True
-
- harness.ok(threw, "[LegacyLenientThis] must take no arguments")
-
- parser = parser.reset()
- parser.parse(
- """
- interface TestClamp {
- undefined testClamp([Clamp] long foo);
- undefined testNotClamp(long foo);
- };
- """
- )
-
- results = parser.finish()
- # Pull out the first argument out of the arglist of the first (and
- # only) signature.
- harness.ok(
- results[0].members[0].signatures()[0][1][0].type.hasClamp(), "Should be clamped"
- )
- harness.ok(
- not results[0].members[1].signatures()[0][1][0].type.hasClamp(),
- "Should not be clamped",
- )
-
- parser = parser.reset()
- threw = False
- try:
- parser.parse(
- """
- interface TestClamp2 {
- undefined testClamp([Clamp=something] long foo);
- };
- """
- )
- results = parser.finish()
- except:
- threw = True
-
- harness.ok(threw, "[Clamp] must take no arguments")
-
- parser = parser.reset()
- parser.parse(
- """
- interface TestEnforceRange {
- undefined testEnforceRange([EnforceRange] long foo);
- undefined testNotEnforceRange(long foo);
- };
- """
- )
-
- results = parser.finish()
- # Pull out the first argument out of the arglist of the first (and
- # only) signature.
- harness.ok(
- results[0].members[0].signatures()[0][1][0].type.hasEnforceRange(),
- "Should be enforceRange",
- )
- harness.ok(
- not results[0].members[1].signatures()[0][1][0].type.hasEnforceRange(),
- "Should not be enforceRange",
- )
-
- parser = parser.reset()
- threw = False
- try:
- parser.parse(
- """
- interface TestEnforceRange2 {
- undefined testEnforceRange([EnforceRange=something] long foo);
- };
- """
- )
- results = parser.finish()
- except:
- threw = True
-
- harness.ok(threw, "[EnforceRange] must take no arguments")
diff --git a/components/script/dom/bindings/codegen/parser/tests/test_float_types.py b/components/script/dom/bindings/codegen/parser/tests/test_float_types.py
deleted file mode 100644
index d37443819d8..00000000000
--- a/components/script/dom/bindings/codegen/parser/tests/test_float_types.py
+++ /dev/null
@@ -1,145 +0,0 @@
-import WebIDL
-
-
-def WebIDLTest(parser, harness):
- parser.parse(
- """
- typedef float myFloat;
- typedef unrestricted float myUnrestrictedFloat;
- interface FloatTypes {
- attribute float f;
- attribute unrestricted float uf;
- attribute double d;
- attribute unrestricted double ud;
- [LenientFloat]
- attribute float lf;
- [LenientFloat]
- attribute double ld;
-
- undefined m1(float arg1, double arg2, float? arg3, double? arg4,
- myFloat arg5, unrestricted float arg6,
- unrestricted double arg7, unrestricted float? arg8,
- unrestricted double? arg9, myUnrestrictedFloat arg10);
- [LenientFloat]
- undefined m2(float arg1, double arg2, float? arg3, double? arg4,
- myFloat arg5, unrestricted float arg6,
- unrestricted double arg7, unrestricted float? arg8,
- unrestricted double? arg9, myUnrestrictedFloat arg10);
- [LenientFloat]
- undefined m3(float arg);
- [LenientFloat]
- undefined m4(double arg);
- [LenientFloat]
- undefined m5((float or FloatTypes) arg);
- [LenientFloat]
- undefined m6(sequence<float> arg);
- };
- """
- )
-
- results = parser.finish()
-
- harness.check(len(results), 3, "Should be two typedefs and one interface.")
- iface = results[2]
- harness.ok(isinstance(iface, WebIDL.IDLInterface), "Should be an IDLInterface")
- types = [a.type for a in iface.members if a.isAttr()]
- harness.ok(types[0].isFloat(), "'float' is a float")
- harness.ok(not types[0].isUnrestricted(), "'float' is not unrestricted")
- harness.ok(types[1].isFloat(), "'unrestricted float' is a float")
- harness.ok(types[1].isUnrestricted(), "'unrestricted float' is unrestricted")
- harness.ok(types[2].isFloat(), "'double' is a float")
- harness.ok(not types[2].isUnrestricted(), "'double' is not unrestricted")
- harness.ok(types[3].isFloat(), "'unrestricted double' is a float")
- harness.ok(types[3].isUnrestricted(), "'unrestricted double' is unrestricted")
-
- method = iface.members[6]
- harness.ok(isinstance(method, WebIDL.IDLMethod), "Should be an IDLMethod")
- argtypes = [a.type for a in method.signatures()[0][1]]
- for (idx, type) in enumerate(argtypes):
- harness.ok(type.isFloat(), "Type %d should be float" % idx)
- harness.check(
- type.isUnrestricted(),
- idx >= 5,
- "Type %d should %sbe unrestricted" % (idx, "" if idx >= 4 else "not "),
- )
-
- parser = parser.reset()
- threw = False
- try:
- parser.parse(
- """
- interface FloatTypes {
- [LenientFloat]
- long m(float arg);
- };
- """
- )
- except Exception as x:
- threw = True
- harness.ok(threw, "[LenientFloat] only allowed on methods returning undefined")
-
- parser = parser.reset()
- threw = False
- try:
- parser.parse(
- """
- interface FloatTypes {
- [LenientFloat]
- undefined m(unrestricted float arg);
- };
- """
- )
- except Exception as x:
- threw = True
- harness.ok(
- threw, "[LenientFloat] only allowed on methods with unrestricted float args"
- )
-
- parser = parser.reset()
- threw = False
- try:
- parser.parse(
- """
- interface FloatTypes {
- [LenientFloat]
- undefined m(sequence<unrestricted float> arg);
- };
- """
- )
- except Exception as x:
- threw = True
- harness.ok(
- threw, "[LenientFloat] only allowed on methods with unrestricted float args (2)"
- )
-
- parser = parser.reset()
- threw = False
- try:
- parser.parse(
- """
- interface FloatTypes {
- [LenientFloat]
- undefined m((unrestricted float or FloatTypes) arg);
- };
- """
- )
- except Exception as x:
- threw = True
- harness.ok(
- threw, "[LenientFloat] only allowed on methods with unrestricted float args (3)"
- )
-
- parser = parser.reset()
- threw = False
- try:
- parser.parse(
- """
- interface FloatTypes {
- [LenientFloat]
- readonly attribute float foo;
- };
- """
- )
- except Exception as x:
- threw = True
- harness.ok(threw, "[LenientFloat] only allowed on writable attributes")
diff --git a/components/script/dom/bindings/codegen/parser/tests/test_forward_decl.py b/components/script/dom/bindings/codegen/parser/tests/test_forward_decl.py
deleted file mode 100644
index 1c81718400a..00000000000
--- a/components/script/dom/bindings/codegen/parser/tests/test_forward_decl.py
+++ /dev/null
@@ -1,18 +0,0 @@
-import WebIDL
-
-
-def WebIDLTest(parser, harness):
- parser.parse(
- """
- interface ForwardDeclared;
- interface ForwardDeclared;
-
- interface TestForwardDecl {
- attribute ForwardDeclared foo;
- };
- """
- )
-
- results = parser.finish()
-
- harness.ok(True, "TestForwardDeclared interface parsed without error.")
diff --git a/components/script/dom/bindings/codegen/parser/tests/test_global_extended_attr.py b/components/script/dom/bindings/codegen/parser/tests/test_global_extended_attr.py
deleted file mode 100644
index 9ee27efbc8d..00000000000
--- a/components/script/dom/bindings/codegen/parser/tests/test_global_extended_attr.py
+++ /dev/null
@@ -1,129 +0,0 @@
-def WebIDLTest(parser, harness):
- parser.parse(
- """
- [Global, Exposed=Foo]
- interface Foo : Bar {
- getter any(DOMString name);
- };
- [Exposed=Foo]
- interface Bar {};
- """
- )
-
- results = parser.finish()
-
- harness.ok(
- results[0].isOnGlobalProtoChain(),
- "[Global] interface should be on global's proto chain",
- )
- harness.ok(
- results[1].isOnGlobalProtoChain(),
- "[Global] interface should be on global's proto chain",
- )
-
- parser = parser.reset()
- threw = False
- try:
- parser.parse(
- """
- [Global, Exposed=Foo]
- interface Foo {
- getter any(DOMString name);
- setter undefined(DOMString name, any arg);
- };
- """
- )
- results = parser.finish()
- except:
- threw = True
-
- harness.ok(
- threw,
- "Should have thrown for [Global] used on an interface with a " "named setter",
- )
-
- parser = parser.reset()
- threw = False
- try:
- parser.parse(
- """
- [Global, Exposed=Foo]
- interface Foo {
- getter any(DOMString name);
- deleter undefined(DOMString name);
- };
- """
- )
- results = parser.finish()
- except:
- threw = True
-
- harness.ok(
- threw,
- "Should have thrown for [Global] used on an interface with a " "named deleter",
- )
-
- parser = parser.reset()
- threw = False
- try:
- parser.parse(
- """
- [Global, LegacyOverrideBuiltIns, Exposed=Foo]
- interface Foo {
- };
- """
- )
- results = parser.finish()
- except:
- threw = True
-
- harness.ok(
- threw,
- "Should have thrown for [Global] used on an interface with a "
- "[LegacyOverrideBuiltIns]",
- )
-
- parser = parser.reset()
- threw = False
- try:
- parser.parse(
- """
- [Global, Exposed=Foo]
- interface Foo : Bar {
- };
- [LegacyOverrideBuiltIns, Exposed=Foo]
- interface Bar {
- };
- """
- )
- results = parser.finish()
- except:
- threw = True
-
- harness.ok(
- threw,
- "Should have thrown for [Global] used on an interface with an "
- "[LegacyOverrideBuiltIns] ancestor",
- )
-
- parser = parser.reset()
- threw = False
- try:
- parser.parse(
- """
- [Global, Exposed=Foo]
- interface Foo {
- };
- [Exposed=Foo]
- interface Bar : Foo {
- };
- """
- )
- results = parser.finish()
- except:
- threw = True
-
- harness.ok(
- threw,
- "Should have thrown for [Global] used on an interface with a " "descendant",
- )
diff --git a/components/script/dom/bindings/codegen/parser/tests/test_identifier_conflict.py b/components/script/dom/bindings/codegen/parser/tests/test_identifier_conflict.py
deleted file mode 100644
index 7404c86f944..00000000000
--- a/components/script/dom/bindings/codegen/parser/tests/test_identifier_conflict.py
+++ /dev/null
@@ -1,49 +0,0 @@
-# Import the WebIDL module, so we can do isinstance checks and whatnot
-import WebIDL
-
-
-def WebIDLTest(parser, harness):
- try:
- parser.parse(
- """
- enum Foo { "a" };
- interface Foo;
- """
- )
- results = parser.finish()
- harness.ok(False, "Should fail to parse")
- except Exception as e:
- harness.ok(
- "Name collision" in str(e), "Should have name collision for interface"
- )
-
- parser = parser.reset()
- try:
- parser.parse(
- """
- dictionary Foo { long x; };
- enum Foo { "a" };
- """
- )
- results = parser.finish()
- harness.ok(False, "Should fail to parse")
- except Exception as e:
- harness.ok(
- "Name collision" in str(e), "Should have name collision for dictionary"
- )
-
- parser = parser.reset()
- try:
- parser.parse(
- """
- enum Foo { "a" };
- enum Foo { "b" };
- """
- )
- results = parser.finish()
- harness.ok(False, "Should fail to parse")
- except Exception as e:
- harness.ok(
- "Multiple unresolvable definitions" in str(e),
- "Should have name collision for dictionary",
- )
diff --git a/components/script/dom/bindings/codegen/parser/tests/test_incomplete_parent.py b/components/script/dom/bindings/codegen/parser/tests/test_incomplete_parent.py
deleted file mode 100644
index ed476b8ed4c..00000000000
--- a/components/script/dom/bindings/codegen/parser/tests/test_incomplete_parent.py
+++ /dev/null
@@ -1,21 +0,0 @@
-import WebIDL
-
-
-def WebIDLTest(parser, harness):
- parser.parse(
- """
- interface TestIncompleteParent : NotYetDefined {
- undefined foo();
- };
-
- interface NotYetDefined : EvenHigherOnTheChain {
- };
-
- interface EvenHigherOnTheChain {
- };
- """
- )
-
- parser.finish()
-
- harness.ok(True, "TestIncompleteParent interface parsed without error.")
diff --git a/components/script/dom/bindings/codegen/parser/tests/test_incomplete_types.py b/components/script/dom/bindings/codegen/parser/tests/test_incomplete_types.py
deleted file mode 100644
index 0d54f708bba..00000000000
--- a/components/script/dom/bindings/codegen/parser/tests/test_incomplete_types.py
+++ /dev/null
@@ -1,61 +0,0 @@
-import WebIDL
-
-
-def WebIDLTest(parser, harness):
- parser.parse(
- """
- interface TestIncompleteTypes {
- attribute FooInterface attr1;
-
- FooInterface method1(FooInterface arg);
- };
-
- interface FooInterface {
- };
- """
- )
-
- results = parser.finish()
-
- harness.ok(True, "TestIncompleteTypes interface parsed without error.")
- harness.check(len(results), 2, "Should be two productions.")
- iface = results[0]
- harness.ok(isinstance(iface, WebIDL.IDLInterface), "Should be an IDLInterface")
- harness.check(
- iface.identifier.QName(),
- "::TestIncompleteTypes",
- "Interface has the right QName",
- )
- harness.check(
- iface.identifier.name, "TestIncompleteTypes", "Interface has the right name"
- )
- harness.check(len(iface.members), 2, "Expect 2 members")
-
- attr = iface.members[0]
- harness.ok(isinstance(attr, WebIDL.IDLAttribute), "Should be an IDLAttribute")
- method = iface.members[1]
- harness.ok(isinstance(method, WebIDL.IDLMethod), "Should be an IDLMethod")
-
- harness.check(
- attr.identifier.QName(),
- "::TestIncompleteTypes::attr1",
- "Attribute has the right QName",
- )
- harness.check(
- attr.type.name, "FooInterface", "Previously unresolved type has the right name"
- )
-
- harness.check(
- method.identifier.QName(),
- "::TestIncompleteTypes::method1",
- "Attribute has the right QName",
- )
- (returnType, args) = method.signatures()[0]
- harness.check(
- returnType.name, "FooInterface", "Previously unresolved type has the right name"
- )
- harness.check(
- args[0].type.name,
- "FooInterface",
- "Previously unresolved type has the right name",
- )
diff --git a/components/script/dom/bindings/codegen/parser/tests/test_interface.py b/components/script/dom/bindings/codegen/parser/tests/test_interface.py
deleted file mode 100644
index 85748848e1b..00000000000
--- a/components/script/dom/bindings/codegen/parser/tests/test_interface.py
+++ /dev/null
@@ -1,459 +0,0 @@
-import WebIDL
-
-
-def WebIDLTest(parser, harness):
- parser.parse("interface Foo { };")
- results = parser.finish()
- harness.ok(True, "Empty interface parsed without error.")
- harness.check(len(results), 1, "Should be one production")
- harness.ok(isinstance(results[0], WebIDL.IDLInterface), "Should be an IDLInterface")
- iface = results[0]
- harness.check(iface.identifier.QName(), "::Foo", "Interface has the right QName")
- harness.check(iface.identifier.name, "Foo", "Interface has the right name")
- harness.check(iface.parent, None, "Interface has no parent")
-
- parser.parse("interface Bar : Foo { };")
- results = parser.finish()
- harness.ok(True, "Empty interface parsed without error.")
- harness.check(len(results), 2, "Should be two productions")
- harness.ok(isinstance(results[1], WebIDL.IDLInterface), "Should be an IDLInterface")
- iface = results[1]
- harness.check(iface.identifier.QName(), "::Bar", "Interface has the right QName")
- harness.check(iface.identifier.name, "Bar", "Interface has the right name")
- harness.ok(isinstance(iface.parent, WebIDL.IDLInterface), "Interface has a parent")
-
- parser = parser.reset()
- parser.parse(
- """
- interface QNameBase {
- attribute long foo;
- };
-
- interface QNameDerived : QNameBase {
- attribute long long foo;
- attribute byte bar;
- };
- """
- )
- results = parser.finish()
- harness.check(len(results), 2, "Should be two productions")
- harness.ok(isinstance(results[0], WebIDL.IDLInterface), "Should be an IDLInterface")
- harness.ok(isinstance(results[1], WebIDL.IDLInterface), "Should be an IDLInterface")
- harness.check(results[1].parent, results[0], "Inheritance chain is right")
- harness.check(len(results[0].members), 1, "Expect 1 productions")
- harness.check(len(results[1].members), 2, "Expect 2 productions")
- base = results[0]
- derived = results[1]
- harness.check(
- base.members[0].identifier.QName(),
- "::QNameBase::foo",
- "Member has the right QName",
- )
- harness.check(
- derived.members[0].identifier.QName(),
- "::QNameDerived::foo",
- "Member has the right QName",
- )
- harness.check(
- derived.members[1].identifier.QName(),
- "::QNameDerived::bar",
- "Member has the right QName",
- )
-
- parser = parser.reset()
- threw = False
- try:
- parser.parse(
- """
- interface A : B {};
- interface B : A {};
- """
- )
- results = parser.finish()
- except:
- threw = True
-
- harness.ok(threw, "Should not allow cycles in interface inheritance chains")
-
- parser = parser.reset()
- threw = False
- try:
- parser.parse(
- """
- interface A : C {};
- interface C : B {};
- interface B : A {};
- """
- )
- results = parser.finish()
- except:
- threw = True
-
- harness.ok(
- threw, "Should not allow indirect cycles in interface inheritance chains"
- )
-
- parser = parser.reset()
- threw = False
- try:
- parser.parse(
- """
- interface A;
- interface B : A {};
- """
- )
- results = parser.finish()
- except:
- threw = True
-
- harness.ok(
- threw,
- "Should not allow inheriting from an interface that is only forward declared",
- )
-
- parser = parser.reset()
- parser.parse(
- """
- interface A {
- constructor();
- constructor(long arg);
- readonly attribute boolean x;
- undefined foo();
- };
- partial interface A {
- readonly attribute boolean y;
- undefined foo(long arg);
- };
- """
- )
- results = parser.finish()
- harness.check(len(results), 2, "Should have two results with partial interface")
- iface = results[0]
- harness.check(
- len(iface.members), 3, "Should have three members with partial interface"
- )
- harness.check(
- iface.members[0].identifier.name,
- "x",
- "First member should be x with partial interface",
- )
- harness.check(
- iface.members[1].identifier.name,
- "foo",
- "Second member should be foo with partial interface",
- )
- harness.check(
- len(iface.members[1].signatures()),
- 2,
- "Should have two foo signatures with partial interface",
- )
- harness.check(
- iface.members[2].identifier.name,
- "y",
- "Third member should be y with partial interface",
- )
- harness.check(
- len(iface.ctor().signatures()),
- 2,
- "Should have two constructors with partial interface",
- )
-
- parser = parser.reset()
- parser.parse(
- """
- partial interface A {
- readonly attribute boolean y;
- undefined foo(long arg);
- };
- interface A {
- constructor();
- constructor(long arg);
- readonly attribute boolean x;
- undefined foo();
- };
- """
- )
- results = parser.finish()
- harness.check(
- len(results), 2, "Should have two results with reversed partial interface"
- )
- iface = results[1]
- harness.check(
- len(iface.members),
- 3,
- "Should have three members with reversed partial interface",
- )
- harness.check(
- iface.members[0].identifier.name,
- "x",
- "First member should be x with reversed partial interface",
- )
- harness.check(
- iface.members[1].identifier.name,
- "foo",
- "Second member should be foo with reversed partial interface",
- )
- harness.check(
- len(iface.members[1].signatures()),
- 2,
- "Should have two foo signatures with reversed partial interface",
- )
- harness.check(
- iface.members[2].identifier.name,
- "y",
- "Third member should be y with reversed partial interface",
- )
- harness.check(
- len(iface.ctor().signatures()),
- 2,
- "Should have two constructors with reversed partial interface",
- )
-
- parser = parser.reset()
- threw = False
- try:
- parser.parse(
- """
- interface A {
- readonly attribute boolean x;
- };
- interface A {
- readonly attribute boolean y;
- };
- """
- )
- results = parser.finish()
- except:
- threw = True
- harness.ok(threw, "Should not allow two non-partial interfaces with the same name")
-
- parser = parser.reset()
- threw = False
- try:
- parser.parse(
- """
- partial interface A {
- readonly attribute boolean x;
- };
- partial interface A {
- readonly attribute boolean y;
- };
- """
- )
- results = parser.finish()
- except:
- threw = True
- harness.ok(threw, "Must have a non-partial interface for a given name")
-
- parser = parser.reset()
- threw = False
- try:
- parser.parse(
- """
- dictionary A {
- boolean x;
- };
- partial interface A {
- readonly attribute boolean y;
- };
- """
- )
- results = parser.finish()
- except:
- threw = True
- harness.ok(
- threw,
- "Should not allow a name collision between partial interface "
- "and other object",
- )
-
- parser = parser.reset()
- threw = False
- try:
- parser.parse(
- """
- dictionary A {
- boolean x;
- };
- interface A {
- readonly attribute boolean y;
- };
- """
- )
- results = parser.finish()
- except:
- threw = True
- harness.ok(
- threw, "Should not allow a name collision between interface " "and other object"
- )
-
- parser = parser.reset()
- threw = False
- try:
- parser.parse(
- """
- dictionary A {
- boolean x;
- };
- interface A;
- """
- )
- results = parser.finish()
- except:
- threw = True
- harness.ok(
- threw,
- "Should not allow a name collision between external interface "
- "and other object",
- )
-
- parser = parser.reset()
- threw = False
- try:
- parser.parse(
- """
- interface A {
- readonly attribute boolean x;
- };
- interface A;
- """
- )
- results = parser.finish()
- except:
- threw = True
- harness.ok(
- threw,
- "Should not allow a name collision between external interface " "and interface",
- )
-
- parser = parser.reset()
- parser.parse(
- """
- interface A;
- interface A;
- """
- )
- results = parser.finish()
- harness.ok(
- len(results) == 1 and isinstance(results[0], WebIDL.IDLExternalInterface),
- "Should allow name collisions between external interface " "declarations",
- )
-
- parser = parser.reset()
- threw = False
- try:
- parser.parse(
- """
- [SomeRandomAnnotation]
- interface A {
- readonly attribute boolean y;
- };
- """
- )
- results = parser.finish()
- except:
- threw = True
- harness.ok(threw, "Should not allow unknown extended attributes on interfaces")
-
- parser = parser.reset()
- parser.parse(
- """
- [Global, Exposed=Window] interface Window {};
- [Exposed=Window, LegacyWindowAlias=A]
- interface B {};
- [Exposed=Window, LegacyWindowAlias=(C, D)]
- interface E {};
- """
- )
- results = parser.finish()
- harness.check(
- results[1].legacyWindowAliases, ["A"], "Should support a single identifier"
- )
- harness.check(
- results[2].legacyWindowAliases, ["C", "D"], "Should support an identifier list"
- )
-
- parser = parser.reset()
- threw = False
- try:
- parser.parse(
- """
- [LegacyWindowAlias]
- interface A {};
- """
- )
- results = parser.finish()
- except:
- threw = True
- harness.ok(threw, "Should not allow [LegacyWindowAlias] with no value")
-
- parser = parser.reset()
- threw = False
- try:
- parser.parse(
- """
- [Exposed=Worker, LegacyWindowAlias=B]
- interface A {};
- """
- )
- results = parser.finish()
- except:
- threw = True
- harness.ok(threw, "Should not allow [LegacyWindowAlias] without Window exposure")
-
- parser = parser.reset()
- threw = False
- try:
- parser.parse(
- """
- [Global, Exposed=Window] interface Window {};
- [Exposed=Window]
- interface A {};
- [Exposed=Window, LegacyWindowAlias=A]
- interface B {};
- """
- )
- results = parser.finish()
- except:
- threw = True
- harness.ok(
- threw, "Should not allow [LegacyWindowAlias] to conflict with other identifiers"
- )
-
- parser = parser.reset()
- threw = False
- try:
- parser.parse(
- """
- [Global, Exposed=Window] interface Window {};
- [Exposed=Window, LegacyWindowAlias=A]
- interface B {};
- [Exposed=Window]
- interface A {};
- """
- )
- results = parser.finish()
- except:
- threw = True
- harness.ok(
- threw, "Should not allow [LegacyWindowAlias] to conflict with other identifiers"
- )
-
- parser = parser.reset()
- threw = False
- try:
- parser.parse(
- """
- [Global, Exposed=Window] interface Window {};
- [Exposed=Window, LegacyWindowAlias=A]
- interface B {};
- [Exposed=Window, LegacyWindowAlias=A]
- interface C {};
- """
- )
- results = parser.finish()
- except:
- threw = True
- harness.ok(
- threw, "Should not allow [LegacyWindowAlias] to conflict with other identifiers"
- )
diff --git a/components/script/dom/bindings/codegen/parser/tests/test_interface_const_identifier_conflicts.py b/components/script/dom/bindings/codegen/parser/tests/test_interface_const_identifier_conflicts.py
deleted file mode 100644
index 5750f87a6fc..00000000000
--- a/components/script/dom/bindings/codegen/parser/tests/test_interface_const_identifier_conflicts.py
+++ /dev/null
@@ -1,17 +0,0 @@
-def WebIDLTest(parser, harness):
- threw = False
- try:
- parser.parse(
- """
- interface IdentifierConflict {
- const byte thing1 = 1;
- const unsigned long thing1 = 1;
- };
- """
- )
-
- results = parser.finish()
- except:
- threw = True
-
- harness.ok(threw, "Should have thrown.")
diff --git a/components/script/dom/bindings/codegen/parser/tests/test_interface_identifier_conflicts_across_members.py b/components/script/dom/bindings/codegen/parser/tests/test_interface_identifier_conflicts_across_members.py
deleted file mode 100644
index c1a544ce718..00000000000
--- a/components/script/dom/bindings/codegen/parser/tests/test_interface_identifier_conflicts_across_members.py
+++ /dev/null
@@ -1,68 +0,0 @@
-def WebIDLTest(parser, harness):
- threw = False
- try:
- parser.parse(
- """
- interface IdentifierConflictAcrossMembers1 {
- const byte thing1 = 1;
- readonly attribute long thing1;
- };
- """
- )
-
- results = parser.finish()
- except:
- threw = True
-
- harness.ok(threw, "Should have thrown.")
-
- threw = False
- try:
- parser.parse(
- """
- interface IdentifierConflictAcrossMembers2 {
- readonly attribute long thing1;
- const byte thing1 = 1;
- };
- """
- )
-
- results = parser.finish()
- except:
- threw = True
-
- harness.ok(threw, "Should have thrown.")
-
- threw = False
- try:
- parser.parse(
- """
- interface IdentifierConflictAcrossMembers3 {
- getter boolean thing1(DOMString name);
- readonly attribute long thing1;
- };
- """
- )
-
- results = parser.finish()
- except:
- threw = True
-
- harness.ok(threw, "Should have thrown.")
-
- threw = False
- try:
- parser.parse(
- """
- interface IdentifierConflictAcrossMembers1 {
- const byte thing1 = 1;
- long thing1();
- };
- """
- )
-
- results = parser.finish()
- except:
- threw = True
-
- harness.ok(threw, "Should have thrown.")
diff --git a/components/script/dom/bindings/codegen/parser/tests/test_interface_maplikesetlikeiterable.py b/components/script/dom/bindings/codegen/parser/tests/test_interface_maplikesetlikeiterable.py
deleted file mode 100644
index 18c6023dd3b..00000000000
--- a/components/script/dom/bindings/codegen/parser/tests/test_interface_maplikesetlikeiterable.py
+++ /dev/null
@@ -1,912 +0,0 @@
-import WebIDL
-import traceback
-
-
-def WebIDLTest(parser, harness):
- def shouldPass(prefix, iface, expectedMembers, numProductions=1):
- p = parser.reset()
- p.parse(iface)
- results = p.finish()
- harness.check(
- len(results),
- numProductions,
- "%s - Should have production count %d" % (prefix, numProductions),
- )
- harness.ok(
- isinstance(results[0], WebIDL.IDLInterface),
- "%s - Should be an IDLInterface" % (prefix),
- )
- # Make a copy, since we plan to modify it
- expectedMembers = list(expectedMembers)
- for m in results[0].members:
- name = m.identifier.name
- if (name, type(m)) in expectedMembers:
- harness.ok(True, "%s - %s - Should be a %s" % (prefix, name, type(m)))
- expectedMembers.remove((name, type(m)))
- else:
- harness.ok(
- False,
- "%s - %s - Unknown symbol of type %s" % (prefix, name, type(m)),
- )
- # A bit of a hoop because we can't generate the error string if we pass
- if len(expectedMembers) == 0:
- harness.ok(True, "Found all the members")
- else:
- harness.ok(
- False,
- "Expected member not found: %s of type %s"
- % (expectedMembers[0][0], expectedMembers[0][1]),
- )
- return results
-
- def shouldFail(prefix, iface):
- try:
- p = parser.reset()
- p.parse(iface)
- p.finish()
- harness.ok(False, prefix + " - Interface passed when should've failed")
- except WebIDL.WebIDLError as e:
- harness.ok(True, prefix + " - Interface failed as expected")
- except Exception as e:
- harness.ok(
- False,
- prefix
- + " - Interface failed but not as a WebIDLError exception: %s" % e,
- )
-
- iterableMembers = [
- (x, WebIDL.IDLMethod) for x in ["entries", "keys", "values", "forEach"]
- ]
- setROMembers = (
- [(x, WebIDL.IDLMethod) for x in ["has"]]
- + [("__setlike", WebIDL.IDLMaplikeOrSetlike)]
- + iterableMembers
- )
- setROMembers.extend([("size", WebIDL.IDLAttribute)])
- setRWMembers = [
- (x, WebIDL.IDLMethod) for x in ["add", "clear", "delete"]
- ] + setROMembers
- setROChromeMembers = [
- (x, WebIDL.IDLMethod) for x in ["__add", "__clear", "__delete"]
- ] + setROMembers
- setRWChromeMembers = [
- (x, WebIDL.IDLMethod) for x in ["__add", "__clear", "__delete"]
- ] + setRWMembers
- mapROMembers = (
- [(x, WebIDL.IDLMethod) for x in ["get", "has"]]
- + [("__maplike", WebIDL.IDLMaplikeOrSetlike)]
- + iterableMembers
- )
- mapROMembers.extend([("size", WebIDL.IDLAttribute)])
- mapRWMembers = [
- (x, WebIDL.IDLMethod) for x in ["set", "clear", "delete"]
- ] + mapROMembers
- mapRWChromeMembers = [
- (x, WebIDL.IDLMethod) for x in ["__set", "__clear", "__delete"]
- ] + mapRWMembers
-
- # OK, now that we've used iterableMembers to set up the above, append
- # __iterable to it for the iterable<> case.
- iterableMembers.append(("__iterable", WebIDL.IDLIterable))
-
- asyncIterableMembers = [
- (x, WebIDL.IDLMethod) for x in ["entries", "keys", "values"]
- ]
- asyncIterableMembers.append(("__iterable", WebIDL.IDLAsyncIterable))
-
- valueIterableMembers = [("__iterable", WebIDL.IDLIterable)]
- valueIterableMembers.append(("__indexedgetter", WebIDL.IDLMethod))
- valueIterableMembers.append(("length", WebIDL.IDLAttribute))
-
- valueAsyncIterableMembers = [("__iterable", WebIDL.IDLAsyncIterable)]
- valueAsyncIterableMembers.append(("values", WebIDL.IDLMethod))
-
- disallowedIterableNames = ["keys", "entries", "values"]
- disallowedMemberNames = ["forEach", "has", "size"] + disallowedIterableNames
- mapDisallowedMemberNames = ["get"] + disallowedMemberNames
- disallowedNonMethodNames = ["clear", "delete"]
- mapDisallowedNonMethodNames = ["set"] + disallowedNonMethodNames
- setDisallowedNonMethodNames = ["add"] + disallowedNonMethodNames
- unrelatedMembers = [
- ("unrelatedAttribute", WebIDL.IDLAttribute),
- ("unrelatedMethod", WebIDL.IDLMethod),
- ]
-
- #
- # Simple Usage Tests
- #
-
- shouldPass(
- "Iterable (key only)",
- """
- interface Foo1 {
- iterable<long>;
- readonly attribute unsigned long length;
- getter long(unsigned long index);
- attribute long unrelatedAttribute;
- long unrelatedMethod();
- };
- """,
- valueIterableMembers + unrelatedMembers,
- )
-
- shouldPass(
- "Iterable (key only) inheriting from parent",
- """
- interface Foo1 : Foo2 {
- iterable<long>;
- readonly attribute unsigned long length;
- getter long(unsigned long index);
- };
- interface Foo2 {
- attribute long unrelatedAttribute;
- long unrelatedMethod();
- };
- """,
- valueIterableMembers,
- numProductions=2,
- )
-
- shouldPass(
- "Iterable (key and value)",
- """
- interface Foo1 {
- iterable<long, long>;
- attribute long unrelatedAttribute;
- long unrelatedMethod();
- };
- """,
- iterableMembers + unrelatedMembers,
- # numProductions == 2 because of the generated iterator iface,
- numProductions=2,
- )
-
- shouldPass(
- "Iterable (key and value) inheriting from parent",
- """
- interface Foo1 : Foo2 {
- iterable<long, long>;
- };
- interface Foo2 {
- attribute long unrelatedAttribute;
- long unrelatedMethod();
- };
- """,
- iterableMembers,
- # numProductions == 3 because of the generated iterator iface,
- numProductions=3,
- )
-
- shouldPass(
- "Async iterable (key only)",
- """
- interface Foo1 {
- async iterable<long>;
- attribute long unrelatedAttribute;
- long unrelatedMethod();
- };
- """,
- valueAsyncIterableMembers + unrelatedMembers,
- # numProductions == 2 because of the generated iterator iface,
- numProductions=2,
- )
-
- shouldPass(
- "Async iterable (key only) inheriting from parent",
- """
- interface Foo1 : Foo2 {
- async iterable<long>;
- };
- interface Foo2 {
- attribute long unrelatedAttribute;
- long unrelatedMethod();
- };
- """,
- valueAsyncIterableMembers,
- # numProductions == 3 because of the generated iterator iface,
- numProductions=3,
- )
-
- shouldPass(
- "Async iterable with argument (key only)",
- """
- interface Foo1 {
- async iterable<long>(optional long foo);
- attribute long unrelatedAttribute;
- long unrelatedMethod();
- };
- """,
- valueAsyncIterableMembers + unrelatedMembers,
- # numProductions == 2 because of the generated iterator iface,
- numProductions=2,
- )
-
- shouldPass(
- "Async iterable (key and value)",
- """
- interface Foo1 {
- async iterable<long, long>;
- attribute long unrelatedAttribute;
- long unrelatedMethod();
- };
- """,
- asyncIterableMembers + unrelatedMembers,
- # numProductions == 2 because of the generated iterator iface,
- numProductions=2,
- )
-
- shouldPass(
- "Async iterable (key and value) inheriting from parent",
- """
- interface Foo1 : Foo2 {
- async iterable<long, long>;
- };
- interface Foo2 {
- attribute long unrelatedAttribute;
- long unrelatedMethod();
- };
- """,
- asyncIterableMembers,
- # numProductions == 3 because of the generated iterator iface,
- numProductions=3,
- )
-
- shouldPass(
- "Async iterable with argument (key and value)",
- """
- interface Foo1 {
- async iterable<long, long>(optional long foo);
- attribute long unrelatedAttribute;
- long unrelatedMethod();
- };
- """,
- asyncIterableMembers + unrelatedMembers,
- # numProductions == 2 because of the generated iterator iface,
- numProductions=2,
- )
-
- shouldPass(
- "Maplike (readwrite)",
- """
- interface Foo1 {
- maplike<long, long>;
- attribute long unrelatedAttribute;
- long unrelatedMethod();
- };
- """,
- mapRWMembers + unrelatedMembers,
- )
-
- shouldPass(
- "Maplike (readwrite) inheriting from parent",
- """
- interface Foo1 : Foo2 {
- maplike<long, long>;
- };
- interface Foo2 {
- attribute long unrelatedAttribute;
- long unrelatedMethod();
- };
- """,
- mapRWMembers,
- numProductions=2,
- )
-
- shouldPass(
- "Maplike (readwrite)",
- """
- interface Foo1 {
- maplike<long, long>;
- attribute long unrelatedAttribute;
- long unrelatedMethod();
- };
- """,
- mapRWMembers + unrelatedMembers,
- )
-
- shouldPass(
- "Maplike (readwrite) inheriting from parent",
- """
- interface Foo1 : Foo2 {
- maplike<long, long>;
- };
- interface Foo2 {
- attribute long unrelatedAttribute;
- long unrelatedMethod();
- };
- """,
- mapRWMembers,
- numProductions=2,
- )
-
- shouldPass(
- "Maplike (readonly)",
- """
- interface Foo1 {
- readonly maplike<long, long>;
- attribute long unrelatedAttribute;
- long unrelatedMethod();
- };
- """,
- mapROMembers + unrelatedMembers,
- )
-
- shouldPass(
- "Maplike (readonly) inheriting from parent",
- """
- interface Foo1 : Foo2 {
- readonly maplike<long, long>;
- };
- interface Foo2 {
- attribute long unrelatedAttribute;
- long unrelatedMethod();
- };
- """,
- mapROMembers,
- numProductions=2,
- )
-
- shouldPass(
- "Setlike (readwrite)",
- """
- interface Foo1 {
- setlike<long>;
- attribute long unrelatedAttribute;
- long unrelatedMethod();
- };
- """,
- setRWMembers + unrelatedMembers,
- )
-
- shouldPass(
- "Setlike (readwrite) inheriting from parent",
- """
- interface Foo1 : Foo2 {
- setlike<long>;
- };
- interface Foo2 {
- attribute long unrelatedAttribute;
- long unrelatedMethod();
- };
- """,
- setRWMembers,
- numProductions=2,
- )
-
- shouldPass(
- "Setlike (readonly)",
- """
- interface Foo1 {
- readonly setlike<long>;
- attribute long unrelatedAttribute;
- long unrelatedMethod();
- };
- """,
- setROMembers + unrelatedMembers,
- )
-
- shouldPass(
- "Setlike (readonly) inheriting from parent",
- """
- interface Foo1 : Foo2 {
- readonly setlike<long>;
- };
- interface Foo2 {
- attribute long unrelatedAttribute;
- long unrelatedMethod();
- };
- """,
- setROMembers,
- numProductions=2,
- )
-
- shouldPass(
- "Inheritance of maplike/setlike",
- """
- interface Foo1 {
- maplike<long, long>;
- };
- interface Foo2 : Foo1 {
- };
- """,
- mapRWMembers,
- numProductions=2,
- )
-
- shouldFail(
- "JS Implemented maplike interface",
- """
- [JSImplementation="@mozilla.org/dom/test-interface-js-maplike;1"]
- interface Foo1 {
- constructor();
- setlike<long>;
- };
- """,
- )
-
- shouldFail(
- "JS Implemented maplike interface",
- """
- [JSImplementation="@mozilla.org/dom/test-interface-js-maplike;1"]
- interface Foo1 {
- constructor();
- maplike<long, long>;
- };
- """,
- )
-
- #
- # Multiple maplike/setlike tests
- #
-
- shouldFail(
- "Two maplike/setlikes on same interface",
- """
- interface Foo1 {
- setlike<long>;
- maplike<long, long>;
- };
- """,
- )
-
- shouldFail(
- "Two iterable/setlikes on same interface",
- """
- interface Foo1 {
- iterable<long>;
- maplike<long, long>;
- };
- """,
- )
-
- shouldFail(
- "Two iterables on same interface",
- """
- interface Foo1 {
- iterable<long>;
- iterable<long, long>;
- };
- """,
- )
-
- shouldFail(
- "Two iterables on same interface",
- """
- interface Foo1 {
- iterable<long>;
- async iterable<long>;
- };
- """,
- )
-
- shouldFail(
- "Two iterables on same interface",
- """
- interface Foo1 {
- async iterable<long>;
- async iterable<long, long>;
- };
- """,
- )
-
- shouldFail(
- "Async iterable with non-optional arguments",
- """
- interface Foo1 {
- async iterable<long>(long foo);
- };
- """,
- )
-
- shouldFail(
- "Async iterable with non-optional arguments",
- """
- interface Foo1 {
- async iterable<long>(optional long foo, long bar);
- };
- """,
- )
-
- shouldFail(
- "Async iterable with non-optional arguments",
- """
- interface Foo1 {
- async iterable<long, long>(long foo);
- };
- """,
- )
-
- shouldFail(
- "Two maplike/setlikes in partials",
- """
- interface Foo1 {
- maplike<long, long>;
- };
- partial interface Foo1 {
- setlike<long>;
- };
- """,
- )
-
- shouldFail(
- "Conflicting maplike/setlikes across inheritance",
- """
- interface Foo1 {
- maplike<long, long>;
- };
- interface Foo2 : Foo1 {
- setlike<long>;
- };
- """,
- )
-
- shouldFail(
- "Conflicting maplike/iterable across inheritance",
- """
- interface Foo1 {
- maplike<long, long>;
- };
- interface Foo2 : Foo1 {
- iterable<long>;
- };
- """,
- )
-
- shouldFail(
- "Conflicting maplike/setlikes across multistep inheritance",
- """
- interface Foo1 {
- maplike<long, long>;
- };
- interface Foo2 : Foo1 {
- };
- interface Foo3 : Foo2 {
- setlike<long>;
- };
- """,
- )
-
- #
- # Member name collision tests
- #
-
- def testConflictingMembers(likeMember, conflictName, expectedMembers, methodPasses):
- """
- Tests for maplike/setlike member generation against conflicting member
- names. If methodPasses is True, this means we expect the interface to
- pass in the case of method shadowing, and expectedMembers should be the
- list of interface members to check against on the passing interface.
-
- """
- if methodPasses:
- shouldPass(
- "Conflicting method: %s and %s" % (likeMember, conflictName),
- """
- interface Foo1 {
- %s;
- [Throws]
- undefined %s(long test1, double test2, double test3);
- };
- """
- % (likeMember, conflictName),
- expectedMembers,
- )
- else:
- shouldFail(
- "Conflicting method: %s and %s" % (likeMember, conflictName),
- """
- interface Foo1 {
- %s;
- [Throws]
- undefined %s(long test1, double test2, double test3);
- };
- """
- % (likeMember, conflictName),
- )
- # Inherited conflicting methods should ALWAYS fail
- shouldFail(
- "Conflicting inherited method: %s and %s" % (likeMember, conflictName),
- """
- interface Foo1 {
- undefined %s(long test1, double test2, double test3);
- };
- interface Foo2 : Foo1 {
- %s;
- };
- """
- % (conflictName, likeMember),
- )
- shouldFail(
- "Conflicting static method: %s and %s" % (likeMember, conflictName),
- """
- interface Foo1 {
- %s;
- static undefined %s(long test1, double test2, double test3);
- };
- """
- % (likeMember, conflictName),
- )
- shouldFail(
- "Conflicting attribute: %s and %s" % (likeMember, conflictName),
- """
- interface Foo1 {
- %s
- attribute double %s;
- };
- """
- % (likeMember, conflictName),
- )
- shouldFail(
- "Conflicting const: %s and %s" % (likeMember, conflictName),
- """
- interface Foo1 {
- %s;
- const double %s = 0;
- };
- """
- % (likeMember, conflictName),
- )
- shouldFail(
- "Conflicting static attribute: %s and %s" % (likeMember, conflictName),
- """
- interface Foo1 {
- %s;
- static attribute long %s;
- };
- """
- % (likeMember, conflictName),
- )
-
- for member in disallowedIterableNames:
- testConflictingMembers("iterable<long, long>", member, iterableMembers, False)
- for member in mapDisallowedMemberNames:
- testConflictingMembers("maplike<long, long>", member, mapRWMembers, False)
- for member in disallowedMemberNames:
- testConflictingMembers("setlike<long>", member, setRWMembers, False)
- for member in mapDisallowedNonMethodNames:
- testConflictingMembers("maplike<long, long>", member, mapRWMembers, True)
- for member in setDisallowedNonMethodNames:
- testConflictingMembers("setlike<long>", member, setRWMembers, True)
-
- shouldPass(
- "Inheritance of maplike/setlike with child member collision",
- """
- interface Foo1 {
- maplike<long, long>;
- };
- interface Foo2 : Foo1 {
- undefined entries();
- };
- """,
- mapRWMembers,
- numProductions=2,
- )
-
- shouldPass(
- "Inheritance of multi-level maplike/setlike with child member collision",
- """
- interface Foo1 {
- maplike<long, long>;
- };
- interface Foo2 : Foo1 {
- };
- interface Foo3 : Foo2 {
- undefined entries();
- };
- """,
- mapRWMembers,
- numProductions=3,
- )
-
- shouldFail(
- "Maplike interface with mixin member collision",
- """
- interface Foo1 {
- maplike<long, long>;
- };
- interface mixin Foo2 {
- undefined entries();
- };
- Foo1 includes Foo2;
- """,
- )
-
- shouldPass(
- "Inherited Maplike interface with consequential interface member collision",
- """
- interface Foo1 {
- maplike<long, long>;
- };
- interface mixin Foo2 {
- undefined entries();
- };
- interface Foo3 : Foo1 {
- };
- Foo3 includes Foo2;
- """,
- mapRWMembers,
- numProductions=4,
- )
-
- shouldFail(
- "Inheritance of name collision with child maplike/setlike",
- """
- interface Foo1 {
- undefined entries();
- };
- interface Foo2 : Foo1 {
- maplike<long, long>;
- };
- """,
- )
-
- shouldFail(
- "Inheritance of multi-level name collision with child maplike/setlike",
- """
- interface Foo1 {
- undefined entries();
- };
- interface Foo2 : Foo1 {
- };
- interface Foo3 : Foo2 {
- maplike<long, long>;
- };
- """,
- )
-
- shouldPass(
- "Inheritance of attribute collision with parent maplike/setlike",
- """
- interface Foo1 {
- maplike<long, long>;
- };
- interface Foo2 : Foo1 {
- attribute double size;
- };
- """,
- mapRWMembers,
- numProductions=2,
- )
-
- shouldPass(
- "Inheritance of multi-level attribute collision with parent maplike/setlike",
- """
- interface Foo1 {
- maplike<long, long>;
- };
- interface Foo2 : Foo1 {
- };
- interface Foo3 : Foo2 {
- attribute double size;
- };
- """,
- mapRWMembers,
- numProductions=3,
- )
-
- shouldFail(
- "Inheritance of attribute collision with child maplike/setlike",
- """
- interface Foo1 {
- attribute double size;
- };
- interface Foo2 : Foo1 {
- maplike<long, long>;
- };
- """,
- )
-
- shouldFail(
- "Inheritance of multi-level attribute collision with child maplike/setlike",
- """
- interface Foo1 {
- attribute double size;
- };
- interface Foo2 : Foo1 {
- };
- interface Foo3 : Foo2 {
- maplike<long, long>;
- };
- """,
- )
-
- shouldFail(
- "Inheritance of attribute/rw function collision with child maplike/setlike",
- """
- interface Foo1 {
- attribute double set;
- };
- interface Foo2 : Foo1 {
- maplike<long, long>;
- };
- """,
- )
-
- shouldFail(
- "Inheritance of const/rw function collision with child maplike/setlike",
- """
- interface Foo1 {
- const double set = 0;
- };
- interface Foo2 : Foo1 {
- maplike<long, long>;
- };
- """,
- )
-
- shouldPass(
- "Inheritance of rw function with same name in child maplike/setlike",
- """
- interface Foo1 {
- maplike<long, long>;
- };
- interface Foo2 : Foo1 {
- undefined clear();
- };
- """,
- mapRWMembers,
- numProductions=2,
- )
-
- shouldFail(
- "Inheritance of unforgeable attribute collision with child maplike/setlike",
- """
- interface Foo1 {
- [LegacyUnforgeable]
- attribute double size;
- };
- interface Foo2 : Foo1 {
- maplike<long, long>;
- };
- """,
- )
-
- shouldFail(
- "Inheritance of multi-level unforgeable attribute collision with child maplike/setlike",
- """
- interface Foo1 {
- [LegacyUnforgeable]
- attribute double size;
- };
- interface Foo2 : Foo1 {
- };
- interface Foo3 : Foo2 {
- maplike<long, long>;
- };
- """,
- )
-
- shouldPass(
- "Interface with readonly allowable overrides",
- """
- interface Foo1 {
- readonly setlike<long>;
- readonly attribute boolean clear;
- };
- """,
- setROMembers + [("clear", WebIDL.IDLAttribute)],
- )
-
- r = shouldPass(
- "Check proper override of clear/delete/set",
- """
- interface Foo1 {
- maplike<long, long>;
- long clear(long a, long b, double c, double d);
- long set(long a, long b, double c, double d);
- long delete(long a, long b, double c, double d);
- };
- """,
- mapRWMembers,
- )
-
- for m in r[0].members:
- if m.identifier.name in ["clear", "set", "delete"]:
- harness.ok(m.isMethod(), "%s should be a method" % m.identifier.name)
- harness.check(
- m.maxArgCount, 4, "%s should have 4 arguments" % m.identifier.name
- )
- harness.ok(
- not m.isMaplikeOrSetlikeOrIterableMethod(),
- "%s should not be a maplike/setlike function" % m.identifier.name,
- )
diff --git a/components/script/dom/bindings/codegen/parser/tests/test_interfacemixin.py b/components/script/dom/bindings/codegen/parser/tests/test_interfacemixin.py
deleted file mode 100644
index b3c8573fa59..00000000000
--- a/components/script/dom/bindings/codegen/parser/tests/test_interfacemixin.py
+++ /dev/null
@@ -1,534 +0,0 @@
-import WebIDL
-
-
-def WebIDLTest(parser, harness):
- parser.parse("interface mixin Foo { };")
- results = parser.finish()
- harness.ok(True, "Empty interface mixin parsed without error.")
- harness.check(len(results), 1, "Should be one production")
- harness.ok(
- isinstance(results[0], WebIDL.IDLInterfaceMixin),
- "Should be an IDLInterfaceMixin",
- )
- mixin = results[0]
- harness.check(
- mixin.identifier.QName(), "::Foo", "Interface mixin has the right QName"
- )
- harness.check(mixin.identifier.name, "Foo", "Interface mixin has the right name")
-
- parser = parser.reset()
- parser.parse(
- """
- interface mixin QNameBase {
- const long foo = 3;
- };
- """
- )
- results = parser.finish()
- harness.check(len(results), 1, "Should be one productions")
- harness.ok(
- isinstance(results[0], WebIDL.IDLInterfaceMixin),
- "Should be an IDLInterfaceMixin",
- )
- harness.check(len(results[0].members), 1, "Expect 1 productions")
- mixin = results[0]
- harness.check(
- mixin.members[0].identifier.QName(),
- "::QNameBase::foo",
- "Member has the right QName",
- )
-
- parser = parser.reset()
- parser.parse(
- """
- interface mixin A {
- readonly attribute boolean x;
- undefined foo();
- };
- partial interface mixin A {
- readonly attribute boolean y;
- undefined foo(long arg);
- };
- """
- )
- results = parser.finish()
- harness.check(
- len(results), 2, "Should have two results with partial interface mixin"
- )
- mixin = results[0]
- harness.check(
- len(mixin.members), 3, "Should have three members with partial interface mixin"
- )
- harness.check(
- mixin.members[0].identifier.name,
- "x",
- "First member should be x with partial interface mixin",
- )
- harness.check(
- mixin.members[1].identifier.name,
- "foo",
- "Second member should be foo with partial interface mixin",
- )
- harness.check(
- len(mixin.members[1].signatures()),
- 2,
- "Should have two foo signatures with partial interface mixin",
- )
- harness.check(
- mixin.members[2].identifier.name,
- "y",
- "Third member should be y with partial interface mixin",
- )
-
- parser = parser.reset()
- parser.parse(
- """
- partial interface mixin A {
- readonly attribute boolean y;
- undefined foo(long arg);
- };
- interface mixin A {
- readonly attribute boolean x;
- undefined foo();
- };
- """
- )
- results = parser.finish()
- harness.check(
- len(results), 2, "Should have two results with reversed partial interface mixin"
- )
- mixin = results[1]
- harness.check(
- len(mixin.members),
- 3,
- "Should have three members with reversed partial interface mixin",
- )
- harness.check(
- mixin.members[0].identifier.name,
- "x",
- "First member should be x with reversed partial interface mixin",
- )
- harness.check(
- mixin.members[1].identifier.name,
- "foo",
- "Second member should be foo with reversed partial interface mixin",
- )
- harness.check(
- len(mixin.members[1].signatures()),
- 2,
- "Should have two foo signatures with reversed partial interface mixin",
- )
- harness.check(
- mixin.members[2].identifier.name,
- "y",
- "Third member should be y with reversed partial interface mixin",
- )
-
- parser = parser.reset()
- parser.parse(
- """
- interface Interface {};
- interface mixin Mixin {
- attribute short x;
- };
- Interface includes Mixin;
- """
- )
- results = parser.finish()
- iface = results[0]
- harness.check(len(iface.members), 1, "Should merge members from mixins")
- harness.check(
- iface.members[0].identifier.name, "x", "Should merge members from mixins"
- )
-
- parser = parser.reset()
- threw = False
- try:
- parser.parse(
- """
- interface mixin A {
- readonly attribute boolean x;
- };
- interface mixin A {
- readonly attribute boolean y;
- };
- """
- )
- results = parser.finish()
- except:
- threw = True
- harness.ok(
- threw, "Should not allow two non-partial interface mixins with the same name"
- )
-
- parser = parser.reset()
- threw = False
- try:
- parser.parse(
- """
- partial interface mixin A {
- readonly attribute boolean x;
- };
- partial interface mixin A {
- readonly attribute boolean y;
- };
- """
- )
- results = parser.finish()
- except:
- threw = True
- harness.ok(threw, "Must have a non-partial interface mixin for a given name")
-
- parser = parser.reset()
- threw = False
- try:
- parser.parse(
- """
- dictionary A {
- boolean x;
- };
- partial interface mixin A {
- readonly attribute boolean y;
- };
- """
- )
- results = parser.finish()
- except:
- threw = True
- harness.ok(
- threw,
- "Should not allow a name collision between partial interface "
- "mixin and other object",
- )
-
- parser = parser.reset()
- threw = False
- try:
- parser.parse(
- """
- dictionary A {
- boolean x;
- };
- interface mixin A {
- readonly attribute boolean y;
- };
- """
- )
- results = parser.finish()
- except:
- threw = True
- harness.ok(
- threw,
- "Should not allow a name collision between interface mixin " "and other object",
- )
-
- parser = parser.reset()
- threw = False
- try:
- parser.parse(
- """
- interface mixin A {
- readonly attribute boolean x;
- };
- interface A;
- """
- )
- results = parser.finish()
- except:
- threw = True
- harness.ok(
- threw,
- "Should not allow a name collision between external interface "
- "and interface mixin",
- )
-
- parser = parser.reset()
- threw = False
- try:
- parser.parse(
- """
- [SomeRandomAnnotation]
- interface mixin A {
- readonly attribute boolean y;
- };
- """
- )
- results = parser.finish()
- except:
- threw = True
- harness.ok(
- threw, "Should not allow unknown extended attributes on interface mixins"
- )
-
- parser = parser.reset()
- threw = False
- try:
- parser.parse(
- """
- interface mixin A {
- getter double (DOMString propertyName);
- };
- """
- )
- results = parser.finish()
- except:
- threw = True
- harness.ok(threw, "Should not allow getters on interface mixins")
-
- parser = parser.reset()
- threw = False
- try:
- parser.parse(
- """
- interface mixin A {
- setter undefined (DOMString propertyName, double propertyValue);
- };
- """
- )
- results = parser.finish()
- except:
- threw = True
- harness.ok(threw, "Should not allow setters on interface mixins")
-
- parser = parser.reset()
- threw = False
- try:
- parser.parse(
- """
- interface mixin A {
- deleter undefined (DOMString propertyName);
- };
- """
- )
- results = parser.finish()
- except:
- threw = True
- harness.ok(threw, "Should not allow deleters on interface mixins")
-
- parser = parser.reset()
- threw = False
- try:
- parser.parse(
- """
- interface mixin A {
- legacycaller double compute(double x);
- };
- """
- )
- results = parser.finish()
- except:
- threw = True
- harness.ok(threw, "Should not allow legacycallers on interface mixins")
-
- parser = parser.reset()
- threw = False
- try:
- parser.parse(
- """
- interface mixin A {
- inherit attribute x;
- };
- """
- )
- results = parser.finish()
- except:
- threw = True
- harness.ok(threw, "Should not allow inherited attribute on interface mixins")
-
- parser = parser.reset()
- threw = False
- try:
- parser.parse(
- """
- interface Interface {};
- interface NotMixin {
- attribute short x;
- };
- Interface includes NotMixin;
- """
- )
- results = parser.finish()
- except:
- threw = True
- harness.ok(threw, "Should fail if the right side does not point an interface mixin")
-
- parser = parser.reset()
- threw = False
- try:
- parser.parse(
- """
- interface mixin NotInterface {};
- interface mixin Mixin {
- attribute short x;
- };
- NotInterface includes Mixin;
- """
- )
- results = parser.finish()
- except:
- threw = True
- harness.ok(threw, "Should fail if the left side does not point an interface")
-
- parser = parser.reset()
- threw = False
- try:
- parser.parse(
- """
- interface mixin Mixin {
- iterable<DOMString>;
- };
- """
- )
- results = parser.finish()
- except:
- threw = True
- harness.ok(threw, "Should fail if an interface mixin includes iterable")
-
- parser = parser.reset()
- threw = False
- try:
- parser.parse(
- """
- interface mixin Mixin {
- setlike<DOMString>;
- };
- """
- )
- results = parser.finish()
- except:
- threw = True
- harness.ok(threw, "Should fail if an interface mixin includes setlike")
-
- parser = parser.reset()
- threw = False
- try:
- parser.parse(
- """
- interface mixin Mixin {
- maplike<DOMString, DOMString>;
- };
- """
- )
- results = parser.finish()
- except:
- threw = True
- harness.ok(threw, "Should fail if an interface mixin includes maplike")
-
- parser = parser.reset()
- threw = False
- try:
- parser.parse(
- """
- interface Interface {
- attribute short attr;
- };
- interface mixin Mixin {
- attribute short attr;
- };
- Interface includes Mixin;
- """
- )
- results = parser.finish()
- except:
- threw = True
- harness.ok(
- threw, "Should fail if the included mixin interface has duplicated member"
- )
-
- parser = parser.reset()
- threw = False
- try:
- parser.parse(
- """
- interface Interface {};
- interface mixin Mixin1 {
- attribute short attr;
- };
- interface mixin Mixin2 {
- attribute short attr;
- };
- Interface includes Mixin1;
- Interface includes Mixin2;
- """
- )
- results = parser.finish()
- except:
- threw = True
- harness.ok(
- threw, "Should fail if the included mixin interfaces have duplicated member"
- )
-
- parser = parser.reset()
- parser.parse(
- """
- [Global, Exposed=Window] interface Window {};
- [Global, Exposed=Worker] interface Worker {};
- [Exposed=Window]
- interface Base {};
- interface mixin Mixin {
- Base returnSelf();
- };
- Base includes Mixin;
- """
- )
- results = parser.finish()
- base = results[2]
- attr = base.members[0]
- harness.check(
- attr.exposureSet,
- set(["Window"]),
- "Should expose on globals where the base interfaces are exposed",
- )
-
- parser = parser.reset()
- parser.parse(
- """
- [Global, Exposed=Window] interface Window {};
- [Global, Exposed=Worker] interface Worker {};
- [Exposed=Window]
- interface Base {};
- [Exposed=Window]
- interface mixin Mixin {
- attribute short a;
- };
- Base includes Mixin;
- """
- )
- results = parser.finish()
- base = results[2]
- attr = base.members[0]
- harness.check(
- attr.exposureSet, set(["Window"]), "Should follow [Exposed] on interface mixin"
- )
-
- parser = parser.reset()
- parser.parse(
- """
- [Global, Exposed=Window] interface Window {};
- [Global, Exposed=Worker] interface Worker {};
- [Exposed=Window]
- interface Base1 {};
- [Exposed=Worker]
- interface Base2 {};
- interface mixin Mixin {
- attribute short a;
- };
- Base1 includes Mixin;
- Base2 includes Mixin;
- """
- )
- results = parser.finish()
- base = results[2]
- attr = base.members[0]
- harness.check(
- attr.exposureSet,
- set(["Window", "Worker"]),
- "Should expose on all globals where including interfaces are " "exposed",
- )
- base = results[3]
- attr = base.members[0]
- harness.check(
- attr.exposureSet,
- set(["Window", "Worker"]),
- "Should expose on all globals where including interfaces are " "exposed",
- )
diff --git a/components/script/dom/bindings/codegen/parser/tests/test_lenientSetter.py b/components/script/dom/bindings/codegen/parser/tests/test_lenientSetter.py
deleted file mode 100644
index 9d2230c3bec..00000000000
--- a/components/script/dom/bindings/codegen/parser/tests/test_lenientSetter.py
+++ /dev/null
@@ -1,84 +0,0 @@
-# This Source Code Form is subject to the terms of the Mozilla Public
-# License, v. 2.0. If a copy of the MPL was not distributed with this
-# file, You can obtain one at http://mozilla.org/MPL/2.0/.
-
-
-def should_throw(parser, harness, message, code):
- parser = parser.reset()
- threw = False
- try:
- parser.parse(code)
- parser.finish()
- except:
- threw = True
-
- harness.ok(threw, "Should have thrown: %s" % message)
-
-
-def WebIDLTest(parser, harness):
- # The [LegacyLenientSetter] extended attribute MUST take no arguments.
- should_throw(
- parser,
- harness,
- "no arguments",
- """
- interface I {
- [LegacyLenientSetter=X] readonly attribute long A;
- };
- """,
- )
-
- # An attribute with the [LegacyLenientSetter] extended attribute MUST NOT
- # also be declared with the [PutForwards] extended attribute.
- should_throw(
- parser,
- harness,
- "PutForwards",
- """
- interface I {
- [PutForwards=B, LegacyLenientSetter] readonly attribute J A;
- };
- interface J {
- attribute long B;
- };
- """,
- )
-
- # An attribute with the [LegacyLenientSetter] extended attribute MUST NOT
- # also be declared with the [Replaceable] extended attribute.
- should_throw(
- parser,
- harness,
- "Replaceable",
- """
- interface I {
- [Replaceable, LegacyLenientSetter] readonly attribute J A;
- };
- """,
- )
-
- # The [LegacyLenientSetter] extended attribute MUST NOT be used on an
- # attribute that is not read only.
- should_throw(
- parser,
- harness,
- "writable attribute",
- """
- interface I {
- [LegacyLenientSetter] attribute long A;
- };
- """,
- )
-
- # The [LegacyLenientSetter] extended attribute MUST NOT be used on a
- # static attribute.
- should_throw(
- parser,
- harness,
- "static attribute",
- """
- interface I {
- [LegacyLenientSetter] static readonly attribute long A;
- };
- """,
- )
diff --git a/components/script/dom/bindings/codegen/parser/tests/test_method.py b/components/script/dom/bindings/codegen/parser/tests/test_method.py
deleted file mode 100644
index 0ddfada28ac..00000000000
--- a/components/script/dom/bindings/codegen/parser/tests/test_method.py
+++ /dev/null
@@ -1,430 +0,0 @@
-import WebIDL
-
-
-def WebIDLTest(parser, harness):
- parser.parse(
- """
- interface TestMethods {
- undefined basic();
- static undefined basicStatic();
- undefined basicWithSimpleArgs(boolean arg1, byte arg2, unsigned long arg3);
- boolean basicBoolean();
- static boolean basicStaticBoolean();
- boolean basicBooleanWithSimpleArgs(boolean arg1, byte arg2, unsigned long arg3);
- undefined optionalArg(optional byte? arg1, optional sequence<byte> arg2);
- undefined variadicArg(byte?... arg1);
- object getObject();
- undefined setObject(object arg1);
- undefined setAny(any arg1);
- float doFloats(float arg1);
- };
- """
- )
-
- results = parser.finish()
-
- harness.ok(True, "TestMethods interface parsed without error.")
- harness.check(len(results), 1, "Should be one production.")
- iface = results[0]
- harness.ok(isinstance(iface, WebIDL.IDLInterface), "Should be an IDLInterface")
- harness.check(
- iface.identifier.QName(), "::TestMethods", "Interface has the right QName"
- )
- harness.check(iface.identifier.name, "TestMethods", "Interface has the right name")
- harness.check(len(iface.members), 12, "Expect 12 members")
-
- methods = iface.members
-
- def checkArgument(argument, QName, name, type, optional, variadic):
- harness.ok(isinstance(argument, WebIDL.IDLArgument), "Should be an IDLArgument")
- harness.check(
- argument.identifier.QName(), QName, "Argument has the right QName"
- )
- harness.check(argument.identifier.name, name, "Argument has the right name")
- harness.check(str(argument.type), type, "Argument has the right return type")
- harness.check(
- argument.optional, optional, "Argument has the right optional value"
- )
- harness.check(
- argument.variadic, variadic, "Argument has the right variadic value"
- )
-
- def checkMethod(
- method,
- QName,
- name,
- signatures,
- static=False,
- getter=False,
- setter=False,
- deleter=False,
- legacycaller=False,
- stringifier=False,
- ):
- harness.ok(isinstance(method, WebIDL.IDLMethod), "Should be an IDLMethod")
- harness.ok(method.isMethod(), "Method is a method")
- harness.ok(not method.isAttr(), "Method is not an attr")
- harness.ok(not method.isConst(), "Method is not a const")
- harness.check(method.identifier.QName(), QName, "Method has the right QName")
- harness.check(method.identifier.name, name, "Method has the right name")
- harness.check(method.isStatic(), static, "Method has the correct static value")
- harness.check(method.isGetter(), getter, "Method has the correct getter value")
- harness.check(method.isSetter(), setter, "Method has the correct setter value")
- harness.check(
- method.isDeleter(), deleter, "Method has the correct deleter value"
- )
- harness.check(
- method.isLegacycaller(),
- legacycaller,
- "Method has the correct legacycaller value",
- )
- harness.check(
- method.isStringifier(),
- stringifier,
- "Method has the correct stringifier value",
- )
- harness.check(
- len(method.signatures()),
- len(signatures),
- "Method has the correct number of signatures",
- )
-
- sigpairs = zip(method.signatures(), signatures)
- for (gotSignature, expectedSignature) in sigpairs:
- (gotRetType, gotArgs) = gotSignature
- (expectedRetType, expectedArgs) = expectedSignature
-
- harness.check(
- str(gotRetType), expectedRetType, "Method has the expected return type."
- )
-
- for i in range(0, len(gotArgs)):
- (QName, name, type, optional, variadic) = expectedArgs[i]
- checkArgument(gotArgs[i], QName, name, type, optional, variadic)
-
- checkMethod(methods[0], "::TestMethods::basic", "basic", [("Undefined", [])])
- checkMethod(
- methods[1],
- "::TestMethods::basicStatic",
- "basicStatic",
- [("Undefined", [])],
- static=True,
- )
- checkMethod(
- methods[2],
- "::TestMethods::basicWithSimpleArgs",
- "basicWithSimpleArgs",
- [
- (
- "Undefined",
- [
- (
- "::TestMethods::basicWithSimpleArgs::arg1",
- "arg1",
- "Boolean",
- False,
- False,
- ),
- (
- "::TestMethods::basicWithSimpleArgs::arg2",
- "arg2",
- "Byte",
- False,
- False,
- ),
- (
- "::TestMethods::basicWithSimpleArgs::arg3",
- "arg3",
- "UnsignedLong",
- False,
- False,
- ),
- ],
- )
- ],
- )
- checkMethod(
- methods[3], "::TestMethods::basicBoolean", "basicBoolean", [("Boolean", [])]
- )
- checkMethod(
- methods[4],
- "::TestMethods::basicStaticBoolean",
- "basicStaticBoolean",
- [("Boolean", [])],
- static=True,
- )
- checkMethod(
- methods[5],
- "::TestMethods::basicBooleanWithSimpleArgs",
- "basicBooleanWithSimpleArgs",
- [
- (
- "Boolean",
- [
- (
- "::TestMethods::basicBooleanWithSimpleArgs::arg1",
- "arg1",
- "Boolean",
- False,
- False,
- ),
- (
- "::TestMethods::basicBooleanWithSimpleArgs::arg2",
- "arg2",
- "Byte",
- False,
- False,
- ),
- (
- "::TestMethods::basicBooleanWithSimpleArgs::arg3",
- "arg3",
- "UnsignedLong",
- False,
- False,
- ),
- ],
- )
- ],
- )
- checkMethod(
- methods[6],
- "::TestMethods::optionalArg",
- "optionalArg",
- [
- (
- "Undefined",
- [
- (
- "::TestMethods::optionalArg::arg1",
- "arg1",
- "ByteOrNull",
- True,
- False,
- ),
- (
- "::TestMethods::optionalArg::arg2",
- "arg2",
- "ByteSequence",
- True,
- False,
- ),
- ],
- )
- ],
- )
- checkMethod(
- methods[7],
- "::TestMethods::variadicArg",
- "variadicArg",
- [
- (
- "Undefined",
- [
- (
- "::TestMethods::variadicArg::arg1",
- "arg1",
- "ByteOrNull",
- True,
- True,
- )
- ],
- )
- ],
- )
- checkMethod(methods[8], "::TestMethods::getObject", "getObject", [("Object", [])])
- checkMethod(
- methods[9],
- "::TestMethods::setObject",
- "setObject",
- [
- (
- "Undefined",
- [("::TestMethods::setObject::arg1", "arg1", "Object", False, False)],
- )
- ],
- )
- checkMethod(
- methods[10],
- "::TestMethods::setAny",
- "setAny",
- [("Undefined", [("::TestMethods::setAny::arg1", "arg1", "Any", False, False)])],
- )
- checkMethod(
- methods[11],
- "::TestMethods::doFloats",
- "doFloats",
- [("Float", [("::TestMethods::doFloats::arg1", "arg1", "Float", False, False)])],
- )
-
- parser = parser.reset()
- threw = False
- try:
- parser.parse(
- """
- interface A {
- undefined foo(optional float bar = 1);
- };
- """
- )
- results = parser.finish()
- except Exception as x:
- threw = True
- harness.ok(not threw, "Should allow integer to float type corecion")
-
- parser = parser.reset()
- threw = False
- try:
- parser.parse(
- """
- interface A {
- [GetterThrows] undefined foo();
- };
- """
- )
- results = parser.finish()
- except Exception as x:
- threw = True
- harness.ok(threw, "Should not allow [GetterThrows] on methods")
-
- parser = parser.reset()
- threw = False
- try:
- parser.parse(
- """
- interface A {
- [SetterThrows] undefined foo();
- };
- """
- )
- results = parser.finish()
- except Exception as x:
- threw = True
- harness.ok(threw, "Should not allow [SetterThrows] on methods")
-
- parser = parser.reset()
- threw = False
- try:
- parser.parse(
- """
- interface A {
- [Throw] undefined foo();
- };
- """
- )
- results = parser.finish()
- except Exception as x:
- threw = True
- harness.ok(threw, "Should spell [Throws] correctly on methods")
-
- parser = parser.reset()
- threw = False
- try:
- parser.parse(
- """
- interface A {
- undefined __noSuchMethod__();
- };
- """
- )
- results = parser.finish()
- except Exception as x:
- threw = True
- harness.ok(threw, "Should not allow __noSuchMethod__ methods")
-
- parser = parser.reset()
- threw = False
- try:
- parser.parse(
- """
- interface A {
- [Throws, LenientFloat]
- undefined foo(float myFloat);
- [Throws]
- undefined foo();
- };
- """
- )
- results = parser.finish()
- except Exception as x:
- threw = True
- harness.ok(not threw, "Should allow LenientFloat to be only in a specific overload")
-
- parser = parser.reset()
- parser.parse(
- """
- interface A {
- [Throws]
- undefined foo();
- [Throws, LenientFloat]
- undefined foo(float myFloat);
- };
- """
- )
- results = parser.finish()
- iface = results[0]
- methods = iface.members
- lenientFloat = methods[0].getExtendedAttribute("LenientFloat")
- harness.ok(
- lenientFloat is not None,
- "LenientFloat in overloads must be added to the method",
- )
-
- parser = parser.reset()
- threw = False
- try:
- parser.parse(
- """
- interface A {
- [Throws, LenientFloat]
- undefined foo(float myFloat);
- [Throws]
- undefined foo(float myFloat, float yourFloat);
- };
- """
- )
- results = parser.finish()
- except Exception as x:
- threw = True
- harness.ok(
- threw,
- "Should prevent overloads from getting different restricted float behavior",
- )
-
- parser = parser.reset()
- threw = False
- try:
- parser.parse(
- """
- interface A {
- [Throws]
- undefined foo(float myFloat, float yourFloat);
- [Throws, LenientFloat]
- undefined foo(float myFloat);
- };
- """
- )
- results = parser.finish()
- except Exception as x:
- threw = True
- harness.ok(
- threw,
- "Should prevent overloads from getting different restricted float behavior (2)",
- )
-
- parser = parser.reset()
- threw = False
- try:
- parser.parse(
- """
- interface A {
- [Throws, LenientFloat]
- undefined foo(float myFloat);
- [Throws, LenientFloat]
- undefined foo(short myShort);
- };
- """
- )
- results = parser.finish()
- except Exception as x:
- threw = True
- harness.ok(threw, "Should prevent overloads from getting redundant [LenientFloat]")
diff --git a/components/script/dom/bindings/codegen/parser/tests/test_namespace.py b/components/script/dom/bindings/codegen/parser/tests/test_namespace.py
deleted file mode 100644
index 247c5b22232..00000000000
--- a/components/script/dom/bindings/codegen/parser/tests/test_namespace.py
+++ /dev/null
@@ -1,232 +0,0 @@
-def WebIDLTest(parser, harness):
- parser.parse(
- """
- namespace MyNamespace {
- attribute any foo;
- any bar();
- };
- """
- )
-
- results = parser.finish()
- harness.check(len(results), 1, "Should have a thing.")
- harness.ok(results[0].isNamespace(), "Our thing should be a namespace")
- harness.check(len(results[0].members), 2, "Should have two things in our namespace")
- harness.ok(results[0].members[0].isAttr(), "First member is attribute")
- harness.ok(results[0].members[0].isStatic(), "Attribute should be static")
- harness.ok(results[0].members[1].isMethod(), "Second member is method")
- harness.ok(results[0].members[1].isStatic(), "Operation should be static")
-
- parser = parser.reset()
- parser.parse(
- """
- namespace MyNamespace {
- attribute any foo;
- };
- partial namespace MyNamespace {
- any bar();
- };
- """
- )
-
- results = parser.finish()
- harness.check(len(results), 2, "Should have things.")
- harness.ok(results[0].isNamespace(), "Our thing should be a namespace")
- harness.check(len(results[0].members), 2, "Should have two things in our namespace")
- harness.ok(results[0].members[0].isAttr(), "First member is attribute")
- harness.ok(results[0].members[0].isStatic(), "Attribute should be static")
- harness.ok(results[0].members[1].isMethod(), "Second member is method")
- harness.ok(results[0].members[1].isStatic(), "Operation should be static")
-
- parser = parser.reset()
- parser.parse(
- """
- partial namespace MyNamespace {
- any bar();
- };
- namespace MyNamespace {
- attribute any foo;
- };
- """
- )
-
- results = parser.finish()
- harness.check(len(results), 2, "Should have things.")
- harness.ok(results[1].isNamespace(), "Our thing should be a namespace")
- harness.check(len(results[1].members), 2, "Should have two things in our namespace")
- harness.ok(results[1].members[0].isAttr(), "First member is attribute")
- harness.ok(results[1].members[0].isStatic(), "Attribute should be static")
- harness.ok(results[1].members[1].isMethod(), "Second member is method")
- harness.ok(results[1].members[1].isStatic(), "Operation should be static")
-
- parser = parser.reset()
- threw = False
- try:
- parser.parse(
- """
- namespace MyNamespace {
- static attribute any foo;
- };
- """
- )
-
- results = parser.finish()
- except Exception as x:
- threw = True
- harness.ok(threw, "Should have thrown.")
-
- parser = parser.reset()
- threw = False
- try:
- parser.parse(
- """
- namespace MyNamespace {
- static any bar();
- };
- """
- )
-
- results = parser.finish()
- except Exception as x:
- threw = True
- harness.ok(threw, "Should have thrown.")
-
- parser = parser.reset()
- threw = False
- try:
- parser.parse(
- """
- namespace MyNamespace {
- any bar();
- };
-
- interface MyNamespace {
- any baz();
- };
- """
- )
-
- results = parser.finish()
- except Exception as x:
- threw = True
- harness.ok(threw, "Should have thrown.")
-
- parser = parser.reset()
- threw = False
- try:
- parser.parse(
- """
- interface MyNamespace {
- any baz();
- };
-
- namespace MyNamespace {
- any bar();
- };
- """
- )
-
- results = parser.finish()
- except Exception as x:
- threw = True
- harness.ok(threw, "Should have thrown.")
-
- parser = parser.reset()
- threw = False
- try:
- parser.parse(
- """
- namespace MyNamespace {
- any baz();
- };
-
- namespace MyNamespace {
- any bar();
- };
- """
- )
-
- results = parser.finish()
- except Exception as x:
- threw = True
- harness.ok(threw, "Should have thrown.")
-
- parser = parser.reset()
- threw = False
- try:
- parser.parse(
- """
- partial namespace MyNamespace {
- any baz();
- };
-
- interface MyNamespace {
- any bar();
- };
- """
- )
-
- results = parser.finish()
- except Exception as x:
- threw = True
- harness.ok(threw, "Should have thrown.")
-
- parser = parser.reset()
- threw = False
- try:
- parser.parse(
- """
- namespace MyNamespace {
- any bar();
- };
-
- partial interface MyNamespace {
- any baz();
- };
- """
- )
-
- results = parser.finish()
- except Exception as x:
- threw = True
- harness.ok(threw, "Should have thrown.")
-
- parser = parser.reset()
- threw = False
- try:
- parser.parse(
- """
- partial interface MyNamespace {
- any baz();
- };
-
- namespace MyNamespace {
- any bar();
- };
- """
- )
-
- results = parser.finish()
- except Exception as x:
- threw = True
- harness.ok(threw, "Should have thrown.")
-
- parser = parser.reset()
- threw = False
- try:
- parser.parse(
- """
- interface MyNamespace {
- any bar();
- };
-
- partial namespace MyNamespace {
- any baz();
- };
- """
- )
-
- results = parser.finish()
- except Exception as x:
- threw = True
- harness.ok(threw, "Should have thrown.")
diff --git a/components/script/dom/bindings/codegen/parser/tests/test_newobject.py b/components/script/dom/bindings/codegen/parser/tests/test_newobject.py
deleted file mode 100644
index c12995a0e86..00000000000
--- a/components/script/dom/bindings/codegen/parser/tests/test_newobject.py
+++ /dev/null
@@ -1,76 +0,0 @@
-# Import the WebIDL module, so we can do isinstance checks and whatnot
-import WebIDL
-
-
-def WebIDLTest(parser, harness):
- # Basic functionality
- parser.parse(
- """
- interface Iface {
- [NewObject] readonly attribute Iface attr;
- [NewObject] Iface method();
- };
- """
- )
- results = parser.finish()
- harness.ok(results, "Should not have thrown on basic [NewObject] usage")
-
- parser = parser.reset()
- threw = False
- try:
- parser.parse(
- """
- interface Iface {
- [Pure, NewObject] readonly attribute Iface attr;
- };
- """
- )
- results = parser.finish()
- except:
- threw = True
- harness.ok(threw, "[NewObject] attributes must depend on something")
-
- parser = parser.reset()
- threw = False
- try:
- parser.parse(
- """
- interface Iface {
- [Pure, NewObject] Iface method();
- };
- """
- )
- results = parser.finish()
- except:
- threw = True
- harness.ok(threw, "[NewObject] methods must depend on something")
-
- parser = parser.reset()
- threw = False
- try:
- parser.parse(
- """
- interface Iface {
- [Cached, NewObject, Affects=Nothing] readonly attribute Iface attr;
- };
- """
- )
- results = parser.finish()
- except:
- threw = True
- harness.ok(threw, "[NewObject] attributes must not be [Cached]")
-
- parser = parser.reset()
- threw = False
- try:
- parser.parse(
- """
- interface Iface {
- [StoreInSlot, NewObject, Affects=Nothing] readonly attribute Iface attr;
- };
- """
- )
- results = parser.finish()
- except:
- threw = True
- harness.ok(threw, "[NewObject] attributes must not be [StoreInSlot]")
diff --git a/components/script/dom/bindings/codegen/parser/tests/test_nullable_equivalency.py b/components/script/dom/bindings/codegen/parser/tests/test_nullable_equivalency.py
deleted file mode 100644
index 012c5fcff7c..00000000000
--- a/components/script/dom/bindings/codegen/parser/tests/test_nullable_equivalency.py
+++ /dev/null
@@ -1,141 +0,0 @@
-import WebIDL
-
-
-def WebIDLTest(parser, harness):
- parser.parse(
- """
- interface TestNullableEquivalency1 {
- attribute long a;
- attribute long? b;
- };
-
- interface TestNullableEquivalency2 {
- attribute ArrayBuffer a;
- attribute ArrayBuffer? b;
- };
-
- /* Can't have dictionary-valued attributes, so can't test that here */
-
- enum TestNullableEquivalency4Enum {
- "Foo",
- "Bar"
- };
-
- interface TestNullableEquivalency4 {
- attribute TestNullableEquivalency4Enum a;
- attribute TestNullableEquivalency4Enum? b;
- };
-
- interface TestNullableEquivalency5 {
- attribute TestNullableEquivalency4 a;
- attribute TestNullableEquivalency4? b;
- };
-
- interface TestNullableEquivalency6 {
- attribute boolean a;
- attribute boolean? b;
- };
-
- interface TestNullableEquivalency7 {
- attribute DOMString a;
- attribute DOMString? b;
- };
-
- interface TestNullableEquivalency8 {
- attribute float a;
- attribute float? b;
- };
-
- interface TestNullableEquivalency9 {
- attribute double a;
- attribute double? b;
- };
-
- interface TestNullableEquivalency10 {
- attribute object a;
- attribute object? b;
- };
- """
- )
-
- for decl in parser.finish():
- if decl.isInterface():
- checkEquivalent(decl, harness)
-
-
-def checkEquivalent(iface, harness):
- type1 = iface.members[0].type
- type2 = iface.members[1].type
-
- harness.check(type1.nullable(), False, "attr1 should not be nullable")
- harness.check(type2.nullable(), True, "attr2 should be nullable")
-
- # We don't know about type1, but type2, the nullable type, definitely
- # shouldn't be builtin.
- harness.check(type2.builtin, False, "attr2 should not be builtin")
-
- # Ensure that all attributes of type2 match those in type1, except for:
- # - names on an ignore list,
- # - names beginning with '_',
- # - functions which throw when called with no args, and
- # - class-level non-callables ("static variables").
- #
- # Yes, this is an ugly, fragile hack. But it finds bugs...
- for attr in dir(type1):
- if (
- attr.startswith("_")
- or attr
- in [
- "nullable",
- "builtin",
- "filename",
- "location",
- "inner",
- "QName",
- "getDeps",
- "name",
- "prettyName",
- ]
- or (hasattr(type(type1), attr) and not callable(getattr(type1, attr)))
- ):
- continue
-
- a1 = getattr(type1, attr)
-
- if callable(a1):
- try:
- v1 = a1()
- except:
- # Can't call a1 with no args, so skip this attriute.
- continue
-
- try:
- a2 = getattr(type2, attr)
- except:
- harness.ok(
- False,
- "Missing %s attribute on type %s in %s" % (attr, type2, iface),
- )
- continue
-
- if not callable(a2):
- harness.ok(
- False,
- "%s attribute on type %s in %s wasn't callable"
- % (attr, type2, iface),
- )
- continue
-
- v2 = a2()
- harness.check(v2, v1, "%s method return value" % attr)
- else:
- try:
- a2 = getattr(type2, attr)
- except:
- harness.ok(
- False,
- "Missing %s attribute on type %s in %s" % (attr, type2, iface),
- )
- continue
-
- harness.check(a2, a1, "%s attribute should match" % attr)
diff --git a/components/script/dom/bindings/codegen/parser/tests/test_observableArray.py b/components/script/dom/bindings/codegen/parser/tests/test_observableArray.py
deleted file mode 100644
index 601f626bcf4..00000000000
--- a/components/script/dom/bindings/codegen/parser/tests/test_observableArray.py
+++ /dev/null
@@ -1,288 +0,0 @@
-# This Source Code Form is subject to the terms of the Mozilla Public
-# License, v. 2.0. If a copy of the MPL was not distributed with this
-# file, You can obtain one at http://mozilla.org/MPL/2.0/.
-
-
-def WebIDLTest(parser, harness):
-
- # Test dictionary as inner type
- harness.should_throw(
- parser,
- """
- dictionary A {
- boolean member;
- };
- interface B {
- attribute ObservableArray<A> foo;
- };
- """,
- "use dictionary as inner type",
- )
-
- # Test sequence as inner type
- harness.should_throw(
- parser,
- """
- interface A {
- attribute ObservableArray<sequence<boolean>> foo;
- };
- """,
- "use sequence as inner type",
- )
-
- # Test sequence<dictionary> as inner type
- harness.should_throw(
- parser,
- """
- dictionary A {
- boolean member;
- };
- interface B {
- attribute ObservableArray<sequence<A>> foo;
- };
- """,
- "use sequence<dictionary> as inner type",
- )
-
- # Test record as inner type
- harness.should_throw(
- parser,
- """
- interface A {
- attribute ObservableArray<record<DOMString, boolean>> foo;
- };
- """,
- "use record as inner type",
- )
-
- # Test record<dictionary> as inner type
- harness.should_throw(
- parser,
- """
- dictionary A {
- boolean member;
- };
- interface B {
- attribute ObservableArray<record<DOMString, A>> foo;
- };
- """,
- "use record<dictionary> as inner type",
- )
-
- # Test observable array as inner type
- harness.should_throw(
- parser,
- """
- interface A {
- attribute ObservableArray<ObservableArray<boolean>> foo;
- };
- """,
- "use ObservableArray as inner type",
- )
-
- # Test nullable attribute
- harness.should_throw(
- parser,
- """
- interface A {
- attribute ObservableArray<boolean>? foo;
- };
- """,
- "nullable",
- )
-
- # Test sequence
- harness.should_throw(
- parser,
- """
- interface A {
- undefined foo(sequence<ObservableArray<boolean>> foo);
- };
- """,
- "used in sequence",
- )
-
- # Test record
- harness.should_throw(
- parser,
- """
- interface A {
- undefined foo(record<DOMString, ObservableArray<boolean>> foo);
- };
- """,
- "used in record",
- )
-
- # Test promise
- harness.should_throw(
- parser,
- """
- interface A {
- Promise<ObservableArray<boolean>> foo();
- };
- """,
- "used in promise",
- )
-
- # Test union
- harness.should_throw(
- parser,
- """
- interface A {
- attribute (DOMString or ObservableArray<boolean>>) foo;
- };
- """,
- "used in union",
- )
-
- # Test dictionary member
- harness.should_throw(
- parser,
- """
- dictionary A {
- ObservableArray<boolean> foo;
- };
- """,
- "used on dictionary member type",
- )
-
- # Test argument
- harness.should_throw(
- parser,
- """
- interface A {
- undefined foo(ObservableArray<boolean> foo);
- };
- """,
- "used on argument",
- )
-
- # Test static attribute
- harness.should_throw(
- parser,
- """
- interface A {
- static attribute ObservableArray<boolean> foo;
- };
- """,
- "used on static attribute type",
- )
-
- # Test iterable
- harness.should_throw(
- parser,
- """
- interface A {
- iterable<ObservableArray<boolean>>;
- };
- """,
- "used in iterable",
- )
-
- # Test maplike
- harness.should_throw(
- parser,
- """
- interface A {
- maplike<long, ObservableArray<boolean>>;
- };
- """,
- "used in maplike",
- )
-
- # Test setlike
- harness.should_throw(
- parser,
- """
- interface A {
- setlike<ObservableArray<boolean>>;
- };
- """,
- "used in setlike",
- )
-
- # Test JS implemented interface
- harness.should_throw(
- parser,
- """
- [JSImplementation="@mozilla.org/dom/test-interface-js;1"]
- interface A {
- readonly attribute ObservableArray<boolean> foo;
- };
- """,
- "used in JS implemented interface",
- )
-
- # Test namespace
- harness.should_throw(
- parser,
- """
- namespace A {
- readonly attribute ObservableArray<boolean> foo;
- };
- """,
- "used in namespaces",
- )
-
- # Test [Cached] extended attribute
- harness.should_throw(
- parser,
- """
- interface A {
- [Cached, Pure]
- readonly attribute ObservableArray<boolean> foo;
- };
- """,
- "have Cached extended attribute",
- )
-
- # Test [StoreInSlot] extended attribute
- harness.should_throw(
- parser,
- """
- interface A {
- [StoreInSlot, Pure]
- readonly attribute ObservableArray<boolean> foo;
- };
- """,
- "have StoreInSlot extended attribute",
- )
-
- # Test regular attribute
- parser = parser.reset()
- parser.parse(
- """
- interface A {
- readonly attribute ObservableArray<boolean> foo;
- attribute ObservableArray<[Clamp] octet> bar;
- attribute ObservableArray<long?> baz;
- attribute ObservableArray<(boolean or long)> qux;
- };
- """
- )
- results = parser.finish()
- A = results[0]
- foo = A.members[0]
- harness.ok(foo.readonly, "A.foo is readonly attribute")
- harness.ok(foo.type.isObservableArray(), "A.foo is ObservableArray type")
- harness.check(
- foo.slotIndices[A.identifier.name], 0, "A.foo should be stored in slot"
- )
- bar = A.members[1]
- harness.ok(bar.type.isObservableArray(), "A.bar is ObservableArray type")
- harness.check(
- bar.slotIndices[A.identifier.name], 1, "A.bar should be stored in slot"
- )
- harness.ok(bar.type.inner.hasClamp(), "A.bar's inner type should be clamped")
- baz = A.members[2]
- harness.ok(baz.type.isObservableArray(), "A.baz is ObservableArray type")
- harness.check(
- baz.slotIndices[A.identifier.name], 2, "A.baz should be stored in slot"
- )
- harness.ok(baz.type.inner.nullable(), "A.baz's inner type should be nullable")
- qux = A.members[3]
- harness.ok(qux.type.isObservableArray(), "A.qux is ObservableArray type")
- harness.check(
- qux.slotIndices[A.identifier.name], 3, "A.qux should be stored in slot"
- )
- harness.ok(qux.type.inner.isUnion(), "A.qux's inner type should be union")
diff --git a/components/script/dom/bindings/codegen/parser/tests/test_optional_constraints.py b/components/script/dom/bindings/codegen/parser/tests/test_optional_constraints.py
deleted file mode 100644
index 2044c6362c3..00000000000
--- a/components/script/dom/bindings/codegen/parser/tests/test_optional_constraints.py
+++ /dev/null
@@ -1,35 +0,0 @@
-def WebIDLTest(parser, harness):
- threw = False
- try:
- parser.parse(
- """
- interface OptionalConstraints1 {
- undefined foo(optional byte arg1, byte arg2);
- };
- """
- )
-
- results = parser.finish()
- except:
- threw = True
-
- harness.ok(
- not threw,
- "Should not have thrown on non-optional argument following "
- "optional argument.",
- )
-
- parser = parser.reset()
- parser.parse(
- """
- interface OptionalConstraints2 {
- undefined foo(optional byte arg1 = 1, optional byte arg2 = 2,
- optional byte arg3, optional byte arg4 = 4,
- optional byte arg5, optional byte arg6 = 9);
- };
- """
- )
- results = parser.finish()
- args = results[0].members[0].signatures()[0][1]
- harness.check(len(args), 6, "Should have 6 arguments")
- harness.check(args[5].defaultValue.value, 9, "Should have correct default value")
diff --git a/components/script/dom/bindings/codegen/parser/tests/test_overload.py b/components/script/dom/bindings/codegen/parser/tests/test_overload.py
deleted file mode 100644
index 7816276aa6d..00000000000
--- a/components/script/dom/bindings/codegen/parser/tests/test_overload.py
+++ /dev/null
@@ -1,74 +0,0 @@
-import WebIDL
-
-
-def WebIDLTest(parser, harness):
- parser.parse(
- """
- interface TestOverloads {
- undefined basic();
- undefined basic(long arg1);
- boolean abitharder(TestOverloads foo);
- boolean abitharder(boolean foo);
- undefined abitharder(ArrayBuffer? foo);
- undefined withVariadics(long... numbers);
- undefined withVariadics(TestOverloads iface);
- undefined withVariadics(long num, TestOverloads iface);
- undefined optionalTest();
- undefined optionalTest(optional long num1, long num2);
- };
- """
- )
-
- results = parser.finish()
-
- harness.ok(True, "TestOverloads interface parsed without error.")
- harness.check(len(results), 1, "Should be one production.")
- iface = results[0]
- harness.ok(isinstance(iface, WebIDL.IDLInterface), "Should be an IDLInterface")
- harness.check(
- iface.identifier.QName(), "::TestOverloads", "Interface has the right QName"
- )
- harness.check(
- iface.identifier.name, "TestOverloads", "Interface has the right name"
- )
- harness.check(len(iface.members), 4, "Expect %s members" % 4)
-
- member = iface.members[0]
- harness.check(
- member.identifier.QName(),
- "::TestOverloads::basic",
- "Method has the right QName",
- )
- harness.check(member.identifier.name, "basic", "Method has the right name")
- harness.check(member.hasOverloads(), True, "Method has overloads")
-
- signatures = member.signatures()
- harness.check(len(signatures), 2, "Method should have 2 signatures")
-
- (retval, argumentSet) = signatures[0]
-
- harness.check(str(retval), "Undefined", "Expect an undefined retval")
- harness.check(len(argumentSet), 0, "Expect an empty argument set")
-
- (retval, argumentSet) = signatures[1]
- harness.check(str(retval), "Undefined", "Expect an undefined retval")
- harness.check(len(argumentSet), 1, "Expect an argument set with one argument")
-
- argument = argumentSet[0]
- harness.ok(isinstance(argument, WebIDL.IDLArgument), "Should be an IDLArgument")
- harness.check(
- argument.identifier.QName(),
- "::TestOverloads::basic::arg1",
- "Argument has the right QName",
- )
- harness.check(argument.identifier.name, "arg1", "Argument has the right name")
- harness.check(str(argument.type), "Long", "Argument has the right type")
-
- member = iface.members[3]
- harness.check(
- len(member.overloadsForArgCount(0)), 1, "Only one overload for no args"
- )
- harness.check(len(member.overloadsForArgCount(1)), 0, "No overloads for one arg")
- harness.check(
- len(member.overloadsForArgCount(2)), 1, "Only one overload for two args"
- )
diff --git a/components/script/dom/bindings/codegen/parser/tests/test_promise.py b/components/script/dom/bindings/codegen/parser/tests/test_promise.py
deleted file mode 100644
index 9b418d51afe..00000000000
--- a/components/script/dom/bindings/codegen/parser/tests/test_promise.py
+++ /dev/null
@@ -1,177 +0,0 @@
-def WebIDLTest(parser, harness):
- threw = False
- try:
- parser.parse(
- """
- interface A {
- legacycaller Promise<any> foo();
- };
- """
- )
- results = parser.finish()
-
- except:
- threw = True
- harness.ok(threw, "Should not allow Promise return values for legacycaller.")
-
- parser = parser.reset()
- threw = False
- try:
- parser.parse(
- """
- interface A {
- Promise<any> foo();
- long foo(long arg);
- };
- """
- )
- results = parser.finish()
- except:
- threw = True
- harness.ok(
- threw,
- "Should not allow overloads which have both Promise and "
- "non-Promise return types.",
- )
-
- parser = parser.reset()
- threw = False
- try:
- parser.parse(
- """
- interface A {
- long foo(long arg);
- Promise<any> foo();
- };
- """
- )
- results = parser.finish()
- except:
- threw = True
- harness.ok(
- threw,
- "Should not allow overloads which have both Promise and "
- "non-Promise return types.",
- )
-
- parser = parser.reset()
- threw = False
- try:
- parser.parse(
- """
- interface A {
- Promise<any>? foo();
- };
- """
- )
- results = parser.finish()
- except:
- threw = True
- harness.ok(threw, "Should not allow nullable Promise return values.")
-
- parser = parser.reset()
- threw = False
- try:
- parser.parse(
- """
- interface A {
- undefined foo(Promise<any>? arg);
- };
- """
- )
- results = parser.finish()
- except:
- threw = True
- harness.ok(threw, "Should not allow nullable Promise arguments.")
-
- parser = parser.reset()
- parser.parse(
- """
- interface A {
- Promise<any> foo();
- Promise<any> foo(long arg);
- };
- """
- )
- results = parser.finish()
-
- harness.ok(
- True, "Should allow overloads which only have Promise and return " "types."
- )
-
- parser = parser.reset()
- threw = False
- try:
- parser.parse(
- """
- interface A {
- attribute Promise<any> attr;
- };
- """
- )
- results = parser.finish()
- except:
- threw = True
- harness.ok(threw, "Should not allow writable Promise-typed attributes.")
-
- parser = parser.reset()
- threw = False
- try:
- parser.parse(
- """
- interface A {
- [LegacyLenientSetter] readonly attribute Promise<any> attr;
- };
- """
- )
- results = parser.finish()
- except:
- threw = True
- harness.ok(
- threw, "Should not allow [LegacyLenientSetter] Promise-typed attributes."
- )
-
- parser = parser.reset()
- threw = False
- try:
- parser.parse(
- """
- interface A {
- [PutForwards=bar] readonly attribute Promise<any> attr;
- };
- """
- )
- results = parser.finish()
- except:
- threw = True
- harness.ok(threw, "Should not allow [PutForwards] Promise-typed attributes.")
-
- parser = parser.reset()
- threw = False
- try:
- parser.parse(
- """
- interface A {
- [Replaceable] readonly attribute Promise<any> attr;
- };
- """
- )
- results = parser.finish()
- except:
- threw = True
- harness.ok(threw, "Should not allow [Replaceable] Promise-typed attributes.")
-
- parser = parser.reset()
- threw = False
- try:
- parser.parse(
- """
- interface A {
- [SameObject] readonly attribute Promise<any> attr;
- };
- """
- )
- results = parser.finish()
- except:
- threw = True
- harness.ok(threw, "Should not allow [SameObject] Promise-typed attributes.")
diff --git a/components/script/dom/bindings/codegen/parser/tests/test_prototype_ident.py b/components/script/dom/bindings/codegen/parser/tests/test_prototype_ident.py
deleted file mode 100644
index 5a806bf2a2d..00000000000
--- a/components/script/dom/bindings/codegen/parser/tests/test_prototype_ident.py
+++ /dev/null
@@ -1,107 +0,0 @@
-def WebIDLTest(parser, harness):
- threw = False
- try:
- parser.parse(
- """
- interface TestIface {
- static attribute boolean prototype;
- };
- """
- )
- results = parser.finish()
- except:
- threw = True
-
- harness.ok(threw, "The identifier of a static attribute must not be 'prototype'")
-
- parser = parser.reset()
- threw = False
- try:
- parser.parse(
- """
- interface TestIface {
- static boolean prototype();
- };
- """
- )
- results = parser.finish()
- except:
- threw = True
-
- harness.ok(threw, "The identifier of a static operation must not be 'prototype'")
-
- parser = parser.reset()
- threw = False
- try:
- parser.parse(
- """
- interface TestIface {
- const boolean prototype = true;
- };
- """
- )
- results = parser.finish()
- except:
- threw = True
-
- harness.ok(threw, "The identifier of a constant must not be 'prototype'")
-
- # Make sure that we can parse non-static attributes with 'prototype' as identifier.
- parser = parser.reset()
- parser.parse(
- """
- interface TestIface {
- attribute boolean prototype;
- };
- """
- )
- results = parser.finish()
-
- testIface = results[0]
- harness.check(
- testIface.members[0].isStatic(), False, "Attribute should not be static"
- )
- harness.check(
- testIface.members[0].identifier.name,
- "prototype",
- "Attribute identifier should be 'prototype'",
- )
-
- # Make sure that we can parse non-static operations with 'prototype' as identifier.
- parser = parser.reset()
- parser.parse(
- """
- interface TestIface {
- boolean prototype();
- };
- """
- )
- results = parser.finish()
-
- testIface = results[0]
- harness.check(
- testIface.members[0].isStatic(), False, "Operation should not be static"
- )
- harness.check(
- testIface.members[0].identifier.name,
- "prototype",
- "Operation identifier should be 'prototype'",
- )
-
- # Make sure that we can parse dictionary members with 'prototype' as identifier.
- parser = parser.reset()
- parser.parse(
- """
- dictionary TestDict {
- boolean prototype;
- };
- """
- )
- results = parser.finish()
-
- testDict = results[0]
- harness.check(
- testDict.members[0].identifier.name,
- "prototype",
- "Dictionary member should be 'prototype'",
- )
diff --git a/components/script/dom/bindings/codegen/parser/tests/test_putForwards.py b/components/script/dom/bindings/codegen/parser/tests/test_putForwards.py
deleted file mode 100644
index 5ec4dde280e..00000000000
--- a/components/script/dom/bindings/codegen/parser/tests/test_putForwards.py
+++ /dev/null
@@ -1,119 +0,0 @@
-def WebIDLTest(parser, harness):
- threw = False
- try:
- parser.parse(
- """
- interface I {
- [PutForwards=B] readonly attribute long A;
- };
- """
- )
-
- results = parser.finish()
- except:
- threw = True
-
- harness.ok(threw, "Should have thrown.")
-
- parser = parser.reset()
- threw = False
- try:
- parser.parse(
- """
- interface I {
- [PutForwards=B] readonly attribute J A;
- };
- interface J {
- };
- """
- )
-
- results = parser.finish()
- except:
- threw = True
-
- harness.ok(threw, "Should have thrown.")
-
- parser = parser.reset()
- threw = False
- try:
- parser.parse(
- """
- interface I {
- [PutForwards=B] attribute J A;
- };
- interface J {
- attribute long B;
- };
- """
- )
-
- results = parser.finish()
- except:
- threw = True
-
- harness.ok(threw, "Should have thrown.")
-
- parser = parser.reset()
- threw = False
- try:
- parser.parse(
- """
- interface I {
- [PutForwards=B] static readonly attribute J A;
- };
- interface J {
- attribute long B;
- };
- """
- )
-
- results = parser.finish()
- except:
- threw = True
-
- harness.ok(threw, "Should have thrown.")
-
- parser = parser.reset()
- threw = False
- try:
- parser.parse(
- """
- callback interface I {
- [PutForwards=B] readonly attribute J A;
- };
- interface J {
- attribute long B;
- };
- """
- )
-
- results = parser.finish()
- except:
- threw = True
-
- harness.ok(threw, "Should have thrown.")
-
- parser = parser.reset()
- threw = False
- try:
- parser.parse(
- """
- interface I {
- [PutForwards=C] readonly attribute J A;
- [PutForwards=C] readonly attribute J B;
- };
- interface J {
- [PutForwards=D] readonly attribute K C;
- };
- interface K {
- [PutForwards=A] readonly attribute I D;
- };
- """
- )
-
- results = parser.finish()
- except:
- threw = True
-
- harness.ok(threw, "Should have thrown.")
diff --git a/components/script/dom/bindings/codegen/parser/tests/test_record.py b/components/script/dom/bindings/codegen/parser/tests/test_record.py
deleted file mode 100644
index 3a31d721b27..00000000000
--- a/components/script/dom/bindings/codegen/parser/tests/test_record.py
+++ /dev/null
@@ -1,61 +0,0 @@
-import WebIDL
-
-
-def WebIDLTest(parser, harness):
- parser.parse(
- """
- dictionary Dict {};
- interface RecordArg {
- undefined foo(record<DOMString, Dict> arg);
- };
- """
- )
-
- results = parser.finish()
-
- harness.check(len(results), 2, "Should know about two things")
- harness.ok(
- isinstance(results[1], WebIDL.IDLInterface), "Should have an interface here"
- )
- members = results[1].members
- harness.check(len(members), 1, "Should have one member")
- harness.ok(members[0].isMethod(), "Should have method")
- signature = members[0].signatures()[0]
- args = signature[1]
- harness.check(len(args), 1, "Should have one arg")
- harness.ok(args[0].type.isRecord(), "Should have a record type here")
- harness.ok(args[0].type.inner.isDictionary(), "Should have a dictionary inner type")
-
- parser = parser.reset()
- threw = False
- try:
- parser.parse(
- """
- interface RecordUndefinedArg {
- undefined foo(record<DOMString, undefined> arg);
- };
- """
- )
-
- results = parser.finish()
- except Exception as x:
- threw = True
- harness.ok(
- threw, "Should have thrown because record can't have undefined as value type."
- )
-
- parser = parser.reset()
- threw = False
- try:
- parser.parse(
- """
- dictionary Dict {
- record<DOMString, Dict> val;
- };
- """
- )
-
- results = parser.finish()
- except Exception as x:
- threw = True
- harness.ok(threw, "Should have thrown on dictionary containing itself via record.")
diff --git a/components/script/dom/bindings/codegen/parser/tests/test_replaceable.py b/components/script/dom/bindings/codegen/parser/tests/test_replaceable.py
deleted file mode 100644
index 06ea6a47239..00000000000
--- a/components/script/dom/bindings/codegen/parser/tests/test_replaceable.py
+++ /dev/null
@@ -1,84 +0,0 @@
-# This Source Code Form is subject to the terms of the Mozilla Public
-# License, v. 2.0. If a copy of the MPL was not distributed with this
-# file, You can obtain one at http://mozilla.org/MPL/2.0/.
-
-
-def should_throw(parser, harness, message, code):
- parser = parser.reset()
- threw = False
- try:
- parser.parse(code)
- parser.finish()
- except:
- threw = True
-
- harness.ok(threw, "Should have thrown: %s" % message)
-
-
-def WebIDLTest(parser, harness):
- # The [Replaceable] extended attribute MUST take no arguments.
- should_throw(
- parser,
- harness,
- "no arguments",
- """
- interface I {
- [Replaceable=X] readonly attribute long A;
- };
- """,
- )
-
- # An attribute with the [Replaceable] extended attribute MUST NOT also be
- # declared with the [PutForwards] extended attribute.
- should_throw(
- parser,
- harness,
- "PutForwards",
- """
- interface I {
- [PutForwards=B, Replaceable] readonly attribute J A;
- };
- interface J {
- attribute long B;
- };
- """,
- )
-
- # The [Replaceable] extended attribute MUST NOT be used on an attribute
- # that is not read only.
- should_throw(
- parser,
- harness,
- "writable attribute",
- """
- interface I {
- [Replaceable] attribute long A;
- };
- """,
- )
-
- # The [Replaceable] extended attribute MUST NOT be used on a static
- # attribute.
- should_throw(
- parser,
- harness,
- "static attribute",
- """
- interface I {
- [Replaceable] static readonly attribute long A;
- };
- """,
- )
-
- # The [Replaceable] extended attribute MUST NOT be used on an attribute
- # declared on a callback interface.
- should_throw(
- parser,
- harness,
- "callback interface",
- """
- callback interface I {
- [Replaceable] readonly attribute long A;
- };
- """,
- )
diff --git a/components/script/dom/bindings/codegen/parser/tests/test_sanity.py b/components/script/dom/bindings/codegen/parser/tests/test_sanity.py
deleted file mode 100644
index d3184c00731..00000000000
--- a/components/script/dom/bindings/codegen/parser/tests/test_sanity.py
+++ /dev/null
@@ -1,7 +0,0 @@
-def WebIDLTest(parser, harness):
- parser.parse("")
- parser.finish()
- harness.ok(True, "Parsing nothing doesn't throw.")
- parser.parse("interface Foo {};")
- parser.finish()
- harness.ok(True, "Parsing a silly interface doesn't throw.")
diff --git a/components/script/dom/bindings/codegen/parser/tests/test_securecontext_extended_attribute.py b/components/script/dom/bindings/codegen/parser/tests/test_securecontext_extended_attribute.py
deleted file mode 100644
index e0e967dd420..00000000000
--- a/components/script/dom/bindings/codegen/parser/tests/test_securecontext_extended_attribute.py
+++ /dev/null
@@ -1,499 +0,0 @@
-import WebIDL
-
-
-def WebIDLTest(parser, harness):
- parser.parse(
- """
- [SecureContext]
- interface TestSecureContextOnInterface {
- const octet TEST_CONSTANT = 0;
- readonly attribute byte testAttribute;
- undefined testMethod(byte foo);
- };
- partial interface TestSecureContextOnInterface {
- const octet TEST_CONSTANT_2 = 0;
- readonly attribute byte testAttribute2;
- undefined testMethod2(byte foo);
- };
- """
- )
- results = parser.finish()
- harness.check(
- len(results[0].members),
- 6,
- "TestSecureContextOnInterface should have six members",
- )
- harness.ok(
- results[0].getExtendedAttribute("SecureContext"),
- "Interface should have [SecureContext] extended attribute",
- )
- harness.ok(
- results[0].members[0].getExtendedAttribute("SecureContext"),
- "[SecureContext] should propagate from interface to constant members",
- )
- harness.ok(
- results[0].members[1].getExtendedAttribute("SecureContext"),
- "[SecureContext] should propagate from interface to attribute members",
- )
- harness.ok(
- results[0].members[2].getExtendedAttribute("SecureContext"),
- "[SecureContext] should propagate from interface to method members",
- )
- harness.ok(
- results[0].members[3].getExtendedAttribute("SecureContext"),
- "[SecureContext] should propagate from interface to constant members from partial interface",
- )
- harness.ok(
- results[0].members[4].getExtendedAttribute("SecureContext"),
- "[SecureContext] should propagate from interface to attribute members from partial interface",
- )
- harness.ok(
- results[0].members[5].getExtendedAttribute("SecureContext"),
- "[SecureContext] should propagate from interface to method members from partial interface",
- )
-
- # Same thing, but with the partial interface specified first:
- parser = parser.reset()
- parser.parse(
- """
- partial interface TestSecureContextOnInterfaceAfterPartialInterface {
- const octet TEST_CONSTANT_2 = 0;
- readonly attribute byte testAttribute2;
- undefined testMethod2(byte foo);
- };
- [SecureContext]
- interface TestSecureContextOnInterfaceAfterPartialInterface {
- const octet TEST_CONSTANT = 0;
- readonly attribute byte testAttribute;
- undefined testMethod(byte foo);
- };
- """
- )
- results = parser.finish()
- harness.check(
- len(results[1].members),
- 6,
- "TestSecureContextOnInterfaceAfterPartialInterface should have six members",
- )
- harness.ok(
- results[1].getExtendedAttribute("SecureContext"),
- "Interface should have [SecureContext] extended attribute",
- )
- harness.ok(
- results[1].members[0].getExtendedAttribute("SecureContext"),
- "[SecureContext] should propagate from interface to constant members",
- )
- harness.ok(
- results[1].members[1].getExtendedAttribute("SecureContext"),
- "[SecureContext] should propagate from interface to attribute members",
- )
- harness.ok(
- results[1].members[2].getExtendedAttribute("SecureContext"),
- "[SecureContext] should propagate from interface to method members",
- )
- harness.ok(
- results[1].members[3].getExtendedAttribute("SecureContext"),
- "[SecureContext] should propagate from interface to constant members from partial interface",
- )
- harness.ok(
- results[1].members[4].getExtendedAttribute("SecureContext"),
- "[SecureContext] should propagate from interface to attribute members from partial interface",
- )
- harness.ok(
- results[1].members[5].getExtendedAttribute("SecureContext"),
- "[SecureContext] should propagate from interface to method members from partial interface",
- )
-
- parser = parser.reset()
- parser.parse(
- """
- interface TestSecureContextOnPartialInterface {
- const octet TEST_CONSTANT = 0;
- readonly attribute byte testAttribute;
- undefined testMethod(byte foo);
- };
- [SecureContext]
- partial interface TestSecureContextOnPartialInterface {
- const octet TEST_CONSTANT_2 = 0;
- readonly attribute byte testAttribute2;
- undefined testMethod2(byte foo);
- };
- """
- )
- results = parser.finish()
- harness.check(
- len(results[0].members),
- 6,
- "TestSecureContextOnPartialInterface should have six members",
- )
- harness.ok(
- results[0].getExtendedAttribute("SecureContext") is None,
- "[SecureContext] should not propagate from a partial interface to the interface",
- )
- harness.ok(
- results[0].members[0].getExtendedAttribute("SecureContext") is None,
- "[SecureContext] should not propagate from a partial interface to the interface's constant members",
- )
- harness.ok(
- results[0].members[1].getExtendedAttribute("SecureContext") is None,
- "[SecureContext] should not propagate from a partial interface to the interface's attribute members",
- )
- harness.ok(
- results[0].members[2].getExtendedAttribute("SecureContext") is None,
- "[SecureContext] should not propagate from a partial interface to the interface's method members",
- )
- harness.ok(
- results[0].members[3].getExtendedAttribute("SecureContext"),
- "Constant members from [SecureContext] partial interface should be [SecureContext]",
- )
- harness.ok(
- results[0].members[4].getExtendedAttribute("SecureContext"),
- "Attribute members from [SecureContext] partial interface should be [SecureContext]",
- )
- harness.ok(
- results[0].members[5].getExtendedAttribute("SecureContext"),
- "Method members from [SecureContext] partial interface should be [SecureContext]",
- )
-
- parser = parser.reset()
- parser.parse(
- """
- interface TestSecureContextOnInterfaceMembers {
- const octet TEST_NON_SECURE_CONSTANT_1 = 0;
- [SecureContext]
- const octet TEST_SECURE_CONSTANT = 1;
- const octet TEST_NON_SECURE_CONSTANT_2 = 2;
- readonly attribute byte testNonSecureAttribute1;
- [SecureContext]
- readonly attribute byte testSecureAttribute;
- readonly attribute byte testNonSecureAttribute2;
- undefined testNonSecureMethod1(byte foo);
- [SecureContext]
- undefined testSecureMethod(byte foo);
- undefined testNonSecureMethod2(byte foo);
- };
- """
- )
- results = parser.finish()
- harness.check(
- len(results[0].members),
- 9,
- "TestSecureContextOnInterfaceMembers should have nine members",
- )
- harness.ok(
- results[0].getExtendedAttribute("SecureContext") is None,
- "[SecureContext] on members should not propagate up to the interface",
- )
- harness.ok(
- results[0].members[0].getExtendedAttribute("SecureContext") is None,
- "Constant should not have [SecureContext] extended attribute",
- )
- harness.ok(
- results[0].members[1].getExtendedAttribute("SecureContext"),
- "Constant should have [SecureContext] extended attribute",
- )
- harness.ok(
- results[0].members[2].getExtendedAttribute("SecureContext") is None,
- "Constant should not have [SecureContext] extended attribute",
- )
- harness.ok(
- results[0].members[3].getExtendedAttribute("SecureContext") is None,
- "Attribute should not have [SecureContext] extended attribute",
- )
- harness.ok(
- results[0].members[4].getExtendedAttribute("SecureContext"),
- "Attribute should have [SecureContext] extended attribute",
- )
- harness.ok(
- results[0].members[5].getExtendedAttribute("SecureContext") is None,
- "Attribute should not have [SecureContext] extended attribute",
- )
- harness.ok(
- results[0].members[6].getExtendedAttribute("SecureContext") is None,
- "Method should not have [SecureContext] extended attribute",
- )
- harness.ok(
- results[0].members[7].getExtendedAttribute("SecureContext"),
- "Method should have [SecureContext] extended attribute",
- )
- harness.ok(
- results[0].members[8].getExtendedAttribute("SecureContext") is None,
- "Method should not have [SecureContext] extended attribute",
- )
-
- parser = parser.reset()
- parser.parse(
- """
- interface TestSecureContextOnPartialInterfaceMembers {
- };
- partial interface TestSecureContextOnPartialInterfaceMembers {
- const octet TEST_NON_SECURE_CONSTANT_1 = 0;
- [SecureContext]
- const octet TEST_SECURE_CONSTANT = 1;
- const octet TEST_NON_SECURE_CONSTANT_2 = 2;
- readonly attribute byte testNonSecureAttribute1;
- [SecureContext]
- readonly attribute byte testSecureAttribute;
- readonly attribute byte testNonSecureAttribute2;
- undefined testNonSecureMethod1(byte foo);
- [SecureContext]
- undefined testSecureMethod(byte foo);
- undefined testNonSecureMethod2(byte foo);
- };
- """
- )
- results = parser.finish()
- harness.check(
- len(results[0].members),
- 9,
- "TestSecureContextOnPartialInterfaceMembers should have nine members",
- )
- harness.ok(
- results[0].members[0].getExtendedAttribute("SecureContext") is None,
- "Constant from partial interface should not have [SecureContext] extended attribute",
- )
- harness.ok(
- results[0].members[1].getExtendedAttribute("SecureContext"),
- "Constant from partial interface should have [SecureContext] extended attribute",
- )
- harness.ok(
- results[0].members[2].getExtendedAttribute("SecureContext") is None,
- "Constant from partial interface should not have [SecureContext] extended attribute",
- )
- harness.ok(
- results[0].members[3].getExtendedAttribute("SecureContext") is None,
- "Attribute from partial interface should not have [SecureContext] extended attribute",
- )
- harness.ok(
- results[0].members[4].getExtendedAttribute("SecureContext"),
- "Attribute from partial interface should have [SecureContext] extended attribute",
- )
- harness.ok(
- results[0].members[5].getExtendedAttribute("SecureContext") is None,
- "Attribute from partial interface should not have [SecureContext] extended attribute",
- )
- harness.ok(
- results[0].members[6].getExtendedAttribute("SecureContext") is None,
- "Method from partial interface should not have [SecureContext] extended attribute",
- )
- harness.ok(
- results[0].members[7].getExtendedAttribute("SecureContext"),
- "Method from partial interface should have [SecureContext] extended attribute",
- )
- harness.ok(
- results[0].members[8].getExtendedAttribute("SecureContext") is None,
- "Method from partial interface should not have [SecureContext] extended attribute",
- )
-
- parser = parser.reset()
- threw = False
- try:
- parser.parse(
- """
- [SecureContext=something]
- interface TestSecureContextTakesNoValue1 {
- const octet TEST_SECURE_CONSTANT = 0;
- };
- """
- )
- results = parser.finish()
- except:
- threw = True
- harness.ok(threw, "[SecureContext] must take no arguments (testing on interface)")
-
- parser = parser.reset()
- threw = False
- try:
- parser.parse(
- """
- interface TestSecureContextForOverloads1 {
- [SecureContext]
- undefined testSecureMethod(byte foo);
- };
- partial interface TestSecureContextForOverloads1 {
- undefined testSecureMethod(byte foo, byte bar);
- };
- """
- )
- results = parser.finish()
- except:
- threw = True
- harness.ok(
- threw,
- "If [SecureContext] appears on an overloaded operation, then it MUST appear on all overloads",
- )
-
- parser = parser.reset()
- threw = False
- try:
- parser.parse(
- """
- interface TestSecureContextForOverloads2 {
- [SecureContext]
- undefined testSecureMethod(byte foo);
- };
- partial interface TestSecureContextForOverloads2 {
- [SecureContext]
- undefined testSecureMethod(byte foo, byte bar);
- };
- """
- )
- results = parser.finish()
- except:
- threw = True
- harness.ok(
- not threw,
- "[SecureContext] can appear on an overloaded operation if it appears on all overloads",
- )
-
- parser = parser.reset()
- threw = False
- try:
- parser.parse(
- """
- [SecureContext]
- interface TestSecureContextOnInterfaceAndMember {
- [SecureContext]
- undefined testSecureMethod(byte foo);
- };
- """
- )
- results = parser.finish()
- except:
- threw = True
- harness.ok(
- threw, "[SecureContext] must not appear on an interface and interface member"
- )
-
- parser = parser.reset()
- threw = False
- try:
- parser.parse(
- """
- interface TestSecureContextOnPartialInterfaceAndMember {
- };
- [SecureContext]
- partial interface TestSecureContextOnPartialInterfaceAndMember {
- [SecureContext]
- undefined testSecureMethod(byte foo);
- };
- """
- )
- results = parser.finish()
- except:
- threw = True
- harness.ok(
- threw,
- "[SecureContext] must not appear on a partial interface and one of the partial interface's member's",
- )
-
- parser = parser.reset()
- threw = False
- try:
- parser.parse(
- """
- [SecureContext]
- interface TestSecureContextOnInterfaceAndPartialInterfaceMember {
- };
- partial interface TestSecureContextOnInterfaceAndPartialInterfaceMember {
- [SecureContext]
- undefined testSecureMethod(byte foo);
- };
- """
- )
- results = parser.finish()
- except:
- threw = True
- harness.ok(
- threw,
- "[SecureContext] must not appear on an interface and one of its partial interface's member's",
- )
-
- parser = parser.reset()
- threw = False
- try:
- parser.parse(
- """
- [SecureContext]
- interface TestSecureContextOnInheritedInterface {
- };
- interface TestSecureContextNotOnInheritingInterface : TestSecureContextOnInheritedInterface {
- undefined testSecureMethod(byte foo);
- };
- """
- )
- results = parser.finish()
- except:
- threw = True
- harness.ok(
- threw,
- "[SecureContext] must appear on interfaces that inherit from another [SecureContext] interface",
- )
-
- # Test 'includes'.
- parser = parser.reset()
- parser.parse(
- """
- [SecureContext]
- interface TestSecureContextInterfaceThatIncludesNonSecureContextMixin {
- const octet TEST_CONSTANT = 0;
- };
- interface mixin TestNonSecureContextMixin {
- const octet TEST_CONSTANT_2 = 0;
- readonly attribute byte testAttribute2;
- undefined testMethod2(byte foo);
- };
- TestSecureContextInterfaceThatIncludesNonSecureContextMixin includes TestNonSecureContextMixin;
- """
- )
- results = parser.finish()
- harness.check(
- len(results[0].members),
- 4,
- "TestSecureContextInterfaceThatImplementsNonSecureContextInterface should have four members",
- )
- harness.ok(
- results[0].getExtendedAttribute("SecureContext"),
- "Interface should have [SecureContext] extended attribute",
- )
- harness.ok(
- results[0].members[0].getExtendedAttribute("SecureContext"),
- "[SecureContext] should propagate from interface to constant members even when other members are copied from a non-[SecureContext] interface",
- )
- harness.ok(
- results[0].members[1].getExtendedAttribute("SecureContext") is None,
- "Constants copied from non-[SecureContext] mixin should not be [SecureContext]",
- )
- harness.ok(
- results[0].members[2].getExtendedAttribute("SecureContext") is None,
- "Attributes copied from non-[SecureContext] mixin should not be [SecureContext]",
- )
- harness.ok(
- results[0].members[3].getExtendedAttribute("SecureContext") is None,
- "Methods copied from non-[SecureContext] mixin should not be [SecureContext]",
- )
-
- # Test SecureContext and LegacyNoInterfaceObject
- parser = parser.reset()
- parser.parse(
- """
- [LegacyNoInterfaceObject, SecureContext]
- interface TestSecureContextLegacyNoInterfaceObject {
- undefined testSecureMethod(byte foo);
- };
- """
- )
- results = parser.finish()
- harness.check(
- len(results[0].members),
- 1,
- "TestSecureContextLegacyNoInterfaceObject should have only one member",
- )
- harness.ok(
- results[0].getExtendedAttribute("SecureContext"),
- "Interface should have [SecureContext] extended attribute",
- )
- harness.ok(
- results[0].members[0].getExtendedAttribute("SecureContext"),
- "Interface member should have [SecureContext] extended attribute",
- )
diff --git a/components/script/dom/bindings/codegen/parser/tests/test_special_method_signature_mismatch.py b/components/script/dom/bindings/codegen/parser/tests/test_special_method_signature_mismatch.py
deleted file mode 100644
index a11860b3728..00000000000
--- a/components/script/dom/bindings/codegen/parser/tests/test_special_method_signature_mismatch.py
+++ /dev/null
@@ -1,256 +0,0 @@
-def WebIDLTest(parser, harness):
- threw = False
- try:
- parser.parse(
- """
- interface SpecialMethodSignatureMismatch1 {
- getter long long foo(long index);
- };
- """
- )
-
- results = parser.finish()
- except:
- threw = True
-
- harness.ok(threw, "Should have thrown.")
-
- threw = False
- try:
- parser.parse(
- """
- interface SpecialMethodSignatureMismatch2 {
- getter undefined foo(unsigned long index);
- };
- """
- )
-
- results = parser.finish()
- except:
- threw = True
-
- harness.ok(threw, "Should have thrown.")
-
- threw = False
- try:
- parser.parse(
- """
- interface SpecialMethodSignatureMismatch3 {
- getter boolean foo(unsigned long index, boolean extraArg);
- };
- """
- )
-
- results = parser.finish()
- except:
- threw = True
-
- harness.ok(threw, "Should have thrown.")
-
- threw = False
- try:
- parser.parse(
- """
- interface SpecialMethodSignatureMismatch4 {
- getter boolean foo(unsigned long... index);
- };
- """
- )
-
- results = parser.finish()
- except:
- threw = True
-
- harness.ok(threw, "Should have thrown.")
-
- threw = False
- try:
- parser.parse(
- """
- interface SpecialMethodSignatureMismatch5 {
- getter boolean foo(optional unsigned long index);
- };
- """
- )
-
- results = parser.finish()
- except:
- threw = True
-
- harness.ok(threw, "Should have thrown.")
-
- threw = False
- try:
- parser.parse(
- """
- interface SpecialMethodSignatureMismatch6 {
- getter boolean foo();
- };
- """
- )
-
- results = parser.finish()
- except:
- threw = True
-
- harness.ok(threw, "Should have thrown.")
-
- threw = False
- try:
- parser.parse(
- """
- interface SpecialMethodSignatureMismatch7 {
- deleter long long foo(long index);
- };
- """
- )
-
- results = parser.finish()
- except:
- threw = True
-
- harness.ok(threw, "Should have thrown.")
-
- threw = False
- try:
- parser.parse(
- """
- interface SpecialMethodSignatureMismatch9 {
- deleter boolean foo(unsigned long index, boolean extraArg);
- };
- """
- )
-
- results = parser.finish()
- except:
- threw = True
-
- harness.ok(threw, "Should have thrown.")
-
- threw = False
- try:
- parser.parse(
- """
- interface SpecialMethodSignatureMismatch10 {
- deleter boolean foo(unsigned long... index);
- };
- """
- )
-
- results = parser.finish()
- except:
- threw = True
-
- harness.ok(threw, "Should have thrown.")
-
- threw = False
- try:
- parser.parse(
- """
- interface SpecialMethodSignatureMismatch11 {
- deleter boolean foo(optional unsigned long index);
- };
- """
- )
-
- results = parser.finish()
- except:
- threw = True
-
- harness.ok(threw, "Should have thrown.")
-
- threw = False
- try:
- parser.parse(
- """
- interface SpecialMethodSignatureMismatch12 {
- deleter boolean foo();
- };
- """
- )
-
- results = parser.finish()
- except:
- threw = True
-
- harness.ok(threw, "Should have thrown.")
-
- threw = False
- try:
- parser.parse(
- """
- interface SpecialMethodSignatureMismatch13 {
- setter long long foo(long index, long long value);
- };
- """
- )
-
- results = parser.finish()
- except:
- threw = True
-
- harness.ok(threw, "Should have thrown.")
-
- threw = False
- try:
- parser.parse(
- """
- interface SpecialMethodSignatureMismatch15 {
- setter boolean foo(unsigned long index, boolean value, long long extraArg);
- };
- """
- )
-
- results = parser.finish()
- except:
- threw = True
-
- harness.ok(threw, "Should have thrown.")
-
- threw = False
- try:
- parser.parse(
- """
- interface SpecialMethodSignatureMismatch16 {
- setter boolean foo(unsigned long index, boolean... value);
- };
- """
- )
-
- results = parser.finish()
- except:
- threw = True
-
- harness.ok(threw, "Should have thrown.")
-
- threw = False
- try:
- parser.parse(
- """
- interface SpecialMethodSignatureMismatch17 {
- setter boolean foo(unsigned long index, optional boolean value);
- };
- """
- )
-
- results = parser.finish()
- except:
- threw = True
-
- harness.ok(threw, "Should have thrown.")
-
- threw = False
- try:
- parser.parse(
- """
- interface SpecialMethodSignatureMismatch18 {
- setter boolean foo();
- };
- """
- )
-
- results = parser.finish()
- except:
- threw = True
-
- harness.ok(threw, "Should have thrown.")
diff --git a/components/script/dom/bindings/codegen/parser/tests/test_special_methods.py b/components/script/dom/bindings/codegen/parser/tests/test_special_methods.py
deleted file mode 100644
index 9601a0a968f..00000000000
--- a/components/script/dom/bindings/codegen/parser/tests/test_special_methods.py
+++ /dev/null
@@ -1,117 +0,0 @@
-import WebIDL
-
-
-def WebIDLTest(parser, harness):
- parser.parse(
- """
- interface SpecialMethods {
- getter long long (unsigned long index);
- setter long long (unsigned long index, long long value);
- getter boolean (DOMString name);
- setter boolean (DOMString name, boolean value);
- deleter boolean (DOMString name);
- readonly attribute unsigned long length;
- };
-
- interface SpecialMethodsCombination {
- getter deleter boolean (DOMString name);
- };
- """
- )
-
- results = parser.finish()
-
- def checkMethod(
- method,
- QName,
- name,
- static=False,
- getter=False,
- setter=False,
- deleter=False,
- legacycaller=False,
- stringifier=False,
- ):
- harness.ok(isinstance(method, WebIDL.IDLMethod), "Should be an IDLMethod")
- harness.check(method.identifier.QName(), QName, "Method has the right QName")
- harness.check(method.identifier.name, name, "Method has the right name")
- harness.check(method.isStatic(), static, "Method has the correct static value")
- harness.check(method.isGetter(), getter, "Method has the correct getter value")
- harness.check(method.isSetter(), setter, "Method has the correct setter value")
- harness.check(
- method.isDeleter(), deleter, "Method has the correct deleter value"
- )
- harness.check(
- method.isLegacycaller(),
- legacycaller,
- "Method has the correct legacycaller value",
- )
- harness.check(
- method.isStringifier(),
- stringifier,
- "Method has the correct stringifier value",
- )
-
- harness.check(len(results), 2, "Expect 2 interfaces")
-
- iface = results[0]
- harness.check(len(iface.members), 6, "Expect 6 members")
-
- checkMethod(
- iface.members[0],
- "::SpecialMethods::__indexedgetter",
- "__indexedgetter",
- getter=True,
- )
- checkMethod(
- iface.members[1],
- "::SpecialMethods::__indexedsetter",
- "__indexedsetter",
- setter=True,
- )
- checkMethod(
- iface.members[2],
- "::SpecialMethods::__namedgetter",
- "__namedgetter",
- getter=True,
- )
- checkMethod(
- iface.members[3],
- "::SpecialMethods::__namedsetter",
- "__namedsetter",
- setter=True,
- )
- checkMethod(
- iface.members[4],
- "::SpecialMethods::__nameddeleter",
- "__nameddeleter",
- deleter=True,
- )
-
- iface = results[1]
- harness.check(len(iface.members), 1, "Expect 1 member")
-
- checkMethod(
- iface.members[0],
- "::SpecialMethodsCombination::__namedgetterdeleter",
- "__namedgetterdeleter",
- getter=True,
- deleter=True,
- )
-
- parser = parser.reset()
-
- threw = False
- try:
- parser.parse(
- """
- interface IndexedDeleter {
- deleter undefined(unsigned long index);
- };
- """
- )
- parser.finish()
- except:
- threw = True
-
- harness.ok(threw, "There are no indexed deleters")
diff --git a/components/script/dom/bindings/codegen/parser/tests/test_special_methods_uniqueness.py b/components/script/dom/bindings/codegen/parser/tests/test_special_methods_uniqueness.py
deleted file mode 100644
index 014737e8168..00000000000
--- a/components/script/dom/bindings/codegen/parser/tests/test_special_methods_uniqueness.py
+++ /dev/null
@@ -1,54 +0,0 @@
-import WebIDL
-
-
-def WebIDLTest(parser, harness):
- threw = False
- try:
- parser.parse(
- """
- interface SpecialMethodUniqueness1 {
- getter deleter boolean (DOMString name);
- getter boolean (DOMString name);
- };
- """
- )
-
- results = parser.finish()
- except:
- threw = True
-
- harness.ok(threw, "Should have thrown.")
-
- threw = False
- try:
- parser.parse(
- """
- interface SpecialMethodUniqueness1 {
- deleter boolean (DOMString name);
- getter deleter boolean (DOMString name);
- };
- """
- )
-
- results = parser.finish()
- except:
- threw = True
-
- harness.ok(threw, "Should have thrown.")
-
- threw = False
- try:
- parser.parse(
- """
- interface SpecialMethodUniqueness1 {
- setter boolean (DOMString name);
- setter boolean (DOMString name);
- };
- """
- )
-
- results = parser.finish()
- except:
- threw = True
-
- harness.ok(threw, "Should have thrown.")
diff --git a/components/script/dom/bindings/codegen/parser/tests/test_stringifier.py b/components/script/dom/bindings/codegen/parser/tests/test_stringifier.py
deleted file mode 100644
index 948be71e4dd..00000000000
--- a/components/script/dom/bindings/codegen/parser/tests/test_stringifier.py
+++ /dev/null
@@ -1,196 +0,0 @@
-import WebIDL
-
-
-def WebIDLTest(parser, harness):
- parser.parse(
- """
- interface TestStringifier {
- stringifier;
- };
- """
- )
-
- results = parser.finish()
-
- harness.ok(
- isinstance(results[0].members[0], WebIDL.IDLMethod),
- "Stringifer should be method",
- )
-
- parser = parser.reset()
-
- threw = False
- try:
- parser.parse(
- """
- interface TestStringifier {
- stringifier;
- stringifier;
- };
- """
- )
- results = parser.finish()
- except:
- threw = True
-
- harness.ok(threw, "Should not allow two 'stringifier;'")
-
- parser = parser.reset()
-
- threw = False
- try:
- parser.parse(
- """
- interface TestStringifier {
- stringifier;
- stringifier DOMString foo();
- };
- """
- )
- results = parser.finish()
- except:
- threw = True
-
- harness.ok(threw, "Should not allow a 'stringifier;' and a 'stringifier()'")
-
- parser = parser.reset()
- parser.parse(
- """
- interface TestStringifier {
- stringifier attribute DOMString foo;
- };
- """
- )
- results = parser.finish()
- harness.ok(
- isinstance(results[0].members[0], WebIDL.IDLAttribute),
- "Stringifier attribute should be an attribute",
- )
- stringifier = results[0].members[1]
- harness.ok(
- isinstance(stringifier, WebIDL.IDLMethod),
- "Stringifier attribute should insert a method",
- )
- harness.ok(stringifier.isStringifier(), "Inserted method should be a stringifier")
-
- parser = parser.reset()
- parser.parse(
- """
- interface TestStringifier {};
- interface mixin TestStringifierMixin {
- stringifier attribute DOMString foo;
- };
- TestStringifier includes TestStringifierMixin;
- """
- )
- results = parser.finish()
- harness.ok(
- isinstance(results[0].members[0], WebIDL.IDLAttribute),
- "Stringifier attribute should be an attribute",
- )
- stringifier = results[0].members[1]
- harness.ok(
- isinstance(stringifier, WebIDL.IDLMethod),
- "Stringifier attribute should insert a method",
- )
- harness.ok(stringifier.isStringifier(), "Inserted method should be a stringifier")
-
- parser = parser.reset()
- parser.parse(
- """
- interface TestStringifier {
- stringifier attribute USVString foo;
- };
- """
- )
- results = parser.finish()
- stringifier = results[0].members[1]
- harness.ok(
- stringifier.signatures()[0][0].isUSVString(),
- "Stringifier attributes should allow USVString",
- )
-
- parser = parser.reset()
- parser.parse(
- """
- interface TestStringifier {
- [Throws, NeedsSubjectPrincipal]
- stringifier attribute USVString foo;
- };
- """
- )
- results = parser.finish()
- stringifier = results[0].members[1]
- harness.ok(
- stringifier.getExtendedAttribute("Throws"),
- "Stringifier attributes should support [Throws]",
- )
- harness.ok(
- stringifier.getExtendedAttribute("NeedsSubjectPrincipal"),
- "Stringifier attributes should support [NeedsSubjectPrincipal]",
- )
-
- parser = parser.reset()
- parser.parse(
- """
- interface TestStringifier {
- stringifier attribute UTF8String foo;
- };
- """
- )
- results = parser.finish()
- stringifier = results[0].members[1]
- harness.ok(
- stringifier.signatures()[0][0].isUTF8String(),
- "Stringifier attributes should allow UTF8String",
- )
-
- parser = parser.reset()
- threw = False
- try:
- parser.parse(
- """
- interface TestStringifier {
- stringifier attribute ByteString foo;
- };
- """
- )
- results = parser.finish()
- except:
- threw = True
-
- harness.ok(threw, "Should not allow ByteString")
-
- parser = parser.reset()
- threw = False
- try:
- parser.parse(
- """
- interface TestStringifier {
- stringifier;
- stringifier attribute DOMString foo;
- };
- """
- )
- results = parser.finish()
- except:
- threw = True
-
- harness.ok(threw, "Should not allow a 'stringifier;' and a stringifier attribute")
-
- parser = parser.reset()
- threw = False
- try:
- parser.parse(
- """
- interface TestStringifier {
- stringifier attribute DOMString foo;
- stringifier attribute DOMString bar;
- };
- """
- )
- results = parser.finish()
- except:
- threw = True
-
- harness.ok(threw, "Should not allow multiple stringifier attributes")
diff --git a/components/script/dom/bindings/codegen/parser/tests/test_toJSON.py b/components/script/dom/bindings/codegen/parser/tests/test_toJSON.py
deleted file mode 100644
index f312667ec4d..00000000000
--- a/components/script/dom/bindings/codegen/parser/tests/test_toJSON.py
+++ /dev/null
@@ -1,309 +0,0 @@
-def WebIDLTest(parser, harness):
- threw = False
- try:
- parser.parse(
- """
- interface Test {
- object toJSON();
- };
- """
- )
- results = parser.finish()
- except:
- threw = True
- harness.ok(not threw, "Should allow a toJSON method.")
-
- parser = parser.reset()
- threw = False
- try:
- parser.parse(
- """
- interface Test {
- object toJSON(object arg);
- object toJSON(long arg);
- };
- """
- )
- results = parser.finish()
- except:
- threw = True
- harness.ok(threw, "Should not allow overloads of a toJSON method.")
-
- parser = parser.reset()
- threw = False
- try:
- parser.parse(
- """
- interface Test {
- object toJSON(object arg);
- };
- """
- )
- results = parser.finish()
- except:
- threw = True
- harness.ok(threw, "Should not allow a toJSON method with arguments.")
-
- parser = parser.reset()
- threw = False
- try:
- parser.parse(
- """
- interface Test {
- long toJSON();
- };
- """
- )
- results = parser.finish()
- except:
- threw = True
- harness.ok(not threw, "Should allow a toJSON method with 'long' as return type.")
-
- parser = parser.reset()
- threw = False
- try:
- parser.parse(
- """
- interface Test {
- [Default] object toJSON();
- };
- """
- )
- results = parser.finish()
- except:
- threw = True
- harness.ok(
- not threw, "Should allow a default toJSON method with 'object' as return type."
- )
-
- parser = parser.reset()
- threw = False
- try:
- parser.parse(
- """
- interface Test {
- [Default] long toJSON();
- };
- """
- )
- results = parser.finish()
- except:
- threw = True
- harness.ok(
- threw,
- "Should not allow a default toJSON method with non-'object' as return type.",
- )
-
- JsonTypes = [
- "byte",
- "octet",
- "short",
- "unsigned short",
- "long",
- "unsigned long",
- "long long",
- "unsigned long long",
- "float",
- "unrestricted float",
- "double",
- "unrestricted double",
- "boolean",
- "DOMString",
- "ByteString",
- "UTF8String",
- "USVString",
- "Enum",
- "InterfaceWithToJSON",
- "object",
- ]
-
- nonJsonTypes = [
- "InterfaceWithoutToJSON",
- "any",
- "Int8Array",
- "Int16Array",
- "Int32Array",
- "Uint8Array",
- "Uint16Array",
- "Uint32Array",
- "Uint8ClampedArray",
- "Float32Array",
- "Float64Array",
- "ArrayBuffer",
- ]
-
- def doTest(testIDL, shouldThrow, description):
- p = parser.reset()
- threw = False
- try:
- p.parse(
- testIDL
- + """
- enum Enum { "a", "b", "c" };
- interface InterfaceWithToJSON { long toJSON(); };
- interface InterfaceWithoutToJSON {};
- """
- )
- p.finish()
- except Exception as x:
- threw = True
- harness.ok(x.message == "toJSON method has non-JSON return type", x)
- harness.check(threw, shouldThrow, description)
-
- for type in JsonTypes:
- doTest(
- "interface Test { %s toJSON(); };" % type,
- False,
- "%s should be a JSON type" % type,
- )
-
- doTest(
- "interface Test { sequence<%s> toJSON(); };" % type,
- False,
- "sequence<%s> should be a JSON type" % type,
- )
-
- doTest(
- "dictionary Foo { %s foo; }; " "interface Test { Foo toJSON(); }; " % type,
- False,
- "dictionary containing only JSON type (%s) should be a JSON type" % type,
- )
-
- doTest(
- "dictionary Foo { %s foo; }; dictionary Bar : Foo { }; "
- "interface Test { Bar toJSON(); }; " % type,
- False,
- "dictionary whose ancestors only contain JSON types should be a JSON type",
- )
-
- doTest(
- "dictionary Foo { any foo; }; dictionary Bar : Foo { %s bar; };"
- "interface Test { Bar toJSON(); };" % type,
- True,
- "dictionary whose ancestors contain non-JSON types should not be a JSON type",
- )
-
- doTest(
- "interface Test { record<DOMString, %s> toJSON(); };" % type,
- False,
- "record<DOMString, %s> should be a JSON type" % type,
- )
-
- doTest(
- "interface Test { record<ByteString, %s> toJSON(); };" % type,
- False,
- "record<ByteString, %s> should be a JSON type" % type,
- )
-
- doTest(
- "interface Test { record<UTF8String, %s> toJSON(); };" % type,
- False,
- "record<UTF8String, %s> should be a JSON type" % type,
- )
-
- doTest(
- "interface Test { record<USVString, %s> toJSON(); };" % type,
- False,
- "record<USVString, %s> should be a JSON type" % type,
- )
-
- otherUnionType = "Foo" if type != "object" else "long"
- doTest(
- "interface Foo { object toJSON(); };"
- "interface Test { (%s or %s) toJSON(); };" % (otherUnionType, type),
- False,
- "union containing only JSON types (%s or %s) should be a JSON type"
- % (otherUnionType, type),
- )
-
- doTest(
- "interface test { %s? toJSON(); };" % type,
- False,
- "Nullable type (%s) should be a JSON type" % type,
- )
-
- doTest(
- "interface Foo : InterfaceWithoutToJSON { %s toJSON(); };"
- "interface Test { Foo toJSON(); };" % type,
- False,
- "interface with toJSON should be a JSON type",
- )
-
- doTest(
- "interface Foo : InterfaceWithToJSON { };" "interface Test { Foo toJSON(); };",
- False,
- "inherited interface with toJSON should be a JSON type",
- )
-
- for type in nonJsonTypes:
- doTest(
- "interface Test { %s toJSON(); };" % type,
- True,
- "%s should not be a JSON type" % type,
- )
-
- doTest(
- "interface Test { sequence<%s> toJSON(); };" % type,
- True,
- "sequence<%s> should not be a JSON type" % type,
- )
-
- doTest(
- "dictionary Foo { %s foo; }; " "interface Test { Foo toJSON(); }; " % type,
- True,
- "Dictionary containing a non-JSON type (%s) should not be a JSON type"
- % type,
- )
-
- doTest(
- "dictionary Foo { %s foo; }; dictionary Bar : Foo { }; "
- "interface Test { Bar toJSON(); }; " % type,
- True,
- "dictionary whose ancestors only contain non-JSON types should not be a JSON type",
- )
-
- doTest(
- "interface Test { record<DOMString, %s> toJSON(); };" % type,
- True,
- "record<DOMString, %s> should not be a JSON type" % type,
- )
-
- doTest(
- "interface Test { record<ByteString, %s> toJSON(); };" % type,
- True,
- "record<ByteString, %s> should not be a JSON type" % type,
- )
-
- doTest(
- "interface Test { record<USVString, %s> toJSON(); };" % type,
- True,
- "record<USVString, %s> should not be a JSON type" % type,
- )
-
- if type != "any":
- doTest(
- "interface Foo { object toJSON(); }; "
- "interface Test { (Foo or %s) toJSON(); };" % type,
- True,
- "union containing a non-JSON type (%s) should not be a JSON type"
- % type,
- )
-
- doTest(
- "interface test { %s? toJSON(); };" % type,
- True,
- "Nullable type (%s) should not be a JSON type" % type,
- )
-
- doTest(
- "dictionary Foo { long foo; any bar; };" "interface Test { Foo toJSON(); };",
- True,
- "dictionary containing a non-JSON type should not be a JSON type",
- )
-
- doTest(
- "interface Foo : InterfaceWithoutToJSON { }; "
- "interface Test { Foo toJSON(); };",
- True,
- "interface without toJSON should not be a JSON type",
- )
diff --git a/components/script/dom/bindings/codegen/parser/tests/test_treatNonCallableAsNull.py b/components/script/dom/bindings/codegen/parser/tests/test_treatNonCallableAsNull.py
deleted file mode 100644
index 7becfdca1f3..00000000000
--- a/components/script/dom/bindings/codegen/parser/tests/test_treatNonCallableAsNull.py
+++ /dev/null
@@ -1,80 +0,0 @@
-import WebIDL
-
-
-def WebIDLTest(parser, harness):
- parser.parse(
- """
- [TreatNonCallableAsNull] callback Function = any(any... arguments);
-
- interface TestTreatNonCallableAsNull1 {
- attribute Function? onfoo;
- attribute Function onbar;
- };
- """
- )
-
- results = parser.finish()
-
- iface = results[1]
- attr = iface.members[0]
- harness.check(attr.type.treatNonCallableAsNull(), True, "Got the expected value")
- attr = iface.members[1]
- harness.check(attr.type.treatNonCallableAsNull(), False, "Got the expected value")
-
- parser = parser.reset()
-
- threw = False
- try:
- parser.parse(
- """
- callback Function = any(any... arguments);
-
- interface TestTreatNonCallableAsNull2 {
- [TreatNonCallableAsNull] attribute Function onfoo;
- };
- """
- )
-
- results = parser.finish()
- except:
- threw = True
-
- harness.ok(threw, "Should have thrown.")
-
- parser = parser.reset()
-
- threw = False
- try:
- parser.parse(
- """
- callback Function = any(any... arguments);
-
- [TreatNonCallableAsNull]
- interface TestTreatNonCallableAsNull3 {
- attribute Function onfoo;
- };
- """
- )
-
- results = parser.finish()
- except:
- threw = True
-
- harness.ok(threw, "Should have thrown.")
-
- parser = parser.reset()
-
- threw = False
- try:
- parser.parse(
- """
- [TreatNonCallableAsNull, LegacyTreatNonObjectAsNull]
- callback Function = any(any... arguments);
- """
- )
-
- results = parser.finish()
- except:
- threw = True
-
- harness.ok(threw, "Should have thrown.")
diff --git a/components/script/dom/bindings/codegen/parser/tests/test_typedef.py b/components/script/dom/bindings/codegen/parser/tests/test_typedef.py
deleted file mode 100644
index c19d064efff..00000000000
--- a/components/script/dom/bindings/codegen/parser/tests/test_typedef.py
+++ /dev/null
@@ -1,94 +0,0 @@
-def WebIDLTest(parser, harness):
- parser.parse(
- """
- typedef long mylong;
- typedef long? mynullablelong;
- interface Foo {
- const mylong X = 5;
- undefined foo(optional mynullablelong arg = 7);
- undefined bar(optional mynullablelong arg = null);
- undefined baz(mylong arg);
- };
- """
- )
-
- results = parser.finish()
-
- harness.check(
- results[2].members[1].signatures()[0][1][0].type.name,
- "LongOrNull",
- "Should expand typedefs",
- )
-
- parser = parser.reset()
- threw = False
- try:
- parser.parse(
- """
- typedef long? mynullablelong;
- interface Foo {
- undefined foo(mynullablelong? Y);
- };
- """
- )
- results = parser.finish()
- except:
- threw = True
-
- harness.ok(threw, "Should have thrown on nullable inside nullable arg.")
-
- parser = parser.reset()
- threw = False
- try:
- parser.parse(
- """
- typedef long? mynullablelong;
- interface Foo {
- const mynullablelong? X = 5;
- };
- """
- )
- results = parser.finish()
- except:
- threw = True
-
- harness.ok(threw, "Should have thrown on nullable inside nullable const.")
-
- parser = parser.reset()
- threw = False
- try:
- parser.parse(
- """
- interface Foo {
- const mynullablelong? X = 5;
- };
- typedef long? mynullablelong;
- """
- )
- results = parser.finish()
- except:
- threw = True
-
- harness.ok(
- threw,
- "Should have thrown on nullable inside nullable const typedef "
- "after interface.",
- )
-
- parser = parser.reset()
- parser.parse(
- """
- interface Foo {
- const mylong X = 5;
- };
- typedef long mylong;
- """
- )
-
- results = parser.finish()
-
- harness.check(
- results[0].members[0].type.name,
- "Long",
- "Should expand typedefs that come before interface",
- )
diff --git a/components/script/dom/bindings/codegen/parser/tests/test_typedef_identifier_conflict.py b/components/script/dom/bindings/codegen/parser/tests/test_typedef_identifier_conflict.py
deleted file mode 100644
index 2aab3a8a91f..00000000000
--- a/components/script/dom/bindings/codegen/parser/tests/test_typedef_identifier_conflict.py
+++ /dev/null
@@ -1,19 +0,0 @@
-def WebIDLTest(parser, harness):
- exception = None
- try:
- parser.parse(
- """
- typedef long foo;
- typedef long foo;
- """
- )
-
- results = parser.finish()
- except Exception as e:
- exception = e
-
- harness.ok(exception, "Should have thrown.")
- harness.ok(
- "Multiple unresolvable definitions of identifier 'foo'" in str(exception),
- "Should have a sane exception message",
- )
diff --git a/components/script/dom/bindings/codegen/parser/tests/test_undefined.py b/components/script/dom/bindings/codegen/parser/tests/test_undefined.py
deleted file mode 100644
index 4731ee1bcd7..00000000000
--- a/components/script/dom/bindings/codegen/parser/tests/test_undefined.py
+++ /dev/null
@@ -1,246 +0,0 @@
-import WebIDL
-
-
-def WebIDLTest(parser, harness):
- try:
- parser.parse(
- """
- dictionary Dict {
- undefined undefinedMember;
- double bar;
- };
- """
- )
- results = parser.finish()
- except:
- threw = True
-
- harness.ok(threw, "undefined must not be used as the type of a dictionary member")
-
- parser = parser.reset()
- threw = False
-
- try:
- parser.parse(
- """
- dictionary Dict {
- (undefined or double) undefinedMemberOfUnionInDict;
- };
- """
- )
- results = parser.finish()
- except:
- threw = True
-
- harness.ok(
- threw,
- "undefined must not be used as the type of a dictionary member, "
- "whether directly or in a union",
- )
-
- parser = parser.reset()
- threw = False
-
- try:
- parser.parse(
- """
- interface Foo {
- double bar(undefined foo);
- };
- """
- )
- results = parser.finish()
- except:
- threw = True
-
- harness.ok(
- threw,
- "undefined must not be used as the type of an argument in any "
- "circumstance (so not as the argument of a regular operation)",
- )
-
- parser = parser.reset()
- threw = False
-
- try:
- parser.parse(
- """
- interface Foo {
- getter double(undefined name);
- };
- """
- )
- results = parser.finish()
- except:
- threw = True
-
- harness.ok(
- threw,
- "undefined must not be used as the type of an argument in any "
- "circumstance (so not as the argument of a getter)",
- )
-
- parser = parser.reset()
- threw = False
-
- try:
- parser.parse(
- """
- interface Foo {
- setter undefined(DOMString name, undefined value);
- };
- """
- )
- results = parser.finish()
- except:
- threw = True
-
- harness.ok(
- threw,
- "undefined must not be used as the type of an argument in any "
- "circumstance (so not as the argument of a setter)",
- )
-
- parser = parser.reset()
- threw = False
-
- try:
- parser.parse(
- """
- interface Foo {
- deleter undefined (undefined name);
- };
- """
- )
- results = parser.finish()
- except:
- threw = True
-
- harness.ok(
- threw,
- "undefined must not be used as the type of an argument in any "
- "circumstance (so not as the argument of a deleter)",
- )
-
- parser = parser.reset()
- threw = False
-
- try:
- parser.parse(
- """
- interface Foo {
- constructor (undefined foo);
- };
- """
- )
- results = parser.finish()
- except:
- threw = True
-
- harness.ok(
- threw,
- "undefined must not be used as the type of an argument in any "
- "circumstance (so not as the argument of a constructor)",
- )
-
- parser = parser.reset()
- threw = False
-
- try:
- parser.parse(
- """
- callback Callback = undefined (undefined foo);
- """
- )
- results = parser.finish()
- except:
- threw = True
-
- harness.ok(
- threw,
- "undefined must not be used as the type of an argument in any "
- "circumstance (so not as the argument of a callback)",
- )
-
- parser = parser.reset()
- threw = False
-
- try:
- parser.parse(
- """
- interface Foo {
- async iterable(undefined name);
- };
- """
- )
- results = parser.finish()
- except:
- threw = True
-
- harness.ok(
- threw,
- "undefined must not be used as the type of an argument in any "
- "circumstance (so not as the argument of an async iterable "
- "iterator)",
- )
-
- parser = parser.reset()
- threw = False
-
- try:
- parser.parse(
- """
- interface Foo {
- static double bar(undefined foo);
- };
- """
- )
- results = parser.finish()
- except:
- threw = True
-
- harness.ok(
- threw,
- "undefined must not be used as the type of an argument in any "
- "circumstance (so not as the argument of a static operation)",
- )
-
- parser = parser.reset()
- threw = False
-
- try:
- parser.parse(
- """
- interface Foo {
- const undefined FOO = undefined;
- };
- """
- )
- results = parser.finish()
- except:
- threw = True
-
- harness.ok(
- threw,
- "undefined is not a valid type for a constant",
- )
-
- parser = parser.reset()
- threw = False
-
- try:
- parser.parse(
- """
- interface Foo {
- const any FOO = undefined;
- };
- """
- )
- results = parser.finish()
- except:
- threw = True
-
- harness.ok(
- threw,
- "undefined is not a valid value for a constant",
- )
diff --git a/components/script/dom/bindings/codegen/parser/tests/test_unenumerable_own_properties.py b/components/script/dom/bindings/codegen/parser/tests/test_unenumerable_own_properties.py
deleted file mode 100644
index b024d317492..00000000000
--- a/components/script/dom/bindings/codegen/parser/tests/test_unenumerable_own_properties.py
+++ /dev/null
@@ -1,71 +0,0 @@
-def WebIDLTest(parser, harness):
-
- parser.parse(
- """
- interface Foo {};
- [LegacyUnenumerableNamedProperties]
- interface Bar : Foo {
- getter long(DOMString name);
- };
- interface Baz : Bar {
- getter long(DOMString name);
- };
- """
- )
- results = parser.finish()
- harness.check(len(results), 3, "Should have three interfaces")
-
- parser = parser.reset()
- threw = False
- try:
- parser.parse(
- """
- [LegacyUnenumerableNamedProperties]
- interface NoNamedGetter {
- };
- """
- )
-
- results = parser.finish()
- except Exception as x:
- threw = True
- harness.ok(threw, "Should have thrown.")
-
- parser = parser.reset()
- threw = False
- try:
- parser.parse(
- """
- [LegacyUnenumerableNamedProperties=Foo]
- interface ShouldNotHaveArg {
- getter long(DOMString name);
- };
- """
- )
-
- results = parser.finish()
- except Exception as x:
- threw = True
- harness.ok(threw, "Should have thrown.")
-
- parser = parser.reset()
- threw = False
- try:
- parser.parse(
- """
- [LegacyUnenumerableNamedProperties]
- interface Foo {
- getter long(DOMString name);
- };
- interface Bar : Foo {};
- [LegacyUnenumerableNamedProperties]
- interface Baz : Bar {
- getter long(DOMString name);
- };
- """
- )
-
- results = parser.finish()
- except Exception as x:
- threw = True
- harness.ok(threw, "Should have thrown.")
diff --git a/components/script/dom/bindings/codegen/parser/tests/test_unforgeable.py b/components/script/dom/bindings/codegen/parser/tests/test_unforgeable.py
deleted file mode 100644
index 500d123ddb2..00000000000
--- a/components/script/dom/bindings/codegen/parser/tests/test_unforgeable.py
+++ /dev/null
@@ -1,311 +0,0 @@
-def WebIDLTest(parser, harness):
- parser.parse(
- """
- interface Child : Parent {
- };
- interface Parent {
- [LegacyUnforgeable] readonly attribute long foo;
- };
- """
- )
-
- results = parser.finish()
- harness.check(
- len(results),
- 2,
- "Should be able to inherit from an interface with "
- "[LegacyUnforgeable] properties.",
- )
-
- parser = parser.reset()
- parser.parse(
- """
- interface Child : Parent {
- const short foo = 10;
- };
- interface Parent {
- [LegacyUnforgeable] readonly attribute long foo;
- };
- """
- )
-
- results = parser.finish()
- harness.check(
- len(results),
- 2,
- "Should be able to inherit from an interface with "
- "[LegacyUnforgeable] properties even if we have a constant with "
- "the same name.",
- )
-
- parser = parser.reset()
- parser.parse(
- """
- interface Child : Parent {
- static attribute short foo;
- };
- interface Parent {
- [LegacyUnforgeable] readonly attribute long foo;
- };
- """
- )
-
- results = parser.finish()
- harness.check(
- len(results),
- 2,
- "Should be able to inherit from an interface with "
- "[LegacyUnforgeable] properties even if we have a static attribute "
- "with the same name.",
- )
-
- parser = parser.reset()
- parser.parse(
- """
- interface Child : Parent {
- static undefined foo();
- };
- interface Parent {
- [LegacyUnforgeable] readonly attribute long foo;
- };
- """
- )
-
- results = parser.finish()
- harness.check(
- len(results),
- 2,
- "Should be able to inherit from an interface with "
- "[LegacyUnforgeable] properties even if we have a static operation "
- "with the same name.",
- )
-
- parser = parser.reset()
- threw = False
- try:
- parser.parse(
- """
- interface Child : Parent {
- undefined foo();
- };
- interface Parent {
- [LegacyUnforgeable] readonly attribute long foo;
- };
- """
- )
-
- results = parser.finish()
- except:
- threw = True
- harness.ok(
- threw,
- "Should have thrown when shadowing unforgeable attribute on "
- "parent with operation.",
- )
-
- parser = parser.reset()
- threw = False
- try:
- parser.parse(
- """
- interface Child : Parent {
- undefined foo();
- };
- interface Parent {
- [LegacyUnforgeable] undefined foo();
- };
- """
- )
-
- results = parser.finish()
- except:
- threw = True
- harness.ok(
- threw,
- "Should have thrown when shadowing unforgeable operation on "
- "parent with operation.",
- )
-
- parser = parser.reset()
- threw = False
- try:
- parser.parse(
- """
- interface Child : Parent {
- attribute short foo;
- };
- interface Parent {
- [LegacyUnforgeable] readonly attribute long foo;
- };
- """
- )
-
- results = parser.finish()
- except Exception as x:
- threw = True
- harness.ok(
- threw,
- "Should have thrown when shadowing unforgeable attribute on "
- "parent with attribute.",
- )
-
- parser = parser.reset()
- threw = False
- try:
- parser.parse(
- """
- interface Child : Parent {
- attribute short foo;
- };
- interface Parent {
- [LegacyUnforgeable] undefined foo();
- };
- """
- )
-
- results = parser.finish()
- except Exception as x:
- threw = True
- harness.ok(
- threw,
- "Should have thrown when shadowing unforgeable operation on "
- "parent with attribute.",
- )
-
- parser = parser.reset()
- parser.parse(
- """
- interface Child : Parent {
- };
- interface Parent {};
- interface mixin Mixin {
- [LegacyUnforgeable] readonly attribute long foo;
- };
- Parent includes Mixin;
- """
- )
-
- results = parser.finish()
- harness.check(
- len(results),
- 4,
- "Should be able to inherit from an interface with a "
- "mixin with [LegacyUnforgeable] properties.",
- )
-
- parser = parser.reset()
- threw = False
- try:
- parser.parse(
- """
- interface Child : Parent {
- undefined foo();
- };
- interface Parent {};
- interface mixin Mixin {
- [LegacyUnforgeable] readonly attribute long foo;
- };
- Parent includes Mixin;
- """
- )
-
- results = parser.finish()
- except:
- threw = True
-
- harness.ok(
- threw,
- "Should have thrown when shadowing unforgeable attribute "
- "of parent's consequential interface.",
- )
-
- parser = parser.reset()
- threw = False
- try:
- parser.parse(
- """
- interface Child : Parent {
- };
- interface Parent : GrandParent {};
- interface GrandParent {};
- interface mixin Mixin {
- [LegacyUnforgeable] readonly attribute long foo;
- };
- GrandParent includes Mixin;
- interface mixin ChildMixin {
- undefined foo();
- };
- Child includes ChildMixin;
- """
- )
-
- results = parser.finish()
- except:
- threw = True
-
- harness.ok(
- threw,
- "Should have thrown when our consequential interface shadows unforgeable attribute "
- "of ancestor's consequential interface.",
- )
-
- parser = parser.reset()
- threw = False
- try:
- parser.parse(
- """
- interface Child : Parent {
- };
- interface Parent : GrandParent {};
- interface GrandParent {};
- interface mixin Mixin {
- [LegacyUnforgeable] undefined foo();
- };
- GrandParent includes Mixin;
- interface mixin ChildMixin {
- undefined foo();
- };
- Child includes ChildMixin;
- """
- )
-
- results = parser.finish()
- except:
- threw = True
-
- harness.ok(
- threw,
- "Should have thrown when our consequential interface shadows unforgeable operation "
- "of ancestor's consequential interface.",
- )
-
- parser = parser.reset()
- parser.parse(
- """
- interface iface {
- [LegacyUnforgeable] attribute long foo;
- };
- """
- )
-
- results = parser.finish()
- harness.check(
- len(results), 1, "Should allow writable [LegacyUnforgeable] attribute."
- )
-
- parser = parser.reset()
- threw = False
- try:
- parser.parse(
- """
- interface iface {
- [LegacyUnforgeable] static readonly attribute long foo;
- };
- """
- )
-
- results = parser.finish()
- except:
- threw = True
-
- harness.ok(threw, "Should have thrown for static [LegacyUnforgeable] attribute.")
diff --git a/components/script/dom/bindings/codegen/parser/tests/test_union.py b/components/script/dom/bindings/codegen/parser/tests/test_union.py
deleted file mode 100644
index 7fc1236d54e..00000000000
--- a/components/script/dom/bindings/codegen/parser/tests/test_union.py
+++ /dev/null
@@ -1,198 +0,0 @@
-import WebIDL
-import itertools
-import string
-
-# We'd like to use itertools.chain but it's 2.6 or higher.
-
-
-def chain(*iterables):
- # chain('ABC', 'DEF') --> A B C D E F
- for it in iterables:
- for element in it:
- yield element
-
-
-# We'd like to use itertools.combinations but it's 2.6 or higher.
-def combinations(iterable, r):
- # combinations('ABCD', 2) --> AB AC AD BC BD CD
- # combinations(range(4), 3) --> 012 013 023 123
- pool = tuple(iterable)
- n = len(pool)
- if r > n:
- return
- indices = list(range(r))
- yield tuple(pool[i] for i in indices)
- while True:
- for i in reversed(range(r)):
- if indices[i] != i + n - r:
- break
- else:
- return
- indices[i] += 1
- for j in range(i + 1, r):
- indices[j] = indices[j - 1] + 1
- yield tuple(pool[i] for i in indices)
-
-
-# We'd like to use itertools.combinations_with_replacement but it's 2.7 or
-# higher.
-def combinations_with_replacement(iterable, r):
- # combinations_with_replacement('ABC', 2) --> AA AB AC BB BC CC
- pool = tuple(iterable)
- n = len(pool)
- if not n and r:
- return
- indices = [0] * r
- yield tuple(pool[i] for i in indices)
- while True:
- for i in reversed(range(r)):
- if indices[i] != n - 1:
- break
- else:
- return
- indices[i:] = [indices[i] + 1] * (r - i)
- yield tuple(pool[i] for i in indices)
-
-
-def WebIDLTest(parser, harness):
- types = [
- "float",
- "double",
- "short",
- "unsigned short",
- "long",
- "unsigned long",
- "long long",
- "unsigned long long",
- "boolean",
- "byte",
- "octet",
- "DOMString",
- "ByteString",
- "USVString",
- # "sequence<float>",
- "object",
- "ArrayBuffer",
- # "Date",
- "TestInterface1",
- "TestInterface2",
- ]
-
- testPre = """
- interface TestInterface1 {
- };
- interface TestInterface2 {
- };
- """
-
- interface = (
- testPre
- + """
- interface PrepareForTest {
- """
- )
- for (i, type) in enumerate(types):
- interface += string.Template(
- """
- readonly attribute ${type} attr${i};
- """
- ).substitute(i=i, type=type)
- interface += """
- };
- """
-
- parser.parse(interface)
- results = parser.finish()
-
- iface = results[2]
-
- parser = parser.reset()
-
- def typesAreDistinguishable(t):
- return all(u[0].isDistinguishableFrom(u[1]) for u in combinations(t, 2))
-
- def typesAreNotDistinguishable(t):
- return any(not u[0].isDistinguishableFrom(u[1]) for u in combinations(t, 2))
-
- def unionTypeName(t):
- if len(t) > 2:
- t[0:2] = [unionTypeName(t[0:2])]
- return "(" + " or ".join(t) + ")"
-
- # typeCombinations is an iterable of tuples containing the name of the type
- # as a string and the parsed IDL type.
- def unionTypes(typeCombinations, predicate):
- for c in typeCombinations:
- if predicate(t[1] for t in c):
- yield unionTypeName([t[0] for t in c])
-
- # We limit invalid union types with a union member type to the subset of 3
- # types with one invalid combination.
- # typeCombinations is an iterable of tuples containing the name of the type
- # as a string and the parsed IDL type.
- def invalidUnionWithUnion(typeCombinations):
- for c in typeCombinations:
- if (
- typesAreNotDistinguishable((c[0][1], c[1][1]))
- and typesAreDistinguishable((c[1][1], c[2][1]))
- and typesAreDistinguishable((c[0][1], c[2][1]))
- ):
- yield unionTypeName([t[0] for t in c])
-
- # Create a list of tuples containing the name of the type as a string and
- # the parsed IDL type.
- types = zip(types, (a.type for a in iface.members))
-
- validUnionTypes = chain(
- unionTypes(combinations(types, 2), typesAreDistinguishable),
- unionTypes(combinations(types, 3), typesAreDistinguishable),
- )
- invalidUnionTypes = chain(
- unionTypes(combinations_with_replacement(types, 2), typesAreNotDistinguishable),
- invalidUnionWithUnion(combinations(types, 3)),
- )
- interface = (
- testPre
- + """
- interface TestUnion {
- """
- )
- for (i, type) in enumerate(validUnionTypes):
- interface += string.Template(
- """
- undefined method${i}(${type} arg);
- ${type} returnMethod${i}();
- attribute ${type} attr${i};
- undefined optionalMethod${i}(${type}? arg);
- """
- ).substitute(i=i, type=type)
- interface += """
- };
- """
- parser.parse(interface)
- results = parser.finish()
-
- parser = parser.reset()
-
- for invalid in invalidUnionTypes:
- interface = (
- testPre
- + string.Template(
- """
- interface TestUnion {
- undefined method(${type} arg);
- };
- """
- ).substitute(type=invalid)
- )
-
- threw = False
- try:
- parser.parse(interface)
- results = parser.finish()
- except:
- threw = True
-
- harness.ok(threw, "Should have thrown.")
-
- parser = parser.reset()
diff --git a/components/script/dom/bindings/codegen/parser/tests/test_union_any.py b/components/script/dom/bindings/codegen/parser/tests/test_union_any.py
deleted file mode 100644
index caba44b55f9..00000000000
--- a/components/script/dom/bindings/codegen/parser/tests/test_union_any.py
+++ /dev/null
@@ -1,16 +0,0 @@
-def WebIDLTest(parser, harness):
- threw = False
- try:
- parser.parse(
- """
- interface AnyNotInUnion {
- undefined foo((any or DOMString) arg);
- };
- """
- )
-
- results = parser.finish()
- except:
- threw = True
-
- harness.ok(threw, "Should have thrown.")
diff --git a/components/script/dom/bindings/codegen/parser/tests/test_union_nullable.py b/components/script/dom/bindings/codegen/parser/tests/test_union_nullable.py
deleted file mode 100644
index d15ed4cfb54..00000000000
--- a/components/script/dom/bindings/codegen/parser/tests/test_union_nullable.py
+++ /dev/null
@@ -1,60 +0,0 @@
-def WebIDLTest(parser, harness):
- threw = False
- try:
- parser.parse(
- """
- interface OneNullableInUnion {
- undefined foo((object? or DOMString?) arg);
- };
- """
- )
-
- results = parser.finish()
- except:
- threw = True
-
- harness.ok(threw, "Two nullable member types of a union should have thrown.")
-
- parser.reset()
- threw = False
-
- try:
- parser.parse(
- """
- interface NullableInNullableUnion {
- undefined foo((object? or DOMString)? arg);
- };
- """
- )
-
- results = parser.finish()
- except:
- threw = True
-
- harness.ok(
- threw,
- "A nullable union type with a nullable member type should have " "thrown.",
- )
-
- parser.reset()
- threw = False
-
- try:
- parser.parse(
- """
- interface NullableInUnionNullableUnionHelper {
- };
- interface NullableInUnionNullableUnion {
- undefined foo(((object? or DOMString) or NullableInUnionNullableUnionHelper)? arg);
- };
- """
- )
-
- results = parser.finish()
- except:
- threw = True
-
- harness.ok(
- threw,
- "A nullable union type with a nullable member type should have " "thrown.",
- )
diff --git a/components/script/dom/bindings/codegen/parser/tests/test_usvstring.py b/components/script/dom/bindings/codegen/parser/tests/test_usvstring.py
deleted file mode 100644
index effede391cb..00000000000
--- a/components/script/dom/bindings/codegen/parser/tests/test_usvstring.py
+++ /dev/null
@@ -1,40 +0,0 @@
-# -*- coding: UTF-8 -*-
-
-import WebIDL
-
-
-def WebIDLTest(parser, harness):
- parser.parse(
- """
- interface TestUSVString {
- attribute USVString svs;
- };
- """
- )
-
- results = parser.finish()
-
- harness.check(len(results), 1, "Should be one production")
- harness.ok(isinstance(results[0], WebIDL.IDLInterface), "Should be an IDLInterface")
- iface = results[0]
- harness.check(
- iface.identifier.QName(), "::TestUSVString", "Interface has the right QName"
- )
- harness.check(
- iface.identifier.name, "TestUSVString", "Interface has the right name"
- )
- harness.check(iface.parent, None, "Interface has no parent")
-
- members = iface.members
- harness.check(len(members), 1, "Should be one member")
-
- attr = members[0]
- harness.ok(isinstance(attr, WebIDL.IDLAttribute), "Should be an IDLAttribute")
- harness.check(
- attr.identifier.QName(), "::TestUSVString::svs", "Attr has correct QName"
- )
- harness.check(attr.identifier.name, "svs", "Attr has correct name")
- harness.check(str(attr.type), "USVString", "Attr type is the correct name")
- harness.ok(attr.type.isUSVString(), "Should be USVString type")
- harness.ok(attr.type.isString(), "Should be String collective type")
- harness.ok(not attr.type.isDOMString(), "Should be not be DOMString type")
diff --git a/components/script/dom/bindings/codegen/parser/tests/test_variadic_callback.py b/components/script/dom/bindings/codegen/parser/tests/test_variadic_callback.py
deleted file mode 100644
index 3fd3dccd37a..00000000000
--- a/components/script/dom/bindings/codegen/parser/tests/test_variadic_callback.py
+++ /dev/null
@@ -1,13 +0,0 @@
-import WebIDL
-
-
-def WebIDLTest(parser, harness):
- parser.parse(
- """
- callback TestVariadicCallback = any(any... arguments);
- """
- )
-
- results = parser.finish()
-
- harness.ok(True, "TestVariadicCallback callback parsed without error.")
diff --git a/components/script/dom/bindings/codegen/parser/tests/test_variadic_constraints.py b/components/script/dom/bindings/codegen/parser/tests/test_variadic_constraints.py
deleted file mode 100644
index 06ce09d8236..00000000000
--- a/components/script/dom/bindings/codegen/parser/tests/test_variadic_constraints.py
+++ /dev/null
@@ -1,74 +0,0 @@
-def WebIDLTest(parser, harness):
- threw = False
- try:
- parser.parse(
- """
- interface VariadicConstraints1 {
- undefined foo(byte... arg1, byte arg2);
- };
- """
- )
- results = parser.finish()
-
- except:
- threw = True
-
- harness.ok(
- threw,
- "Should have thrown on variadic argument followed by required " "argument.",
- )
-
- parser = parser.reset()
- threw = False
- try:
- parser.parse(
- """
- interface VariadicConstraints2 {
- undefined foo(byte... arg1, optional byte arg2);
- };
- """
- )
- results = parser.finish()
- except:
- threw = True
-
- harness.ok(
- threw,
- "Should have thrown on variadic argument followed by optional " "argument.",
- )
-
- parser = parser.reset()
- threw = False
- try:
- parser.parse(
- """
- interface VariadicConstraints3 {
- undefined foo(optional byte... arg1);
- };
- """
- )
- results = parser.finish()
-
- except:
- threw = True
-
- harness.ok(
- threw,
- "Should have thrown on variadic argument explicitly flagged as " "optional.",
- )
-
- parser = parser.reset()
- threw = False
- try:
- parser.parse(
- """
- interface VariadicConstraints4 {
- undefined foo(byte... arg1 = 0);
- };
- """
- )
- results = parser.finish()
- except:
- threw = True
-
- harness.ok(threw, "Should have thrown on variadic argument with default value.")
diff --git a/components/script/dom/bindings/codegen/parser/union-typedef.patch b/components/script/dom/bindings/codegen/parser/union-typedef.patch
deleted file mode 100644
index 20efea8e129..00000000000
--- a/components/script/dom/bindings/codegen/parser/union-typedef.patch
+++ /dev/null
@@ -1,22 +0,0 @@
---- WebIDL.py
-+++ WebIDL.py
-@@ -2624,10 +2624,18 @@ class IDLUnionType(IDLType):
- return type.name
-
- for (i, type) in enumerate(self.memberTypes):
-- if not type.isComplete():
-+ # Exclude typedefs because if given "typedef (B or C) test",
-+ # we want AOrTest, not AOrBOrC
-+ if not type.isComplete() and not isinstance(type, IDLTypedefType):
- self.memberTypes[i] = type.complete(scope)
-
- self.name = "Or".join(typeName(type) for type in self.memberTypes)
-+
-+ # We do this again to complete the typedef types
-+ for (i, type) in enumerate(self.memberTypes):
-+ if not type.isComplete():
-+ self.memberTypes[i] = type.complete(scope)
-+
- self.flatMemberTypes = list(self.memberTypes)
- i = 0
- while i < len(self.flatMemberTypes):
diff --git a/components/script/dom/bindings/codegen/parser/update.sh b/components/script/dom/bindings/codegen/parser/update.sh
deleted file mode 100755
index cec4d6a378e..00000000000
--- a/components/script/dom/bindings/codegen/parser/update.sh
+++ /dev/null
@@ -1,13 +0,0 @@
-wget https://hg.mozilla.org/mozilla-central/raw-file/tip/dom/bindings/parser/WebIDL.py -O WebIDL.py
-patch < abstract.patch
-patch < debug.patch
-patch < callback-location.patch
-patch < union-typedef.patch
-patch < inline.patch
-patch < readable-stream.patch
-
-wget https://hg.mozilla.org/mozilla-central/archive/tip.zip/dom/bindings/parser/tests/ -O tests.zip
-rm -r tests
-mkdir tests
-unzip -d tests -j tests.zip
-rm tests.zip WebIDL.py.orig
diff --git a/components/script/dom/bindings/codegen/ply/ANNOUNCE b/components/script/dom/bindings/codegen/ply/ANNOUNCE
deleted file mode 100644
index c430051cf45..00000000000
--- a/components/script/dom/bindings/codegen/ply/ANNOUNCE
+++ /dev/null
@@ -1,40 +0,0 @@
-January 31, 2017
-
- Announcing : PLY-3.10 (Python Lex-Yacc)
-
- http://www.dabeaz.com/ply
-
-I'm pleased to announce PLY-3.10--a pure Python implementation of the
-common parsing tools lex and yacc. PLY-3.10 is a minor bug fix
-release. It supports both Python 2 and Python 3.
-
-If you are new to PLY, here are a few highlights:
-
-- PLY is closely modeled after traditional lex/yacc. If you know how
- to use these or similar tools in other languages, you will find
- PLY to be comparable.
-
-- PLY provides very extensive error reporting and diagnostic
- information to assist in parser construction. The original
- implementation was developed for instructional purposes. As
- a result, the system tries to identify the most common types
- of errors made by novice users.
-
-- PLY provides full support for empty productions, error recovery,
- precedence rules, and ambiguous grammars.
-
-- Parsing is based on LR-parsing which is fast, memory efficient,
- better suited to large grammars, and which has a number of nice
- properties when dealing with syntax errors and other parsing
- problems. Currently, PLY can build its parsing tables using
- either SLR or LALR(1) algorithms.
-
-More information about PLY can be obtained on the PLY webpage at:
-
- http://www.dabeaz.com/ply
-
-PLY is freely available.
-
-Cheers,
-
-David Beazley (http://www.dabeaz.com) \ No newline at end of file
diff --git a/components/script/dom/bindings/codegen/ply/CHANGES b/components/script/dom/bindings/codegen/ply/CHANGES
deleted file mode 100644
index 815c23184e4..00000000000
--- a/components/script/dom/bindings/codegen/ply/CHANGES
+++ /dev/null
@@ -1,1394 +0,0 @@
-Version 3.10
----------------------
-01/31/17: beazley
- Changed grammar signature computation to not involve hashing
- functions. Parts are just combined into a big string.
-
-10/07/16: beazley
- Fixed Issue #101: Incorrect shift-reduce conflict resolution with
- precedence specifier.
-
- PLY was incorrectly resolving shift-reduce conflicts in certain
- cases. For example, in the example/calc/calc.py example, you
- could trigger it doing this:
-
- calc > -3 - 4
- 1 (correct answer should be -7)
- calc >
-
- Issue and suggested patch contributed by https://github.com/RomaVis
-
-Version 3.9
----------------------
-08/30/16: beazley
- Exposed the parser state number as the parser.state attribute
- in productions and error functions. For example:
-
- def p_somerule(p):
- '''
- rule : A B C
- '''
- print('State:', p.parser.state)
-
- May address issue #65 (publish current state in error callback).
-
-08/30/16: beazley
- Fixed Issue #88. Python3 compatibility with ply/cpp.
-
-08/30/16: beazley
- Fixed Issue #93. Ply can crash if SyntaxError is raised inside
- a production. Not actually sure if the original implementation
- worked as documented at all. Yacc has been modified to follow
- the spec as outlined in the CHANGES noted for 11/27/07 below.
-
-08/30/16: beazley
- Fixed Issue #97. Failure with code validation when the original
- source files aren't present. Validation step now ignores
- the missing file.
-
-08/30/16: beazley
- Minor fixes to version numbers.
-
-Version 3.8
----------------------
-10/02/15: beazley
- Fixed issues related to Python 3.5. Patch contributed by Barry Warsaw.
-
-Version 3.7
----------------------
-08/25/15: beazley
- Fixed problems when reading table files from pickled data.
-
-05/07/15: beazley
- Fixed regression in handling of table modules if specified as module
- objects. See https://github.com/dabeaz/ply/issues/63
-
-Version 3.6
----------------------
-04/25/15: beazley
- If PLY is unable to create the 'parser.out' or 'parsetab.py' files due
- to permission issues, it now just issues a warning message and
- continues to operate. This could happen if a module using PLY
- is installed in a funny way where tables have to be regenerated, but
- for whatever reason, the user doesn't have write permission on
- the directory where PLY wants to put them.
-
-04/24/15: beazley
- Fixed some issues related to use of packages and table file
- modules. Just to emphasize, PLY now generates its special
- files such as 'parsetab.py' and 'lextab.py' in the *SAME*
- directory as the source file that uses lex() and yacc().
-
- If for some reason, you want to change the name of the table
- module, use the tabmodule and lextab options:
-
- lexer = lex.lex(lextab='spamlextab')
- parser = yacc.yacc(tabmodule='spamparsetab')
-
- If you specify a simple name as shown, the module will still be
- created in the same directory as the file invoking lex() or yacc().
- If you want the table files to be placed into a different package,
- then give a fully qualified package name. For example:
-
- lexer = lex.lex(lextab='pkgname.files.lextab')
- parser = yacc.yacc(tabmodule='pkgname.files.parsetab')
-
- For this to work, 'pkgname.files' must already exist as a valid
- Python package (i.e., the directories must already exist and be
- set up with the proper __init__.py files, etc.).
-
-Version 3.5
----------------------
-04/21/15: beazley
- Added support for defaulted_states in the parser. A
- defaulted_state is a state where the only legal action is a
- reduction of a single grammar rule across all valid input
- tokens. For such states, the rule is reduced and the
- reading of the next lookahead token is delayed until it is
- actually needed at a later point in time.
-
- This delay in consuming the next lookahead token is a
- potentially important feature in advanced parsing
- applications that require tight interaction between the
- lexer and the parser. For example, a grammar rule change
- modify the lexer state upon reduction and have such changes
- take effect before the next input token is read.
-
- *** POTENTIAL INCOMPATIBILITY ***
- One potential danger of defaulted_states is that syntax
- errors might be deferred to a a later point of processing
- than where they were detected in past versions of PLY.
- Thus, it's possible that your error handling could change
- slightly on the same inputs. defaulted_states do not change
- the overall parsing of the input (i.e., the same grammar is
- accepted).
-
- If for some reason, you need to disable defaulted states,
- you can do this:
-
- parser = yacc.yacc()
- parser.defaulted_states = {}
-
-04/21/15: beazley
- Fixed debug logging in the parser. It wasn't properly reporting goto states
- on grammar rule reductions.
-
-04/20/15: beazley
- Added actions to be defined to character literals (Issue #32). For example:
-
- literals = [ '{', '}' ]
-
- def t_lbrace(t):
- r'\{'
- # Some action
- t.type = '{'
- return t
-
- def t_rbrace(t):
- r'\}'
- # Some action
- t.type = '}'
- return t
-
-04/19/15: beazley
- Import of the 'parsetab.py' file is now constrained to only consider the
- directory specified by the outputdir argument to yacc(). If not supplied,
- the import will only consider the directory in which the grammar is defined.
- This should greatly reduce problems with the wrong parsetab.py file being
- imported by mistake. For example, if it's found somewhere else on the path
- by accident.
-
- *** POTENTIAL INCOMPATIBILITY *** It's possible that this might break some
- packaging/deployment setup if PLY was instructed to place its parsetab.py
- in a different location. You'll have to specify a proper outputdir= argument
- to yacc() to fix this if needed.
-
-04/19/15: beazley
- Changed default output directory to be the same as that in which the
- yacc grammar is defined. If your grammar is in a file 'calc.py',
- then the parsetab.py and parser.out files should be generated in the
- same directory as that file. The destination directory can be changed
- using the outputdir= argument to yacc().
-
-04/19/15: beazley
- Changed the parsetab.py file signature slightly so that the parsetab won't
- regenerate if created on a different major version of Python (ie., a
- parsetab created on Python 2 will work with Python 3).
-
-04/16/15: beazley
- Fixed Issue #44 call_errorfunc() should return the result of errorfunc()
-
-04/16/15: beazley
- Support for versions of Python <2.7 is officially dropped. PLY may work, but
- the unit tests requires Python 2.7 or newer.
-
-04/16/15: beazley
- Fixed bug related to calling yacc(start=...). PLY wasn't regenerating the
- table file correctly for this case.
-
-04/16/15: beazley
- Added skipped tests for PyPy and Java. Related to use of Python's -O option.
-
-05/29/13: beazley
- Added filter to make unit tests pass under 'python -3'.
- Reported by Neil Muller.
-
-05/29/13: beazley
- Fixed CPP_INTEGER regex in ply/cpp.py (Issue 21).
- Reported by @vbraun.
-
-05/29/13: beazley
- Fixed yacc validation bugs when from __future__ import unicode_literals
- is being used. Reported by Kenn Knowles.
-
-05/29/13: beazley
- Added support for Travis-CI. Contributed by Kenn Knowles.
-
-05/29/13: beazley
- Added a .gitignore file. Suggested by Kenn Knowles.
-
-05/29/13: beazley
- Fixed validation problems for source files that include a
- different source code encoding specifier. Fix relies on
- the inspect module. Should work on Python 2.6 and newer.
- Not sure about older versions of Python.
- Contributed by Michael Droettboom
-
-05/21/13: beazley
- Fixed unit tests for yacc to eliminate random failures due to dict hash value
- randomization in Python 3.3
- Reported by Arfrever
-
-10/15/12: beazley
- Fixed comment whitespace processing bugs in ply/cpp.py.
- Reported by Alexei Pososin.
-
-10/15/12: beazley
- Fixed token names in ply/ctokens.py to match rule names.
- Reported by Alexei Pososin.
-
-04/26/12: beazley
- Changes to functions available in panic mode error recover. In previous versions
- of PLY, the following global functions were available for use in the p_error() rule:
-
- yacc.errok() # Reset error state
- yacc.token() # Get the next token
- yacc.restart() # Reset the parsing stack
-
- The use of global variables was problematic for code involving multiple parsers
- and frankly was a poor design overall. These functions have been moved to methods
- of the parser instance created by the yacc() function. You should write code like
- this:
-
- def p_error(p):
- ...
- parser.errok()
-
- parser = yacc.yacc()
-
- *** POTENTIAL INCOMPATIBILITY *** The original global functions now issue a
- DeprecationWarning.
-
-04/19/12: beazley
- Fixed some problems with line and position tracking and the use of error
- symbols. If you have a grammar rule involving an error rule like this:
-
- def p_assignment_bad(p):
- '''assignment : location EQUALS error SEMI'''
- ...
-
- You can now do line and position tracking on the error token. For example:
-
- def p_assignment_bad(p):
- '''assignment : location EQUALS error SEMI'''
- start_line = p.lineno(3)
- start_pos = p.lexpos(3)
-
- If the trackng=True option is supplied to parse(), you can additionally get
- spans:
-
- def p_assignment_bad(p):
- '''assignment : location EQUALS error SEMI'''
- start_line, end_line = p.linespan(3)
- start_pos, end_pos = p.lexspan(3)
-
- Note that error handling is still a hairy thing in PLY. This won't work
- unless your lexer is providing accurate information. Please report bugs.
- Suggested by a bug reported by Davis Herring.
-
-04/18/12: beazley
- Change to doc string handling in lex module. Regex patterns are now first
- pulled from a function's .regex attribute. If that doesn't exist, then
- .doc is checked as a fallback. The @TOKEN decorator now sets the .regex
- attribute of a function instead of its doc string.
- Changed suggested by Kristoffer Ellersgaard Koch.
-
-04/18/12: beazley
- Fixed issue #1: Fixed _tabversion. It should use __tabversion__ instead of __version__
- Reported by Daniele Tricoli
-
-04/18/12: beazley
- Fixed issue #8: Literals empty list causes IndexError
- Reported by Walter Nissen.
-
-04/18/12: beazley
- Fixed issue #12: Typo in code snippet in documentation
- Reported by florianschanda.
-
-04/18/12: beazley
- Fixed issue #10: Correctly escape t_XOREQUAL pattern.
- Reported by Andy Kittner.
-
-Version 3.4
----------------------
-02/17/11: beazley
- Minor patch to make cpp.py compatible with Python 3. Note: This
- is an experimental file not currently used by the rest of PLY.
-
-02/17/11: beazley
- Fixed setup.py trove classifiers to properly list PLY as
- Python 3 compatible.
-
-01/02/11: beazley
- Migration of repository to github.
-
-Version 3.3
------------------------------
-08/25/09: beazley
- Fixed issue 15 related to the set_lineno() method in yacc. Reported by
- mdsherry.
-
-08/25/09: beazley
- Fixed a bug related to regular expression compilation flags not being
- properly stored in lextab.py files created by the lexer when running
- in optimize mode. Reported by Bruce Frederiksen.
-
-
-Version 3.2
------------------------------
-03/24/09: beazley
- Added an extra check to not print duplicated warning messages
- about reduce/reduce conflicts.
-
-03/24/09: beazley
- Switched PLY over to a BSD-license.
-
-03/23/09: beazley
- Performance optimization. Discovered a few places to make
- speedups in LR table generation.
-
-03/23/09: beazley
- New warning message. PLY now warns about rules never
- reduced due to reduce/reduce conflicts. Suggested by
- Bruce Frederiksen.
-
-03/23/09: beazley
- Some clean-up of warning messages related to reduce/reduce errors.
-
-03/23/09: beazley
- Added a new picklefile option to yacc() to write the parsing
- tables to a filename using the pickle module. Here is how
- it works:
-
- yacc(picklefile="parsetab.p")
-
- This option can be used if the normal parsetab.py file is
- extremely large. For example, on jython, it is impossible
- to read parsing tables if the parsetab.py exceeds a certain
- threshold.
-
- The filename supplied to the picklefile option is opened
- relative to the current working directory of the Python
- interpreter. If you need to refer to the file elsewhere,
- you will need to supply an absolute or relative path.
-
- For maximum portability, the pickle file is written
- using protocol 0.
-
-03/13/09: beazley
- Fixed a bug in parser.out generation where the rule numbers
- where off by one.
-
-03/13/09: beazley
- Fixed a string formatting bug with one of the error messages.
- Reported by Richard Reitmeyer
-
-Version 3.1
------------------------------
-02/28/09: beazley
- Fixed broken start argument to yacc(). PLY-3.0 broke this
- feature by accident.
-
-02/28/09: beazley
- Fixed debugging output. yacc() no longer reports shift/reduce
- or reduce/reduce conflicts if debugging is turned off. This
- restores similar behavior in PLY-2.5. Reported by Andrew Waters.
-
-Version 3.0
------------------------------
-02/03/09: beazley
- Fixed missing lexer attribute on certain tokens when
- invoking the parser p_error() function. Reported by
- Bart Whiteley.
-
-02/02/09: beazley
- The lex() command now does all error-reporting and diagonistics
- using the logging module interface. Pass in a Logger object
- using the errorlog parameter to specify a different logger.
-
-02/02/09: beazley
- Refactored ply.lex to use a more object-oriented and organized
- approach to collecting lexer information.
-
-02/01/09: beazley
- Removed the nowarn option from lex(). All output is controlled
- by passing in a logger object. Just pass in a logger with a high
- level setting to suppress output. This argument was never
- documented to begin with so hopefully no one was relying upon it.
-
-02/01/09: beazley
- Discovered and removed a dead if-statement in the lexer. This
- resulted in a 6-7% speedup in lexing when I tested it.
-
-01/13/09: beazley
- Minor change to the procedure for signalling a syntax error in a
- production rule. A normal SyntaxError exception should be raised
- instead of yacc.SyntaxError.
-
-01/13/09: beazley
- Added a new method p.set_lineno(n,lineno) that can be used to set the
- line number of symbol n in grammar rules. This simplifies manual
- tracking of line numbers.
-
-01/11/09: beazley
- Vastly improved debugging support for yacc.parse(). Instead of passing
- debug as an integer, you can supply a Logging object (see the logging
- module). Messages will be generated at the ERROR, INFO, and DEBUG
- logging levels, each level providing progressively more information.
- The debugging trace also shows states, grammar rule, values passed
- into grammar rules, and the result of each reduction.
-
-01/09/09: beazley
- The yacc() command now does all error-reporting and diagnostics using
- the interface of the logging module. Use the errorlog parameter to
- specify a logging object for error messages. Use the debuglog parameter
- to specify a logging object for the 'parser.out' output.
-
-01/09/09: beazley
- *HUGE* refactoring of the the ply.yacc() implementation. The high-level
- user interface is backwards compatible, but the internals are completely
- reorganized into classes. No more global variables. The internals
- are also more extensible. For example, you can use the classes to
- construct a LALR(1) parser in an entirely different manner than
- what is currently the case. Documentation is forthcoming.
-
-01/07/09: beazley
- Various cleanup and refactoring of yacc internals.
-
-01/06/09: beazley
- Fixed a bug with precedence assignment. yacc was assigning the precedence
- each rule based on the left-most token, when in fact, it should have been
- using the right-most token. Reported by Bruce Frederiksen.
-
-11/27/08: beazley
- Numerous changes to support Python 3.0 including removal of deprecated
- statements (e.g., has_key) and the additional of compatibility code
- to emulate features from Python 2 that have been removed, but which
- are needed. Fixed the unit testing suite to work with Python 3.0.
- The code should be backwards compatible with Python 2.
-
-11/26/08: beazley
- Loosened the rules on what kind of objects can be passed in as the
- "module" parameter to lex() and yacc(). Previously, you could only use
- a module or an instance. Now, PLY just uses dir() to get a list of
- symbols on whatever the object is without regard for its type.
-
-11/26/08: beazley
- Changed all except: statements to be compatible with Python2.x/3.x syntax.
-
-11/26/08: beazley
- Changed all raise Exception, value statements to raise Exception(value) for
- forward compatibility.
-
-11/26/08: beazley
- Removed all print statements from lex and yacc, using sys.stdout and sys.stderr
- directly. Preparation for Python 3.0 support.
-
-11/04/08: beazley
- Fixed a bug with referring to symbols on the the parsing stack using negative
- indices.
-
-05/29/08: beazley
- Completely revamped the testing system to use the unittest module for everything.
- Added additional tests to cover new errors/warnings.
-
-Version 2.5
------------------------------
-05/28/08: beazley
- Fixed a bug with writing lex-tables in optimized mode and start states.
- Reported by Kevin Henry.
-
-Version 2.4
------------------------------
-05/04/08: beazley
- A version number is now embedded in the table file signature so that
- yacc can more gracefully accomodate changes to the output format
- in the future.
-
-05/04/08: beazley
- Removed undocumented .pushback() method on grammar productions. I'm
- not sure this ever worked and can't recall ever using it. Might have
- been an abandoned idea that never really got fleshed out. This
- feature was never described or tested so removing it is hopefully
- harmless.
-
-05/04/08: beazley
- Added extra error checking to yacc() to detect precedence rules defined
- for undefined terminal symbols. This allows yacc() to detect a potential
- problem that can be really tricky to debug if no warning message or error
- message is generated about it.
-
-05/04/08: beazley
- lex() now has an outputdir that can specify the output directory for
- tables when running in optimize mode. For example:
-
- lexer = lex.lex(optimize=True, lextab="ltab", outputdir="foo/bar")
-
- The behavior of specifying a table module and output directory are
- more aligned with the behavior of yacc().
-
-05/04/08: beazley
- [Issue 9]
- Fixed filename bug in when specifying the modulename in lex() and yacc().
- If you specified options such as the following:
-
- parser = yacc.yacc(tabmodule="foo.bar.parsetab",outputdir="foo/bar")
-
- yacc would create a file "foo.bar.parsetab.py" in the given directory.
- Now, it simply generates a file "parsetab.py" in that directory.
- Bug reported by cptbinho.
-
-05/04/08: beazley
- Slight modification to lex() and yacc() to allow their table files
- to be loaded from a previously loaded module. This might make
- it easier to load the parsing tables from a complicated package
- structure. For example:
-
- import foo.bar.spam.parsetab as parsetab
- parser = yacc.yacc(tabmodule=parsetab)
-
- Note: lex and yacc will never regenerate the table file if used
- in the form---you will get a warning message instead.
- This idea suggested by Brian Clapper.
-
-
-04/28/08: beazley
- Fixed a big with p_error() functions being picked up correctly
- when running in yacc(optimize=1) mode. Patch contributed by
- Bart Whiteley.
-
-02/28/08: beazley
- Fixed a bug with 'nonassoc' precedence rules. Basically the
- non-precedence was being ignored and not producing the correct
- run-time behavior in the parser.
-
-02/16/08: beazley
- Slight relaxation of what the input() method to a lexer will
- accept as a string. Instead of testing the input to see
- if the input is a string or unicode string, it checks to see
- if the input object looks like it contains string data.
- This change makes it possible to pass string-like objects
- in as input. For example, the object returned by mmap.
-
- import mmap, os
- data = mmap.mmap(os.open(filename,os.O_RDONLY),
- os.path.getsize(filename),
- access=mmap.ACCESS_READ)
- lexer.input(data)
-
-
-11/29/07: beazley
- Modification of ply.lex to allow token functions to aliased.
- This is subtle, but it makes it easier to create libraries and
- to reuse token specifications. For example, suppose you defined
- a function like this:
-
- def number(t):
- r'\d+'
- t.value = int(t.value)
- return t
-
- This change would allow you to define a token rule as follows:
-
- t_NUMBER = number
-
- In this case, the token type will be set to 'NUMBER' and use
- the associated number() function to process tokens.
-
-11/28/07: beazley
- Slight modification to lex and yacc to grab symbols from both
- the local and global dictionaries of the caller. This
- modification allows lexers and parsers to be defined using
- inner functions and closures.
-
-11/28/07: beazley
- Performance optimization: The lexer.lexmatch and t.lexer
- attributes are no longer set for lexer tokens that are not
- defined by functions. The only normal use of these attributes
- would be in lexer rules that need to perform some kind of
- special processing. Thus, it doesn't make any sense to set
- them on every token.
-
- *** POTENTIAL INCOMPATIBILITY *** This might break code
- that is mucking around with internal lexer state in some
- sort of magical way.
-
-11/27/07: beazley
- Added the ability to put the parser into error-handling mode
- from within a normal production. To do this, simply raise
- a yacc.SyntaxError exception like this:
-
- def p_some_production(p):
- 'some_production : prod1 prod2'
- ...
- raise yacc.SyntaxError # Signal an error
-
- A number of things happen after this occurs:
-
- - The last symbol shifted onto the symbol stack is discarded
- and parser state backed up to what it was before the
- the rule reduction.
-
- - The current lookahead symbol is saved and replaced by
- the 'error' symbol.
-
- - The parser enters error recovery mode where it tries
- to either reduce the 'error' rule or it starts
- discarding items off of the stack until the parser
- resets.
-
- When an error is manually set, the parser does *not* call
- the p_error() function (if any is defined).
- *** NEW FEATURE *** Suggested on the mailing list
-
-11/27/07: beazley
- Fixed structure bug in examples/ansic. Reported by Dion Blazakis.
-
-11/27/07: beazley
- Fixed a bug in the lexer related to start conditions and ignored
- token rules. If a rule was defined that changed state, but
- returned no token, the lexer could be left in an inconsistent
- state. Reported by
-
-11/27/07: beazley
- Modified setup.py to support Python Eggs. Patch contributed by
- Simon Cross.
-
-11/09/07: beazely
- Fixed a bug in error handling in yacc. If a syntax error occurred and the
- parser rolled the entire parse stack back, the parser would be left in in
- inconsistent state that would cause it to trigger incorrect actions on
- subsequent input. Reported by Ton Biegstraaten, Justin King, and others.
-
-11/09/07: beazley
- Fixed a bug when passing empty input strings to yacc.parse(). This
- would result in an error message about "No input given". Reported
- by Andrew Dalke.
-
-Version 2.3
------------------------------
-02/20/07: beazley
- Fixed a bug with character literals if the literal '.' appeared as the
- last symbol of a grammar rule. Reported by Ales Smrcka.
-
-02/19/07: beazley
- Warning messages are now redirected to stderr instead of being printed
- to standard output.
-
-02/19/07: beazley
- Added a warning message to lex.py if it detects a literal backslash
- character inside the t_ignore declaration. This is to help
- problems that might occur if someone accidentally defines t_ignore
- as a Python raw string. For example:
-
- t_ignore = r' \t'
-
- The idea for this is from an email I received from David Cimimi who
- reported bizarre behavior in lexing as a result of defining t_ignore
- as a raw string by accident.
-
-02/18/07: beazley
- Performance improvements. Made some changes to the internal
- table organization and LR parser to improve parsing performance.
-
-02/18/07: beazley
- Automatic tracking of line number and position information must now be
- enabled by a special flag to parse(). For example:
-
- yacc.parse(data,tracking=True)
-
- In many applications, it's just not that important to have the
- parser automatically track all line numbers. By making this an
- optional feature, it allows the parser to run significantly faster
- (more than a 20% speed increase in many cases). Note: positional
- information is always available for raw tokens---this change only
- applies to positional information associated with nonterminal
- grammar symbols.
- *** POTENTIAL INCOMPATIBILITY ***
-
-02/18/07: beazley
- Yacc no longer supports extended slices of grammar productions.
- However, it does support regular slices. For example:
-
- def p_foo(p):
- '''foo: a b c d e'''
- p[0] = p[1:3]
-
- This change is a performance improvement to the parser--it streamlines
- normal access to the grammar values since slices are now handled in
- a __getslice__() method as opposed to __getitem__().
-
-02/12/07: beazley
- Fixed a bug in the handling of token names when combined with
- start conditions. Bug reported by Todd O'Bryan.
-
-Version 2.2
-------------------------------
-11/01/06: beazley
- Added lexpos() and lexspan() methods to grammar symbols. These
- mirror the same functionality of lineno() and linespan(). For
- example:
-
- def p_expr(p):
- 'expr : expr PLUS expr'
- p.lexpos(1) # Lexing position of left-hand-expression
- p.lexpos(1) # Lexing position of PLUS
- start,end = p.lexspan(3) # Lexing range of right hand expression
-
-11/01/06: beazley
- Minor change to error handling. The recommended way to skip characters
- in the input is to use t.lexer.skip() as shown here:
-
- def t_error(t):
- print "Illegal character '%s'" % t.value[0]
- t.lexer.skip(1)
-
- The old approach of just using t.skip(1) will still work, but won't
- be documented.
-
-10/31/06: beazley
- Discarded tokens can now be specified as simple strings instead of
- functions. To do this, simply include the text "ignore_" in the
- token declaration. For example:
-
- t_ignore_cppcomment = r'//.*'
-
- Previously, this had to be done with a function. For example:
-
- def t_ignore_cppcomment(t):
- r'//.*'
- pass
-
- If start conditions/states are being used, state names should appear
- before the "ignore_" text.
-
-10/19/06: beazley
- The Lex module now provides support for flex-style start conditions
- as described at http://www.gnu.org/software/flex/manual/html_chapter/flex_11.html.
- Please refer to this document to understand this change note. Refer to
- the PLY documentation for PLY-specific explanation of how this works.
-
- To use start conditions, you first need to declare a set of states in
- your lexer file:
-
- states = (
- ('foo','exclusive'),
- ('bar','inclusive')
- )
-
- This serves the same role as the %s and %x specifiers in flex.
-
- One a state has been declared, tokens for that state can be
- declared by defining rules of the form t_state_TOK. For example:
-
- t_PLUS = '\+' # Rule defined in INITIAL state
- t_foo_NUM = '\d+' # Rule defined in foo state
- t_bar_NUM = '\d+' # Rule defined in bar state
-
- t_foo_bar_NUM = '\d+' # Rule defined in both foo and bar
- t_ANY_NUM = '\d+' # Rule defined in all states
-
- In addition to defining tokens for each state, the t_ignore and t_error
- specifications can be customized for specific states. For example:
-
- t_foo_ignore = " " # Ignored characters for foo state
- def t_bar_error(t):
- # Handle errors in bar state
-
- With token rules, the following methods can be used to change states
-
- def t_TOKNAME(t):
- t.lexer.begin('foo') # Begin state 'foo'
- t.lexer.push_state('foo') # Begin state 'foo', push old state
- # onto a stack
- t.lexer.pop_state() # Restore previous state
- t.lexer.current_state() # Returns name of current state
-
- These methods mirror the BEGIN(), yy_push_state(), yy_pop_state(), and
- yy_top_state() functions in flex.
-
- The use of start states can be used as one way to write sub-lexers.
- For example, the lexer or parser might instruct the lexer to start
- generating a different set of tokens depending on the context.
-
- example/yply/ylex.py shows the use of start states to grab C/C++
- code fragments out of traditional yacc specification files.
-
- *** NEW FEATURE *** Suggested by Daniel Larraz with whom I also
- discussed various aspects of the design.
-
-10/19/06: beazley
- Minor change to the way in which yacc.py was reporting shift/reduce
- conflicts. Although the underlying LALR(1) algorithm was correct,
- PLY was under-reporting the number of conflicts compared to yacc/bison
- when precedence rules were in effect. This change should make PLY
- report the same number of conflicts as yacc.
-
-10/19/06: beazley
- Modified yacc so that grammar rules could also include the '-'
- character. For example:
-
- def p_expr_list(p):
- 'expression-list : expression-list expression'
-
- Suggested by Oldrich Jedlicka.
-
-10/18/06: beazley
- Attribute lexer.lexmatch added so that token rules can access the re
- match object that was generated. For example:
-
- def t_FOO(t):
- r'some regex'
- m = t.lexer.lexmatch
- # Do something with m
-
-
- This may be useful if you want to access named groups specified within
- the regex for a specific token. Suggested by Oldrich Jedlicka.
-
-10/16/06: beazley
- Changed the error message that results if an illegal character
- is encountered and no default error function is defined in lex.
- The exception is now more informative about the actual cause of
- the error.
-
-Version 2.1
-------------------------------
-10/02/06: beazley
- The last Lexer object built by lex() can be found in lex.lexer.
- The last Parser object built by yacc() can be found in yacc.parser.
-
-10/02/06: beazley
- New example added: examples/yply
-
- This example uses PLY to convert Unix-yacc specification files to
- PLY programs with the same grammar. This may be useful if you
- want to convert a grammar from bison/yacc to use with PLY.
-
-10/02/06: beazley
- Added support for a start symbol to be specified in the yacc
- input file itself. Just do this:
-
- start = 'name'
-
- where 'name' matches some grammar rule. For example:
-
- def p_name(p):
- 'name : A B C'
- ...
-
- This mirrors the functionality of the yacc %start specifier.
-
-09/30/06: beazley
- Some new examples added.:
-
- examples/GardenSnake : A simple indentation based language similar
- to Python. Shows how you might handle
- whitespace. Contributed by Andrew Dalke.
-
- examples/BASIC : An implementation of 1964 Dartmouth BASIC.
- Contributed by Dave against his better
- judgement.
-
-09/28/06: beazley
- Minor patch to allow named groups to be used in lex regular
- expression rules. For example:
-
- t_QSTRING = r'''(?P<quote>['"]).*?(?P=quote)'''
-
- Patch submitted by Adam Ring.
-
-09/28/06: beazley
- LALR(1) is now the default parsing method. To use SLR, use
- yacc.yacc(method="SLR"). Note: there is no performance impact
- on parsing when using LALR(1) instead of SLR. However, constructing
- the parsing tables will take a little longer.
-
-09/26/06: beazley
- Change to line number tracking. To modify line numbers, modify
- the line number of the lexer itself. For example:
-
- def t_NEWLINE(t):
- r'\n'
- t.lexer.lineno += 1
-
- This modification is both cleanup and a performance optimization.
- In past versions, lex was monitoring every token for changes in
- the line number. This extra processing is unnecessary for a vast
- majority of tokens. Thus, this new approach cleans it up a bit.
-
- *** POTENTIAL INCOMPATIBILITY ***
- You will need to change code in your lexer that updates the line
- number. For example, "t.lineno += 1" becomes "t.lexer.lineno += 1"
-
-09/26/06: beazley
- Added the lexing position to tokens as an attribute lexpos. This
- is the raw index into the input text at which a token appears.
- This information can be used to compute column numbers and other
- details (e.g., scan backwards from lexpos to the first newline
- to get a column position).
-
-09/25/06: beazley
- Changed the name of the __copy__() method on the Lexer class
- to clone(). This is used to clone a Lexer object (e.g., if
- you're running different lexers at the same time).
-
-09/21/06: beazley
- Limitations related to the use of the re module have been eliminated.
- Several users reported problems with regular expressions exceeding
- more than 100 named groups. To solve this, lex.py is now capable
- of automatically splitting its master regular regular expression into
- smaller expressions as needed. This should, in theory, make it
- possible to specify an arbitrarily large number of tokens.
-
-09/21/06: beazley
- Improved error checking in lex.py. Rules that match the empty string
- are now rejected (otherwise they cause the lexer to enter an infinite
- loop). An extra check for rules containing '#' has also been added.
- Since lex compiles regular expressions in verbose mode, '#' is interpreted
- as a regex comment, it is critical to use '\#' instead.
-
-09/18/06: beazley
- Added a @TOKEN decorator function to lex.py that can be used to
- define token rules where the documentation string might be computed
- in some way.
-
- digit = r'([0-9])'
- nondigit = r'([_A-Za-z])'
- identifier = r'(' + nondigit + r'(' + digit + r'|' + nondigit + r')*)'
-
- from ply.lex import TOKEN
-
- @TOKEN(identifier)
- def t_ID(t):
- # Do whatever
-
- The @TOKEN decorator merely sets the documentation string of the
- associated token function as needed for lex to work.
-
- Note: An alternative solution is the following:
-
- def t_ID(t):
- # Do whatever
-
- t_ID.__doc__ = identifier
-
- Note: Decorators require the use of Python 2.4 or later. If compatibility
- with old versions is needed, use the latter solution.
-
- The need for this feature was suggested by Cem Karan.
-
-09/14/06: beazley
- Support for single-character literal tokens has been added to yacc.
- These literals must be enclosed in quotes. For example:
-
- def p_expr(p):
- "expr : expr '+' expr"
- ...
-
- def p_expr(p):
- 'expr : expr "-" expr'
- ...
-
- In addition to this, it is necessary to tell the lexer module about
- literal characters. This is done by defining the variable 'literals'
- as a list of characters. This should be defined in the module that
- invokes the lex.lex() function. For example:
-
- literals = ['+','-','*','/','(',')','=']
-
- or simply
-
- literals = '+=*/()='
-
- It is important to note that literals can only be a single character.
- When the lexer fails to match a token using its normal regular expression
- rules, it will check the current character against the literal list.
- If found, it will be returned with a token type set to match the literal
- character. Otherwise, an illegal character will be signalled.
-
-
-09/14/06: beazley
- Modified PLY to install itself as a proper Python package called 'ply'.
- This will make it a little more friendly to other modules. This
- changes the usage of PLY only slightly. Just do this to import the
- modules
-
- import ply.lex as lex
- import ply.yacc as yacc
-
- Alternatively, you can do this:
-
- from ply import *
-
- Which imports both the lex and yacc modules.
- Change suggested by Lee June.
-
-09/13/06: beazley
- Changed the handling of negative indices when used in production rules.
- A negative production index now accesses already parsed symbols on the
- parsing stack. For example,
-
- def p_foo(p):
- "foo: A B C D"
- print p[1] # Value of 'A' symbol
- print p[2] # Value of 'B' symbol
- print p[-1] # Value of whatever symbol appears before A
- # on the parsing stack.
-
- p[0] = some_val # Sets the value of the 'foo' grammer symbol
-
- This behavior makes it easier to work with embedded actions within the
- parsing rules. For example, in C-yacc, it is possible to write code like
- this:
-
- bar: A { printf("seen an A = %d\n", $1); } B { do_stuff; }
-
- In this example, the printf() code executes immediately after A has been
- parsed. Within the embedded action code, $1 refers to the A symbol on
- the stack.
-
- To perform this equivalent action in PLY, you need to write a pair
- of rules like this:
-
- def p_bar(p):
- "bar : A seen_A B"
- do_stuff
-
- def p_seen_A(p):
- "seen_A :"
- print "seen an A =", p[-1]
-
- The second rule "seen_A" is merely a empty production which should be
- reduced as soon as A is parsed in the "bar" rule above. The use
- of the negative index p[-1] is used to access whatever symbol appeared
- before the seen_A symbol.
-
- This feature also makes it possible to support inherited attributes.
- For example:
-
- def p_decl(p):
- "decl : scope name"
-
- def p_scope(p):
- """scope : GLOBAL
- | LOCAL"""
- p[0] = p[1]
-
- def p_name(p):
- "name : ID"
- if p[-1] == "GLOBAL":
- # ...
- else if p[-1] == "LOCAL":
- #...
-
- In this case, the name rule is inheriting an attribute from the
- scope declaration that precedes it.
-
- *** POTENTIAL INCOMPATIBILITY ***
- If you are currently using negative indices within existing grammar rules,
- your code will break. This should be extremely rare if non-existent in
- most cases. The argument to various grammar rules is not usually not
- processed in the same way as a list of items.
-
-Version 2.0
-------------------------------
-09/07/06: beazley
- Major cleanup and refactoring of the LR table generation code. Both SLR
- and LALR(1) table generation is now performed by the same code base with
- only minor extensions for extra LALR(1) processing.
-
-09/07/06: beazley
- Completely reimplemented the entire LALR(1) parsing engine to use the
- DeRemer and Pennello algorithm for calculating lookahead sets. This
- significantly improves the performance of generating LALR(1) tables
- and has the added feature of actually working correctly! If you
- experienced weird behavior with LALR(1) in prior releases, this should
- hopefully resolve all of those problems. Many thanks to
- Andrew Waters and Markus Schoepflin for submitting bug reports
- and helping me test out the revised LALR(1) support.
-
-Version 1.8
-------------------------------
-08/02/06: beazley
- Fixed a problem related to the handling of default actions in LALR(1)
- parsing. If you experienced subtle and/or bizarre behavior when trying
- to use the LALR(1) engine, this may correct those problems. Patch
- contributed by Russ Cox. Note: This patch has been superceded by
- revisions for LALR(1) parsing in Ply-2.0.
-
-08/02/06: beazley
- Added support for slicing of productions in yacc.
- Patch contributed by Patrick Mezard.
-
-Version 1.7
-------------------------------
-03/02/06: beazley
- Fixed infinite recursion problem ReduceToTerminals() function that
- would sometimes come up in LALR(1) table generation. Reported by
- Markus Schoepflin.
-
-03/01/06: beazley
- Added "reflags" argument to lex(). For example:
-
- lex.lex(reflags=re.UNICODE)
-
- This can be used to specify optional flags to the re.compile() function
- used inside the lexer. This may be necessary for special situations such
- as processing Unicode (e.g., if you want escapes like \w and \b to consult
- the Unicode character property database). The need for this suggested by
- Andreas Jung.
-
-03/01/06: beazley
- Fixed a bug with an uninitialized variable on repeated instantiations of parser
- objects when the write_tables=0 argument was used. Reported by Michael Brown.
-
-03/01/06: beazley
- Modified lex.py to accept Unicode strings both as the regular expressions for
- tokens and as input. Hopefully this is the only change needed for Unicode support.
- Patch contributed by Johan Dahl.
-
-03/01/06: beazley
- Modified the class-based interface to work with new-style or old-style classes.
- Patch contributed by Michael Brown (although I tweaked it slightly so it would work
- with older versions of Python).
-
-Version 1.6
-------------------------------
-05/27/05: beazley
- Incorporated patch contributed by Christopher Stawarz to fix an extremely
- devious bug in LALR(1) parser generation. This patch should fix problems
- numerous people reported with LALR parsing.
-
-05/27/05: beazley
- Fixed problem with lex.py copy constructor. Reported by Dave Aitel, Aaron Lav,
- and Thad Austin.
-
-05/27/05: beazley
- Added outputdir option to yacc() to control output directory. Contributed
- by Christopher Stawarz.
-
-05/27/05: beazley
- Added rununit.py test script to run tests using the Python unittest module.
- Contributed by Miki Tebeka.
-
-Version 1.5
-------------------------------
-05/26/04: beazley
- Major enhancement. LALR(1) parsing support is now working.
- This feature was implemented by Elias Ioup (ezioup@alumni.uchicago.edu)
- and optimized by David Beazley. To use LALR(1) parsing do
- the following:
-
- yacc.yacc(method="LALR")
-
- Computing LALR(1) parsing tables takes about twice as long as
- the default SLR method. However, LALR(1) allows you to handle
- more complex grammars. For example, the ANSI C grammar
- (in example/ansic) has 13 shift-reduce conflicts with SLR, but
- only has 1 shift-reduce conflict with LALR(1).
-
-05/20/04: beazley
- Added a __len__ method to parser production lists. Can
- be used in parser rules like this:
-
- def p_somerule(p):
- """a : B C D
- | E F"
- if (len(p) == 3):
- # Must have been first rule
- elif (len(p) == 2):
- # Must be second rule
-
- Suggested by Joshua Gerth and others.
-
-Version 1.4
-------------------------------
-04/23/04: beazley
- Incorporated a variety of patches contributed by Eric Raymond.
- These include:
-
- 0. Cleans up some comments so they don't wrap on an 80-column display.
- 1. Directs compiler errors to stderr where they belong.
- 2. Implements and documents automatic line counting when \n is ignored.
- 3. Changes the way progress messages are dumped when debugging is on.
- The new format is both less verbose and conveys more information than
- the old, including shift and reduce actions.
-
-04/23/04: beazley
- Added a Python setup.py file to simply installation. Contributed
- by Adam Kerrison.
-
-04/23/04: beazley
- Added patches contributed by Adam Kerrison.
-
- - Some output is now only shown when debugging is enabled. This
- means that PLY will be completely silent when not in debugging mode.
-
- - An optional parameter "write_tables" can be passed to yacc() to
- control whether or not parsing tables are written. By default,
- it is true, but it can be turned off if you don't want the yacc
- table file. Note: disabling this will cause yacc() to regenerate
- the parsing table each time.
-
-04/23/04: beazley
- Added patches contributed by David McNab. This patch addes two
- features:
-
- - The parser can be supplied as a class instead of a module.
- For an example of this, see the example/classcalc directory.
-
- - Debugging output can be directed to a filename of the user's
- choice. Use
-
- yacc(debugfile="somefile.out")
-
-
-Version 1.3
-------------------------------
-12/10/02: jmdyck
- Various minor adjustments to the code that Dave checked in today.
- Updated test/yacc_{inf,unused}.exp to reflect today's changes.
-
-12/10/02: beazley
- Incorporated a variety of minor bug fixes to empty production
- handling and infinite recursion checking. Contributed by
- Michael Dyck.
-
-12/10/02: beazley
- Removed bogus recover() method call in yacc.restart()
-
-Version 1.2
-------------------------------
-11/27/02: beazley
- Lexer and parser objects are now available as an attribute
- of tokens and slices respectively. For example:
-
- def t_NUMBER(t):
- r'\d+'
- print t.lexer
-
- def p_expr_plus(t):
- 'expr: expr PLUS expr'
- print t.lexer
- print t.parser
-
- This can be used for state management (if needed).
-
-10/31/02: beazley
- Modified yacc.py to work with Python optimize mode. To make
- this work, you need to use
-
- yacc.yacc(optimize=1)
-
- Furthermore, you need to first run Python in normal mode
- to generate the necessary parsetab.py files. After that,
- you can use python -O or python -OO.
-
- Note: optimized mode turns off a lot of error checking.
- Only use when you are sure that your grammar is working.
- Make sure parsetab.py is up to date!
-
-10/30/02: beazley
- Added cloning of Lexer objects. For example:
-
- import copy
- l = lex.lex()
- lc = copy.copy(l)
-
- l.input("Some text")
- lc.input("Some other text")
- ...
-
- This might be useful if the same "lexer" is meant to
- be used in different contexts---or if multiple lexers
- are running concurrently.
-
-10/30/02: beazley
- Fixed subtle bug with first set computation and empty productions.
- Patch submitted by Michael Dyck.
-
-10/30/02: beazley
- Fixed error messages to use "filename:line: message" instead
- of "filename:line. message". This makes error reporting more
- friendly to emacs. Patch submitted by François Pinard.
-
-10/30/02: beazley
- Improvements to parser.out file. Terminals and nonterminals
- are sorted instead of being printed in random order.
- Patch submitted by François Pinard.
-
-10/30/02: beazley
- Improvements to parser.out file output. Rules are now printed
- in a way that's easier to understand. Contributed by Russ Cox.
-
-10/30/02: beazley
- Added 'nonassoc' associativity support. This can be used
- to disable the chaining of operators like a < b < c.
- To use, simply specify 'nonassoc' in the precedence table
-
- precedence = (
- ('nonassoc', 'LESSTHAN', 'GREATERTHAN'), # Nonassociative operators
- ('left', 'PLUS', 'MINUS'),
- ('left', 'TIMES', 'DIVIDE'),
- ('right', 'UMINUS'), # Unary minus operator
- )
-
- Patch contributed by Russ Cox.
-
-10/30/02: beazley
- Modified the lexer to provide optional support for Python -O and -OO
- modes. To make this work, Python *first* needs to be run in
- unoptimized mode. This reads the lexing information and creates a
- file "lextab.py". Then, run lex like this:
-
- # module foo.py
- ...
- ...
- lex.lex(optimize=1)
-
- Once the lextab file has been created, subsequent calls to
- lex.lex() will read data from the lextab file instead of using
- introspection. In optimized mode (-O, -OO) everything should
- work normally despite the loss of doc strings.
-
- To change the name of the file 'lextab.py' use the following:
-
- lex.lex(lextab="footab")
-
- (this creates a file footab.py)
-
-
-Version 1.1 October 25, 2001
-------------------------------
-
-10/25/01: beazley
- Modified the table generator to produce much more compact data.
- This should greatly reduce the size of the parsetab.py[c] file.
- Caveat: the tables still need to be constructed so a little more
- work is done in parsetab on import.
-
-10/25/01: beazley
- There may be a possible bug in the cycle detector that reports errors
- about infinite recursion. I'm having a little trouble tracking it
- down, but if you get this problem, you can disable the cycle
- detector as follows:
-
- yacc.yacc(check_recursion = 0)
-
-10/25/01: beazley
- Fixed a bug in lex.py that sometimes caused illegal characters to be
- reported incorrectly. Reported by Sverre Jørgensen.
-
-7/8/01 : beazley
- Added a reference to the underlying lexer object when tokens are handled by
- functions. The lexer is available as the 'lexer' attribute. This
- was added to provide better lexing support for languages such as Fortran
- where certain types of tokens can't be conveniently expressed as regular
- expressions (and where the tokenizing function may want to perform a
- little backtracking). Suggested by Pearu Peterson.
-
-6/20/01 : beazley
- Modified yacc() function so that an optional starting symbol can be specified.
- For example:
-
- yacc.yacc(start="statement")
-
- Normally yacc always treats the first production rule as the starting symbol.
- However, if you are debugging your grammar it may be useful to specify
- an alternative starting symbol. Idea suggested by Rich Salz.
-
-Version 1.0 June 18, 2001
---------------------------
-Initial public offering
-
diff --git a/components/script/dom/bindings/codegen/ply/MANIFEST.in b/components/script/dom/bindings/codegen/ply/MANIFEST.in
deleted file mode 100644
index 0d37431b0b4..00000000000
--- a/components/script/dom/bindings/codegen/ply/MANIFEST.in
+++ /dev/null
@@ -1,8 +0,0 @@
-recursive-include example *
-recursive-include doc *
-recursive-include test *
-include ANNOUNCE
-include README.md
-include CHANGES
-include TODO
-global-exclude *.pyc
diff --git a/components/script/dom/bindings/codegen/ply/PKG-INFO b/components/script/dom/bindings/codegen/ply/PKG-INFO
deleted file mode 100644
index 6eedf425953..00000000000
--- a/components/script/dom/bindings/codegen/ply/PKG-INFO
+++ /dev/null
@@ -1,22 +0,0 @@
-Metadata-Version: 1.1
-Name: ply
-Version: 3.10
-Summary: Python Lex & Yacc
-Home-page: http://www.dabeaz.com/ply/
-Author: David Beazley
-Author-email: dave@dabeaz.com
-License: BSD
-Description:
- PLY is yet another implementation of lex and yacc for Python. Some notable
- features include the fact that its implemented entirely in Python and it
- uses LALR(1) parsing which is efficient and well suited for larger grammars.
-
- PLY provides most of the standard lex/yacc features including support for empty
- productions, precedence rules, error recovery, and support for ambiguous grammars.
-
- PLY is extremely easy to use and provides very extensive error checking.
- It is compatible with both Python 2 and Python 3.
-
-Platform: UNKNOWN
-Classifier: Programming Language :: Python :: 3
-Classifier: Programming Language :: Python :: 2
diff --git a/components/script/dom/bindings/codegen/ply/README.md b/components/script/dom/bindings/codegen/ply/README.md
deleted file mode 100644
index e428f1b14a8..00000000000
--- a/components/script/dom/bindings/codegen/ply/README.md
+++ /dev/null
@@ -1,273 +0,0 @@
-PLY (Python Lex-Yacc) Version 3.10
-
-Copyright (C) 2001-2017
-David M. Beazley (Dabeaz LLC)
-All rights reserved.
-
-Redistribution and use in source and binary forms, with or without
-modification, are permitted provided that the following conditions are
-met:
-
-* Redistributions of source code must retain the above copyright notice,
- this list of conditions and the following disclaimer.
-* Redistributions in binary form must reproduce the above copyright notice,
- this list of conditions and the following disclaimer in the documentation
- and/or other materials provided with the distribution.
-* Neither the name of the David Beazley or Dabeaz LLC may be used to
- endorse or promote products derived from this software without
- specific prior written permission.
-
-THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
-"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
-LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
-A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
-OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
-SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
-LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
-DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
-THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
-(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
-OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
-
-Introduction
-============
-
-PLY is a 100% Python implementation of the common parsing tools lex
-and yacc. Here are a few highlights:
-
- - PLY is very closely modeled after traditional lex/yacc.
- If you know how to use these tools in C, you will find PLY
- to be similar.
-
- - PLY provides *very* extensive error reporting and diagnostic
- information to assist in parser construction. The original
- implementation was developed for instructional purposes. As
- a result, the system tries to identify the most common types
- of errors made by novice users.
-
- - PLY provides full support for empty productions, error recovery,
- precedence specifiers, and moderately ambiguous grammars.
-
- - Parsing is based on LR-parsing which is fast, memory efficient,
- better suited to large grammars, and which has a number of nice
- properties when dealing with syntax errors and other parsing problems.
- Currently, PLY builds its parsing tables using the LALR(1)
- algorithm used in yacc.
-
- - PLY uses Python introspection features to build lexers and parsers.
- This greatly simplifies the task of parser construction since it reduces
- the number of files and eliminates the need to run a separate lex/yacc
- tool before running your program.
-
- - PLY can be used to build parsers for "real" programming languages.
- Although it is not ultra-fast due to its Python implementation,
- PLY can be used to parse grammars consisting of several hundred
- rules (as might be found for a language like C). The lexer and LR
- parser are also reasonably efficient when parsing typically
- sized programs. People have used PLY to build parsers for
- C, C++, ADA, and other real programming languages.
-
-How to Use
-==========
-
-PLY consists of two files : lex.py and yacc.py. These are contained
-within the 'ply' directory which may also be used as a Python package.
-To use PLY, simply copy the 'ply' directory to your project and import
-lex and yacc from the associated 'ply' package. For example:
-
- import ply.lex as lex
- import ply.yacc as yacc
-
-Alternatively, you can copy just the files lex.py and yacc.py
-individually and use them as modules. For example:
-
- import lex
- import yacc
-
-The file setup.py can be used to install ply using distutils.
-
-The file doc/ply.html contains complete documentation on how to use
-the system.
-
-The example directory contains several different examples including a
-PLY specification for ANSI C as given in K&R 2nd Ed.
-
-A simple example is found at the end of this document
-
-Requirements
-============
-PLY requires the use of Python 2.6 or greater. However, you should
-use the latest Python release if possible. It should work on just
-about any platform. PLY has been tested with both CPython and Jython.
-It also seems to work with IronPython.
-
-Resources
-=========
-More information about PLY can be obtained on the PLY webpage at:
-
- http://www.dabeaz.com/ply
-
-For a detailed overview of parsing theory, consult the excellent
-book "Compilers : Principles, Techniques, and Tools" by Aho, Sethi, and
-Ullman. The topics found in "Lex & Yacc" by Levine, Mason, and Brown
-may also be useful.
-
-The GitHub page for PLY can be found at:
-
- https://github.com/dabeaz/ply
-
-An old and relatively inactive discussion group for PLY is found at:
-
- http://groups.google.com/group/ply-hack
-
-Acknowledgments
-===============
-A special thanks is in order for all of the students in CS326 who
-suffered through about 25 different versions of these tools :-).
-
-The CHANGES file acknowledges those who have contributed patches.
-
-Elias Ioup did the first implementation of LALR(1) parsing in PLY-1.x.
-Andrew Waters and Markus Schoepflin were instrumental in reporting bugs
-and testing a revised LALR(1) implementation for PLY-2.0.
-
-Special Note for PLY-3.0
-========================
-PLY-3.0 the first PLY release to support Python 3. However, backwards
-compatibility with Python 2.6 is still preserved. PLY provides dual
-Python 2/3 compatibility by restricting its implementation to a common
-subset of basic language features. You should not convert PLY using
-2to3--it is not necessary and may in fact break the implementation.
-
-Example
-=======
-
-Here is a simple example showing a PLY implementation of a calculator
-with variables.
-
- # -----------------------------------------------------------------------------
- # calc.py
- #
- # A simple calculator with variables.
- # -----------------------------------------------------------------------------
-
- tokens = (
- 'NAME','NUMBER',
- 'PLUS','MINUS','TIMES','DIVIDE','EQUALS',
- 'LPAREN','RPAREN',
- )
-
- # Tokens
-
- t_PLUS = r'\+'
- t_MINUS = r'-'
- t_TIMES = r'\*'
- t_DIVIDE = r'/'
- t_EQUALS = r'='
- t_LPAREN = r'\('
- t_RPAREN = r'\)'
- t_NAME = r'[a-zA-Z_][a-zA-Z0-9_]*'
-
- def t_NUMBER(t):
- r'\d+'
- t.value = int(t.value)
- return t
-
- # Ignored characters
- t_ignore = " \t"
-
- def t_newline(t):
- r'\n+'
- t.lexer.lineno += t.value.count("\n")
-
- def t_error(t):
- print("Illegal character '%s'" % t.value[0])
- t.lexer.skip(1)
-
- # Build the lexer
- import ply.lex as lex
- lex.lex()
-
- # Precedence rules for the arithmetic operators
- precedence = (
- ('left','PLUS','MINUS'),
- ('left','TIMES','DIVIDE'),
- ('right','UMINUS'),
- )
-
- # dictionary of names (for storing variables)
- names = { }
-
- def p_statement_assign(p):
- 'statement : NAME EQUALS expression'
- names[p[1]] = p[3]
-
- def p_statement_expr(p):
- 'statement : expression'
- print(p[1])
-
- def p_expression_binop(p):
- '''expression : expression PLUS expression
- | expression MINUS expression
- | expression TIMES expression
- | expression DIVIDE expression'''
- if p[2] == '+' : p[0] = p[1] + p[3]
- elif p[2] == '-': p[0] = p[1] - p[3]
- elif p[2] == '*': p[0] = p[1] * p[3]
- elif p[2] == '/': p[0] = p[1] / p[3]
-
- def p_expression_uminus(p):
- 'expression : MINUS expression %prec UMINUS'
- p[0] = -p[2]
-
- def p_expression_group(p):
- 'expression : LPAREN expression RPAREN'
- p[0] = p[2]
-
- def p_expression_number(p):
- 'expression : NUMBER'
- p[0] = p[1]
-
- def p_expression_name(p):
- 'expression : NAME'
- try:
- p[0] = names[p[1]]
- except LookupError:
- print("Undefined name '%s'" % p[1])
- p[0] = 0
-
- def p_error(p):
- print("Syntax error at '%s'" % p.value)
-
- import ply.yacc as yacc
- yacc.yacc()
-
- while True:
- try:
- s = raw_input('calc > ') # use input() on Python 3
- except EOFError:
- break
- yacc.parse(s)
-
-
-Bug Reports and Patches
-=======================
-My goal with PLY is to simply have a decent lex/yacc implementation
-for Python. As a general rule, I don't spend huge amounts of time
-working on it unless I receive very specific bug reports and/or
-patches to fix problems. I also try to incorporate submitted feature
-requests and enhancements into each new version. Please visit the PLY
-github page at https://github.com/dabeaz/ply to submit issues and pull
-requests. To contact me about bugs and/or new features, please send
-email to dave@dabeaz.com.
-
--- Dave
-
-
-
-
-
-
-
-
-
diff --git a/components/script/dom/bindings/codegen/ply/TODO b/components/script/dom/bindings/codegen/ply/TODO
deleted file mode 100644
index f4800aacf47..00000000000
--- a/components/script/dom/bindings/codegen/ply/TODO
+++ /dev/null
@@ -1,16 +0,0 @@
-The PLY to-do list:
-
-1. Finish writing the C Preprocessor module. Started in the
- file ply/cpp.py
-
-2. Create and document libraries of useful tokens.
-
-3. Expand the examples/yply tool that parses bison/yacc
- files.
-
-4. Think of various diabolical things to do with the
- new yacc internals. For example, it is now possible
- to specify grammrs using completely different schemes
- than the reflection approach used by PLY.
-
-
diff --git a/components/script/dom/bindings/codegen/ply/example/BASIC/README b/components/script/dom/bindings/codegen/ply/example/BASIC/README
deleted file mode 100644
index be24a3005e7..00000000000
--- a/components/script/dom/bindings/codegen/ply/example/BASIC/README
+++ /dev/null
@@ -1,79 +0,0 @@
-Inspired by a September 14, 2006 Salon article "Why Johnny Can't Code" by
-David Brin (http://www.salon.com/tech/feature/2006/09/14/basic/index.html),
-I thought that a fully working BASIC interpreter might be an interesting,
-if not questionable, PLY example. Uh, okay, so maybe it's just a bad idea,
-but in any case, here it is.
-
-In this example, you'll find a rough implementation of 1964 Dartmouth BASIC
-as described in the manual at:
-
- http://www.bitsavers.org/pdf/dartmouth/BASIC_Oct64.pdf
-
-See also:
-
- http://en.wikipedia.org/wiki/Dartmouth_BASIC
-
-This dialect is downright primitive---there are no string variables
-and no facilities for interactive input. Moreover, subroutines and functions
-are brain-dead even more than they usually are for BASIC. Of course,
-the GOTO statement is provided.
-
-Nevertheless, there are a few interesting aspects of this example:
-
- - It illustrates a fully working interpreter including lexing, parsing,
- and interpretation of instructions.
-
- - The parser shows how to catch and report various kinds of parsing
- errors in a more graceful way.
-
- - The example both parses files (supplied on command line) and
- interactive input entered line by line.
-
- - It shows how you might represent parsed information. In this case,
- each BASIC statement is encoded into a Python tuple containing the
- statement type and parameters. These tuples are then stored in
- a dictionary indexed by program line numbers.
-
- - Even though it's just BASIC, the parser contains more than 80
- rules and 150 parsing states. Thus, it's a little more meaty than
- the calculator example.
-
-To use the example, run it as follows:
-
- % python basic.py hello.bas
- HELLO WORLD
- %
-
-or use it interactively:
-
- % python basic.py
- [BASIC] 10 PRINT "HELLO WORLD"
- [BASIC] 20 END
- [BASIC] RUN
- HELLO WORLD
- [BASIC]
-
-The following files are defined:
-
- basic.py - High level script that controls everything
- basiclex.py - BASIC tokenizer
- basparse.py - BASIC parser
- basinterp.py - BASIC interpreter that runs parsed programs.
-
-In addition, a number of sample BASIC programs (.bas suffix) are
-provided. These were taken out of the Dartmouth manual.
-
-Disclaimer: I haven't spent a ton of time testing this and it's likely that
-I've skimped here and there on a few finer details (e.g., strictly enforcing
-variable naming rules). However, the interpreter seems to be able to run
-the examples in the BASIC manual.
-
-Have fun!
-
--Dave
-
-
-
-
-
-
diff --git a/components/script/dom/bindings/codegen/ply/example/BASIC/basic.py b/components/script/dom/bindings/codegen/ply/example/BASIC/basic.py
deleted file mode 100644
index 70ac9e7c740..00000000000
--- a/components/script/dom/bindings/codegen/ply/example/BASIC/basic.py
+++ /dev/null
@@ -1,65 +0,0 @@
-# An implementation of Dartmouth BASIC (1964)
-#
-
-import sys
-sys.path.insert(0, "../..")
-
-if sys.version_info[0] >= 3:
- raw_input = input
-
-import basiclex
-import basparse
-import basinterp
-
-# If a filename has been specified, we try to run it.
-# If a runtime error occurs, we bail out and enter
-# interactive mode below
-if len(sys.argv) == 2:
- data = open(sys.argv[1]).read()
- prog = basparse.parse(data)
- if not prog:
- raise SystemExit
- b = basinterp.BasicInterpreter(prog)
- try:
- b.run()
- raise SystemExit
- except RuntimeError:
- pass
-
-else:
- b = basinterp.BasicInterpreter({})
-
-# Interactive mode. This incrementally adds/deletes statements
-# from the program stored in the BasicInterpreter object. In
-# addition, special commands 'NEW','LIST',and 'RUN' are added.
-# Specifying a line number with no code deletes that line from
-# the program.
-
-while 1:
- try:
- line = raw_input("[BASIC] ")
- except EOFError:
- raise SystemExit
- if not line:
- continue
- line += "\n"
- prog = basparse.parse(line)
- if not prog:
- continue
-
- keys = list(prog)
- if keys[0] > 0:
- b.add_statements(prog)
- else:
- stat = prog[keys[0]]
- if stat[0] == 'RUN':
- try:
- b.run()
- except RuntimeError:
- pass
- elif stat[0] == 'LIST':
- b.list()
- elif stat[0] == 'BLANK':
- b.del_line(stat[1])
- elif stat[0] == 'NEW':
- b.new()
diff --git a/components/script/dom/bindings/codegen/ply/example/BASIC/basiclex.py b/components/script/dom/bindings/codegen/ply/example/BASIC/basiclex.py
deleted file mode 100644
index 4151f4c34fb..00000000000
--- a/components/script/dom/bindings/codegen/ply/example/BASIC/basiclex.py
+++ /dev/null
@@ -1,61 +0,0 @@
-# An implementation of Dartmouth BASIC (1964)
-
-from ply import *
-
-keywords = (
- 'LET', 'READ', 'DATA', 'PRINT', 'GOTO', 'IF', 'THEN', 'FOR', 'NEXT', 'TO', 'STEP',
- 'END', 'STOP', 'DEF', 'GOSUB', 'DIM', 'REM', 'RETURN', 'RUN', 'LIST', 'NEW',
-)
-
-tokens = keywords + (
- 'EQUALS', 'PLUS', 'MINUS', 'TIMES', 'DIVIDE', 'POWER',
- 'LPAREN', 'RPAREN', 'LT', 'LE', 'GT', 'GE', 'NE',
- 'COMMA', 'SEMI', 'INTEGER', 'FLOAT', 'STRING',
- 'ID', 'NEWLINE'
-)
-
-t_ignore = ' \t'
-
-
-def t_REM(t):
- r'REM .*'
- return t
-
-
-def t_ID(t):
- r'[A-Z][A-Z0-9]*'
- if t.value in keywords:
- t.type = t.value
- return t
-
-t_EQUALS = r'='
-t_PLUS = r'\+'
-t_MINUS = r'-'
-t_TIMES = r'\*'
-t_POWER = r'\^'
-t_DIVIDE = r'/'
-t_LPAREN = r'\('
-t_RPAREN = r'\)'
-t_LT = r'<'
-t_LE = r'<='
-t_GT = r'>'
-t_GE = r'>='
-t_NE = r'<>'
-t_COMMA = r'\,'
-t_SEMI = r';'
-t_INTEGER = r'\d+'
-t_FLOAT = r'((\d*\.\d+)(E[\+-]?\d+)?|([1-9]\d*E[\+-]?\d+))'
-t_STRING = r'\".*?\"'
-
-
-def t_NEWLINE(t):
- r'\n'
- t.lexer.lineno += 1
- return t
-
-
-def t_error(t):
- print("Illegal character %s" % t.value[0])
- t.lexer.skip(1)
-
-lex.lex(debug=0)
diff --git a/components/script/dom/bindings/codegen/ply/example/BASIC/basiclog.py b/components/script/dom/bindings/codegen/ply/example/BASIC/basiclog.py
deleted file mode 100644
index 9dcc7feda69..00000000000
--- a/components/script/dom/bindings/codegen/ply/example/BASIC/basiclog.py
+++ /dev/null
@@ -1,73 +0,0 @@
-# An implementation of Dartmouth BASIC (1964)
-#
-
-import sys
-sys.path.insert(0, "../..")
-
-if sys.version_info[0] >= 3:
- raw_input = input
-
-import logging
-logging.basicConfig(
- level=logging.INFO,
- filename="parselog.txt",
- filemode="w"
-)
-log = logging.getLogger()
-
-import basiclex
-import basparse
-import basinterp
-
-# If a filename has been specified, we try to run it.
-# If a runtime error occurs, we bail out and enter
-# interactive mode below
-if len(sys.argv) == 2:
- data = open(sys.argv[1]).read()
- prog = basparse.parse(data, debug=log)
- if not prog:
- raise SystemExit
- b = basinterp.BasicInterpreter(prog)
- try:
- b.run()
- raise SystemExit
- except RuntimeError:
- pass
-
-else:
- b = basinterp.BasicInterpreter({})
-
-# Interactive mode. This incrementally adds/deletes statements
-# from the program stored in the BasicInterpreter object. In
-# addition, special commands 'NEW','LIST',and 'RUN' are added.
-# Specifying a line number with no code deletes that line from
-# the program.
-
-while 1:
- try:
- line = raw_input("[BASIC] ")
- except EOFError:
- raise SystemExit
- if not line:
- continue
- line += "\n"
- prog = basparse.parse(line, debug=log)
- if not prog:
- continue
-
- keys = list(prog)
- if keys[0] > 0:
- b.add_statements(prog)
- else:
- stat = prog[keys[0]]
- if stat[0] == 'RUN':
- try:
- b.run()
- except RuntimeError:
- pass
- elif stat[0] == 'LIST':
- b.list()
- elif stat[0] == 'BLANK':
- b.del_line(stat[1])
- elif stat[0] == 'NEW':
- b.new()
diff --git a/components/script/dom/bindings/codegen/ply/example/BASIC/basinterp.py b/components/script/dom/bindings/codegen/ply/example/BASIC/basinterp.py
deleted file mode 100644
index 67762c797bf..00000000000
--- a/components/script/dom/bindings/codegen/ply/example/BASIC/basinterp.py
+++ /dev/null
@@ -1,496 +0,0 @@
-# This file provides the runtime support for running a basic program
-# Assumes the program has been parsed using basparse.py
-
-import sys
-import math
-import random
-
-
-class BasicInterpreter:
-
- # Initialize the interpreter. prog is a dictionary
- # containing (line,statement) mappings
- def __init__(self, prog):
- self.prog = prog
-
- self.functions = { # Built-in function table
- 'SIN': lambda z: math.sin(self.eval(z)),
- 'COS': lambda z: math.cos(self.eval(z)),
- 'TAN': lambda z: math.tan(self.eval(z)),
- 'ATN': lambda z: math.atan(self.eval(z)),
- 'EXP': lambda z: math.exp(self.eval(z)),
- 'ABS': lambda z: abs(self.eval(z)),
- 'LOG': lambda z: math.log(self.eval(z)),
- 'SQR': lambda z: math.sqrt(self.eval(z)),
- 'INT': lambda z: int(self.eval(z)),
- 'RND': lambda z: random.random()
- }
-
- # Collect all data statements
- def collect_data(self):
- self.data = []
- for lineno in self.stat:
- if self.prog[lineno][0] == 'DATA':
- self.data = self.data + self.prog[lineno][1]
- self.dc = 0 # Initialize the data counter
-
- # Check for end statements
- def check_end(self):
- has_end = 0
- for lineno in self.stat:
- if self.prog[lineno][0] == 'END' and not has_end:
- has_end = lineno
- if not has_end:
- print("NO END INSTRUCTION")
- self.error = 1
- return
- if has_end != lineno:
- print("END IS NOT LAST")
- self.error = 1
-
- # Check loops
- def check_loops(self):
- for pc in range(len(self.stat)):
- lineno = self.stat[pc]
- if self.prog[lineno][0] == 'FOR':
- forinst = self.prog[lineno]
- loopvar = forinst[1]
- for i in range(pc + 1, len(self.stat)):
- if self.prog[self.stat[i]][0] == 'NEXT':
- nextvar = self.prog[self.stat[i]][1]
- if nextvar != loopvar:
- continue
- self.loopend[pc] = i
- break
- else:
- print("FOR WITHOUT NEXT AT LINE %s" % self.stat[pc])
- self.error = 1
-
- # Evaluate an expression
- def eval(self, expr):
- etype = expr[0]
- if etype == 'NUM':
- return expr[1]
- elif etype == 'GROUP':
- return self.eval(expr[1])
- elif etype == 'UNARY':
- if expr[1] == '-':
- return -self.eval(expr[2])
- elif etype == 'BINOP':
- if expr[1] == '+':
- return self.eval(expr[2]) + self.eval(expr[3])
- elif expr[1] == '-':
- return self.eval(expr[2]) - self.eval(expr[3])
- elif expr[1] == '*':
- return self.eval(expr[2]) * self.eval(expr[3])
- elif expr[1] == '/':
- return float(self.eval(expr[2])) / self.eval(expr[3])
- elif expr[1] == '^':
- return abs(self.eval(expr[2]))**self.eval(expr[3])
- elif etype == 'VAR':
- var, dim1, dim2 = expr[1]
- if not dim1 and not dim2:
- if var in self.vars:
- return self.vars[var]
- else:
- print("UNDEFINED VARIABLE %s AT LINE %s" %
- (var, self.stat[self.pc]))
- raise RuntimeError
- # May be a list lookup or a function evaluation
- if dim1 and not dim2:
- if var in self.functions:
- # A function
- return self.functions[var](dim1)
- else:
- # A list evaluation
- if var in self.lists:
- dim1val = self.eval(dim1)
- if dim1val < 1 or dim1val > len(self.lists[var]):
- print("LIST INDEX OUT OF BOUNDS AT LINE %s" %
- self.stat[self.pc])
- raise RuntimeError
- return self.lists[var][dim1val - 1]
- if dim1 and dim2:
- if var in self.tables:
- dim1val = self.eval(dim1)
- dim2val = self.eval(dim2)
- if dim1val < 1 or dim1val > len(self.tables[var]) or dim2val < 1 or dim2val > len(self.tables[var][0]):
- print("TABLE INDEX OUT OUT BOUNDS AT LINE %s" %
- self.stat[self.pc])
- raise RuntimeError
- return self.tables[var][dim1val - 1][dim2val - 1]
- print("UNDEFINED VARIABLE %s AT LINE %s" %
- (var, self.stat[self.pc]))
- raise RuntimeError
-
- # Evaluate a relational expression
- def releval(self, expr):
- etype = expr[1]
- lhs = self.eval(expr[2])
- rhs = self.eval(expr[3])
- if etype == '<':
- if lhs < rhs:
- return 1
- else:
- return 0
-
- elif etype == '<=':
- if lhs <= rhs:
- return 1
- else:
- return 0
-
- elif etype == '>':
- if lhs > rhs:
- return 1
- else:
- return 0
-
- elif etype == '>=':
- if lhs >= rhs:
- return 1
- else:
- return 0
-
- elif etype == '=':
- if lhs == rhs:
- return 1
- else:
- return 0
-
- elif etype == '<>':
- if lhs != rhs:
- return 1
- else:
- return 0
-
- # Assignment
- def assign(self, target, value):
- var, dim1, dim2 = target
- if not dim1 and not dim2:
- self.vars[var] = self.eval(value)
- elif dim1 and not dim2:
- # List assignment
- dim1val = self.eval(dim1)
- if not var in self.lists:
- self.lists[var] = [0] * 10
-
- if dim1val > len(self.lists[var]):
- print ("DIMENSION TOO LARGE AT LINE %s" % self.stat[self.pc])
- raise RuntimeError
- self.lists[var][dim1val - 1] = self.eval(value)
- elif dim1 and dim2:
- dim1val = self.eval(dim1)
- dim2val = self.eval(dim2)
- if not var in self.tables:
- temp = [0] * 10
- v = []
- for i in range(10):
- v.append(temp[:])
- self.tables[var] = v
- # Variable already exists
- if dim1val > len(self.tables[var]) or dim2val > len(self.tables[var][0]):
- print("DIMENSION TOO LARGE AT LINE %s" % self.stat[self.pc])
- raise RuntimeError
- self.tables[var][dim1val - 1][dim2val - 1] = self.eval(value)
-
- # Change the current line number
- def goto(self, linenum):
- if not linenum in self.prog:
- print("UNDEFINED LINE NUMBER %d AT LINE %d" %
- (linenum, self.stat[self.pc]))
- raise RuntimeError
- self.pc = self.stat.index(linenum)
-
- # Run it
- def run(self):
- self.vars = {} # All variables
- self.lists = {} # List variables
- self.tables = {} # Tables
- self.loops = [] # Currently active loops
- self.loopend = {} # Mapping saying where loops end
- self.gosub = None # Gosub return point (if any)
- self.error = 0 # Indicates program error
-
- self.stat = list(self.prog) # Ordered list of all line numbers
- self.stat.sort()
- self.pc = 0 # Current program counter
-
- # Processing prior to running
-
- self.collect_data() # Collect all of the data statements
- self.check_end()
- self.check_loops()
-
- if self.error:
- raise RuntimeError
-
- while 1:
- line = self.stat[self.pc]
- instr = self.prog[line]
-
- op = instr[0]
-
- # END and STOP statements
- if op == 'END' or op == 'STOP':
- break # We're done
-
- # GOTO statement
- elif op == 'GOTO':
- newline = instr[1]
- self.goto(newline)
- continue
-
- # PRINT statement
- elif op == 'PRINT':
- plist = instr[1]
- out = ""
- for label, val in plist:
- if out:
- out += ' ' * (15 - (len(out) % 15))
- out += label
- if val:
- if label:
- out += " "
- eval = self.eval(val)
- out += str(eval)
- sys.stdout.write(out)
- end = instr[2]
- if not (end == ',' or end == ';'):
- sys.stdout.write("\n")
- if end == ',':
- sys.stdout.write(" " * (15 - (len(out) % 15)))
- if end == ';':
- sys.stdout.write(" " * (3 - (len(out) % 3)))
-
- # LET statement
- elif op == 'LET':
- target = instr[1]
- value = instr[2]
- self.assign(target, value)
-
- # READ statement
- elif op == 'READ':
- for target in instr[1]:
- if self.dc < len(self.data):
- value = ('NUM', self.data[self.dc])
- self.assign(target, value)
- self.dc += 1
- else:
- # No more data. Program ends
- return
- elif op == 'IF':
- relop = instr[1]
- newline = instr[2]
- if (self.releval(relop)):
- self.goto(newline)
- continue
-
- elif op == 'FOR':
- loopvar = instr[1]
- initval = instr[2]
- finval = instr[3]
- stepval = instr[4]
-
- # Check to see if this is a new loop
- if not self.loops or self.loops[-1][0] != self.pc:
- # Looks like a new loop. Make the initial assignment
- newvalue = initval
- self.assign((loopvar, None, None), initval)
- if not stepval:
- stepval = ('NUM', 1)
- stepval = self.eval(stepval) # Evaluate step here
- self.loops.append((self.pc, stepval))
- else:
- # It's a repeat of the previous loop
- # Update the value of the loop variable according to the
- # step
- stepval = ('NUM', self.loops[-1][1])
- newvalue = (
- 'BINOP', '+', ('VAR', (loopvar, None, None)), stepval)
-
- if self.loops[-1][1] < 0:
- relop = '>='
- else:
- relop = '<='
- if not self.releval(('RELOP', relop, newvalue, finval)):
- # Loop is done. Jump to the NEXT
- self.pc = self.loopend[self.pc]
- self.loops.pop()
- else:
- self.assign((loopvar, None, None), newvalue)
-
- elif op == 'NEXT':
- if not self.loops:
- print("NEXT WITHOUT FOR AT LINE %s" % line)
- return
-
- nextvar = instr[1]
- self.pc = self.loops[-1][0]
- loopinst = self.prog[self.stat[self.pc]]
- forvar = loopinst[1]
- if nextvar != forvar:
- print("NEXT DOESN'T MATCH FOR AT LINE %s" % line)
- return
- continue
- elif op == 'GOSUB':
- newline = instr[1]
- if self.gosub:
- print("ALREADY IN A SUBROUTINE AT LINE %s" % line)
- return
- self.gosub = self.stat[self.pc]
- self.goto(newline)
- continue
-
- elif op == 'RETURN':
- if not self.gosub:
- print("RETURN WITHOUT A GOSUB AT LINE %s" % line)
- return
- self.goto(self.gosub)
- self.gosub = None
-
- elif op == 'FUNC':
- fname = instr[1]
- pname = instr[2]
- expr = instr[3]
-
- def eval_func(pvalue, name=pname, self=self, expr=expr):
- self.assign((pname, None, None), pvalue)
- return self.eval(expr)
- self.functions[fname] = eval_func
-
- elif op == 'DIM':
- for vname, x, y in instr[1]:
- if y == 0:
- # Single dimension variable
- self.lists[vname] = [0] * x
- else:
- # Double dimension variable
- temp = [0] * y
- v = []
- for i in range(x):
- v.append(temp[:])
- self.tables[vname] = v
-
- self.pc += 1
-
- # Utility functions for program listing
- def expr_str(self, expr):
- etype = expr[0]
- if etype == 'NUM':
- return str(expr[1])
- elif etype == 'GROUP':
- return "(%s)" % self.expr_str(expr[1])
- elif etype == 'UNARY':
- if expr[1] == '-':
- return "-" + str(expr[2])
- elif etype == 'BINOP':
- return "%s %s %s" % (self.expr_str(expr[2]), expr[1], self.expr_str(expr[3]))
- elif etype == 'VAR':
- return self.var_str(expr[1])
-
- def relexpr_str(self, expr):
- return "%s %s %s" % (self.expr_str(expr[2]), expr[1], self.expr_str(expr[3]))
-
- def var_str(self, var):
- varname, dim1, dim2 = var
- if not dim1 and not dim2:
- return varname
- if dim1 and not dim2:
- return "%s(%s)" % (varname, self.expr_str(dim1))
- return "%s(%s,%s)" % (varname, self.expr_str(dim1), self.expr_str(dim2))
-
- # Create a program listing
- def list(self):
- stat = list(self.prog) # Ordered list of all line numbers
- stat.sort()
- for line in stat:
- instr = self.prog[line]
- op = instr[0]
- if op in ['END', 'STOP', 'RETURN']:
- print("%s %s" % (line, op))
- continue
- elif op == 'REM':
- print("%s %s" % (line, instr[1]))
- elif op == 'PRINT':
- _out = "%s %s " % (line, op)
- first = 1
- for p in instr[1]:
- if not first:
- _out += ", "
- if p[0] and p[1]:
- _out += '"%s"%s' % (p[0], self.expr_str(p[1]))
- elif p[1]:
- _out += self.expr_str(p[1])
- else:
- _out += '"%s"' % (p[0],)
- first = 0
- if instr[2]:
- _out += instr[2]
- print(_out)
- elif op == 'LET':
- print("%s LET %s = %s" %
- (line, self.var_str(instr[1]), self.expr_str(instr[2])))
- elif op == 'READ':
- _out = "%s READ " % line
- first = 1
- for r in instr[1]:
- if not first:
- _out += ","
- _out += self.var_str(r)
- first = 0
- print(_out)
- elif op == 'IF':
- print("%s IF %s THEN %d" %
- (line, self.relexpr_str(instr[1]), instr[2]))
- elif op == 'GOTO' or op == 'GOSUB':
- print("%s %s %s" % (line, op, instr[1]))
- elif op == 'FOR':
- _out = "%s FOR %s = %s TO %s" % (
- line, instr[1], self.expr_str(instr[2]), self.expr_str(instr[3]))
- if instr[4]:
- _out += " STEP %s" % (self.expr_str(instr[4]))
- print(_out)
- elif op == 'NEXT':
- print("%s NEXT %s" % (line, instr[1]))
- elif op == 'FUNC':
- print("%s DEF %s(%s) = %s" %
- (line, instr[1], instr[2], self.expr_str(instr[3])))
- elif op == 'DIM':
- _out = "%s DIM " % line
- first = 1
- for vname, x, y in instr[1]:
- if not first:
- _out += ","
- first = 0
- if y == 0:
- _out += "%s(%d)" % (vname, x)
- else:
- _out += "%s(%d,%d)" % (vname, x, y)
-
- print(_out)
- elif op == 'DATA':
- _out = "%s DATA " % line
- first = 1
- for v in instr[1]:
- if not first:
- _out += ","
- first = 0
- _out += v
- print(_out)
-
- # Erase the current program
- def new(self):
- self.prog = {}
-
- # Insert statements
- def add_statements(self, prog):
- for line, stat in prog.items():
- self.prog[line] = stat
-
- # Delete a statement
- def del_line(self, lineno):
- try:
- del self.prog[lineno]
- except KeyError:
- pass
diff --git a/components/script/dom/bindings/codegen/ply/example/BASIC/basparse.py b/components/script/dom/bindings/codegen/ply/example/BASIC/basparse.py
deleted file mode 100644
index d610c7d9094..00000000000
--- a/components/script/dom/bindings/codegen/ply/example/BASIC/basparse.py
+++ /dev/null
@@ -1,474 +0,0 @@
-# An implementation of Dartmouth BASIC (1964)
-#
-
-from ply import *
-import basiclex
-
-tokens = basiclex.tokens
-
-precedence = (
- ('left', 'PLUS', 'MINUS'),
- ('left', 'TIMES', 'DIVIDE'),
- ('left', 'POWER'),
- ('right', 'UMINUS')
-)
-
-# A BASIC program is a series of statements. We represent the program as a
-# dictionary of tuples indexed by line number.
-
-
-def p_program(p):
- '''program : program statement
- | statement'''
-
- if len(p) == 2 and p[1]:
- p[0] = {}
- line, stat = p[1]
- p[0][line] = stat
- elif len(p) == 3:
- p[0] = p[1]
- if not p[0]:
- p[0] = {}
- if p[2]:
- line, stat = p[2]
- p[0][line] = stat
-
-# This catch-all rule is used for any catastrophic errors. In this case,
-# we simply return nothing
-
-
-def p_program_error(p):
- '''program : error'''
- p[0] = None
- p.parser.error = 1
-
-# Format of all BASIC statements.
-
-
-def p_statement(p):
- '''statement : INTEGER command NEWLINE'''
- if isinstance(p[2], str):
- print("%s %s %s" % (p[2], "AT LINE", p[1]))
- p[0] = None
- p.parser.error = 1
- else:
- lineno = int(p[1])
- p[0] = (lineno, p[2])
-
-# Interactive statements.
-
-
-def p_statement_interactive(p):
- '''statement : RUN NEWLINE
- | LIST NEWLINE
- | NEW NEWLINE'''
- p[0] = (0, (p[1], 0))
-
-# Blank line number
-
-
-def p_statement_blank(p):
- '''statement : INTEGER NEWLINE'''
- p[0] = (0, ('BLANK', int(p[1])))
-
-# Error handling for malformed statements
-
-
-def p_statement_bad(p):
- '''statement : INTEGER error NEWLINE'''
- print("MALFORMED STATEMENT AT LINE %s" % p[1])
- p[0] = None
- p.parser.error = 1
-
-# Blank line
-
-
-def p_statement_newline(p):
- '''statement : NEWLINE'''
- p[0] = None
-
-# LET statement
-
-
-def p_command_let(p):
- '''command : LET variable EQUALS expr'''
- p[0] = ('LET', p[2], p[4])
-
-
-def p_command_let_bad(p):
- '''command : LET variable EQUALS error'''
- p[0] = "BAD EXPRESSION IN LET"
-
-# READ statement
-
-
-def p_command_read(p):
- '''command : READ varlist'''
- p[0] = ('READ', p[2])
-
-
-def p_command_read_bad(p):
- '''command : READ error'''
- p[0] = "MALFORMED VARIABLE LIST IN READ"
-
-# DATA statement
-
-
-def p_command_data(p):
- '''command : DATA numlist'''
- p[0] = ('DATA', p[2])
-
-
-def p_command_data_bad(p):
- '''command : DATA error'''
- p[0] = "MALFORMED NUMBER LIST IN DATA"
-
-# PRINT statement
-
-
-def p_command_print(p):
- '''command : PRINT plist optend'''
- p[0] = ('PRINT', p[2], p[3])
-
-
-def p_command_print_bad(p):
- '''command : PRINT error'''
- p[0] = "MALFORMED PRINT STATEMENT"
-
-# Optional ending on PRINT. Either a comma (,) or semicolon (;)
-
-
-def p_optend(p):
- '''optend : COMMA
- | SEMI
- |'''
- if len(p) == 2:
- p[0] = p[1]
- else:
- p[0] = None
-
-# PRINT statement with no arguments
-
-
-def p_command_print_empty(p):
- '''command : PRINT'''
- p[0] = ('PRINT', [], None)
-
-# GOTO statement
-
-
-def p_command_goto(p):
- '''command : GOTO INTEGER'''
- p[0] = ('GOTO', int(p[2]))
-
-
-def p_command_goto_bad(p):
- '''command : GOTO error'''
- p[0] = "INVALID LINE NUMBER IN GOTO"
-
-# IF-THEN statement
-
-
-def p_command_if(p):
- '''command : IF relexpr THEN INTEGER'''
- p[0] = ('IF', p[2], int(p[4]))
-
-
-def p_command_if_bad(p):
- '''command : IF error THEN INTEGER'''
- p[0] = "BAD RELATIONAL EXPRESSION"
-
-
-def p_command_if_bad2(p):
- '''command : IF relexpr THEN error'''
- p[0] = "INVALID LINE NUMBER IN THEN"
-
-# FOR statement
-
-
-def p_command_for(p):
- '''command : FOR ID EQUALS expr TO expr optstep'''
- p[0] = ('FOR', p[2], p[4], p[6], p[7])
-
-
-def p_command_for_bad_initial(p):
- '''command : FOR ID EQUALS error TO expr optstep'''
- p[0] = "BAD INITIAL VALUE IN FOR STATEMENT"
-
-
-def p_command_for_bad_final(p):
- '''command : FOR ID EQUALS expr TO error optstep'''
- p[0] = "BAD FINAL VALUE IN FOR STATEMENT"
-
-
-def p_command_for_bad_step(p):
- '''command : FOR ID EQUALS expr TO expr STEP error'''
- p[0] = "MALFORMED STEP IN FOR STATEMENT"
-
-# Optional STEP qualifier on FOR statement
-
-
-def p_optstep(p):
- '''optstep : STEP expr
- | empty'''
- if len(p) == 3:
- p[0] = p[2]
- else:
- p[0] = None
-
-# NEXT statement
-
-
-def p_command_next(p):
- '''command : NEXT ID'''
-
- p[0] = ('NEXT', p[2])
-
-
-def p_command_next_bad(p):
- '''command : NEXT error'''
- p[0] = "MALFORMED NEXT"
-
-# END statement
-
-
-def p_command_end(p):
- '''command : END'''
- p[0] = ('END',)
-
-# REM statement
-
-
-def p_command_rem(p):
- '''command : REM'''
- p[0] = ('REM', p[1])
-
-# STOP statement
-
-
-def p_command_stop(p):
- '''command : STOP'''
- p[0] = ('STOP',)
-
-# DEF statement
-
-
-def p_command_def(p):
- '''command : DEF ID LPAREN ID RPAREN EQUALS expr'''
- p[0] = ('FUNC', p[2], p[4], p[7])
-
-
-def p_command_def_bad_rhs(p):
- '''command : DEF ID LPAREN ID RPAREN EQUALS error'''
- p[0] = "BAD EXPRESSION IN DEF STATEMENT"
-
-
-def p_command_def_bad_arg(p):
- '''command : DEF ID LPAREN error RPAREN EQUALS expr'''
- p[0] = "BAD ARGUMENT IN DEF STATEMENT"
-
-# GOSUB statement
-
-
-def p_command_gosub(p):
- '''command : GOSUB INTEGER'''
- p[0] = ('GOSUB', int(p[2]))
-
-
-def p_command_gosub_bad(p):
- '''command : GOSUB error'''
- p[0] = "INVALID LINE NUMBER IN GOSUB"
-
-# RETURN statement
-
-
-def p_command_return(p):
- '''command : RETURN'''
- p[0] = ('RETURN',)
-
-# DIM statement
-
-
-def p_command_dim(p):
- '''command : DIM dimlist'''
- p[0] = ('DIM', p[2])
-
-
-def p_command_dim_bad(p):
- '''command : DIM error'''
- p[0] = "MALFORMED VARIABLE LIST IN DIM"
-
-# List of variables supplied to DIM statement
-
-
-def p_dimlist(p):
- '''dimlist : dimlist COMMA dimitem
- | dimitem'''
- if len(p) == 4:
- p[0] = p[1]
- p[0].append(p[3])
- else:
- p[0] = [p[1]]
-
-# DIM items
-
-
-def p_dimitem_single(p):
- '''dimitem : ID LPAREN INTEGER RPAREN'''
- p[0] = (p[1], eval(p[3]), 0)
-
-
-def p_dimitem_double(p):
- '''dimitem : ID LPAREN INTEGER COMMA INTEGER RPAREN'''
- p[0] = (p[1], eval(p[3]), eval(p[5]))
-
-# Arithmetic expressions
-
-
-def p_expr_binary(p):
- '''expr : expr PLUS expr
- | expr MINUS expr
- | expr TIMES expr
- | expr DIVIDE expr
- | expr POWER expr'''
-
- p[0] = ('BINOP', p[2], p[1], p[3])
-
-
-def p_expr_number(p):
- '''expr : INTEGER
- | FLOAT'''
- p[0] = ('NUM', eval(p[1]))
-
-
-def p_expr_variable(p):
- '''expr : variable'''
- p[0] = ('VAR', p[1])
-
-
-def p_expr_group(p):
- '''expr : LPAREN expr RPAREN'''
- p[0] = ('GROUP', p[2])
-
-
-def p_expr_unary(p):
- '''expr : MINUS expr %prec UMINUS'''
- p[0] = ('UNARY', '-', p[2])
-
-# Relational expressions
-
-
-def p_relexpr(p):
- '''relexpr : expr LT expr
- | expr LE expr
- | expr GT expr
- | expr GE expr
- | expr EQUALS expr
- | expr NE expr'''
- p[0] = ('RELOP', p[2], p[1], p[3])
-
-# Variables
-
-
-def p_variable(p):
- '''variable : ID
- | ID LPAREN expr RPAREN
- | ID LPAREN expr COMMA expr RPAREN'''
- if len(p) == 2:
- p[0] = (p[1], None, None)
- elif len(p) == 5:
- p[0] = (p[1], p[3], None)
- else:
- p[0] = (p[1], p[3], p[5])
-
-# Builds a list of variable targets as a Python list
-
-
-def p_varlist(p):
- '''varlist : varlist COMMA variable
- | variable'''
- if len(p) > 2:
- p[0] = p[1]
- p[0].append(p[3])
- else:
- p[0] = [p[1]]
-
-
-# Builds a list of numbers as a Python list
-
-def p_numlist(p):
- '''numlist : numlist COMMA number
- | number'''
-
- if len(p) > 2:
- p[0] = p[1]
- p[0].append(p[3])
- else:
- p[0] = [p[1]]
-
-# A number. May be an integer or a float
-
-
-def p_number(p):
- '''number : INTEGER
- | FLOAT'''
- p[0] = eval(p[1])
-
-# A signed number.
-
-
-def p_number_signed(p):
- '''number : MINUS INTEGER
- | MINUS FLOAT'''
- p[0] = eval("-" + p[2])
-
-# List of targets for a print statement
-# Returns a list of tuples (label,expr)
-
-
-def p_plist(p):
- '''plist : plist COMMA pitem
- | pitem'''
- if len(p) > 3:
- p[0] = p[1]
- p[0].append(p[3])
- else:
- p[0] = [p[1]]
-
-
-def p_item_string(p):
- '''pitem : STRING'''
- p[0] = (p[1][1:-1], None)
-
-
-def p_item_string_expr(p):
- '''pitem : STRING expr'''
- p[0] = (p[1][1:-1], p[2])
-
-
-def p_item_expr(p):
- '''pitem : expr'''
- p[0] = ("", p[1])
-
-# Empty
-
-
-def p_empty(p):
- '''empty : '''
-
-# Catastrophic error handler
-
-
-def p_error(p):
- if not p:
- print("SYNTAX ERROR AT EOF")
-
-bparser = yacc.yacc()
-
-
-def parse(data, debug=0):
- bparser.error = 0
- p = bparser.parse(data, debug=debug)
- if bparser.error:
- return None
- return p
diff --git a/components/script/dom/bindings/codegen/ply/example/BASIC/dim.bas b/components/script/dom/bindings/codegen/ply/example/BASIC/dim.bas
deleted file mode 100644
index 87bd95b32ec..00000000000
--- a/components/script/dom/bindings/codegen/ply/example/BASIC/dim.bas
+++ /dev/null
@@ -1,14 +0,0 @@
-5 DIM A(50,15)
-10 FOR I = 1 TO 50
-20 FOR J = 1 TO 15
-30 LET A(I,J) = I + J
-35 REM PRINT I,J, A(I,J)
-40 NEXT J
-50 NEXT I
-100 FOR I = 1 TO 50
-110 FOR J = 1 TO 15
-120 PRINT A(I,J),
-130 NEXT J
-140 PRINT
-150 NEXT I
-999 END
diff --git a/components/script/dom/bindings/codegen/ply/example/BASIC/func.bas b/components/script/dom/bindings/codegen/ply/example/BASIC/func.bas
deleted file mode 100644
index 447ee16a927..00000000000
--- a/components/script/dom/bindings/codegen/ply/example/BASIC/func.bas
+++ /dev/null
@@ -1,5 +0,0 @@
-10 DEF FDX(X) = 2*X
-20 FOR I = 0 TO 100
-30 PRINT FDX(I)
-40 NEXT I
-50 END
diff --git a/components/script/dom/bindings/codegen/ply/example/BASIC/gcd.bas b/components/script/dom/bindings/codegen/ply/example/BASIC/gcd.bas
deleted file mode 100644
index d0b77460894..00000000000
--- a/components/script/dom/bindings/codegen/ply/example/BASIC/gcd.bas
+++ /dev/null
@@ -1,22 +0,0 @@
-10 PRINT "A","B","C","GCD"
-20 READ A,B,C
-30 LET X = A
-40 LET Y = B
-50 GOSUB 200
-60 LET X = G
-70 LET Y = C
-80 GOSUB 200
-90 PRINT A, B, C, G
-100 GOTO 20
-110 DATA 60, 90, 120
-120 DATA 38456, 64872, 98765
-130 DATA 32, 384, 72
-200 LET Q = INT(X/Y)
-210 LET R = X - Q*Y
-220 IF R = 0 THEN 300
-230 LET X = Y
-240 LET Y = R
-250 GOTO 200
-300 LET G = Y
-310 RETURN
-999 END
diff --git a/components/script/dom/bindings/codegen/ply/example/BASIC/gosub.bas b/components/script/dom/bindings/codegen/ply/example/BASIC/gosub.bas
deleted file mode 100644
index 99737b16f15..00000000000
--- a/components/script/dom/bindings/codegen/ply/example/BASIC/gosub.bas
+++ /dev/null
@@ -1,13 +0,0 @@
-100 LET X = 3
-110 GOSUB 400
-120 PRINT U, V, W
-200 LET X = 5
-210 GOSUB 400
-220 LET Z = U + 2*V + 3*W
-230 PRINT Z
-240 GOTO 999
-400 LET U = X*X
-410 LET V = X*X*X
-420 LET W = X*X*X*X + X*X*X + X*X + X
-430 RETURN
-999 END
diff --git a/components/script/dom/bindings/codegen/ply/example/BASIC/hello.bas b/components/script/dom/bindings/codegen/ply/example/BASIC/hello.bas
deleted file mode 100644
index cc6f0b0b511..00000000000
--- a/components/script/dom/bindings/codegen/ply/example/BASIC/hello.bas
+++ /dev/null
@@ -1,4 +0,0 @@
-5 REM HELLO WORLD PROGAM
-10 PRINT "HELLO WORLD"
-99 END
-
diff --git a/components/script/dom/bindings/codegen/ply/example/BASIC/linear.bas b/components/script/dom/bindings/codegen/ply/example/BASIC/linear.bas
deleted file mode 100644
index 56c08220b3e..00000000000
--- a/components/script/dom/bindings/codegen/ply/example/BASIC/linear.bas
+++ /dev/null
@@ -1,17 +0,0 @@
-1 REM ::: SOLVE A SYSTEM OF LINEAR EQUATIONS
-2 REM ::: A1*X1 + A2*X2 = B1
-3 REM ::: A3*X1 + A4*X2 = B2
-4 REM --------------------------------------
-10 READ A1, A2, A3, A4
-15 LET D = A1 * A4 - A3 * A2
-20 IF D = 0 THEN 65
-30 READ B1, B2
-37 LET X1 = (B1*A4 - B2*A2) / D
-42 LET X2 = (A1*B2 - A3*B1) / D
-55 PRINT X1, X2
-60 GOTO 30
-65 PRINT "NO UNIQUE SOLUTION"
-70 DATA 1, 2, 4
-80 DATA 2, -7, 5
-85 DATA 1, 3, 4, -7
-90 END
diff --git a/components/script/dom/bindings/codegen/ply/example/BASIC/maxsin.bas b/components/script/dom/bindings/codegen/ply/example/BASIC/maxsin.bas
deleted file mode 100644
index b96901530c2..00000000000
--- a/components/script/dom/bindings/codegen/ply/example/BASIC/maxsin.bas
+++ /dev/null
@@ -1,12 +0,0 @@
-5 PRINT "X VALUE", "SINE", "RESOLUTION"
-10 READ D
-20 LET M = -1
-30 FOR X = 0 TO 3 STEP D
-40 IF SIN(X) <= M THEN 80
-50 LET X0 = X
-60 LET M = SIN(X)
-80 NEXT X
-85 PRINT X0, M, D
-90 GOTO 10
-100 DATA .1, .01, .001
-110 END
diff --git a/components/script/dom/bindings/codegen/ply/example/BASIC/powers.bas b/components/script/dom/bindings/codegen/ply/example/BASIC/powers.bas
deleted file mode 100644
index a454dc3e211..00000000000
--- a/components/script/dom/bindings/codegen/ply/example/BASIC/powers.bas
+++ /dev/null
@@ -1,13 +0,0 @@
-5 PRINT "THIS PROGRAM COMPUTES AND PRINTS THE NTH POWERS"
-6 PRINT "OF THE NUMBERS LESS THAN OR EQUAL TO N FOR VARIOUS"
-7 PRINT "N FROM 1 THROUGH 7"
-8 PRINT
-10 FOR N = 1 TO 7
-15 PRINT "N = "N
-20 FOR I = 1 TO N
-30 PRINT I^N,
-40 NEXT I
-50 PRINT
-60 PRINT
-70 NEXT N
-80 END
diff --git a/components/script/dom/bindings/codegen/ply/example/BASIC/rand.bas b/components/script/dom/bindings/codegen/ply/example/BASIC/rand.bas
deleted file mode 100644
index 4ff7a146702..00000000000
--- a/components/script/dom/bindings/codegen/ply/example/BASIC/rand.bas
+++ /dev/null
@@ -1,4 +0,0 @@
-10 FOR I = 1 TO 20
-20 PRINT INT(10*RND(0))
-30 NEXT I
-40 END
diff --git a/components/script/dom/bindings/codegen/ply/example/BASIC/sales.bas b/components/script/dom/bindings/codegen/ply/example/BASIC/sales.bas
deleted file mode 100644
index a39aefb762c..00000000000
--- a/components/script/dom/bindings/codegen/ply/example/BASIC/sales.bas
+++ /dev/null
@@ -1,20 +0,0 @@
-10 FOR I = 1 TO 3
-20 READ P(I)
-30 NEXT I
-40 FOR I = 1 TO 3
-50 FOR J = 1 TO 5
-60 READ S(I,J)
-70 NEXT J
-80 NEXT I
-90 FOR J = 1 TO 5
-100 LET S = 0
-110 FOR I = 1 TO 3
-120 LET S = S + P(I) * S(I,J)
-130 NEXT I
-140 PRINT "TOTAL SALES FOR SALESMAN"J, "$"S
-150 NEXT J
-200 DATA 1.25, 4.30, 2.50
-210 DATA 40, 20, 37, 29, 42
-220 DATA 10, 16, 3, 21, 8
-230 DATA 35, 47, 29, 16, 33
-300 END
diff --git a/components/script/dom/bindings/codegen/ply/example/BASIC/sears.bas b/components/script/dom/bindings/codegen/ply/example/BASIC/sears.bas
deleted file mode 100644
index 5ced3974e24..00000000000
--- a/components/script/dom/bindings/codegen/ply/example/BASIC/sears.bas
+++ /dev/null
@@ -1,18 +0,0 @@
-1 REM :: THIS PROGRAM COMPUTES HOW MANY TIMES YOU HAVE TO FOLD
-2 REM :: A PIECE OF PAPER SO THAT IT IS TALLER THAN THE
-3 REM :: SEARS TOWER.
-4 REM :: S = HEIGHT OF TOWER (METERS)
-5 REM :: T = THICKNESS OF PAPER (MILLIMETERS)
-10 LET S = 442
-20 LET T = 0.1
-30 REM CONVERT T TO METERS
-40 LET T = T * .001
-50 LET F = 1
-60 LET H = T
-100 IF H > S THEN 200
-120 LET H = 2 * H
-125 LET F = F + 1
-130 GOTO 100
-200 PRINT "NUMBER OF FOLDS ="F
-220 PRINT "FINAL HEIGHT ="H
-999 END
diff --git a/components/script/dom/bindings/codegen/ply/example/BASIC/sqrt1.bas b/components/script/dom/bindings/codegen/ply/example/BASIC/sqrt1.bas
deleted file mode 100644
index 6673a91524f..00000000000
--- a/components/script/dom/bindings/codegen/ply/example/BASIC/sqrt1.bas
+++ /dev/null
@@ -1,5 +0,0 @@
-10 LET X = 0
-20 LET X = X + 1
-30 PRINT X, SQR(X)
-40 IF X < 100 THEN 20
-50 END
diff --git a/components/script/dom/bindings/codegen/ply/example/BASIC/sqrt2.bas b/components/script/dom/bindings/codegen/ply/example/BASIC/sqrt2.bas
deleted file mode 100644
index 862d85ef269..00000000000
--- a/components/script/dom/bindings/codegen/ply/example/BASIC/sqrt2.bas
+++ /dev/null
@@ -1,4 +0,0 @@
-10 FOR X = 1 TO 100
-20 PRINT X, SQR(X)
-30 NEXT X
-40 END
diff --git a/components/script/dom/bindings/codegen/ply/example/GardenSnake/GardenSnake.py b/components/script/dom/bindings/codegen/ply/example/GardenSnake/GardenSnake.py
deleted file mode 100644
index 8b493b40dca..00000000000
--- a/components/script/dom/bindings/codegen/ply/example/GardenSnake/GardenSnake.py
+++ /dev/null
@@ -1,777 +0,0 @@
-# GardenSnake - a parser generator demonstration program
-#
-# This implements a modified version of a subset of Python:
-# - only 'def', 'return' and 'if' statements
-# - 'if' only has 'then' clause (no elif nor else)
-# - single-quoted strings only, content in raw format
-# - numbers are decimal.Decimal instances (not integers or floats)
-# - no print statment; use the built-in 'print' function
-# - only < > == + - / * implemented (and unary + -)
-# - assignment and tuple assignment work
-# - no generators of any sort
-# - no ... well, no quite a lot
-
-# Why? I'm thinking about a new indentation-based configuration
-# language for a project and wanted to figure out how to do it. Once
-# I got that working I needed a way to test it out. My original AST
-# was dumb so I decided to target Python's AST and compile it into
-# Python code. Plus, it's pretty cool that it only took a day or so
-# from sitting down with Ply to having working code.
-
-# This uses David Beazley's Ply from http://www.dabeaz.com/ply/
-
-# This work is hereby released into the Public Domain. To view a copy of
-# the public domain dedication, visit
-# http://creativecommons.org/licenses/publicdomain/ or send a letter to
-# Creative Commons, 543 Howard Street, 5th Floor, San Francisco,
-# California, 94105, USA.
-#
-# Portions of this work are derived from Python's Grammar definition
-# and may be covered under the Python copyright and license
-#
-# Andrew Dalke / Dalke Scientific Software, LLC
-# 30 August 2006 / Cape Town, South Africa
-
-# Changelog:
-# 30 August - added link to CC license; removed the "swapcase" encoding
-
-# Modifications for inclusion in PLY distribution
-import sys
-sys.path.insert(0, "../..")
-from ply import *
-
-##### Lexer ######
-#import lex
-import decimal
-
-tokens = (
- 'DEF',
- 'IF',
- 'NAME',
- 'NUMBER', # Python decimals
- 'STRING', # single quoted strings only; syntax of raw strings
- 'LPAR',
- 'RPAR',
- 'COLON',
- 'EQ',
- 'ASSIGN',
- 'LT',
- 'GT',
- 'PLUS',
- 'MINUS',
- 'MULT',
- 'DIV',
- 'RETURN',
- 'WS',
- 'NEWLINE',
- 'COMMA',
- 'SEMICOLON',
- 'INDENT',
- 'DEDENT',
- 'ENDMARKER',
-)
-
-#t_NUMBER = r'\d+'
-# taken from decmial.py but without the leading sign
-
-
-def t_NUMBER(t):
- r"""(\d+(\.\d*)?|\.\d+)([eE][-+]? \d+)?"""
- t.value = decimal.Decimal(t.value)
- return t
-
-
-def t_STRING(t):
- r"'([^\\']+|\\'|\\\\)*'" # I think this is right ...
- t.value = t.value[1:-1].decode("string-escape") # .swapcase() # for fun
- return t
-
-t_COLON = r':'
-t_EQ = r'=='
-t_ASSIGN = r'='
-t_LT = r'<'
-t_GT = r'>'
-t_PLUS = r'\+'
-t_MINUS = r'-'
-t_MULT = r'\*'
-t_DIV = r'/'
-t_COMMA = r','
-t_SEMICOLON = r';'
-
-# Ply nicely documented how to do this.
-
-RESERVED = {
- "def": "DEF",
- "if": "IF",
- "return": "RETURN",
-}
-
-
-def t_NAME(t):
- r'[a-zA-Z_][a-zA-Z0-9_]*'
- t.type = RESERVED.get(t.value, "NAME")
- return t
-
-# Putting this before t_WS let it consume lines with only comments in
-# them so the latter code never sees the WS part. Not consuming the
-# newline. Needed for "if 1: #comment"
-
-
-def t_comment(t):
- r"[ ]*\043[^\n]*" # \043 is '#'
- pass
-
-
-# Whitespace
-def t_WS(t):
- r' [ ]+ '
- if t.lexer.at_line_start and t.lexer.paren_count == 0:
- return t
-
-# Don't generate newline tokens when inside of parenthesis, eg
-# a = (1,
-# 2, 3)
-
-
-def t_newline(t):
- r'\n+'
- t.lexer.lineno += len(t.value)
- t.type = "NEWLINE"
- if t.lexer.paren_count == 0:
- return t
-
-
-def t_LPAR(t):
- r'\('
- t.lexer.paren_count += 1
- return t
-
-
-def t_RPAR(t):
- r'\)'
- # check for underflow? should be the job of the parser
- t.lexer.paren_count -= 1
- return t
-
-
-def t_error(t):
- raise SyntaxError("Unknown symbol %r" % (t.value[0],))
- print "Skipping", repr(t.value[0])
- t.lexer.skip(1)
-
-# I implemented INDENT / DEDENT generation as a post-processing filter
-
-# The original lex token stream contains WS and NEWLINE characters.
-# WS will only occur before any other tokens on a line.
-
-# I have three filters. One tags tokens by adding two attributes.
-# "must_indent" is True if the token must be indented from the
-# previous code. The other is "at_line_start" which is True for WS
-# and the first non-WS/non-NEWLINE on a line. It flags the check so
-# see if the new line has changed indication level.
-
-# Python's syntax has three INDENT states
-# 0) no colon hence no need to indent
-# 1) "if 1: go()" - simple statements have a COLON but no need for an indent
-# 2) "if 1:\n go()" - complex statements have a COLON NEWLINE and must indent
-NO_INDENT = 0
-MAY_INDENT = 1
-MUST_INDENT = 2
-
-# only care about whitespace at the start of a line
-
-
-def track_tokens_filter(lexer, tokens):
- lexer.at_line_start = at_line_start = True
- indent = NO_INDENT
- saw_colon = False
- for token in tokens:
- token.at_line_start = at_line_start
-
- if token.type == "COLON":
- at_line_start = False
- indent = MAY_INDENT
- token.must_indent = False
-
- elif token.type == "NEWLINE":
- at_line_start = True
- if indent == MAY_INDENT:
- indent = MUST_INDENT
- token.must_indent = False
-
- elif token.type == "WS":
- assert token.at_line_start == True
- at_line_start = True
- token.must_indent = False
-
- else:
- # A real token; only indent after COLON NEWLINE
- if indent == MUST_INDENT:
- token.must_indent = True
- else:
- token.must_indent = False
- at_line_start = False
- indent = NO_INDENT
-
- yield token
- lexer.at_line_start = at_line_start
-
-
-def _new_token(type, lineno):
- tok = lex.LexToken()
- tok.type = type
- tok.value = None
- tok.lineno = lineno
- return tok
-
-# Synthesize a DEDENT tag
-
-
-def DEDENT(lineno):
- return _new_token("DEDENT", lineno)
-
-# Synthesize an INDENT tag
-
-
-def INDENT(lineno):
- return _new_token("INDENT", lineno)
-
-
-# Track the indentation level and emit the right INDENT / DEDENT events.
-def indentation_filter(tokens):
- # A stack of indentation levels; will never pop item 0
- levels = [0]
- token = None
- depth = 0
- prev_was_ws = False
- for token in tokens:
- # if 1:
- # print "Process", token,
- # if token.at_line_start:
- # print "at_line_start",
- # if token.must_indent:
- # print "must_indent",
- # print
-
- # WS only occurs at the start of the line
- # There may be WS followed by NEWLINE so
- # only track the depth here. Don't indent/dedent
- # until there's something real.
- if token.type == "WS":
- assert depth == 0
- depth = len(token.value)
- prev_was_ws = True
- # WS tokens are never passed to the parser
- continue
-
- if token.type == "NEWLINE":
- depth = 0
- if prev_was_ws or token.at_line_start:
- # ignore blank lines
- continue
- # pass the other cases on through
- yield token
- continue
-
- # then it must be a real token (not WS, not NEWLINE)
- # which can affect the indentation level
-
- prev_was_ws = False
- if token.must_indent:
- # The current depth must be larger than the previous level
- if not (depth > levels[-1]):
- raise IndentationError("expected an indented block")
-
- levels.append(depth)
- yield INDENT(token.lineno)
-
- elif token.at_line_start:
- # Must be on the same level or one of the previous levels
- if depth == levels[-1]:
- # At the same level
- pass
- elif depth > levels[-1]:
- raise IndentationError(
- "indentation increase but not in new block")
- else:
- # Back up; but only if it matches a previous level
- try:
- i = levels.index(depth)
- except ValueError:
- raise IndentationError("inconsistent indentation")
- for _ in range(i + 1, len(levels)):
- yield DEDENT(token.lineno)
- levels.pop()
-
- yield token
-
- ### Finished processing ###
-
- # Must dedent any remaining levels
- if len(levels) > 1:
- assert token is not None
- for _ in range(1, len(levels)):
- yield DEDENT(token.lineno)
-
-
-# The top-level filter adds an ENDMARKER, if requested.
-# Python's grammar uses it.
-def filter(lexer, add_endmarker=True):
- token = None
- tokens = iter(lexer.token, None)
- tokens = track_tokens_filter(lexer, tokens)
- for token in indentation_filter(tokens):
- yield token
-
- if add_endmarker:
- lineno = 1
- if token is not None:
- lineno = token.lineno
- yield _new_token("ENDMARKER", lineno)
-
-# Combine Ply and my filters into a new lexer
-
-
-class IndentLexer(object):
-
- def __init__(self, debug=0, optimize=0, lextab='lextab', reflags=0):
- self.lexer = lex.lex(debug=debug, optimize=optimize,
- lextab=lextab, reflags=reflags)
- self.token_stream = None
-
- def input(self, s, add_endmarker=True):
- self.lexer.paren_count = 0
- self.lexer.input(s)
- self.token_stream = filter(self.lexer, add_endmarker)
-
- def token(self):
- try:
- return self.token_stream.next()
- except StopIteration:
- return None
-
-########## Parser (tokens -> AST) ######
-
-# also part of Ply
-#import yacc
-
-# I use the Python AST
-from compiler import ast
-
-# Helper function
-
-
-def Assign(left, right):
- names = []
- if isinstance(left, ast.Name):
- # Single assignment on left
- return ast.Assign([ast.AssName(left.name, 'OP_ASSIGN')], right)
- elif isinstance(left, ast.Tuple):
- # List of things - make sure they are Name nodes
- names = []
- for child in left.getChildren():
- if not isinstance(child, ast.Name):
- raise SyntaxError("that assignment not supported")
- names.append(child.name)
- ass_list = [ast.AssName(name, 'OP_ASSIGN') for name in names]
- return ast.Assign([ast.AssTuple(ass_list)], right)
- else:
- raise SyntaxError("Can't do that yet")
-
-
-# The grammar comments come from Python's Grammar/Grammar file
-
-# NB: compound_stmt in single_input is followed by extra NEWLINE!
-# file_input: (NEWLINE | stmt)* ENDMARKER
-def p_file_input_end(p):
- """file_input_end : file_input ENDMARKER"""
- p[0] = ast.Stmt(p[1])
-
-
-def p_file_input(p):
- """file_input : file_input NEWLINE
- | file_input stmt
- | NEWLINE
- | stmt"""
- if isinstance(p[len(p) - 1], basestring):
- if len(p) == 3:
- p[0] = p[1]
- else:
- p[0] = [] # p == 2 --> only a blank line
- else:
- if len(p) == 3:
- p[0] = p[1] + p[2]
- else:
- p[0] = p[1]
-
-
-# funcdef: [decorators] 'def' NAME parameters ':' suite
-# ignoring decorators
-def p_funcdef(p):
- "funcdef : DEF NAME parameters COLON suite"
- p[0] = ast.Function(None, p[2], tuple(p[3]), (), 0, None, p[5])
-
-# parameters: '(' [varargslist] ')'
-
-
-def p_parameters(p):
- """parameters : LPAR RPAR
- | LPAR varargslist RPAR"""
- if len(p) == 3:
- p[0] = []
- else:
- p[0] = p[2]
-
-
-# varargslist: (fpdef ['=' test] ',')* ('*' NAME [',' '**' NAME] | '**' NAME) |
-# highly simplified
-def p_varargslist(p):
- """varargslist : varargslist COMMA NAME
- | NAME"""
- if len(p) == 4:
- p[0] = p[1] + p[3]
- else:
- p[0] = [p[1]]
-
-# stmt: simple_stmt | compound_stmt
-
-
-def p_stmt_simple(p):
- """stmt : simple_stmt"""
- # simple_stmt is a list
- p[0] = p[1]
-
-
-def p_stmt_compound(p):
- """stmt : compound_stmt"""
- p[0] = [p[1]]
-
-# simple_stmt: small_stmt (';' small_stmt)* [';'] NEWLINE
-
-
-def p_simple_stmt(p):
- """simple_stmt : small_stmts NEWLINE
- | small_stmts SEMICOLON NEWLINE"""
- p[0] = p[1]
-
-
-def p_small_stmts(p):
- """small_stmts : small_stmts SEMICOLON small_stmt
- | small_stmt"""
- if len(p) == 4:
- p[0] = p[1] + [p[3]]
- else:
- p[0] = [p[1]]
-
-# small_stmt: expr_stmt | print_stmt | del_stmt | pass_stmt | flow_stmt |
-# import_stmt | global_stmt | exec_stmt | assert_stmt
-
-
-def p_small_stmt(p):
- """small_stmt : flow_stmt
- | expr_stmt"""
- p[0] = p[1]
-
-# expr_stmt: testlist (augassign (yield_expr|testlist) |
-# ('=' (yield_expr|testlist))*)
-# augassign: ('+=' | '-=' | '*=' | '/=' | '%=' | '&=' | '|=' | '^=' |
-# '<<=' | '>>=' | '**=' | '//=')
-
-
-def p_expr_stmt(p):
- """expr_stmt : testlist ASSIGN testlist
- | testlist """
- if len(p) == 2:
- # a list of expressions
- p[0] = ast.Discard(p[1])
- else:
- p[0] = Assign(p[1], p[3])
-
-
-def p_flow_stmt(p):
- "flow_stmt : return_stmt"
- p[0] = p[1]
-
-# return_stmt: 'return' [testlist]
-
-
-def p_return_stmt(p):
- "return_stmt : RETURN testlist"
- p[0] = ast.Return(p[2])
-
-
-def p_compound_stmt(p):
- """compound_stmt : if_stmt
- | funcdef"""
- p[0] = p[1]
-
-
-def p_if_stmt(p):
- 'if_stmt : IF test COLON suite'
- p[0] = ast.If([(p[2], p[4])], None)
-
-
-def p_suite(p):
- """suite : simple_stmt
- | NEWLINE INDENT stmts DEDENT"""
- if len(p) == 2:
- p[0] = ast.Stmt(p[1])
- else:
- p[0] = ast.Stmt(p[3])
-
-
-def p_stmts(p):
- """stmts : stmts stmt
- | stmt"""
- if len(p) == 3:
- p[0] = p[1] + p[2]
- else:
- p[0] = p[1]
-
-# No using Python's approach because Ply supports precedence
-
-# comparison: expr (comp_op expr)*
-# arith_expr: term (('+'|'-') term)*
-# term: factor (('*'|'/'|'%'|'//') factor)*
-# factor: ('+'|'-'|'~') factor | power
-# comp_op: '<'|'>'|'=='|'>='|'<='|'<>'|'!='|'in'|'not' 'in'|'is'|'is' 'not'
-
-
-def make_lt_compare((left, right)):
- return ast.Compare(left, [('<', right), ])
-
-
-def make_gt_compare((left, right)):
- return ast.Compare(left, [('>', right), ])
-
-
-def make_eq_compare((left, right)):
- return ast.Compare(left, [('==', right), ])
-
-
-binary_ops = {
- "+": ast.Add,
- "-": ast.Sub,
- "*": ast.Mul,
- "/": ast.Div,
- "<": make_lt_compare,
- ">": make_gt_compare,
- "==": make_eq_compare,
-}
-unary_ops = {
- "+": ast.UnaryAdd,
- "-": ast.UnarySub,
-}
-precedence = (
- ("left", "EQ", "GT", "LT"),
- ("left", "PLUS", "MINUS"),
- ("left", "MULT", "DIV"),
-)
-
-
-def p_comparison(p):
- """comparison : comparison PLUS comparison
- | comparison MINUS comparison
- | comparison MULT comparison
- | comparison DIV comparison
- | comparison LT comparison
- | comparison EQ comparison
- | comparison GT comparison
- | PLUS comparison
- | MINUS comparison
- | power"""
- if len(p) == 4:
- p[0] = binary_ops[p[2]]((p[1], p[3]))
- elif len(p) == 3:
- p[0] = unary_ops[p[1]](p[2])
- else:
- p[0] = p[1]
-
-# power: atom trailer* ['**' factor]
-# trailers enables function calls. I only allow one level of calls
-# so this is 'trailer'
-
-
-def p_power(p):
- """power : atom
- | atom trailer"""
- if len(p) == 2:
- p[0] = p[1]
- else:
- if p[2][0] == "CALL":
- p[0] = ast.CallFunc(p[1], p[2][1], None, None)
- else:
- raise AssertionError("not implemented")
-
-
-def p_atom_name(p):
- """atom : NAME"""
- p[0] = ast.Name(p[1])
-
-
-def p_atom_number(p):
- """atom : NUMBER
- | STRING"""
- p[0] = ast.Const(p[1])
-
-
-def p_atom_tuple(p):
- """atom : LPAR testlist RPAR"""
- p[0] = p[2]
-
-# trailer: '(' [arglist] ')' | '[' subscriptlist ']' | '.' NAME
-
-
-def p_trailer(p):
- "trailer : LPAR arglist RPAR"
- p[0] = ("CALL", p[2])
-
-# testlist: test (',' test)* [',']
-# Contains shift/reduce error
-
-
-def p_testlist(p):
- """testlist : testlist_multi COMMA
- | testlist_multi """
- if len(p) == 2:
- p[0] = p[1]
- else:
- # May need to promote singleton to tuple
- if isinstance(p[1], list):
- p[0] = p[1]
- else:
- p[0] = [p[1]]
- # Convert into a tuple?
- if isinstance(p[0], list):
- p[0] = ast.Tuple(p[0])
-
-
-def p_testlist_multi(p):
- """testlist_multi : testlist_multi COMMA test
- | test"""
- if len(p) == 2:
- # singleton
- p[0] = p[1]
- else:
- if isinstance(p[1], list):
- p[0] = p[1] + [p[3]]
- else:
- # singleton -> tuple
- p[0] = [p[1], p[3]]
-
-
-# test: or_test ['if' or_test 'else' test] | lambdef
-# as I don't support 'and', 'or', and 'not' this works down to 'comparison'
-def p_test(p):
- "test : comparison"
- p[0] = p[1]
-
-
-# arglist: (argument ',')* (argument [',']| '*' test [',' '**' test] | '**' test)
-# XXX INCOMPLETE: this doesn't allow the trailing comma
-def p_arglist(p):
- """arglist : arglist COMMA argument
- | argument"""
- if len(p) == 4:
- p[0] = p[1] + [p[3]]
- else:
- p[0] = [p[1]]
-
-# argument: test [gen_for] | test '=' test # Really [keyword '='] test
-
-
-def p_argument(p):
- "argument : test"
- p[0] = p[1]
-
-
-def p_error(p):
- # print "Error!", repr(p)
- raise SyntaxError(p)
-
-
-class GardenSnakeParser(object):
-
- def __init__(self, lexer=None):
- if lexer is None:
- lexer = IndentLexer()
- self.lexer = lexer
- self.parser = yacc.yacc(start="file_input_end")
-
- def parse(self, code):
- self.lexer.input(code)
- result = self.parser.parse(lexer=self.lexer)
- return ast.Module(None, result)
-
-
-###### Code generation ######
-
-from compiler import misc, syntax, pycodegen
-
-
-class GardenSnakeCompiler(object):
-
- def __init__(self):
- self.parser = GardenSnakeParser()
-
- def compile(self, code, filename="<string>"):
- tree = self.parser.parse(code)
- # print tree
- misc.set_filename(filename, tree)
- syntax.check(tree)
- gen = pycodegen.ModuleCodeGenerator(tree)
- code = gen.getCode()
- return code
-
-####### Test code #######
-
-compile = GardenSnakeCompiler().compile
-
-code = r"""
-
-print('LET\'S TRY THIS \\OUT')
-
-#Comment here
-def x(a):
- print('called with',a)
- if a == 1:
- return 2
- if a*2 > 10: return 999 / 4
- # Another comment here
-
- return a+2*3
-
-ints = (1, 2,
- 3, 4,
-5)
-print('mutiline-expression', ints)
-
-t = 4+1/3*2+6*(9-5+1)
-print('predence test; should be 34+2/3:', t, t==(34+2/3))
-
-print('numbers', 1,2,3,4,5)
-if 1:
- 8
- a=9
- print(x(a))
-
-print(x(1))
-print(x(2))
-print(x(8),'3')
-print('this is decimal', 1/5)
-print('BIG DECIMAL', 1.234567891234567e12345)
-
-"""
-
-# Set up the GardenSnake run-time environment
-
-
-def print_(*args):
- print "-->", " ".join(map(str, args))
-
-globals()["print"] = print_
-
-compiled_code = compile(code)
-
-exec compiled_code in globals()
-print "Done"
diff --git a/components/script/dom/bindings/codegen/ply/example/GardenSnake/README b/components/script/dom/bindings/codegen/ply/example/GardenSnake/README
deleted file mode 100644
index 4d8be2db050..00000000000
--- a/components/script/dom/bindings/codegen/ply/example/GardenSnake/README
+++ /dev/null
@@ -1,5 +0,0 @@
-This example is Andrew Dalke's GardenSnake language. It shows how to process an
-indentation-like language like Python. Further details can be found here:
-
-http://dalkescientific.com/writings/diary/archive/2006/08/30/gardensnake_language.html
-
diff --git a/components/script/dom/bindings/codegen/ply/example/README b/components/script/dom/bindings/codegen/ply/example/README
deleted file mode 100644
index 63519b557f1..00000000000
--- a/components/script/dom/bindings/codegen/ply/example/README
+++ /dev/null
@@ -1,10 +0,0 @@
-Simple examples:
- calc - Simple calculator
- classcalc - Simple calculate defined as a class
-
-Complex examples
- ansic - ANSI C grammar from K&R
- BASIC - A small BASIC interpreter
- GardenSnake - A simple python-like language
- yply - Converts Unix yacc files to PLY programs.
-
diff --git a/components/script/dom/bindings/codegen/ply/example/ansic/README b/components/script/dom/bindings/codegen/ply/example/ansic/README
deleted file mode 100644
index e049d3b4e48..00000000000
--- a/components/script/dom/bindings/codegen/ply/example/ansic/README
+++ /dev/null
@@ -1,2 +0,0 @@
-This example is incomplete. Was going to specify an ANSI C parser.
-This is part of it.
diff --git a/components/script/dom/bindings/codegen/ply/example/ansic/clex.py b/components/script/dom/bindings/codegen/ply/example/ansic/clex.py
deleted file mode 100644
index 4bde1d730b0..00000000000
--- a/components/script/dom/bindings/codegen/ply/example/ansic/clex.py
+++ /dev/null
@@ -1,168 +0,0 @@
-# ----------------------------------------------------------------------
-# clex.py
-#
-# A lexer for ANSI C.
-# ----------------------------------------------------------------------
-
-import sys
-sys.path.insert(0, "../..")
-
-import ply.lex as lex
-
-# Reserved words
-reserved = (
- 'AUTO', 'BREAK', 'CASE', 'CHAR', 'CONST', 'CONTINUE', 'DEFAULT', 'DO', 'DOUBLE',
- 'ELSE', 'ENUM', 'EXTERN', 'FLOAT', 'FOR', 'GOTO', 'IF', 'INT', 'LONG', 'REGISTER',
- 'RETURN', 'SHORT', 'SIGNED', 'SIZEOF', 'STATIC', 'STRUCT', 'SWITCH', 'TYPEDEF',
- 'UNION', 'UNSIGNED', 'VOID', 'VOLATILE', 'WHILE',
-)
-
-tokens = reserved + (
- # Literals (identifier, integer constant, float constant, string constant,
- # char const)
- 'ID', 'TYPEID', 'ICONST', 'FCONST', 'SCONST', 'CCONST',
-
- # Operators (+,-,*,/,%,|,&,~,^,<<,>>, ||, &&, !, <, <=, >, >=, ==, !=)
- 'PLUS', 'MINUS', 'TIMES', 'DIVIDE', 'MOD',
- 'OR', 'AND', 'NOT', 'XOR', 'LSHIFT', 'RSHIFT',
- 'LOR', 'LAND', 'LNOT',
- 'LT', 'LE', 'GT', 'GE', 'EQ', 'NE',
-
- # Assignment (=, *=, /=, %=, +=, -=, <<=, >>=, &=, ^=, |=)
- 'EQUALS', 'TIMESEQUAL', 'DIVEQUAL', 'MODEQUAL', 'PLUSEQUAL', 'MINUSEQUAL',
- 'LSHIFTEQUAL', 'RSHIFTEQUAL', 'ANDEQUAL', 'XOREQUAL', 'OREQUAL',
-
- # Increment/decrement (++,--)
- 'PLUSPLUS', 'MINUSMINUS',
-
- # Structure dereference (->)
- 'ARROW',
-
- # Conditional operator (?)
- 'CONDOP',
-
- # Delimeters ( ) [ ] { } , . ; :
- 'LPAREN', 'RPAREN',
- 'LBRACKET', 'RBRACKET',
- 'LBRACE', 'RBRACE',
- 'COMMA', 'PERIOD', 'SEMI', 'COLON',
-
- # Ellipsis (...)
- 'ELLIPSIS',
-)
-
-# Completely ignored characters
-t_ignore = ' \t\x0c'
-
-# Newlines
-
-
-def t_NEWLINE(t):
- r'\n+'
- t.lexer.lineno += t.value.count("\n")
-
-# Operators
-t_PLUS = r'\+'
-t_MINUS = r'-'
-t_TIMES = r'\*'
-t_DIVIDE = r'/'
-t_MOD = r'%'
-t_OR = r'\|'
-t_AND = r'&'
-t_NOT = r'~'
-t_XOR = r'\^'
-t_LSHIFT = r'<<'
-t_RSHIFT = r'>>'
-t_LOR = r'\|\|'
-t_LAND = r'&&'
-t_LNOT = r'!'
-t_LT = r'<'
-t_GT = r'>'
-t_LE = r'<='
-t_GE = r'>='
-t_EQ = r'=='
-t_NE = r'!='
-
-# Assignment operators
-
-t_EQUALS = r'='
-t_TIMESEQUAL = r'\*='
-t_DIVEQUAL = r'/='
-t_MODEQUAL = r'%='
-t_PLUSEQUAL = r'\+='
-t_MINUSEQUAL = r'-='
-t_LSHIFTEQUAL = r'<<='
-t_RSHIFTEQUAL = r'>>='
-t_ANDEQUAL = r'&='
-t_OREQUAL = r'\|='
-t_XOREQUAL = r'\^='
-
-# Increment/decrement
-t_PLUSPLUS = r'\+\+'
-t_MINUSMINUS = r'--'
-
-# ->
-t_ARROW = r'->'
-
-# ?
-t_CONDOP = r'\?'
-
-# Delimeters
-t_LPAREN = r'\('
-t_RPAREN = r'\)'
-t_LBRACKET = r'\['
-t_RBRACKET = r'\]'
-t_LBRACE = r'\{'
-t_RBRACE = r'\}'
-t_COMMA = r','
-t_PERIOD = r'\.'
-t_SEMI = r';'
-t_COLON = r':'
-t_ELLIPSIS = r'\.\.\.'
-
-# Identifiers and reserved words
-
-reserved_map = {}
-for r in reserved:
- reserved_map[r.lower()] = r
-
-
-def t_ID(t):
- r'[A-Za-z_][\w_]*'
- t.type = reserved_map.get(t.value, "ID")
- return t
-
-# Integer literal
-t_ICONST = r'\d+([uU]|[lL]|[uU][lL]|[lL][uU])?'
-
-# Floating literal
-t_FCONST = r'((\d+)(\.\d+)(e(\+|-)?(\d+))? | (\d+)e(\+|-)?(\d+))([lL]|[fF])?'
-
-# String literal
-t_SCONST = r'\"([^\\\n]|(\\.))*?\"'
-
-# Character constant 'c' or L'c'
-t_CCONST = r'(L)?\'([^\\\n]|(\\.))*?\''
-
-# Comments
-
-
-def t_comment(t):
- r'/\*(.|\n)*?\*/'
- t.lexer.lineno += t.value.count('\n')
-
-# Preprocessor directive (ignored)
-
-
-def t_preprocessor(t):
- r'\#(.)*?\n'
- t.lexer.lineno += 1
-
-
-def t_error(t):
- print("Illegal character %s" % repr(t.value[0]))
- t.lexer.skip(1)
-
-lexer = lex.lex()
-if __name__ == "__main__":
- lex.runmain(lexer)
diff --git a/components/script/dom/bindings/codegen/ply/example/ansic/cparse.py b/components/script/dom/bindings/codegen/ply/example/ansic/cparse.py
deleted file mode 100644
index 5fe9bce0428..00000000000
--- a/components/script/dom/bindings/codegen/ply/example/ansic/cparse.py
+++ /dev/null
@@ -1,1048 +0,0 @@
-# -----------------------------------------------------------------------------
-# cparse.py
-#
-# Simple parser for ANSI C. Based on the grammar in K&R, 2nd Ed.
-# -----------------------------------------------------------------------------
-
-import sys
-import clex
-import ply.yacc as yacc
-
-# Get the token map
-tokens = clex.tokens
-
-# translation-unit:
-
-
-def p_translation_unit_1(t):
- 'translation_unit : external_declaration'
- pass
-
-
-def p_translation_unit_2(t):
- 'translation_unit : translation_unit external_declaration'
- pass
-
-# external-declaration:
-
-
-def p_external_declaration_1(t):
- 'external_declaration : function_definition'
- pass
-
-
-def p_external_declaration_2(t):
- 'external_declaration : declaration'
- pass
-
-# function-definition:
-
-
-def p_function_definition_1(t):
- 'function_definition : declaration_specifiers declarator declaration_list compound_statement'
- pass
-
-
-def p_function_definition_2(t):
- 'function_definition : declarator declaration_list compound_statement'
- pass
-
-
-def p_function_definition_3(t):
- 'function_definition : declarator compound_statement'
- pass
-
-
-def p_function_definition_4(t):
- 'function_definition : declaration_specifiers declarator compound_statement'
- pass
-
-# declaration:
-
-
-def p_declaration_1(t):
- 'declaration : declaration_specifiers init_declarator_list SEMI'
- pass
-
-
-def p_declaration_2(t):
- 'declaration : declaration_specifiers SEMI'
- pass
-
-# declaration-list:
-
-
-def p_declaration_list_1(t):
- 'declaration_list : declaration'
- pass
-
-
-def p_declaration_list_2(t):
- 'declaration_list : declaration_list declaration '
- pass
-
-# declaration-specifiers
-
-
-def p_declaration_specifiers_1(t):
- 'declaration_specifiers : storage_class_specifier declaration_specifiers'
- pass
-
-
-def p_declaration_specifiers_2(t):
- 'declaration_specifiers : type_specifier declaration_specifiers'
- pass
-
-
-def p_declaration_specifiers_3(t):
- 'declaration_specifiers : type_qualifier declaration_specifiers'
- pass
-
-
-def p_declaration_specifiers_4(t):
- 'declaration_specifiers : storage_class_specifier'
- pass
-
-
-def p_declaration_specifiers_5(t):
- 'declaration_specifiers : type_specifier'
- pass
-
-
-def p_declaration_specifiers_6(t):
- 'declaration_specifiers : type_qualifier'
- pass
-
-# storage-class-specifier
-
-
-def p_storage_class_specifier(t):
- '''storage_class_specifier : AUTO
- | REGISTER
- | STATIC
- | EXTERN
- | TYPEDEF
- '''
- pass
-
-# type-specifier:
-
-
-def p_type_specifier(t):
- '''type_specifier : VOID
- | CHAR
- | SHORT
- | INT
- | LONG
- | FLOAT
- | DOUBLE
- | SIGNED
- | UNSIGNED
- | struct_or_union_specifier
- | enum_specifier
- | TYPEID
- '''
- pass
-
-# type-qualifier:
-
-
-def p_type_qualifier(t):
- '''type_qualifier : CONST
- | VOLATILE'''
- pass
-
-# struct-or-union-specifier
-
-
-def p_struct_or_union_specifier_1(t):
- 'struct_or_union_specifier : struct_or_union ID LBRACE struct_declaration_list RBRACE'
- pass
-
-
-def p_struct_or_union_specifier_2(t):
- 'struct_or_union_specifier : struct_or_union LBRACE struct_declaration_list RBRACE'
- pass
-
-
-def p_struct_or_union_specifier_3(t):
- 'struct_or_union_specifier : struct_or_union ID'
- pass
-
-# struct-or-union:
-
-
-def p_struct_or_union(t):
- '''struct_or_union : STRUCT
- | UNION
- '''
- pass
-
-# struct-declaration-list:
-
-
-def p_struct_declaration_list_1(t):
- 'struct_declaration_list : struct_declaration'
- pass
-
-
-def p_struct_declaration_list_2(t):
- 'struct_declaration_list : struct_declaration_list struct_declaration'
- pass
-
-# init-declarator-list:
-
-
-def p_init_declarator_list_1(t):
- 'init_declarator_list : init_declarator'
- pass
-
-
-def p_init_declarator_list_2(t):
- 'init_declarator_list : init_declarator_list COMMA init_declarator'
- pass
-
-# init-declarator
-
-
-def p_init_declarator_1(t):
- 'init_declarator : declarator'
- pass
-
-
-def p_init_declarator_2(t):
- 'init_declarator : declarator EQUALS initializer'
- pass
-
-# struct-declaration:
-
-
-def p_struct_declaration(t):
- 'struct_declaration : specifier_qualifier_list struct_declarator_list SEMI'
- pass
-
-# specifier-qualifier-list:
-
-
-def p_specifier_qualifier_list_1(t):
- 'specifier_qualifier_list : type_specifier specifier_qualifier_list'
- pass
-
-
-def p_specifier_qualifier_list_2(t):
- 'specifier_qualifier_list : type_specifier'
- pass
-
-
-def p_specifier_qualifier_list_3(t):
- 'specifier_qualifier_list : type_qualifier specifier_qualifier_list'
- pass
-
-
-def p_specifier_qualifier_list_4(t):
- 'specifier_qualifier_list : type_qualifier'
- pass
-
-# struct-declarator-list:
-
-
-def p_struct_declarator_list_1(t):
- 'struct_declarator_list : struct_declarator'
- pass
-
-
-def p_struct_declarator_list_2(t):
- 'struct_declarator_list : struct_declarator_list COMMA struct_declarator'
- pass
-
-# struct-declarator:
-
-
-def p_struct_declarator_1(t):
- 'struct_declarator : declarator'
- pass
-
-
-def p_struct_declarator_2(t):
- 'struct_declarator : declarator COLON constant_expression'
- pass
-
-
-def p_struct_declarator_3(t):
- 'struct_declarator : COLON constant_expression'
- pass
-
-# enum-specifier:
-
-
-def p_enum_specifier_1(t):
- 'enum_specifier : ENUM ID LBRACE enumerator_list RBRACE'
- pass
-
-
-def p_enum_specifier_2(t):
- 'enum_specifier : ENUM LBRACE enumerator_list RBRACE'
- pass
-
-
-def p_enum_specifier_3(t):
- 'enum_specifier : ENUM ID'
- pass
-
-# enumerator_list:
-
-
-def p_enumerator_list_1(t):
- 'enumerator_list : enumerator'
- pass
-
-
-def p_enumerator_list_2(t):
- 'enumerator_list : enumerator_list COMMA enumerator'
- pass
-
-# enumerator:
-
-
-def p_enumerator_1(t):
- 'enumerator : ID'
- pass
-
-
-def p_enumerator_2(t):
- 'enumerator : ID EQUALS constant_expression'
- pass
-
-# declarator:
-
-
-def p_declarator_1(t):
- 'declarator : pointer direct_declarator'
- pass
-
-
-def p_declarator_2(t):
- 'declarator : direct_declarator'
- pass
-
-# direct-declarator:
-
-
-def p_direct_declarator_1(t):
- 'direct_declarator : ID'
- pass
-
-
-def p_direct_declarator_2(t):
- 'direct_declarator : LPAREN declarator RPAREN'
- pass
-
-
-def p_direct_declarator_3(t):
- 'direct_declarator : direct_declarator LBRACKET constant_expression_opt RBRACKET'
- pass
-
-
-def p_direct_declarator_4(t):
- 'direct_declarator : direct_declarator LPAREN parameter_type_list RPAREN '
- pass
-
-
-def p_direct_declarator_5(t):
- 'direct_declarator : direct_declarator LPAREN identifier_list RPAREN '
- pass
-
-
-def p_direct_declarator_6(t):
- 'direct_declarator : direct_declarator LPAREN RPAREN '
- pass
-
-# pointer:
-
-
-def p_pointer_1(t):
- 'pointer : TIMES type_qualifier_list'
- pass
-
-
-def p_pointer_2(t):
- 'pointer : TIMES'
- pass
-
-
-def p_pointer_3(t):
- 'pointer : TIMES type_qualifier_list pointer'
- pass
-
-
-def p_pointer_4(t):
- 'pointer : TIMES pointer'
- pass
-
-# type-qualifier-list:
-
-
-def p_type_qualifier_list_1(t):
- 'type_qualifier_list : type_qualifier'
- pass
-
-
-def p_type_qualifier_list_2(t):
- 'type_qualifier_list : type_qualifier_list type_qualifier'
- pass
-
-# parameter-type-list:
-
-
-def p_parameter_type_list_1(t):
- 'parameter_type_list : parameter_list'
- pass
-
-
-def p_parameter_type_list_2(t):
- 'parameter_type_list : parameter_list COMMA ELLIPSIS'
- pass
-
-# parameter-list:
-
-
-def p_parameter_list_1(t):
- 'parameter_list : parameter_declaration'
- pass
-
-
-def p_parameter_list_2(t):
- 'parameter_list : parameter_list COMMA parameter_declaration'
- pass
-
-# parameter-declaration:
-
-
-def p_parameter_declaration_1(t):
- 'parameter_declaration : declaration_specifiers declarator'
- pass
-
-
-def p_parameter_declaration_2(t):
- 'parameter_declaration : declaration_specifiers abstract_declarator_opt'
- pass
-
-# identifier-list:
-
-
-def p_identifier_list_1(t):
- 'identifier_list : ID'
- pass
-
-
-def p_identifier_list_2(t):
- 'identifier_list : identifier_list COMMA ID'
- pass
-
-# initializer:
-
-
-def p_initializer_1(t):
- 'initializer : assignment_expression'
- pass
-
-
-def p_initializer_2(t):
- '''initializer : LBRACE initializer_list RBRACE
- | LBRACE initializer_list COMMA RBRACE'''
- pass
-
-# initializer-list:
-
-
-def p_initializer_list_1(t):
- 'initializer_list : initializer'
- pass
-
-
-def p_initializer_list_2(t):
- 'initializer_list : initializer_list COMMA initializer'
- pass
-
-# type-name:
-
-
-def p_type_name(t):
- 'type_name : specifier_qualifier_list abstract_declarator_opt'
- pass
-
-
-def p_abstract_declarator_opt_1(t):
- 'abstract_declarator_opt : empty'
- pass
-
-
-def p_abstract_declarator_opt_2(t):
- 'abstract_declarator_opt : abstract_declarator'
- pass
-
-# abstract-declarator:
-
-
-def p_abstract_declarator_1(t):
- 'abstract_declarator : pointer '
- pass
-
-
-def p_abstract_declarator_2(t):
- 'abstract_declarator : pointer direct_abstract_declarator'
- pass
-
-
-def p_abstract_declarator_3(t):
- 'abstract_declarator : direct_abstract_declarator'
- pass
-
-# direct-abstract-declarator:
-
-
-def p_direct_abstract_declarator_1(t):
- 'direct_abstract_declarator : LPAREN abstract_declarator RPAREN'
- pass
-
-
-def p_direct_abstract_declarator_2(t):
- 'direct_abstract_declarator : direct_abstract_declarator LBRACKET constant_expression_opt RBRACKET'
- pass
-
-
-def p_direct_abstract_declarator_3(t):
- 'direct_abstract_declarator : LBRACKET constant_expression_opt RBRACKET'
- pass
-
-
-def p_direct_abstract_declarator_4(t):
- 'direct_abstract_declarator : direct_abstract_declarator LPAREN parameter_type_list_opt RPAREN'
- pass
-
-
-def p_direct_abstract_declarator_5(t):
- 'direct_abstract_declarator : LPAREN parameter_type_list_opt RPAREN'
- pass
-
-# Optional fields in abstract declarators
-
-
-def p_constant_expression_opt_1(t):
- 'constant_expression_opt : empty'
- pass
-
-
-def p_constant_expression_opt_2(t):
- 'constant_expression_opt : constant_expression'
- pass
-
-
-def p_parameter_type_list_opt_1(t):
- 'parameter_type_list_opt : empty'
- pass
-
-
-def p_parameter_type_list_opt_2(t):
- 'parameter_type_list_opt : parameter_type_list'
- pass
-
-# statement:
-
-
-def p_statement(t):
- '''
- statement : labeled_statement
- | expression_statement
- | compound_statement
- | selection_statement
- | iteration_statement
- | jump_statement
- '''
- pass
-
-# labeled-statement:
-
-
-def p_labeled_statement_1(t):
- 'labeled_statement : ID COLON statement'
- pass
-
-
-def p_labeled_statement_2(t):
- 'labeled_statement : CASE constant_expression COLON statement'
- pass
-
-
-def p_labeled_statement_3(t):
- 'labeled_statement : DEFAULT COLON statement'
- pass
-
-# expression-statement:
-
-
-def p_expression_statement(t):
- 'expression_statement : expression_opt SEMI'
- pass
-
-# compound-statement:
-
-
-def p_compound_statement_1(t):
- 'compound_statement : LBRACE declaration_list statement_list RBRACE'
- pass
-
-
-def p_compound_statement_2(t):
- 'compound_statement : LBRACE statement_list RBRACE'
- pass
-
-
-def p_compound_statement_3(t):
- 'compound_statement : LBRACE declaration_list RBRACE'
- pass
-
-
-def p_compound_statement_4(t):
- 'compound_statement : LBRACE RBRACE'
- pass
-
-# statement-list:
-
-
-def p_statement_list_1(t):
- 'statement_list : statement'
- pass
-
-
-def p_statement_list_2(t):
- 'statement_list : statement_list statement'
- pass
-
-# selection-statement
-
-
-def p_selection_statement_1(t):
- 'selection_statement : IF LPAREN expression RPAREN statement'
- pass
-
-
-def p_selection_statement_2(t):
- 'selection_statement : IF LPAREN expression RPAREN statement ELSE statement '
- pass
-
-
-def p_selection_statement_3(t):
- 'selection_statement : SWITCH LPAREN expression RPAREN statement '
- pass
-
-# iteration_statement:
-
-
-def p_iteration_statement_1(t):
- 'iteration_statement : WHILE LPAREN expression RPAREN statement'
- pass
-
-
-def p_iteration_statement_2(t):
- 'iteration_statement : FOR LPAREN expression_opt SEMI expression_opt SEMI expression_opt RPAREN statement '
- pass
-
-
-def p_iteration_statement_3(t):
- 'iteration_statement : DO statement WHILE LPAREN expression RPAREN SEMI'
- pass
-
-# jump_statement:
-
-
-def p_jump_statement_1(t):
- 'jump_statement : GOTO ID SEMI'
- pass
-
-
-def p_jump_statement_2(t):
- 'jump_statement : CONTINUE SEMI'
- pass
-
-
-def p_jump_statement_3(t):
- 'jump_statement : BREAK SEMI'
- pass
-
-
-def p_jump_statement_4(t):
- 'jump_statement : RETURN expression_opt SEMI'
- pass
-
-
-def p_expression_opt_1(t):
- 'expression_opt : empty'
- pass
-
-
-def p_expression_opt_2(t):
- 'expression_opt : expression'
- pass
-
-# expression:
-
-
-def p_expression_1(t):
- 'expression : assignment_expression'
- pass
-
-
-def p_expression_2(t):
- 'expression : expression COMMA assignment_expression'
- pass
-
-# assigment_expression:
-
-
-def p_assignment_expression_1(t):
- 'assignment_expression : conditional_expression'
- pass
-
-
-def p_assignment_expression_2(t):
- 'assignment_expression : unary_expression assignment_operator assignment_expression'
- pass
-
-# assignment_operator:
-
-
-def p_assignment_operator(t):
- '''
- assignment_operator : EQUALS
- | TIMESEQUAL
- | DIVEQUAL
- | MODEQUAL
- | PLUSEQUAL
- | MINUSEQUAL
- | LSHIFTEQUAL
- | RSHIFTEQUAL
- | ANDEQUAL
- | OREQUAL
- | XOREQUAL
- '''
- pass
-
-# conditional-expression
-
-
-def p_conditional_expression_1(t):
- 'conditional_expression : logical_or_expression'
- pass
-
-
-def p_conditional_expression_2(t):
- 'conditional_expression : logical_or_expression CONDOP expression COLON conditional_expression '
- pass
-
-# constant-expression
-
-
-def p_constant_expression(t):
- 'constant_expression : conditional_expression'
- pass
-
-# logical-or-expression
-
-
-def p_logical_or_expression_1(t):
- 'logical_or_expression : logical_and_expression'
- pass
-
-
-def p_logical_or_expression_2(t):
- 'logical_or_expression : logical_or_expression LOR logical_and_expression'
- pass
-
-# logical-and-expression
-
-
-def p_logical_and_expression_1(t):
- 'logical_and_expression : inclusive_or_expression'
- pass
-
-
-def p_logical_and_expression_2(t):
- 'logical_and_expression : logical_and_expression LAND inclusive_or_expression'
- pass
-
-# inclusive-or-expression:
-
-
-def p_inclusive_or_expression_1(t):
- 'inclusive_or_expression : exclusive_or_expression'
- pass
-
-
-def p_inclusive_or_expression_2(t):
- 'inclusive_or_expression : inclusive_or_expression OR exclusive_or_expression'
- pass
-
-# exclusive-or-expression:
-
-
-def p_exclusive_or_expression_1(t):
- 'exclusive_or_expression : and_expression'
- pass
-
-
-def p_exclusive_or_expression_2(t):
- 'exclusive_or_expression : exclusive_or_expression XOR and_expression'
- pass
-
-# AND-expression
-
-
-def p_and_expression_1(t):
- 'and_expression : equality_expression'
- pass
-
-
-def p_and_expression_2(t):
- 'and_expression : and_expression AND equality_expression'
- pass
-
-
-# equality-expression:
-def p_equality_expression_1(t):
- 'equality_expression : relational_expression'
- pass
-
-
-def p_equality_expression_2(t):
- 'equality_expression : equality_expression EQ relational_expression'
- pass
-
-
-def p_equality_expression_3(t):
- 'equality_expression : equality_expression NE relational_expression'
- pass
-
-
-# relational-expression:
-def p_relational_expression_1(t):
- 'relational_expression : shift_expression'
- pass
-
-
-def p_relational_expression_2(t):
- 'relational_expression : relational_expression LT shift_expression'
- pass
-
-
-def p_relational_expression_3(t):
- 'relational_expression : relational_expression GT shift_expression'
- pass
-
-
-def p_relational_expression_4(t):
- 'relational_expression : relational_expression LE shift_expression'
- pass
-
-
-def p_relational_expression_5(t):
- 'relational_expression : relational_expression GE shift_expression'
- pass
-
-# shift-expression
-
-
-def p_shift_expression_1(t):
- 'shift_expression : additive_expression'
- pass
-
-
-def p_shift_expression_2(t):
- 'shift_expression : shift_expression LSHIFT additive_expression'
- pass
-
-
-def p_shift_expression_3(t):
- 'shift_expression : shift_expression RSHIFT additive_expression'
- pass
-
-# additive-expression
-
-
-def p_additive_expression_1(t):
- 'additive_expression : multiplicative_expression'
- pass
-
-
-def p_additive_expression_2(t):
- 'additive_expression : additive_expression PLUS multiplicative_expression'
- pass
-
-
-def p_additive_expression_3(t):
- 'additive_expression : additive_expression MINUS multiplicative_expression'
- pass
-
-# multiplicative-expression
-
-
-def p_multiplicative_expression_1(t):
- 'multiplicative_expression : cast_expression'
- pass
-
-
-def p_multiplicative_expression_2(t):
- 'multiplicative_expression : multiplicative_expression TIMES cast_expression'
- pass
-
-
-def p_multiplicative_expression_3(t):
- 'multiplicative_expression : multiplicative_expression DIVIDE cast_expression'
- pass
-
-
-def p_multiplicative_expression_4(t):
- 'multiplicative_expression : multiplicative_expression MOD cast_expression'
- pass
-
-# cast-expression:
-
-
-def p_cast_expression_1(t):
- 'cast_expression : unary_expression'
- pass
-
-
-def p_cast_expression_2(t):
- 'cast_expression : LPAREN type_name RPAREN cast_expression'
- pass
-
-# unary-expression:
-
-
-def p_unary_expression_1(t):
- 'unary_expression : postfix_expression'
- pass
-
-
-def p_unary_expression_2(t):
- 'unary_expression : PLUSPLUS unary_expression'
- pass
-
-
-def p_unary_expression_3(t):
- 'unary_expression : MINUSMINUS unary_expression'
- pass
-
-
-def p_unary_expression_4(t):
- 'unary_expression : unary_operator cast_expression'
- pass
-
-
-def p_unary_expression_5(t):
- 'unary_expression : SIZEOF unary_expression'
- pass
-
-
-def p_unary_expression_6(t):
- 'unary_expression : SIZEOF LPAREN type_name RPAREN'
- pass
-
-# unary-operator
-
-
-def p_unary_operator(t):
- '''unary_operator : AND
- | TIMES
- | PLUS
- | MINUS
- | NOT
- | LNOT '''
- pass
-
-# postfix-expression:
-
-
-def p_postfix_expression_1(t):
- 'postfix_expression : primary_expression'
- pass
-
-
-def p_postfix_expression_2(t):
- 'postfix_expression : postfix_expression LBRACKET expression RBRACKET'
- pass
-
-
-def p_postfix_expression_3(t):
- 'postfix_expression : postfix_expression LPAREN argument_expression_list RPAREN'
- pass
-
-
-def p_postfix_expression_4(t):
- 'postfix_expression : postfix_expression LPAREN RPAREN'
- pass
-
-
-def p_postfix_expression_5(t):
- 'postfix_expression : postfix_expression PERIOD ID'
- pass
-
-
-def p_postfix_expression_6(t):
- 'postfix_expression : postfix_expression ARROW ID'
- pass
-
-
-def p_postfix_expression_7(t):
- 'postfix_expression : postfix_expression PLUSPLUS'
- pass
-
-
-def p_postfix_expression_8(t):
- 'postfix_expression : postfix_expression MINUSMINUS'
- pass
-
-# primary-expression:
-
-
-def p_primary_expression(t):
- '''primary_expression : ID
- | constant
- | SCONST
- | LPAREN expression RPAREN'''
- pass
-
-# argument-expression-list:
-
-
-def p_argument_expression_list(t):
- '''argument_expression_list : assignment_expression
- | argument_expression_list COMMA assignment_expression'''
- pass
-
-# constant:
-
-
-def p_constant(t):
- '''constant : ICONST
- | FCONST
- | CCONST'''
- pass
-
-
-def p_empty(t):
- 'empty : '
- pass
-
-
-def p_error(t):
- print("Whoa. We're hosed")
-
-import profile
-# Build the grammar
-
-yacc.yacc()
-#yacc.yacc(method='LALR',write_tables=False,debug=False)
-
-#profile.run("yacc.yacc(method='LALR')")
diff --git a/components/script/dom/bindings/codegen/ply/example/calc/calc.py b/components/script/dom/bindings/codegen/ply/example/calc/calc.py
deleted file mode 100644
index 824c3d7d0a2..00000000000
--- a/components/script/dom/bindings/codegen/ply/example/calc/calc.py
+++ /dev/null
@@ -1,123 +0,0 @@
-# -----------------------------------------------------------------------------
-# calc.py
-#
-# A simple calculator with variables. This is from O'Reilly's
-# "Lex and Yacc", p. 63.
-# -----------------------------------------------------------------------------
-
-import sys
-sys.path.insert(0, "../..")
-
-if sys.version_info[0] >= 3:
- raw_input = input
-
-tokens = (
- 'NAME', 'NUMBER',
-)
-
-literals = ['=', '+', '-', '*', '/', '(', ')']
-
-# Tokens
-
-t_NAME = r'[a-zA-Z_][a-zA-Z0-9_]*'
-
-
-def t_NUMBER(t):
- r'\d+'
- t.value = int(t.value)
- return t
-
-t_ignore = " \t"
-
-
-def t_newline(t):
- r'\n+'
- t.lexer.lineno += t.value.count("\n")
-
-
-def t_error(t):
- print("Illegal character '%s'" % t.value[0])
- t.lexer.skip(1)
-
-# Build the lexer
-import ply.lex as lex
-lex.lex()
-
-# Parsing rules
-
-precedence = (
- ('left', '+', '-'),
- ('left', '*', '/'),
- ('right', 'UMINUS'),
-)
-
-# dictionary of names
-names = {}
-
-
-def p_statement_assign(p):
- 'statement : NAME "=" expression'
- names[p[1]] = p[3]
-
-
-def p_statement_expr(p):
- 'statement : expression'
- print(p[1])
-
-
-def p_expression_binop(p):
- '''expression : expression '+' expression
- | expression '-' expression
- | expression '*' expression
- | expression '/' expression'''
- if p[2] == '+':
- p[0] = p[1] + p[3]
- elif p[2] == '-':
- p[0] = p[1] - p[3]
- elif p[2] == '*':
- p[0] = p[1] * p[3]
- elif p[2] == '/':
- p[0] = p[1] / p[3]
-
-
-def p_expression_uminus(p):
- "expression : '-' expression %prec UMINUS"
- p[0] = -p[2]
-
-
-def p_expression_group(p):
- "expression : '(' expression ')'"
- p[0] = p[2]
-
-
-def p_expression_number(p):
- "expression : NUMBER"
- p[0] = p[1]
-
-
-def p_expression_name(p):
- "expression : NAME"
- try:
- p[0] = names[p[1]]
- except LookupError:
- print("Undefined name '%s'" % p[1])
- p[0] = 0
-
-
-def p_error(p):
- if p:
- print("Syntax error at '%s'" % p.value)
- else:
- print("Syntax error at EOF")
-
-import ply.yacc as yacc
-yacc.yacc()
-
-while 1:
- try:
- s = raw_input('calc > ')
- except EOFError:
- break
- if not s:
- continue
- yacc.parse(s)
diff --git a/components/script/dom/bindings/codegen/ply/example/calcdebug/calc.py b/components/script/dom/bindings/codegen/ply/example/calcdebug/calc.py
deleted file mode 100644
index 06831e2ca56..00000000000
--- a/components/script/dom/bindings/codegen/ply/example/calcdebug/calc.py
+++ /dev/null
@@ -1,129 +0,0 @@
-# -----------------------------------------------------------------------------
-# calc.py
-#
-# This example shows how to run the parser in a debugging mode
-# with output routed to a logging object.
-# -----------------------------------------------------------------------------
-
-import sys
-sys.path.insert(0, "../..")
-
-if sys.version_info[0] >= 3:
- raw_input = input
-
-tokens = (
- 'NAME', 'NUMBER',
-)
-
-literals = ['=', '+', '-', '*', '/', '(', ')']
-
-# Tokens
-
-t_NAME = r'[a-zA-Z_][a-zA-Z0-9_]*'
-
-
-def t_NUMBER(t):
- r'\d+'
- t.value = int(t.value)
- return t
-
-t_ignore = " \t"
-
-
-def t_newline(t):
- r'\n+'
- t.lexer.lineno += t.value.count("\n")
-
-
-def t_error(t):
- print("Illegal character '%s'" % t.value[0])
- t.lexer.skip(1)
-
-# Build the lexer
-import ply.lex as lex
-lex.lex()
-
-# Parsing rules
-
-precedence = (
- ('left', '+', '-'),
- ('left', '*', '/'),
- ('right', 'UMINUS'),
-)
-
-# dictionary of names
-names = {}
-
-
-def p_statement_assign(p):
- 'statement : NAME "=" expression'
- names[p[1]] = p[3]
-
-
-def p_statement_expr(p):
- 'statement : expression'
- print(p[1])
-
-
-def p_expression_binop(p):
- '''expression : expression '+' expression
- | expression '-' expression
- | expression '*' expression
- | expression '/' expression'''
- if p[2] == '+':
- p[0] = p[1] + p[3]
- elif p[2] == '-':
- p[0] = p[1] - p[3]
- elif p[2] == '*':
- p[0] = p[1] * p[3]
- elif p[2] == '/':
- p[0] = p[1] / p[3]
-
-
-def p_expression_uminus(p):
- "expression : '-' expression %prec UMINUS"
- p[0] = -p[2]
-
-
-def p_expression_group(p):
- "expression : '(' expression ')'"
- p[0] = p[2]
-
-
-def p_expression_number(p):
- "expression : NUMBER"
- p[0] = p[1]
-
-
-def p_expression_name(p):
- "expression : NAME"
- try:
- p[0] = names[p[1]]
- except LookupError:
- print("Undefined name '%s'" % p[1])
- p[0] = 0
-
-
-def p_error(p):
- if p:
- print("Syntax error at '%s'" % p.value)
- else:
- print("Syntax error at EOF")
-
-import ply.yacc as yacc
-yacc.yacc()
-
-import logging
-logging.basicConfig(
- level=logging.INFO,
- filename="parselog.txt"
-)
-
-while 1:
- try:
- s = raw_input('calc > ')
- except EOFError:
- break
- if not s:
- continue
- yacc.parse(s, debug=logging.getLogger())
diff --git a/components/script/dom/bindings/codegen/ply/example/calceof/calc.py b/components/script/dom/bindings/codegen/ply/example/calceof/calc.py
deleted file mode 100644
index 22b39a41a86..00000000000
--- a/components/script/dom/bindings/codegen/ply/example/calceof/calc.py
+++ /dev/null
@@ -1,132 +0,0 @@
-# -----------------------------------------------------------------------------
-# calc.py
-#
-# A simple calculator with variables. Asks the user for more input and
-# demonstrates the use of the t_eof() rule.
-# -----------------------------------------------------------------------------
-
-import sys
-sys.path.insert(0, "../..")
-
-if sys.version_info[0] >= 3:
- raw_input = input
-
-tokens = (
- 'NAME', 'NUMBER',
-)
-
-literals = ['=', '+', '-', '*', '/', '(', ')']
-
-# Tokens
-
-t_NAME = r'[a-zA-Z_][a-zA-Z0-9_]*'
-
-
-def t_NUMBER(t):
- r'\d+'
- t.value = int(t.value)
- return t
-
-t_ignore = " \t"
-
-
-def t_newline(t):
- r'\n+'
- t.lexer.lineno += t.value.count("\n")
-
-
-def t_eof(t):
- more = raw_input('... ')
- if more:
- t.lexer.input(more + '\n')
- return t.lexer.token()
- else:
- return None
-
-
-def t_error(t):
- print("Illegal character '%s'" % t.value[0])
- t.lexer.skip(1)
-
-# Build the lexer
-import ply.lex as lex
-lex.lex()
-
-# Parsing rules
-
-precedence = (
- ('left', '+', '-'),
- ('left', '*', '/'),
- ('right', 'UMINUS'),
-)
-
-# dictionary of names
-names = {}
-
-
-def p_statement_assign(p):
- 'statement : NAME "=" expression'
- names[p[1]] = p[3]
-
-
-def p_statement_expr(p):
- 'statement : expression'
- print(p[1])
-
-
-def p_expression_binop(p):
- '''expression : expression '+' expression
- | expression '-' expression
- | expression '*' expression
- | expression '/' expression'''
- if p[2] == '+':
- p[0] = p[1] + p[3]
- elif p[2] == '-':
- p[0] = p[1] - p[3]
- elif p[2] == '*':
- p[0] = p[1] * p[3]
- elif p[2] == '/':
- p[0] = p[1] / p[3]
-
-
-def p_expression_uminus(p):
- "expression : '-' expression %prec UMINUS"
- p[0] = -p[2]
-
-
-def p_expression_group(p):
- "expression : '(' expression ')'"
- p[0] = p[2]
-
-
-def p_expression_number(p):
- "expression : NUMBER"
- p[0] = p[1]
-
-
-def p_expression_name(p):
- "expression : NAME"
- try:
- p[0] = names[p[1]]
- except LookupError:
- print("Undefined name '%s'" % p[1])
- p[0] = 0
-
-
-def p_error(p):
- if p:
- print("Syntax error at '%s'" % p.value)
- else:
- print("Syntax error at EOF")
-
-import ply.yacc as yacc
-yacc.yacc()
-
-while 1:
- try:
- s = raw_input('calc > ')
- except EOFError:
- break
- if not s:
- continue
- yacc.parse(s + '\n')
diff --git a/components/script/dom/bindings/codegen/ply/example/classcalc/calc.py b/components/script/dom/bindings/codegen/ply/example/classcalc/calc.py
deleted file mode 100755
index ada4afd426c..00000000000
--- a/components/script/dom/bindings/codegen/ply/example/classcalc/calc.py
+++ /dev/null
@@ -1,165 +0,0 @@
-#!/usr/bin/env python
-
-# -----------------------------------------------------------------------------
-# calc.py
-#
-# A simple calculator with variables. This is from O'Reilly's
-# "Lex and Yacc", p. 63.
-#
-# Class-based example contributed to PLY by David McNab
-# -----------------------------------------------------------------------------
-
-import sys
-sys.path.insert(0, "../..")
-
-if sys.version_info[0] >= 3:
- raw_input = input
-
-import ply.lex as lex
-import ply.yacc as yacc
-import os
-
-
-class Parser:
- """
- Base class for a lexer/parser that has the rules defined as methods
- """
- tokens = ()
- precedence = ()
-
- def __init__(self, **kw):
- self.debug = kw.get('debug', 0)
- self.names = {}
- try:
- modname = os.path.split(os.path.splitext(__file__)[0])[
- 1] + "_" + self.__class__.__name__
- except:
- modname = "parser" + "_" + self.__class__.__name__
- self.debugfile = modname + ".dbg"
- self.tabmodule = modname + "_" + "parsetab"
- # print self.debugfile, self.tabmodule
-
- # Build the lexer and parser
- lex.lex(module=self, debug=self.debug)
- yacc.yacc(module=self,
- debug=self.debug,
- debugfile=self.debugfile,
- tabmodule=self.tabmodule)
-
- def run(self):
- while 1:
- try:
- s = raw_input('calc > ')
- except EOFError:
- break
- if not s:
- continue
- yacc.parse(s)
-
-
-class Calc(Parser):
-
- tokens = (
- 'NAME', 'NUMBER',
- 'PLUS', 'MINUS', 'EXP', 'TIMES', 'DIVIDE', 'EQUALS',
- 'LPAREN', 'RPAREN',
- )
-
- # Tokens
-
- t_PLUS = r'\+'
- t_MINUS = r'-'
- t_EXP = r'\*\*'
- t_TIMES = r'\*'
- t_DIVIDE = r'/'
- t_EQUALS = r'='
- t_LPAREN = r'\('
- t_RPAREN = r'\)'
- t_NAME = r'[a-zA-Z_][a-zA-Z0-9_]*'
-
- def t_NUMBER(self, t):
- r'\d+'
- try:
- t.value = int(t.value)
- except ValueError:
- print("Integer value too large %s" % t.value)
- t.value = 0
- # print "parsed number %s" % repr(t.value)
- return t
-
- t_ignore = " \t"
-
- def t_newline(self, t):
- r'\n+'
- t.lexer.lineno += t.value.count("\n")
-
- def t_error(self, t):
- print("Illegal character '%s'" % t.value[0])
- t.lexer.skip(1)
-
- # Parsing rules
-
- precedence = (
- ('left', 'PLUS', 'MINUS'),
- ('left', 'TIMES', 'DIVIDE'),
- ('left', 'EXP'),
- ('right', 'UMINUS'),
- )
-
- def p_statement_assign(self, p):
- 'statement : NAME EQUALS expression'
- self.names[p[1]] = p[3]
-
- def p_statement_expr(self, p):
- 'statement : expression'
- print(p[1])
-
- def p_expression_binop(self, p):
- """
- expression : expression PLUS expression
- | expression MINUS expression
- | expression TIMES expression
- | expression DIVIDE expression
- | expression EXP expression
- """
- # print [repr(p[i]) for i in range(0,4)]
- if p[2] == '+':
- p[0] = p[1] + p[3]
- elif p[2] == '-':
- p[0] = p[1] - p[3]
- elif p[2] == '*':
- p[0] = p[1] * p[3]
- elif p[2] == '/':
- p[0] = p[1] / p[3]
- elif p[2] == '**':
- p[0] = p[1] ** p[3]
-
- def p_expression_uminus(self, p):
- 'expression : MINUS expression %prec UMINUS'
- p[0] = -p[2]
-
- def p_expression_group(self, p):
- 'expression : LPAREN expression RPAREN'
- p[0] = p[2]
-
- def p_expression_number(self, p):
- 'expression : NUMBER'
- p[0] = p[1]
-
- def p_expression_name(self, p):
- 'expression : NAME'
- try:
- p[0] = self.names[p[1]]
- except LookupError:
- print("Undefined name '%s'" % p[1])
- p[0] = 0
-
- def p_error(self, p):
- if p:
- print("Syntax error at '%s'" % p.value)
- else:
- print("Syntax error at EOF")
-
-if __name__ == '__main__':
- calc = Calc()
- calc.run()
diff --git a/components/script/dom/bindings/codegen/ply/example/cleanup.sh b/components/script/dom/bindings/codegen/ply/example/cleanup.sh
deleted file mode 100755
index 3e115f41c42..00000000000
--- a/components/script/dom/bindings/codegen/ply/example/cleanup.sh
+++ /dev/null
@@ -1,2 +0,0 @@
-#!/bin/sh
-rm -f */*.pyc */parsetab.py */parser.out */*~ */*.class
diff --git a/components/script/dom/bindings/codegen/ply/example/closurecalc/calc.py b/components/script/dom/bindings/codegen/ply/example/closurecalc/calc.py
deleted file mode 100644
index 6031b058130..00000000000
--- a/components/script/dom/bindings/codegen/ply/example/closurecalc/calc.py
+++ /dev/null
@@ -1,132 +0,0 @@
-# -----------------------------------------------------------------------------
-# calc.py
-#
-# A calculator parser that makes use of closures. The function make_calculator()
-# returns a function that accepts an input string and returns a result. All
-# lexing rules, parsing rules, and internal state are held inside the function.
-# -----------------------------------------------------------------------------
-
-import sys
-sys.path.insert(0, "../..")
-
-if sys.version_info[0] >= 3:
- raw_input = input
-
-# Make a calculator function
-
-
-def make_calculator():
- import ply.lex as lex
- import ply.yacc as yacc
-
- # ------- Internal calculator state
-
- variables = {} # Dictionary of stored variables
-
- # ------- Calculator tokenizing rules
-
- tokens = (
- 'NAME', 'NUMBER',
- )
-
- literals = ['=', '+', '-', '*', '/', '(', ')']
-
- t_ignore = " \t"
-
- t_NAME = r'[a-zA-Z_][a-zA-Z0-9_]*'
-
- def t_NUMBER(t):
- r'\d+'
- t.value = int(t.value)
- return t
-
- def t_newline(t):
- r'\n+'
- t.lexer.lineno += t.value.count("\n")
-
- def t_error(t):
- print("Illegal character '%s'" % t.value[0])
- t.lexer.skip(1)
-
- # Build the lexer
- lexer = lex.lex()
-
- # ------- Calculator parsing rules
-
- precedence = (
- ('left', '+', '-'),
- ('left', '*', '/'),
- ('right', 'UMINUS'),
- )
-
- def p_statement_assign(p):
- 'statement : NAME "=" expression'
- variables[p[1]] = p[3]
- p[0] = None
-
- def p_statement_expr(p):
- 'statement : expression'
- p[0] = p[1]
-
- def p_expression_binop(p):
- '''expression : expression '+' expression
- | expression '-' expression
- | expression '*' expression
- | expression '/' expression'''
- if p[2] == '+':
- p[0] = p[1] + p[3]
- elif p[2] == '-':
- p[0] = p[1] - p[3]
- elif p[2] == '*':
- p[0] = p[1] * p[3]
- elif p[2] == '/':
- p[0] = p[1] / p[3]
-
- def p_expression_uminus(p):
- "expression : '-' expression %prec UMINUS"
- p[0] = -p[2]
-
- def p_expression_group(p):
- "expression : '(' expression ')'"
- p[0] = p[2]
-
- def p_expression_number(p):
- "expression : NUMBER"
- p[0] = p[1]
-
- def p_expression_name(p):
- "expression : NAME"
- try:
- p[0] = variables[p[1]]
- except LookupError:
- print("Undefined name '%s'" % p[1])
- p[0] = 0
-
- def p_error(p):
- if p:
- print("Syntax error at '%s'" % p.value)
- else:
- print("Syntax error at EOF")
-
- # Build the parser
- parser = yacc.yacc()
-
- # ------- Input function
-
- def input(text):
- result = parser.parse(text, lexer=lexer)
- return result
-
- return input
-
-# Make a calculator object and use it
-calc = make_calculator()
-
-while True:
- try:
- s = raw_input("calc > ")
- except EOFError:
- break
- r = calc(s)
- if r:
- print(r)
diff --git a/components/script/dom/bindings/codegen/ply/example/hedit/hedit.py b/components/script/dom/bindings/codegen/ply/example/hedit/hedit.py
deleted file mode 100644
index 32da745677c..00000000000
--- a/components/script/dom/bindings/codegen/ply/example/hedit/hedit.py
+++ /dev/null
@@ -1,48 +0,0 @@
-# -----------------------------------------------------------------------------
-# hedit.py
-#
-# Paring of Fortran H Edit descriptions (Contributed by Pearu Peterson)
-#
-# These tokens can't be easily tokenized because they are of the following
-# form:
-#
-# nHc1...cn
-#
-# where n is a positive integer and c1 ... cn are characters.
-#
-# This example shows how to modify the state of the lexer to parse
-# such tokens
-# -----------------------------------------------------------------------------
-
-import sys
-sys.path.insert(0, "../..")
-
-
-tokens = (
- 'H_EDIT_DESCRIPTOR',
-)
-
-# Tokens
-t_ignore = " \t\n"
-
-
-def t_H_EDIT_DESCRIPTOR(t):
- r"\d+H.*" # This grabs all of the remaining text
- i = t.value.index('H')
- n = eval(t.value[:i])
-
- # Adjust the tokenizing position
- t.lexer.lexpos -= len(t.value) - (i + 1 + n)
-
- t.value = t.value[i + 1:i + 1 + n]
- return t
-
-
-def t_error(t):
- print("Illegal character '%s'" % t.value[0])
- t.lexer.skip(1)
-
-# Build the lexer
-import ply.lex as lex
-lex.lex()
-lex.runmain()
diff --git a/components/script/dom/bindings/codegen/ply/example/newclasscalc/calc.py b/components/script/dom/bindings/codegen/ply/example/newclasscalc/calc.py
deleted file mode 100755
index 43c9506a8aa..00000000000
--- a/components/script/dom/bindings/codegen/ply/example/newclasscalc/calc.py
+++ /dev/null
@@ -1,167 +0,0 @@
-#!/usr/bin/env python
-
-# -----------------------------------------------------------------------------
-# calc.py
-#
-# A simple calculator with variables. This is from O'Reilly's
-# "Lex and Yacc", p. 63.
-#
-# Class-based example contributed to PLY by David McNab.
-#
-# Modified to use new-style classes. Test case.
-# -----------------------------------------------------------------------------
-
-import sys
-sys.path.insert(0, "../..")
-
-if sys.version_info[0] >= 3:
- raw_input = input
-
-import ply.lex as lex
-import ply.yacc as yacc
-import os
-
-
-class Parser(object):
- """
- Base class for a lexer/parser that has the rules defined as methods
- """
- tokens = ()
- precedence = ()
-
- def __init__(self, **kw):
- self.debug = kw.get('debug', 0)
- self.names = {}
- try:
- modname = os.path.split(os.path.splitext(__file__)[0])[
- 1] + "_" + self.__class__.__name__
- except:
- modname = "parser" + "_" + self.__class__.__name__
- self.debugfile = modname + ".dbg"
- self.tabmodule = modname + "_" + "parsetab"
- # print self.debugfile, self.tabmodule
-
- # Build the lexer and parser
- lex.lex(module=self, debug=self.debug)
- yacc.yacc(module=self,
- debug=self.debug,
- debugfile=self.debugfile,
- tabmodule=self.tabmodule)
-
- def run(self):
- while 1:
- try:
- s = raw_input('calc > ')
- except EOFError:
- break
- if not s:
- continue
- yacc.parse(s)
-
-
-class Calc(Parser):
-
- tokens = (
- 'NAME', 'NUMBER',
- 'PLUS', 'MINUS', 'EXP', 'TIMES', 'DIVIDE', 'EQUALS',
- 'LPAREN', 'RPAREN',
- )
-
- # Tokens
-
- t_PLUS = r'\+'
- t_MINUS = r'-'
- t_EXP = r'\*\*'
- t_TIMES = r'\*'
- t_DIVIDE = r'/'
- t_EQUALS = r'='
- t_LPAREN = r'\('
- t_RPAREN = r'\)'
- t_NAME = r'[a-zA-Z_][a-zA-Z0-9_]*'
-
- def t_NUMBER(self, t):
- r'\d+'
- try:
- t.value = int(t.value)
- except ValueError:
- print("Integer value too large %s" % t.value)
- t.value = 0
- # print "parsed number %s" % repr(t.value)
- return t
-
- t_ignore = " \t"
-
- def t_newline(self, t):
- r'\n+'
- t.lexer.lineno += t.value.count("\n")
-
- def t_error(self, t):
- print("Illegal character '%s'" % t.value[0])
- t.lexer.skip(1)
-
- # Parsing rules
-
- precedence = (
- ('left', 'PLUS', 'MINUS'),
- ('left', 'TIMES', 'DIVIDE'),
- ('left', 'EXP'),
- ('right', 'UMINUS'),
- )
-
- def p_statement_assign(self, p):
- 'statement : NAME EQUALS expression'
- self.names[p[1]] = p[3]
-
- def p_statement_expr(self, p):
- 'statement : expression'
- print(p[1])
-
- def p_expression_binop(self, p):
- """
- expression : expression PLUS expression
- | expression MINUS expression
- | expression TIMES expression
- | expression DIVIDE expression
- | expression EXP expression
- """
- # print [repr(p[i]) for i in range(0,4)]
- if p[2] == '+':
- p[0] = p[1] + p[3]
- elif p[2] == '-':
- p[0] = p[1] - p[3]
- elif p[2] == '*':
- p[0] = p[1] * p[3]
- elif p[2] == '/':
- p[0] = p[1] / p[3]
- elif p[2] == '**':
- p[0] = p[1] ** p[3]
-
- def p_expression_uminus(self, p):
- 'expression : MINUS expression %prec UMINUS'
- p[0] = -p[2]
-
- def p_expression_group(self, p):
- 'expression : LPAREN expression RPAREN'
- p[0] = p[2]
-
- def p_expression_number(self, p):
- 'expression : NUMBER'
- p[0] = p[1]
-
- def p_expression_name(self, p):
- 'expression : NAME'
- try:
- p[0] = self.names[p[1]]
- except LookupError:
- print("Undefined name '%s'" % p[1])
- p[0] = 0
-
- def p_error(self, p):
- if p:
- print("Syntax error at '%s'" % p.value)
- else:
- print("Syntax error at EOF")
-
-if __name__ == '__main__':
- calc = Calc()
- calc.run()
diff --git a/components/script/dom/bindings/codegen/ply/example/optcalc/README b/components/script/dom/bindings/codegen/ply/example/optcalc/README
deleted file mode 100644
index 53dd5fcd559..00000000000
--- a/components/script/dom/bindings/codegen/ply/example/optcalc/README
+++ /dev/null
@@ -1,9 +0,0 @@
-An example showing how to use Python optimized mode.
-To run:
-
- - First run 'python calc.py'
-
- - Then run 'python -OO calc.py'
-
-If working correctly, the second version should run the
-same way.
diff --git a/components/script/dom/bindings/codegen/ply/example/optcalc/calc.py b/components/script/dom/bindings/codegen/ply/example/optcalc/calc.py
deleted file mode 100644
index 0c223e59949..00000000000
--- a/components/script/dom/bindings/codegen/ply/example/optcalc/calc.py
+++ /dev/null
@@ -1,134 +0,0 @@
-# -----------------------------------------------------------------------------
-# calc.py
-#
-# A simple calculator with variables. This is from O'Reilly's
-# "Lex and Yacc", p. 63.
-# -----------------------------------------------------------------------------
-
-import sys
-sys.path.insert(0, "../..")
-
-if sys.version_info[0] >= 3:
- raw_input = input
-
-tokens = (
- 'NAME', 'NUMBER',
- 'PLUS', 'MINUS', 'TIMES', 'DIVIDE', 'EQUALS',
- 'LPAREN', 'RPAREN',
-)
-
-# Tokens
-
-t_PLUS = r'\+'
-t_MINUS = r'-'
-t_TIMES = r'\*'
-t_DIVIDE = r'/'
-t_EQUALS = r'='
-t_LPAREN = r'\('
-t_RPAREN = r'\)'
-t_NAME = r'[a-zA-Z_][a-zA-Z0-9_]*'
-
-
-def t_NUMBER(t):
- r'\d+'
- try:
- t.value = int(t.value)
- except ValueError:
- print("Integer value too large %s" % t.value)
- t.value = 0
- return t
-
-t_ignore = " \t"
-
-
-def t_newline(t):
- r'\n+'
- t.lexer.lineno += t.value.count("\n")
-
-
-def t_error(t):
- print("Illegal character '%s'" % t.value[0])
- t.lexer.skip(1)
-
-# Build the lexer
-import ply.lex as lex
-lex.lex(optimize=1)
-
-# Parsing rules
-
-precedence = (
- ('left', 'PLUS', 'MINUS'),
- ('left', 'TIMES', 'DIVIDE'),
- ('right', 'UMINUS'),
-)
-
-# dictionary of names
-names = {}
-
-
-def p_statement_assign(t):
- 'statement : NAME EQUALS expression'
- names[t[1]] = t[3]
-
-
-def p_statement_expr(t):
- 'statement : expression'
- print(t[1])
-
-
-def p_expression_binop(t):
- '''expression : expression PLUS expression
- | expression MINUS expression
- | expression TIMES expression
- | expression DIVIDE expression'''
- if t[2] == '+':
- t[0] = t[1] + t[3]
- elif t[2] == '-':
- t[0] = t[1] - t[3]
- elif t[2] == '*':
- t[0] = t[1] * t[3]
- elif t[2] == '/':
- t[0] = t[1] / t[3]
- elif t[2] == '<':
- t[0] = t[1] < t[3]
-
-
-def p_expression_uminus(t):
- 'expression : MINUS expression %prec UMINUS'
- t[0] = -t[2]
-
-
-def p_expression_group(t):
- 'expression : LPAREN expression RPAREN'
- t[0] = t[2]
-
-
-def p_expression_number(t):
- 'expression : NUMBER'
- t[0] = t[1]
-
-
-def p_expression_name(t):
- 'expression : NAME'
- try:
- t[0] = names[t[1]]
- except LookupError:
- print("Undefined name '%s'" % t[1])
- t[0] = 0
-
-
-def p_error(t):
- if t:
- print("Syntax error at '%s'" % t.value)
- else:
- print("Syntax error at EOF")
-
-import ply.yacc as yacc
-yacc.yacc(optimize=1)
-
-while 1:
- try:
- s = raw_input('calc > ')
- except EOFError:
- break
- yacc.parse(s)
diff --git a/components/script/dom/bindings/codegen/ply/example/unicalc/calc.py b/components/script/dom/bindings/codegen/ply/example/unicalc/calc.py
deleted file mode 100644
index 901c4b9d761..00000000000
--- a/components/script/dom/bindings/codegen/ply/example/unicalc/calc.py
+++ /dev/null
@@ -1,133 +0,0 @@
-# -----------------------------------------------------------------------------
-# calc.py
-#
-# A simple calculator with variables. This is from O'Reilly's
-# "Lex and Yacc", p. 63.
-#
-# This example uses unicode strings for tokens, docstrings, and input.
-# -----------------------------------------------------------------------------
-
-import sys
-sys.path.insert(0, "../..")
-
-tokens = (
- 'NAME', 'NUMBER',
- 'PLUS', 'MINUS', 'TIMES', 'DIVIDE', 'EQUALS',
- 'LPAREN', 'RPAREN',
-)
-
-# Tokens
-
-t_PLUS = ur'\+'
-t_MINUS = ur'-'
-t_TIMES = ur'\*'
-t_DIVIDE = ur'/'
-t_EQUALS = ur'='
-t_LPAREN = ur'\('
-t_RPAREN = ur'\)'
-t_NAME = ur'[a-zA-Z_][a-zA-Z0-9_]*'
-
-
-def t_NUMBER(t):
- ur'\d+'
- try:
- t.value = int(t.value)
- except ValueError:
- print "Integer value too large", t.value
- t.value = 0
- return t
-
-t_ignore = u" \t"
-
-
-def t_newline(t):
- ur'\n+'
- t.lexer.lineno += t.value.count("\n")
-
-
-def t_error(t):
- print "Illegal character '%s'" % t.value[0]
- t.lexer.skip(1)
-
-# Build the lexer
-import ply.lex as lex
-lex.lex()
-
-# Parsing rules
-
-precedence = (
- ('left', 'PLUS', 'MINUS'),
- ('left', 'TIMES', 'DIVIDE'),
- ('right', 'UMINUS'),
-)
-
-# dictionary of names
-names = {}
-
-
-def p_statement_assign(p):
- 'statement : NAME EQUALS expression'
- names[p[1]] = p[3]
-
-
-def p_statement_expr(p):
- 'statement : expression'
- print p[1]
-
-
-def p_expression_binop(p):
- '''expression : expression PLUS expression
- | expression MINUS expression
- | expression TIMES expression
- | expression DIVIDE expression'''
- if p[2] == u'+':
- p[0] = p[1] + p[3]
- elif p[2] == u'-':
- p[0] = p[1] - p[3]
- elif p[2] == u'*':
- p[0] = p[1] * p[3]
- elif p[2] == u'/':
- p[0] = p[1] / p[3]
-
-
-def p_expression_uminus(p):
- 'expression : MINUS expression %prec UMINUS'
- p[0] = -p[2]
-
-
-def p_expression_group(p):
- 'expression : LPAREN expression RPAREN'
- p[0] = p[2]
-
-
-def p_expression_number(p):
- 'expression : NUMBER'
- p[0] = p[1]
-
-
-def p_expression_name(p):
- 'expression : NAME'
- try:
- p[0] = names[p[1]]
- except LookupError:
- print "Undefined name '%s'" % p[1]
- p[0] = 0
-
-
-def p_error(p):
- if p:
- print "Syntax error at '%s'" % p.value
- else:
- print "Syntax error at EOF"
-
-import ply.yacc as yacc
-yacc.yacc()
-
-while 1:
- try:
- s = raw_input('calc > ')
- except EOFError:
- break
- if not s:
- continue
- yacc.parse(unicode(s))
diff --git a/components/script/dom/bindings/codegen/ply/example/yply/README b/components/script/dom/bindings/codegen/ply/example/yply/README
deleted file mode 100644
index bfadf36436f..00000000000
--- a/components/script/dom/bindings/codegen/ply/example/yply/README
+++ /dev/null
@@ -1,41 +0,0 @@
-yply.py
-
-This example implements a program yply.py that converts a UNIX-yacc
-specification file into a PLY-compatible program. To use, simply
-run it like this:
-
- % python yply.py [-nocode] inputfile.y >myparser.py
-
-The output of this program is Python code. In the output,
-any C code in the original file is included, but is commented out.
-If you use the -nocode option, then all of the C code in the
-original file is just discarded.
-
-To use the resulting grammer with PLY, you'll need to edit the
-myparser.py file. Within this file, some stub code is included that
-can be used to test the construction of the parsing tables. However,
-you'll need to do more editing to make a workable parser.
-
-Disclaimer: This just an example I threw together in an afternoon.
-It might have some bugs. However, it worked when I tried it on
-a yacc-specified C++ parser containing 442 rules and 855 parsing
-states.
-
-Comments:
-
-1. This example does not parse specification files meant for lex/flex.
- You'll need to specify the tokenizer on your own.
-
-2. This example shows a number of interesting PLY features including
-
- - Parsing of literal text delimited by nested parentheses
- - Some interaction between the parser and the lexer.
- - Use of literals in the grammar specification
- - One pass compilation. The program just emits the result,
- there is no intermediate parse tree.
-
-3. This program could probably be cleaned up and enhanced a lot.
- It would be great if someone wanted to work on this (hint).
-
--Dave
-
diff --git a/components/script/dom/bindings/codegen/ply/example/yply/ylex.py b/components/script/dom/bindings/codegen/ply/example/yply/ylex.py
deleted file mode 100644
index 16410e250ee..00000000000
--- a/components/script/dom/bindings/codegen/ply/example/yply/ylex.py
+++ /dev/null
@@ -1,119 +0,0 @@
-# lexer for yacc-grammars
-#
-# Author: David Beazley (dave@dabeaz.com)
-# Date : October 2, 2006
-
-import sys
-sys.path.append("../..")
-
-from ply import *
-
-tokens = (
- 'LITERAL', 'SECTION', 'TOKEN', 'LEFT', 'RIGHT', 'PREC', 'START', 'TYPE', 'NONASSOC', 'UNION', 'CODE',
- 'ID', 'QLITERAL', 'NUMBER',
-)
-
-states = (('code', 'exclusive'),)
-
-literals = [';', ',', '<', '>', '|', ':']
-t_ignore = ' \t'
-
-t_TOKEN = r'%token'
-t_LEFT = r'%left'
-t_RIGHT = r'%right'
-t_NONASSOC = r'%nonassoc'
-t_PREC = r'%prec'
-t_START = r'%start'
-t_TYPE = r'%type'
-t_UNION = r'%union'
-t_ID = r'[a-zA-Z_][a-zA-Z_0-9]*'
-t_QLITERAL = r'''(?P<quote>['"]).*?(?P=quote)'''
-t_NUMBER = r'\d+'
-
-
-def t_SECTION(t):
- r'%%'
- if getattr(t.lexer, "lastsection", 0):
- t.value = t.lexer.lexdata[t.lexpos + 2:]
- t.lexer.lexpos = len(t.lexer.lexdata)
- else:
- t.lexer.lastsection = 0
- return t
-
-# Comments
-
-
-def t_ccomment(t):
- r'/\*(.|\n)*?\*/'
- t.lexer.lineno += t.value.count('\n')
-
-t_ignore_cppcomment = r'//.*'
-
-
-def t_LITERAL(t):
- r'%\{(.|\n)*?%\}'
- t.lexer.lineno += t.value.count("\n")
- return t
-
-
-def t_NEWLINE(t):
- r'\n'
- t.lexer.lineno += 1
-
-
-def t_code(t):
- r'\{'
- t.lexer.codestart = t.lexpos
- t.lexer.level = 1
- t.lexer.begin('code')
-
-
-def t_code_ignore_string(t):
- r'\"([^\\\n]|(\\.))*?\"'
-
-
-def t_code_ignore_char(t):
- r'\'([^\\\n]|(\\.))*?\''
-
-
-def t_code_ignore_comment(t):
- r'/\*(.|\n)*?\*/'
-
-
-def t_code_ignore_cppcom(t):
- r'//.*'
-
-
-def t_code_lbrace(t):
- r'\{'
- t.lexer.level += 1
-
-
-def t_code_rbrace(t):
- r'\}'
- t.lexer.level -= 1
- if t.lexer.level == 0:
- t.type = 'CODE'
- t.value = t.lexer.lexdata[t.lexer.codestart:t.lexpos + 1]
- t.lexer.begin('INITIAL')
- t.lexer.lineno += t.value.count('\n')
- return t
-
-t_code_ignore_nonspace = r'[^\s\}\'\"\{]+'
-t_code_ignore_whitespace = r'\s+'
-t_code_ignore = ""
-
-
-def t_code_error(t):
- raise RuntimeError
-
-
-def t_error(t):
- print("%d: Illegal character '%s'" % (t.lexer.lineno, t.value[0]))
- print(t.value)
- t.lexer.skip(1)
-
-lex.lex()
-
-if __name__ == '__main__':
- lex.runmain()
diff --git a/components/script/dom/bindings/codegen/ply/example/yply/yparse.py b/components/script/dom/bindings/codegen/ply/example/yply/yparse.py
deleted file mode 100644
index 1f2e8d0922c..00000000000
--- a/components/script/dom/bindings/codegen/ply/example/yply/yparse.py
+++ /dev/null
@@ -1,244 +0,0 @@
-# parser for Unix yacc-based grammars
-#
-# Author: David Beazley (dave@dabeaz.com)
-# Date : October 2, 2006
-
-import ylex
-tokens = ylex.tokens
-
-from ply import *
-
-tokenlist = []
-preclist = []
-
-emit_code = 1
-
-
-def p_yacc(p):
- '''yacc : defsection rulesection'''
-
-
-def p_defsection(p):
- '''defsection : definitions SECTION
- | SECTION'''
- p.lexer.lastsection = 1
- print("tokens = ", repr(tokenlist))
- print()
- print("precedence = ", repr(preclist))
- print()
- print("# -------------- RULES ----------------")
- print()
-
-
-def p_rulesection(p):
- '''rulesection : rules SECTION'''
-
- print("# -------------- RULES END ----------------")
- print_code(p[2], 0)
-
-
-def p_definitions(p):
- '''definitions : definitions definition
- | definition'''
-
-
-def p_definition_literal(p):
- '''definition : LITERAL'''
- print_code(p[1], 0)
-
-
-def p_definition_start(p):
- '''definition : START ID'''
- print("start = '%s'" % p[2])
-
-
-def p_definition_token(p):
- '''definition : toktype opttype idlist optsemi '''
- for i in p[3]:
- if i[0] not in "'\"":
- tokenlist.append(i)
- if p[1] == '%left':
- preclist.append(('left',) + tuple(p[3]))
- elif p[1] == '%right':
- preclist.append(('right',) + tuple(p[3]))
- elif p[1] == '%nonassoc':
- preclist.append(('nonassoc',) + tuple(p[3]))
-
-
-def p_toktype(p):
- '''toktype : TOKEN
- | LEFT
- | RIGHT
- | NONASSOC'''
- p[0] = p[1]
-
-
-def p_opttype(p):
- '''opttype : '<' ID '>'
- | empty'''
-
-
-def p_idlist(p):
- '''idlist : idlist optcomma tokenid
- | tokenid'''
- if len(p) == 2:
- p[0] = [p[1]]
- else:
- p[0] = p[1]
- p[1].append(p[3])
-
-
-def p_tokenid(p):
- '''tokenid : ID
- | ID NUMBER
- | QLITERAL
- | QLITERAL NUMBER'''
- p[0] = p[1]
-
-
-def p_optsemi(p):
- '''optsemi : ';'
- | empty'''
-
-
-def p_optcomma(p):
- '''optcomma : ','
- | empty'''
-
-
-def p_definition_type(p):
- '''definition : TYPE '<' ID '>' namelist optsemi'''
- # type declarations are ignored
-
-
-def p_namelist(p):
- '''namelist : namelist optcomma ID
- | ID'''
-
-
-def p_definition_union(p):
- '''definition : UNION CODE optsemi'''
- # Union declarations are ignored
-
-
-def p_rules(p):
- '''rules : rules rule
- | rule'''
- if len(p) == 2:
- rule = p[1]
- else:
- rule = p[2]
-
- # Print out a Python equivalent of this rule
-
- embedded = [] # Embedded actions (a mess)
- embed_count = 0
-
- rulename = rule[0]
- rulecount = 1
- for r in rule[1]:
- # r contains one of the rule possibilities
- print("def p_%s_%d(p):" % (rulename, rulecount))
- prod = []
- prodcode = ""
- for i in range(len(r)):
- item = r[i]
- if item[0] == '{': # A code block
- if i == len(r) - 1:
- prodcode = item
- break
- else:
- # an embedded action
- embed_name = "_embed%d_%s" % (embed_count, rulename)
- prod.append(embed_name)
- embedded.append((embed_name, item))
- embed_count += 1
- else:
- prod.append(item)
- print(" '''%s : %s'''" % (rulename, " ".join(prod)))
- # Emit code
- print_code(prodcode, 4)
- print()
- rulecount += 1
-
- for e, code in embedded:
- print("def p_%s(p):" % e)
- print(" '''%s : '''" % e)
- print_code(code, 4)
- print()
-
-
-def p_rule(p):
- '''rule : ID ':' rulelist ';' '''
- p[0] = (p[1], [p[3]])
-
-
-def p_rule2(p):
- '''rule : ID ':' rulelist morerules ';' '''
- p[4].insert(0, p[3])
- p[0] = (p[1], p[4])
-
-
-def p_rule_empty(p):
- '''rule : ID ':' ';' '''
- p[0] = (p[1], [[]])
-
-
-def p_rule_empty2(p):
- '''rule : ID ':' morerules ';' '''
-
- p[3].insert(0, [])
- p[0] = (p[1], p[3])
-
-
-def p_morerules(p):
- '''morerules : morerules '|' rulelist
- | '|' rulelist
- | '|' '''
-
- if len(p) == 2:
- p[0] = [[]]
- elif len(p) == 3:
- p[0] = [p[2]]
- else:
- p[0] = p[1]
- p[0].append(p[3])
-
-# print("morerules", len(p), p[0])
-
-
-def p_rulelist(p):
- '''rulelist : rulelist ruleitem
- | ruleitem'''
-
- if len(p) == 2:
- p[0] = [p[1]]
- else:
- p[0] = p[1]
- p[1].append(p[2])
-
-
-def p_ruleitem(p):
- '''ruleitem : ID
- | QLITERAL
- | CODE
- | PREC'''
- p[0] = p[1]
-
-
-def p_empty(p):
- '''empty : '''
-
-
-def p_error(p):
- pass
-
-yacc.yacc(debug=0)
-
-
-def print_code(code, indent):
- if not emit_code:
- return
- codelines = code.splitlines()
- for c in codelines:
- print("%s# %s" % (" " * indent, c))
diff --git a/components/script/dom/bindings/codegen/ply/example/yply/yply.py b/components/script/dom/bindings/codegen/ply/example/yply/yply.py
deleted file mode 100755
index e24616c831c..00000000000
--- a/components/script/dom/bindings/codegen/ply/example/yply/yply.py
+++ /dev/null
@@ -1,51 +0,0 @@
-#!/usr/local/bin/python
-# yply.py
-#
-# Author: David Beazley (dave@dabeaz.com)
-# Date : October 2, 2006
-#
-# Converts a UNIX-yacc specification file into a PLY-compatible
-# specification. To use, simply do this:
-#
-# % python yply.py [-nocode] inputfile.y >myparser.py
-#
-# The output of this program is Python code. In the output,
-# any C code in the original file is included, but is commented.
-# If you use the -nocode option, then all of the C code in the
-# original file is discarded.
-#
-# Disclaimer: This just an example I threw together in an afternoon.
-# It might have some bugs. However, it worked when I tried it on
-# a yacc-specified C++ parser containing 442 rules and 855 parsing
-# states.
-#
-
-import sys
-sys.path.insert(0, "../..")
-
-import ylex
-import yparse
-
-from ply import *
-
-if len(sys.argv) == 1:
- print("usage : yply.py [-nocode] inputfile")
- raise SystemExit
-
-if len(sys.argv) == 3:
- if sys.argv[1] == '-nocode':
- yparse.emit_code = 0
- else:
- print("Unknown option '%s'" % sys.argv[1])
- raise SystemExit
- filename = sys.argv[2]
-else:
- filename = sys.argv[1]
-
-yacc.parse(open(filename).read())
-
-print("""
-if __name__ == '__main__':
- from ply import *
- yacc.yacc()
-""")
diff --git a/components/script/dom/bindings/codegen/ply/ply.egg-info/PKG-INFO b/components/script/dom/bindings/codegen/ply/ply.egg-info/PKG-INFO
deleted file mode 100644
index 6eedf425953..00000000000
--- a/components/script/dom/bindings/codegen/ply/ply.egg-info/PKG-INFO
+++ /dev/null
@@ -1,22 +0,0 @@
-Metadata-Version: 1.1
-Name: ply
-Version: 3.10
-Summary: Python Lex & Yacc
-Home-page: http://www.dabeaz.com/ply/
-Author: David Beazley
-Author-email: dave@dabeaz.com
-License: BSD
-Description:
- PLY is yet another implementation of lex and yacc for Python. Some notable
- features include the fact that its implemented entirely in Python and it
- uses LALR(1) parsing which is efficient and well suited for larger grammars.
-
- PLY provides most of the standard lex/yacc features including support for empty
- productions, precedence rules, error recovery, and support for ambiguous grammars.
-
- PLY is extremely easy to use and provides very extensive error checking.
- It is compatible with both Python 2 and Python 3.
-
-Platform: UNKNOWN
-Classifier: Programming Language :: Python :: 3
-Classifier: Programming Language :: Python :: 2
diff --git a/components/script/dom/bindings/codegen/ply/ply.egg-info/SOURCES.txt b/components/script/dom/bindings/codegen/ply/ply.egg-info/SOURCES.txt
deleted file mode 100644
index 2dff7dd29b8..00000000000
--- a/components/script/dom/bindings/codegen/ply/ply.egg-info/SOURCES.txt
+++ /dev/null
@@ -1,172 +0,0 @@
-ANNOUNCE
-CHANGES
-MANIFEST.in
-README.md
-TODO
-setup.cfg
-setup.py
-doc/internal.html
-doc/makedoc.py
-doc/ply.html
-example/README
-example/cleanup.sh
-example/BASIC/README
-example/BASIC/basic.py
-example/BASIC/basiclex.py
-example/BASIC/basiclog.py
-example/BASIC/basinterp.py
-example/BASIC/basparse.py
-example/BASIC/dim.bas
-example/BASIC/func.bas
-example/BASIC/gcd.bas
-example/BASIC/gosub.bas
-example/BASIC/hello.bas
-example/BASIC/linear.bas
-example/BASIC/maxsin.bas
-example/BASIC/powers.bas
-example/BASIC/rand.bas
-example/BASIC/sales.bas
-example/BASIC/sears.bas
-example/BASIC/sqrt1.bas
-example/BASIC/sqrt2.bas
-example/GardenSnake/GardenSnake.py
-example/GardenSnake/README
-example/ansic/README
-example/ansic/clex.py
-example/ansic/cparse.py
-example/calc/calc.py
-example/calcdebug/calc.py
-example/calceof/calc.py
-example/classcalc/calc.py
-example/closurecalc/calc.py
-example/hedit/hedit.py
-example/newclasscalc/calc.py
-example/optcalc/README
-example/optcalc/calc.py
-example/unicalc/calc.py
-example/yply/README
-example/yply/ylex.py
-example/yply/yparse.py
-example/yply/yply.py
-ply/__init__.py
-ply/cpp.py
-ply/ctokens.py
-ply/lex.py
-ply/yacc.py
-ply/ygen.py
-ply.egg-info/PKG-INFO
-ply.egg-info/SOURCES.txt
-ply.egg-info/dependency_links.txt
-ply.egg-info/top_level.txt
-test/README
-test/calclex.py
-test/cleanup.sh
-test/lex_closure.py
-test/lex_doc1.py
-test/lex_dup1.py
-test/lex_dup2.py
-test/lex_dup3.py
-test/lex_empty.py
-test/lex_error1.py
-test/lex_error2.py
-test/lex_error3.py
-test/lex_error4.py
-test/lex_hedit.py
-test/lex_ignore.py
-test/lex_ignore2.py
-test/lex_literal1.py
-test/lex_literal2.py
-test/lex_literal3.py
-test/lex_many_tokens.py
-test/lex_module.py
-test/lex_module_import.py
-test/lex_object.py
-test/lex_opt_alias.py
-test/lex_optimize.py
-test/lex_optimize2.py
-test/lex_optimize3.py
-test/lex_re1.py
-test/lex_re2.py
-test/lex_re3.py
-test/lex_rule1.py
-test/lex_rule2.py
-test/lex_rule3.py
-test/lex_state1.py
-test/lex_state2.py
-test/lex_state3.py
-test/lex_state4.py
-test/lex_state5.py
-test/lex_state_noerror.py
-test/lex_state_norule.py
-test/lex_state_try.py
-test/lex_token1.py
-test/lex_token2.py
-test/lex_token3.py
-test/lex_token4.py
-test/lex_token5.py
-test/lex_token_dup.py
-test/testlex.py
-test/testyacc.py
-test/yacc_badargs.py
-test/yacc_badid.py
-test/yacc_badprec.py
-test/yacc_badprec2.py
-test/yacc_badprec3.py
-test/yacc_badrule.py
-test/yacc_badtok.py
-test/yacc_dup.py
-test/yacc_error1.py
-test/yacc_error2.py
-test/yacc_error3.py
-test/yacc_error4.py
-test/yacc_error5.py
-test/yacc_error6.py
-test/yacc_error7.py
-test/yacc_inf.py
-test/yacc_literal.py
-test/yacc_misplaced.py
-test/yacc_missing1.py
-test/yacc_nested.py
-test/yacc_nodoc.py
-test/yacc_noerror.py
-test/yacc_nop.py
-test/yacc_notfunc.py
-test/yacc_notok.py
-test/yacc_prec1.py
-test/yacc_rr.py
-test/yacc_rr_unused.py
-test/yacc_simple.py
-test/yacc_sr.py
-test/yacc_term1.py
-test/yacc_unicode_literals.py
-test/yacc_unused.py
-test/yacc_unused_rule.py
-test/yacc_uprec.py
-test/yacc_uprec2.py
-test/pkg_test1/__init__.py
-test/pkg_test1/parsing/__init__.py
-test/pkg_test1/parsing/calclex.py
-test/pkg_test1/parsing/calcparse.py
-test/pkg_test2/__init__.py
-test/pkg_test2/parsing/__init__.py
-test/pkg_test2/parsing/calclex.py
-test/pkg_test2/parsing/calcparse.py
-test/pkg_test3/__init__.py
-test/pkg_test3/generated/__init__.py
-test/pkg_test3/parsing/__init__.py
-test/pkg_test3/parsing/calclex.py
-test/pkg_test3/parsing/calcparse.py
-test/pkg_test4/__init__.py
-test/pkg_test4/parsing/__init__.py
-test/pkg_test4/parsing/calclex.py
-test/pkg_test4/parsing/calcparse.py
-test/pkg_test5/__init__.py
-test/pkg_test5/parsing/__init__.py
-test/pkg_test5/parsing/calclex.py
-test/pkg_test5/parsing/calcparse.py
-test/pkg_test6/__init__.py
-test/pkg_test6/parsing/__init__.py
-test/pkg_test6/parsing/calclex.py
-test/pkg_test6/parsing/calcparse.py
-test/pkg_test6/parsing/expression.py
-test/pkg_test6/parsing/statement.py \ No newline at end of file
diff --git a/components/script/dom/bindings/codegen/ply/ply.egg-info/dependency_links.txt b/components/script/dom/bindings/codegen/ply/ply.egg-info/dependency_links.txt
deleted file mode 100644
index 8b137891791..00000000000
--- a/components/script/dom/bindings/codegen/ply/ply.egg-info/dependency_links.txt
+++ /dev/null
@@ -1 +0,0 @@
-
diff --git a/components/script/dom/bindings/codegen/ply/ply.egg-info/top_level.txt b/components/script/dom/bindings/codegen/ply/ply.egg-info/top_level.txt
deleted file mode 100644
index 90412f06833..00000000000
--- a/components/script/dom/bindings/codegen/ply/ply.egg-info/top_level.txt
+++ /dev/null
@@ -1 +0,0 @@
-ply
diff --git a/components/script/dom/bindings/codegen/ply/ply/__init__.py b/components/script/dom/bindings/codegen/ply/ply/__init__.py
deleted file mode 100644
index 6e53cddcf67..00000000000
--- a/components/script/dom/bindings/codegen/ply/ply/__init__.py
+++ /dev/null
@@ -1,5 +0,0 @@
-# PLY package
-# Author: David Beazley (dave@dabeaz.com)
-
-__version__ = '3.9'
-__all__ = ['lex','yacc']
diff --git a/components/script/dom/bindings/codegen/ply/ply/cpp.py b/components/script/dom/bindings/codegen/ply/ply/cpp.py
deleted file mode 100644
index b6bfc69614b..00000000000
--- a/components/script/dom/bindings/codegen/ply/ply/cpp.py
+++ /dev/null
@@ -1,918 +0,0 @@
-# -----------------------------------------------------------------------------
-# cpp.py
-#
-# Author: David Beazley (http://www.dabeaz.com)
-# Copyright (C) 2007
-# All rights reserved
-#
-# This module implements an ANSI-C style lexical preprocessor for PLY.
-# -----------------------------------------------------------------------------
-from __future__ import generators
-
-import sys
-
-# Some Python 3 compatibility shims
-if sys.version_info.major < 3:
- STRING_TYPES = (str, unicode)
-else:
- STRING_TYPES = str
- xrange = range
-
-# -----------------------------------------------------------------------------
-# Default preprocessor lexer definitions. These tokens are enough to get
-# a basic preprocessor working. Other modules may import these if they want
-# -----------------------------------------------------------------------------
-
-tokens = (
- 'CPP_ID','CPP_INTEGER', 'CPP_FLOAT', 'CPP_STRING', 'CPP_CHAR', 'CPP_WS', 'CPP_COMMENT1', 'CPP_COMMENT2', 'CPP_POUND','CPP_DPOUND'
-)
-
-literals = "+-*/%|&~^<>=!?()[]{}.,;:\\\'\""
-
-# Whitespace
-def t_CPP_WS(t):
- r'\s+'
- t.lexer.lineno += t.value.count("\n")
- return t
-
-t_CPP_POUND = r'\#'
-t_CPP_DPOUND = r'\#\#'
-
-# Identifier
-t_CPP_ID = r'[A-Za-z_][\w_]*'
-
-# Integer literal
-def CPP_INTEGER(t):
- r'(((((0x)|(0X))[0-9a-fA-F]+)|(\d+))([uU][lL]|[lL][uU]|[uU]|[lL])?)'
- return t
-
-t_CPP_INTEGER = CPP_INTEGER
-
-# Floating literal
-t_CPP_FLOAT = r'((\d+)(\.\d+)(e(\+|-)?(\d+))? | (\d+)e(\+|-)?(\d+))([lL]|[fF])?'
-
-# String literal
-def t_CPP_STRING(t):
- r'\"([^\\\n]|(\\(.|\n)))*?\"'
- t.lexer.lineno += t.value.count("\n")
- return t
-
-# Character constant 'c' or L'c'
-def t_CPP_CHAR(t):
- r'(L)?\'([^\\\n]|(\\(.|\n)))*?\''
- t.lexer.lineno += t.value.count("\n")
- return t
-
-# Comment
-def t_CPP_COMMENT1(t):
- r'(/\*(.|\n)*?\*/)'
- ncr = t.value.count("\n")
- t.lexer.lineno += ncr
- # replace with one space or a number of '\n'
- t.type = 'CPP_WS'; t.value = '\n' * ncr if ncr else ' '
- return t
-
-# Line comment
-def t_CPP_COMMENT2(t):
- r'(//.*?(\n|$))'
- # replace with '/n'
- t.type = 'CPP_WS'; t.value = '\n'
- return t
-
-def t_error(t):
- t.type = t.value[0]
- t.value = t.value[0]
- t.lexer.skip(1)
- return t
-
-import re
-import copy
-import time
-import os.path
-
-# -----------------------------------------------------------------------------
-# trigraph()
-#
-# Given an input string, this function replaces all trigraph sequences.
-# The following mapping is used:
-#
-# ??= #
-# ??/ \
-# ??' ^
-# ??( [
-# ??) ]
-# ??! |
-# ??< {
-# ??> }
-# ??- ~
-# -----------------------------------------------------------------------------
-
-_trigraph_pat = re.compile(r'''\?\?[=/\'\(\)\!<>\-]''')
-_trigraph_rep = {
- '=':'#',
- '/':'\\',
- "'":'^',
- '(':'[',
- ')':']',
- '!':'|',
- '<':'{',
- '>':'}',
- '-':'~'
-}
-
-def trigraph(input):
- return _trigraph_pat.sub(lambda g: _trigraph_rep[g.group()[-1]],input)
-
-# ------------------------------------------------------------------
-# Macro object
-#
-# This object holds information about preprocessor macros
-#
-# .name - Macro name (string)
-# .value - Macro value (a list of tokens)
-# .arglist - List of argument names
-# .variadic - Boolean indicating whether or not variadic macro
-# .vararg - Name of the variadic parameter
-#
-# When a macro is created, the macro replacement token sequence is
-# pre-scanned and used to create patch lists that are later used
-# during macro expansion
-# ------------------------------------------------------------------
-
-class Macro(object):
- def __init__(self,name,value,arglist=None,variadic=False):
- self.name = name
- self.value = value
- self.arglist = arglist
- self.variadic = variadic
- if variadic:
- self.vararg = arglist[-1]
- self.source = None
-
-# ------------------------------------------------------------------
-# Preprocessor object
-#
-# Object representing a preprocessor. Contains macro definitions,
-# include directories, and other information
-# ------------------------------------------------------------------
-
-class Preprocessor(object):
- def __init__(self,lexer=None):
- if lexer is None:
- lexer = lex.lexer
- self.lexer = lexer
- self.macros = { }
- self.path = []
- self.temp_path = []
-
- # Probe the lexer for selected tokens
- self.lexprobe()
-
- tm = time.localtime()
- self.define("__DATE__ \"%s\"" % time.strftime("%b %d %Y",tm))
- self.define("__TIME__ \"%s\"" % time.strftime("%H:%M:%S",tm))
- self.parser = None
-
- # -----------------------------------------------------------------------------
- # tokenize()
- #
- # Utility function. Given a string of text, tokenize into a list of tokens
- # -----------------------------------------------------------------------------
-
- def tokenize(self,text):
- tokens = []
- self.lexer.input(text)
- while True:
- tok = self.lexer.token()
- if not tok: break
- tokens.append(tok)
- return tokens
-
- # ---------------------------------------------------------------------
- # error()
- #
- # Report a preprocessor error/warning of some kind
- # ----------------------------------------------------------------------
-
- def error(self,file,line,msg):
- print("%s:%d %s" % (file,line,msg))
-
- # ----------------------------------------------------------------------
- # lexprobe()
- #
- # This method probes the preprocessor lexer object to discover
- # the token types of symbols that are important to the preprocessor.
- # If this works right, the preprocessor will simply "work"
- # with any suitable lexer regardless of how tokens have been named.
- # ----------------------------------------------------------------------
-
- def lexprobe(self):
-
- # Determine the token type for identifiers
- self.lexer.input("identifier")
- tok = self.lexer.token()
- if not tok or tok.value != "identifier":
- print("Couldn't determine identifier type")
- else:
- self.t_ID = tok.type
-
- # Determine the token type for integers
- self.lexer.input("12345")
- tok = self.lexer.token()
- if not tok or int(tok.value) != 12345:
- print("Couldn't determine integer type")
- else:
- self.t_INTEGER = tok.type
- self.t_INTEGER_TYPE = type(tok.value)
-
- # Determine the token type for strings enclosed in double quotes
- self.lexer.input("\"filename\"")
- tok = self.lexer.token()
- if not tok or tok.value != "\"filename\"":
- print("Couldn't determine string type")
- else:
- self.t_STRING = tok.type
-
- # Determine the token type for whitespace--if any
- self.lexer.input(" ")
- tok = self.lexer.token()
- if not tok or tok.value != " ":
- self.t_SPACE = None
- else:
- self.t_SPACE = tok.type
-
- # Determine the token type for newlines
- self.lexer.input("\n")
- tok = self.lexer.token()
- if not tok or tok.value != "\n":
- self.t_NEWLINE = None
- print("Couldn't determine token for newlines")
- else:
- self.t_NEWLINE = tok.type
-
- self.t_WS = (self.t_SPACE, self.t_NEWLINE)
-
- # Check for other characters used by the preprocessor
- chars = [ '<','>','#','##','\\','(',')',',','.']
- for c in chars:
- self.lexer.input(c)
- tok = self.lexer.token()
- if not tok or tok.value != c:
- print("Unable to lex '%s' required for preprocessor" % c)
-
- # ----------------------------------------------------------------------
- # add_path()
- #
- # Adds a search path to the preprocessor.
- # ----------------------------------------------------------------------
-
- def add_path(self,path):
- self.path.append(path)
-
- # ----------------------------------------------------------------------
- # group_lines()
- #
- # Given an input string, this function splits it into lines. Trailing whitespace
- # is removed. Any line ending with \ is grouped with the next line. This
- # function forms the lowest level of the preprocessor---grouping into text into
- # a line-by-line format.
- # ----------------------------------------------------------------------
-
- def group_lines(self,input):
- lex = self.lexer.clone()
- lines = [x.rstrip() for x in input.splitlines()]
- for i in xrange(len(lines)):
- j = i+1
- while lines[i].endswith('\\') and (j < len(lines)):
- lines[i] = lines[i][:-1]+lines[j]
- lines[j] = ""
- j += 1
-
- input = "\n".join(lines)
- lex.input(input)
- lex.lineno = 1
-
- current_line = []
- while True:
- tok = lex.token()
- if not tok:
- break
- current_line.append(tok)
- if tok.type in self.t_WS and '\n' in tok.value:
- yield current_line
- current_line = []
-
- if current_line:
- yield current_line
-
- # ----------------------------------------------------------------------
- # tokenstrip()
- #
- # Remove leading/trailing whitespace tokens from a token list
- # ----------------------------------------------------------------------
-
- def tokenstrip(self,tokens):
- i = 0
- while i < len(tokens) and tokens[i].type in self.t_WS:
- i += 1
- del tokens[:i]
- i = len(tokens)-1
- while i >= 0 and tokens[i].type in self.t_WS:
- i -= 1
- del tokens[i+1:]
- return tokens
-
-
- # ----------------------------------------------------------------------
- # collect_args()
- #
- # Collects comma separated arguments from a list of tokens. The arguments
- # must be enclosed in parenthesis. Returns a tuple (tokencount,args,positions)
- # where tokencount is the number of tokens consumed, args is a list of arguments,
- # and positions is a list of integers containing the starting index of each
- # argument. Each argument is represented by a list of tokens.
- #
- # When collecting arguments, leading and trailing whitespace is removed
- # from each argument.
- #
- # This function properly handles nested parenthesis and commas---these do not
- # define new arguments.
- # ----------------------------------------------------------------------
-
- def collect_args(self,tokenlist):
- args = []
- positions = []
- current_arg = []
- nesting = 1
- tokenlen = len(tokenlist)
-
- # Search for the opening '('.
- i = 0
- while (i < tokenlen) and (tokenlist[i].type in self.t_WS):
- i += 1
-
- if (i < tokenlen) and (tokenlist[i].value == '('):
- positions.append(i+1)
- else:
- self.error(self.source,tokenlist[0].lineno,"Missing '(' in macro arguments")
- return 0, [], []
-
- i += 1
-
- while i < tokenlen:
- t = tokenlist[i]
- if t.value == '(':
- current_arg.append(t)
- nesting += 1
- elif t.value == ')':
- nesting -= 1
- if nesting == 0:
- if current_arg:
- args.append(self.tokenstrip(current_arg))
- positions.append(i)
- return i+1,args,positions
- current_arg.append(t)
- elif t.value == ',' and nesting == 1:
- args.append(self.tokenstrip(current_arg))
- positions.append(i+1)
- current_arg = []
- else:
- current_arg.append(t)
- i += 1
-
- # Missing end argument
- self.error(self.source,tokenlist[-1].lineno,"Missing ')' in macro arguments")
- return 0, [],[]
-
- # ----------------------------------------------------------------------
- # macro_prescan()
- #
- # Examine the macro value (token sequence) and identify patch points
- # This is used to speed up macro expansion later on---we'll know
- # right away where to apply patches to the value to form the expansion
- # ----------------------------------------------------------------------
-
- def macro_prescan(self,macro):
- macro.patch = [] # Standard macro arguments
- macro.str_patch = [] # String conversion expansion
- macro.var_comma_patch = [] # Variadic macro comma patch
- i = 0
- while i < len(macro.value):
- if macro.value[i].type == self.t_ID and macro.value[i].value in macro.arglist:
- argnum = macro.arglist.index(macro.value[i].value)
- # Conversion of argument to a string
- if i > 0 and macro.value[i-1].value == '#':
- macro.value[i] = copy.copy(macro.value[i])
- macro.value[i].type = self.t_STRING
- del macro.value[i-1]
- macro.str_patch.append((argnum,i-1))
- continue
- # Concatenation
- elif (i > 0 and macro.value[i-1].value == '##'):
- macro.patch.append(('c',argnum,i-1))
- del macro.value[i-1]
- continue
- elif ((i+1) < len(macro.value) and macro.value[i+1].value == '##'):
- macro.patch.append(('c',argnum,i))
- i += 1
- continue
- # Standard expansion
- else:
- macro.patch.append(('e',argnum,i))
- elif macro.value[i].value == '##':
- if macro.variadic and (i > 0) and (macro.value[i-1].value == ',') and \
- ((i+1) < len(macro.value)) and (macro.value[i+1].type == self.t_ID) and \
- (macro.value[i+1].value == macro.vararg):
- macro.var_comma_patch.append(i-1)
- i += 1
- macro.patch.sort(key=lambda x: x[2],reverse=True)
-
- # ----------------------------------------------------------------------
- # macro_expand_args()
- #
- # Given a Macro and list of arguments (each a token list), this method
- # returns an expanded version of a macro. The return value is a token sequence
- # representing the replacement macro tokens
- # ----------------------------------------------------------------------
-
- def macro_expand_args(self,macro,args):
- # Make a copy of the macro token sequence
- rep = [copy.copy(_x) for _x in macro.value]
-
- # Make string expansion patches. These do not alter the length of the replacement sequence
-
- str_expansion = {}
- for argnum, i in macro.str_patch:
- if argnum not in str_expansion:
- str_expansion[argnum] = ('"%s"' % "".join([x.value for x in args[argnum]])).replace("\\","\\\\")
- rep[i] = copy.copy(rep[i])
- rep[i].value = str_expansion[argnum]
-
- # Make the variadic macro comma patch. If the variadic macro argument is empty, we get rid
- comma_patch = False
- if macro.variadic and not args[-1]:
- for i in macro.var_comma_patch:
- rep[i] = None
- comma_patch = True
-
- # Make all other patches. The order of these matters. It is assumed that the patch list
- # has been sorted in reverse order of patch location since replacements will cause the
- # size of the replacement sequence to expand from the patch point.
-
- expanded = { }
- for ptype, argnum, i in macro.patch:
- # Concatenation. Argument is left unexpanded
- if ptype == 'c':
- rep[i:i+1] = args[argnum]
- # Normal expansion. Argument is macro expanded first
- elif ptype == 'e':
- if argnum not in expanded:
- expanded[argnum] = self.expand_macros(args[argnum])
- rep[i:i+1] = expanded[argnum]
-
- # Get rid of removed comma if necessary
- if comma_patch:
- rep = [_i for _i in rep if _i]
-
- return rep
-
-
- # ----------------------------------------------------------------------
- # expand_macros()
- #
- # Given a list of tokens, this function performs macro expansion.
- # The expanded argument is a dictionary that contains macros already
- # expanded. This is used to prevent infinite recursion.
- # ----------------------------------------------------------------------
-
- def expand_macros(self,tokens,expanded=None):
- if expanded is None:
- expanded = {}
- i = 0
- while i < len(tokens):
- t = tokens[i]
- if t.type == self.t_ID:
- if t.value in self.macros and t.value not in expanded:
- # Yes, we found a macro match
- expanded[t.value] = True
-
- m = self.macros[t.value]
- if not m.arglist:
- # A simple macro
- ex = self.expand_macros([copy.copy(_x) for _x in m.value],expanded)
- for e in ex:
- e.lineno = t.lineno
- tokens[i:i+1] = ex
- i += len(ex)
- else:
- # A macro with arguments
- j = i + 1
- while j < len(tokens) and tokens[j].type in self.t_WS:
- j += 1
- if tokens[j].value == '(':
- tokcount,args,positions = self.collect_args(tokens[j:])
- if not m.variadic and len(args) != len(m.arglist):
- self.error(self.source,t.lineno,"Macro %s requires %d arguments" % (t.value,len(m.arglist)))
- i = j + tokcount
- elif m.variadic and len(args) < len(m.arglist)-1:
- if len(m.arglist) > 2:
- self.error(self.source,t.lineno,"Macro %s must have at least %d arguments" % (t.value, len(m.arglist)-1))
- else:
- self.error(self.source,t.lineno,"Macro %s must have at least %d argument" % (t.value, len(m.arglist)-1))
- i = j + tokcount
- else:
- if m.variadic:
- if len(args) == len(m.arglist)-1:
- args.append([])
- else:
- args[len(m.arglist)-1] = tokens[j+positions[len(m.arglist)-1]:j+tokcount-1]
- del args[len(m.arglist):]
-
- # Get macro replacement text
- rep = self.macro_expand_args(m,args)
- rep = self.expand_macros(rep,expanded)
- for r in rep:
- r.lineno = t.lineno
- tokens[i:j+tokcount] = rep
- i += len(rep)
- del expanded[t.value]
- continue
- elif t.value == '__LINE__':
- t.type = self.t_INTEGER
- t.value = self.t_INTEGER_TYPE(t.lineno)
-
- i += 1
- return tokens
-
- # ----------------------------------------------------------------------
- # evalexpr()
- #
- # Evaluate an expression token sequence for the purposes of evaluating
- # integral expressions.
- # ----------------------------------------------------------------------
-
- def evalexpr(self,tokens):
- # tokens = tokenize(line)
- # Search for defined macros
- i = 0
- while i < len(tokens):
- if tokens[i].type == self.t_ID and tokens[i].value == 'defined':
- j = i + 1
- needparen = False
- result = "0L"
- while j < len(tokens):
- if tokens[j].type in self.t_WS:
- j += 1
- continue
- elif tokens[j].type == self.t_ID:
- if tokens[j].value in self.macros:
- result = "1L"
- else:
- result = "0L"
- if not needparen: break
- elif tokens[j].value == '(':
- needparen = True
- elif tokens[j].value == ')':
- break
- else:
- self.error(self.source,tokens[i].lineno,"Malformed defined()")
- j += 1
- tokens[i].type = self.t_INTEGER
- tokens[i].value = self.t_INTEGER_TYPE(result)
- del tokens[i+1:j+1]
- i += 1
- tokens = self.expand_macros(tokens)
- for i,t in enumerate(tokens):
- if t.type == self.t_ID:
- tokens[i] = copy.copy(t)
- tokens[i].type = self.t_INTEGER
- tokens[i].value = self.t_INTEGER_TYPE("0L")
- elif t.type == self.t_INTEGER:
- tokens[i] = copy.copy(t)
- # Strip off any trailing suffixes
- tokens[i].value = str(tokens[i].value)
- while tokens[i].value[-1] not in "0123456789abcdefABCDEF":
- tokens[i].value = tokens[i].value[:-1]
-
- expr = "".join([str(x.value) for x in tokens])
- expr = expr.replace("&&"," and ")
- expr = expr.replace("||"," or ")
- expr = expr.replace("!"," not ")
- try:
- result = eval(expr)
- except Exception:
- self.error(self.source,tokens[0].lineno,"Couldn't evaluate expression")
- result = 0
- return result
-
- # ----------------------------------------------------------------------
- # parsegen()
- #
- # Parse an input string/
- # ----------------------------------------------------------------------
- def parsegen(self,input,source=None):
-
- # Replace trigraph sequences
- t = trigraph(input)
- lines = self.group_lines(t)
-
- if not source:
- source = ""
-
- self.define("__FILE__ \"%s\"" % source)
-
- self.source = source
- chunk = []
- enable = True
- iftrigger = False
- ifstack = []
-
- for x in lines:
- for i,tok in enumerate(x):
- if tok.type not in self.t_WS: break
- if tok.value == '#':
- # Preprocessor directive
-
- # insert necessary whitespace instead of eaten tokens
- for tok in x:
- if tok.type in self.t_WS and '\n' in tok.value:
- chunk.append(tok)
-
- dirtokens = self.tokenstrip(x[i+1:])
- if dirtokens:
- name = dirtokens[0].value
- args = self.tokenstrip(dirtokens[1:])
- else:
- name = ""
- args = []
-
- if name == 'define':
- if enable:
- for tok in self.expand_macros(chunk):
- yield tok
- chunk = []
- self.define(args)
- elif name == 'include':
- if enable:
- for tok in self.expand_macros(chunk):
- yield tok
- chunk = []
- oldfile = self.macros['__FILE__']
- for tok in self.include(args):
- yield tok
- self.macros['__FILE__'] = oldfile
- self.source = source
- elif name == 'undef':
- if enable:
- for tok in self.expand_macros(chunk):
- yield tok
- chunk = []
- self.undef(args)
- elif name == 'ifdef':
- ifstack.append((enable,iftrigger))
- if enable:
- if not args[0].value in self.macros:
- enable = False
- iftrigger = False
- else:
- iftrigger = True
- elif name == 'ifndef':
- ifstack.append((enable,iftrigger))
- if enable:
- if args[0].value in self.macros:
- enable = False
- iftrigger = False
- else:
- iftrigger = True
- elif name == 'if':
- ifstack.append((enable,iftrigger))
- if enable:
- result = self.evalexpr(args)
- if not result:
- enable = False
- iftrigger = False
- else:
- iftrigger = True
- elif name == 'elif':
- if ifstack:
- if ifstack[-1][0]: # We only pay attention if outer "if" allows this
- if enable: # If already true, we flip enable False
- enable = False
- elif not iftrigger: # If False, but not triggered yet, we'll check expression
- result = self.evalexpr(args)
- if result:
- enable = True
- iftrigger = True
- else:
- self.error(self.source,dirtokens[0].lineno,"Misplaced #elif")
-
- elif name == 'else':
- if ifstack:
- if ifstack[-1][0]:
- if enable:
- enable = False
- elif not iftrigger:
- enable = True
- iftrigger = True
- else:
- self.error(self.source,dirtokens[0].lineno,"Misplaced #else")
-
- elif name == 'endif':
- if ifstack:
- enable,iftrigger = ifstack.pop()
- else:
- self.error(self.source,dirtokens[0].lineno,"Misplaced #endif")
- else:
- # Unknown preprocessor directive
- pass
-
- else:
- # Normal text
- if enable:
- chunk.extend(x)
-
- for tok in self.expand_macros(chunk):
- yield tok
- chunk = []
-
- # ----------------------------------------------------------------------
- # include()
- #
- # Implementation of file-inclusion
- # ----------------------------------------------------------------------
-
- def include(self,tokens):
- # Try to extract the filename and then process an include file
- if not tokens:
- return
- if tokens:
- if tokens[0].value != '<' and tokens[0].type != self.t_STRING:
- tokens = self.expand_macros(tokens)
-
- if tokens[0].value == '<':
- # Include <...>
- i = 1
- while i < len(tokens):
- if tokens[i].value == '>':
- break
- i += 1
- else:
- print("Malformed #include <...>")
- return
- filename = "".join([x.value for x in tokens[1:i]])
- path = self.path + [""] + self.temp_path
- elif tokens[0].type == self.t_STRING:
- filename = tokens[0].value[1:-1]
- path = self.temp_path + [""] + self.path
- else:
- print("Malformed #include statement")
- return
- for p in path:
- iname = os.path.join(p,filename)
- try:
- data = open(iname,"r").read()
- dname = os.path.dirname(iname)
- if dname:
- self.temp_path.insert(0,dname)
- for tok in self.parsegen(data,filename):
- yield tok
- if dname:
- del self.temp_path[0]
- break
- except IOError:
- pass
- else:
- print("Couldn't find '%s'" % filename)
-
- # ----------------------------------------------------------------------
- # define()
- #
- # Define a new macro
- # ----------------------------------------------------------------------
-
- def define(self,tokens):
- if isinstance(tokens,STRING_TYPES):
- tokens = self.tokenize(tokens)
-
- linetok = tokens
- try:
- name = linetok[0]
- if len(linetok) > 1:
- mtype = linetok[1]
- else:
- mtype = None
- if not mtype:
- m = Macro(name.value,[])
- self.macros[name.value] = m
- elif mtype.type in self.t_WS:
- # A normal macro
- m = Macro(name.value,self.tokenstrip(linetok[2:]))
- self.macros[name.value] = m
- elif mtype.value == '(':
- # A macro with arguments
- tokcount, args, positions = self.collect_args(linetok[1:])
- variadic = False
- for a in args:
- if variadic:
- print("No more arguments may follow a variadic argument")
- break
- astr = "".join([str(_i.value) for _i in a])
- if astr == "...":
- variadic = True
- a[0].type = self.t_ID
- a[0].value = '__VA_ARGS__'
- variadic = True
- del a[1:]
- continue
- elif astr[-3:] == "..." and a[0].type == self.t_ID:
- variadic = True
- del a[1:]
- # If, for some reason, "." is part of the identifier, strip off the name for the purposes
- # of macro expansion
- if a[0].value[-3:] == '...':
- a[0].value = a[0].value[:-3]
- continue
- if len(a) > 1 or a[0].type != self.t_ID:
- print("Invalid macro argument")
- break
- else:
- mvalue = self.tokenstrip(linetok[1+tokcount:])
- i = 0
- while i < len(mvalue):
- if i+1 < len(mvalue):
- if mvalue[i].type in self.t_WS and mvalue[i+1].value == '##':
- del mvalue[i]
- continue
- elif mvalue[i].value == '##' and mvalue[i+1].type in self.t_WS:
- del mvalue[i+1]
- i += 1
- m = Macro(name.value,mvalue,[x[0].value for x in args],variadic)
- self.macro_prescan(m)
- self.macros[name.value] = m
- else:
- print("Bad macro definition")
- except LookupError:
- print("Bad macro definition")
-
- # ----------------------------------------------------------------------
- # undef()
- #
- # Undefine a macro
- # ----------------------------------------------------------------------
-
- def undef(self,tokens):
- id = tokens[0].value
- try:
- del self.macros[id]
- except LookupError:
- pass
-
- # ----------------------------------------------------------------------
- # parse()
- #
- # Parse input text.
- # ----------------------------------------------------------------------
- def parse(self,input,source=None,ignore={}):
- self.ignore = ignore
- self.parser = self.parsegen(input,source)
-
- # ----------------------------------------------------------------------
- # token()
- #
- # Method to return individual tokens
- # ----------------------------------------------------------------------
- def token(self):
- try:
- while True:
- tok = next(self.parser)
- if tok.type not in self.ignore: return tok
- except StopIteration:
- self.parser = None
- return None
-
-if __name__ == '__main__':
- import ply.lex as lex
- lexer = lex.lex()
-
- # Run a preprocessor
- import sys
- f = open(sys.argv[1])
- input = f.read()
-
- p = Preprocessor(lexer)
- p.parse(input,sys.argv[1])
- while True:
- tok = p.token()
- if not tok: break
- print(p.source, tok)
-
-
-
-
-
-
-
-
-
-
-
diff --git a/components/script/dom/bindings/codegen/ply/ply/ctokens.py b/components/script/dom/bindings/codegen/ply/ply/ctokens.py
deleted file mode 100644
index f6f6952d605..00000000000
--- a/components/script/dom/bindings/codegen/ply/ply/ctokens.py
+++ /dev/null
@@ -1,133 +0,0 @@
-# ----------------------------------------------------------------------
-# ctokens.py
-#
-# Token specifications for symbols in ANSI C and C++. This file is
-# meant to be used as a library in other tokenizers.
-# ----------------------------------------------------------------------
-
-# Reserved words
-
-tokens = [
- # Literals (identifier, integer constant, float constant, string constant, char const)
- 'ID', 'TYPEID', 'INTEGER', 'FLOAT', 'STRING', 'CHARACTER',
-
- # Operators (+,-,*,/,%,|,&,~,^,<<,>>, ||, &&, !, <, <=, >, >=, ==, !=)
- 'PLUS', 'MINUS', 'TIMES', 'DIVIDE', 'MODULO',
- 'OR', 'AND', 'NOT', 'XOR', 'LSHIFT', 'RSHIFT',
- 'LOR', 'LAND', 'LNOT',
- 'LT', 'LE', 'GT', 'GE', 'EQ', 'NE',
-
- # Assignment (=, *=, /=, %=, +=, -=, <<=, >>=, &=, ^=, |=)
- 'EQUALS', 'TIMESEQUAL', 'DIVEQUAL', 'MODEQUAL', 'PLUSEQUAL', 'MINUSEQUAL',
- 'LSHIFTEQUAL','RSHIFTEQUAL', 'ANDEQUAL', 'XOREQUAL', 'OREQUAL',
-
- # Increment/decrement (++,--)
- 'INCREMENT', 'DECREMENT',
-
- # Structure dereference (->)
- 'ARROW',
-
- # Ternary operator (?)
- 'TERNARY',
-
- # Delimeters ( ) [ ] { } , . ; :
- 'LPAREN', 'RPAREN',
- 'LBRACKET', 'RBRACKET',
- 'LBRACE', 'RBRACE',
- 'COMMA', 'PERIOD', 'SEMI', 'COLON',
-
- # Ellipsis (...)
- 'ELLIPSIS',
-]
-
-# Operators
-t_PLUS = r'\+'
-t_MINUS = r'-'
-t_TIMES = r'\*'
-t_DIVIDE = r'/'
-t_MODULO = r'%'
-t_OR = r'\|'
-t_AND = r'&'
-t_NOT = r'~'
-t_XOR = r'\^'
-t_LSHIFT = r'<<'
-t_RSHIFT = r'>>'
-t_LOR = r'\|\|'
-t_LAND = r'&&'
-t_LNOT = r'!'
-t_LT = r'<'
-t_GT = r'>'
-t_LE = r'<='
-t_GE = r'>='
-t_EQ = r'=='
-t_NE = r'!='
-
-# Assignment operators
-
-t_EQUALS = r'='
-t_TIMESEQUAL = r'\*='
-t_DIVEQUAL = r'/='
-t_MODEQUAL = r'%='
-t_PLUSEQUAL = r'\+='
-t_MINUSEQUAL = r'-='
-t_LSHIFTEQUAL = r'<<='
-t_RSHIFTEQUAL = r'>>='
-t_ANDEQUAL = r'&='
-t_OREQUAL = r'\|='
-t_XOREQUAL = r'\^='
-
-# Increment/decrement
-t_INCREMENT = r'\+\+'
-t_DECREMENT = r'--'
-
-# ->
-t_ARROW = r'->'
-
-# ?
-t_TERNARY = r'\?'
-
-# Delimeters
-t_LPAREN = r'\('
-t_RPAREN = r'\)'
-t_LBRACKET = r'\['
-t_RBRACKET = r'\]'
-t_LBRACE = r'\{'
-t_RBRACE = r'\}'
-t_COMMA = r','
-t_PERIOD = r'\.'
-t_SEMI = r';'
-t_COLON = r':'
-t_ELLIPSIS = r'\.\.\.'
-
-# Identifiers
-t_ID = r'[A-Za-z_][A-Za-z0-9_]*'
-
-# Integer literal
-t_INTEGER = r'\d+([uU]|[lL]|[uU][lL]|[lL][uU])?'
-
-# Floating literal
-t_FLOAT = r'((\d+)(\.\d+)(e(\+|-)?(\d+))? | (\d+)e(\+|-)?(\d+))([lL]|[fF])?'
-
-# String literal
-t_STRING = r'\"([^\\\n]|(\\.))*?\"'
-
-# Character constant 'c' or L'c'
-t_CHARACTER = r'(L)?\'([^\\\n]|(\\.))*?\''
-
-# Comment (C-Style)
-def t_COMMENT(t):
- r'/\*(.|\n)*?\*/'
- t.lexer.lineno += t.value.count('\n')
- return t
-
-# Comment (C++-Style)
-def t_CPPCOMMENT(t):
- r'//.*\n'
- t.lexer.lineno += 1
- return t
-
-
-
-
-
-
diff --git a/components/script/dom/bindings/codegen/ply/ply/lex.py b/components/script/dom/bindings/codegen/ply/ply/lex.py
deleted file mode 100644
index 3e240d1aa20..00000000000
--- a/components/script/dom/bindings/codegen/ply/ply/lex.py
+++ /dev/null
@@ -1,1100 +0,0 @@
-# -----------------------------------------------------------------------------
-# ply: lex.py
-#
-# Copyright (C) 2001-2017
-# David M. Beazley (Dabeaz LLC)
-# All rights reserved.
-#
-# Redistribution and use in source and binary forms, with or without
-# modification, are permitted provided that the following conditions are
-# met:
-#
-# * Redistributions of source code must retain the above copyright notice,
-# this list of conditions and the following disclaimer.
-# * Redistributions in binary form must reproduce the above copyright notice,
-# this list of conditions and the following disclaimer in the documentation
-# and/or other materials provided with the distribution.
-# * Neither the name of the David Beazley or Dabeaz LLC may be used to
-# endorse or promote products derived from this software without
-# specific prior written permission.
-#
-# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
-# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
-# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
-# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
-# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
-# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
-# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
-# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
-# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
-# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
-# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
-# -----------------------------------------------------------------------------
-
-__version__ = '3.10'
-__tabversion__ = '3.10'
-
-import re
-import sys
-import types
-import copy
-import os
-import inspect
-
-# This tuple contains known string types
-try:
- # Python 2.6
- StringTypes = (types.StringType, types.UnicodeType)
-except AttributeError:
- # Python 3.0
- StringTypes = (str, bytes)
-
-# This regular expression is used to match valid token names
-_is_identifier = re.compile(r'^[a-zA-Z0-9_]+$')
-
-# Exception thrown when invalid token encountered and no default error
-# handler is defined.
-class LexError(Exception):
- def __init__(self, message, s):
- self.args = (message,)
- self.text = s
-
-
-# Token class. This class is used to represent the tokens produced.
-class LexToken(object):
- def __str__(self):
- return 'LexToken(%s,%r,%d,%d)' % (self.type, self.value, self.lineno, self.lexpos)
-
- def __repr__(self):
- return str(self)
-
-
-# This object is a stand-in for a logging object created by the
-# logging module.
-
-class PlyLogger(object):
- def __init__(self, f):
- self.f = f
-
- def critical(self, msg, *args, **kwargs):
- self.f.write((msg % args) + '\n')
-
- def warning(self, msg, *args, **kwargs):
- self.f.write('WARNING: ' + (msg % args) + '\n')
-
- def error(self, msg, *args, **kwargs):
- self.f.write('ERROR: ' + (msg % args) + '\n')
-
- info = critical
- debug = critical
-
-
-# Null logger is used when no output is generated. Does nothing.
-class NullLogger(object):
- def __getattribute__(self, name):
- return self
-
- def __call__(self, *args, **kwargs):
- return self
-
-
-# -----------------------------------------------------------------------------
-# === Lexing Engine ===
-#
-# The following Lexer class implements the lexer runtime. There are only
-# a few public methods and attributes:
-#
-# input() - Store a new string in the lexer
-# token() - Get the next token
-# clone() - Clone the lexer
-#
-# lineno - Current line number
-# lexpos - Current position in the input string
-# -----------------------------------------------------------------------------
-
-class Lexer:
- def __init__(self):
- self.lexre = None # Master regular expression. This is a list of
- # tuples (re, findex) where re is a compiled
- # regular expression and findex is a list
- # mapping regex group numbers to rules
- self.lexretext = None # Current regular expression strings
- self.lexstatere = {} # Dictionary mapping lexer states to master regexs
- self.lexstateretext = {} # Dictionary mapping lexer states to regex strings
- self.lexstaterenames = {} # Dictionary mapping lexer states to symbol names
- self.lexstate = 'INITIAL' # Current lexer state
- self.lexstatestack = [] # Stack of lexer states
- self.lexstateinfo = None # State information
- self.lexstateignore = {} # Dictionary of ignored characters for each state
- self.lexstateerrorf = {} # Dictionary of error functions for each state
- self.lexstateeoff = {} # Dictionary of eof functions for each state
- self.lexreflags = 0 # Optional re compile flags
- self.lexdata = None # Actual input data (as a string)
- self.lexpos = 0 # Current position in input text
- self.lexlen = 0 # Length of the input text
- self.lexerrorf = None # Error rule (if any)
- self.lexeoff = None # EOF rule (if any)
- self.lextokens = None # List of valid tokens
- self.lexignore = '' # Ignored characters
- self.lexliterals = '' # Literal characters that can be passed through
- self.lexmodule = None # Module
- self.lineno = 1 # Current line number
- self.lexoptimize = False # Optimized mode
-
- def clone(self, object=None):
- c = copy.copy(self)
-
- # If the object parameter has been supplied, it means we are attaching the
- # lexer to a new object. In this case, we have to rebind all methods in
- # the lexstatere and lexstateerrorf tables.
-
- if object:
- newtab = {}
- for key, ritem in self.lexstatere.items():
- newre = []
- for cre, findex in ritem:
- newfindex = []
- for f in findex:
- if not f or not f[0]:
- newfindex.append(f)
- continue
- newfindex.append((getattr(object, f[0].__name__), f[1]))
- newre.append((cre, newfindex))
- newtab[key] = newre
- c.lexstatere = newtab
- c.lexstateerrorf = {}
- for key, ef in self.lexstateerrorf.items():
- c.lexstateerrorf[key] = getattr(object, ef.__name__)
- c.lexmodule = object
- return c
-
- # ------------------------------------------------------------
- # writetab() - Write lexer information to a table file
- # ------------------------------------------------------------
- def writetab(self, lextab, outputdir=''):
- if isinstance(lextab, types.ModuleType):
- raise IOError("Won't overwrite existing lextab module")
- basetabmodule = lextab.split('.')[-1]
- filename = os.path.join(outputdir, basetabmodule) + '.py'
- with open(filename, 'w') as tf:
- tf.write('# %s.py. This file automatically created by PLY (version %s). Don\'t edit!\n' % (basetabmodule, __version__))
- tf.write('_tabversion = %s\n' % repr(__tabversion__))
- tf.write('_lextokens = set(%s)\n' % repr(tuple(self.lextokens)))
- tf.write('_lexreflags = %s\n' % repr(self.lexreflags))
- tf.write('_lexliterals = %s\n' % repr(self.lexliterals))
- tf.write('_lexstateinfo = %s\n' % repr(self.lexstateinfo))
-
- # Rewrite the lexstatere table, replacing function objects with function names
- tabre = {}
- for statename, lre in self.lexstatere.items():
- titem = []
- for (pat, func), retext, renames in zip(lre, self.lexstateretext[statename], self.lexstaterenames[statename]):
- titem.append((retext, _funcs_to_names(func, renames)))
- tabre[statename] = titem
-
- tf.write('_lexstatere = %s\n' % repr(tabre))
- tf.write('_lexstateignore = %s\n' % repr(self.lexstateignore))
-
- taberr = {}
- for statename, ef in self.lexstateerrorf.items():
- taberr[statename] = ef.__name__ if ef else None
- tf.write('_lexstateerrorf = %s\n' % repr(taberr))
-
- tabeof = {}
- for statename, ef in self.lexstateeoff.items():
- tabeof[statename] = ef.__name__ if ef else None
- tf.write('_lexstateeoff = %s\n' % repr(tabeof))
-
- # ------------------------------------------------------------
- # readtab() - Read lexer information from a tab file
- # ------------------------------------------------------------
- def readtab(self, tabfile, fdict):
- if isinstance(tabfile, types.ModuleType):
- lextab = tabfile
- else:
- exec('import %s' % tabfile)
- lextab = sys.modules[tabfile]
-
- if getattr(lextab, '_tabversion', '0.0') != __tabversion__:
- raise ImportError('Inconsistent PLY version')
-
- self.lextokens = lextab._lextokens
- self.lexreflags = lextab._lexreflags
- self.lexliterals = lextab._lexliterals
- self.lextokens_all = self.lextokens | set(self.lexliterals)
- self.lexstateinfo = lextab._lexstateinfo
- self.lexstateignore = lextab._lexstateignore
- self.lexstatere = {}
- self.lexstateretext = {}
- for statename, lre in lextab._lexstatere.items():
- titem = []
- txtitem = []
- for pat, func_name in lre:
- titem.append((re.compile(pat, lextab._lexreflags), _names_to_funcs(func_name, fdict)))
-
- self.lexstatere[statename] = titem
- self.lexstateretext[statename] = txtitem
-
- self.lexstateerrorf = {}
- for statename, ef in lextab._lexstateerrorf.items():
- self.lexstateerrorf[statename] = fdict[ef]
-
- self.lexstateeoff = {}
- for statename, ef in lextab._lexstateeoff.items():
- self.lexstateeoff[statename] = fdict[ef]
-
- self.begin('INITIAL')
-
- # ------------------------------------------------------------
- # input() - Push a new string into the lexer
- # ------------------------------------------------------------
- def input(self, s):
- # Pull off the first character to see if s looks like a string
- c = s[:1]
- if not isinstance(c, StringTypes):
- raise ValueError('Expected a string')
- self.lexdata = s
- self.lexpos = 0
- self.lexlen = len(s)
-
- # ------------------------------------------------------------
- # begin() - Changes the lexing state
- # ------------------------------------------------------------
- def begin(self, state):
- if state not in self.lexstatere:
- raise ValueError('Undefined state')
- self.lexre = self.lexstatere[state]
- self.lexretext = self.lexstateretext[state]
- self.lexignore = self.lexstateignore.get(state, '')
- self.lexerrorf = self.lexstateerrorf.get(state, None)
- self.lexeoff = self.lexstateeoff.get(state, None)
- self.lexstate = state
-
- # ------------------------------------------------------------
- # push_state() - Changes the lexing state and saves old on stack
- # ------------------------------------------------------------
- def push_state(self, state):
- self.lexstatestack.append(self.lexstate)
- self.begin(state)
-
- # ------------------------------------------------------------
- # pop_state() - Restores the previous state
- # ------------------------------------------------------------
- def pop_state(self):
- self.begin(self.lexstatestack.pop())
-
- # ------------------------------------------------------------
- # current_state() - Returns the current lexing state
- # ------------------------------------------------------------
- def current_state(self):
- return self.lexstate
-
- # ------------------------------------------------------------
- # skip() - Skip ahead n characters
- # ------------------------------------------------------------
- def skip(self, n):
- self.lexpos += n
-
- # ------------------------------------------------------------
- # opttoken() - Return the next token from the Lexer
- #
- # Note: This function has been carefully implemented to be as fast
- # as possible. Don't make changes unless you really know what
- # you are doing
- # ------------------------------------------------------------
- def token(self):
- # Make local copies of frequently referenced attributes
- lexpos = self.lexpos
- lexlen = self.lexlen
- lexignore = self.lexignore
- lexdata = self.lexdata
-
- while lexpos < lexlen:
- # This code provides some short-circuit code for whitespace, tabs, and other ignored characters
- if lexdata[lexpos] in lexignore:
- lexpos += 1
- continue
-
- # Look for a regular expression match
- for lexre, lexindexfunc in self.lexre:
- m = lexre.match(lexdata, lexpos)
- if not m:
- continue
-
- # Create a token for return
- tok = LexToken()
- tok.value = m.group()
- tok.lineno = self.lineno
- tok.lexpos = lexpos
-
- i = m.lastindex
- func, tok.type = lexindexfunc[i]
-
- if not func:
- # If no token type was set, it's an ignored token
- if tok.type:
- self.lexpos = m.end()
- return tok
- else:
- lexpos = m.end()
- break
-
- lexpos = m.end()
-
- # If token is processed by a function, call it
-
- tok.lexer = self # Set additional attributes useful in token rules
- self.lexmatch = m
- self.lexpos = lexpos
-
- newtok = func(tok)
-
- # Every function must return a token, if nothing, we just move to next token
- if not newtok:
- lexpos = self.lexpos # This is here in case user has updated lexpos.
- lexignore = self.lexignore # This is here in case there was a state change
- break
-
- # Verify type of the token. If not in the token map, raise an error
- if not self.lexoptimize:
- if newtok.type not in self.lextokens_all:
- raise LexError("%s:%d: Rule '%s' returned an unknown token type '%s'" % (
- func.__code__.co_filename, func.__code__.co_firstlineno,
- func.__name__, newtok.type), lexdata[lexpos:])
-
- return newtok
- else:
- # No match, see if in literals
- if lexdata[lexpos] in self.lexliterals:
- tok = LexToken()
- tok.value = lexdata[lexpos]
- tok.lineno = self.lineno
- tok.type = tok.value
- tok.lexpos = lexpos
- self.lexpos = lexpos + 1
- return tok
-
- # No match. Call t_error() if defined.
- if self.lexerrorf:
- tok = LexToken()
- tok.value = self.lexdata[lexpos:]
- tok.lineno = self.lineno
- tok.type = 'error'
- tok.lexer = self
- tok.lexpos = lexpos
- self.lexpos = lexpos
- newtok = self.lexerrorf(tok)
- if lexpos == self.lexpos:
- # Error method didn't change text position at all. This is an error.
- raise LexError("Scanning error. Illegal character '%s'" % (lexdata[lexpos]), lexdata[lexpos:])
- lexpos = self.lexpos
- if not newtok:
- continue
- return newtok
-
- self.lexpos = lexpos
- raise LexError("Illegal character '%s' at index %d" % (lexdata[lexpos], lexpos), lexdata[lexpos:])
-
- if self.lexeoff:
- tok = LexToken()
- tok.type = 'eof'
- tok.value = ''
- tok.lineno = self.lineno
- tok.lexpos = lexpos
- tok.lexer = self
- self.lexpos = lexpos
- newtok = self.lexeoff(tok)
- return newtok
-
- self.lexpos = lexpos + 1
- if self.lexdata is None:
- raise RuntimeError('No input string given with input()')
- return None
-
- # Iterator interface
- def __iter__(self):
- return self
-
- def next(self):
- t = self.token()
- if t is None:
- raise StopIteration
- return t
-
- __next__ = next
-
-# -----------------------------------------------------------------------------
-# ==== Lex Builder ===
-#
-# The functions and classes below are used to collect lexing information
-# and build a Lexer object from it.
-# -----------------------------------------------------------------------------
-
-# -----------------------------------------------------------------------------
-# _get_regex(func)
-#
-# Returns the regular expression assigned to a function either as a doc string
-# or as a .regex attribute attached by the @TOKEN decorator.
-# -----------------------------------------------------------------------------
-def _get_regex(func):
- return getattr(func, 'regex', func.__doc__)
-
-# -----------------------------------------------------------------------------
-# get_caller_module_dict()
-#
-# This function returns a dictionary containing all of the symbols defined within
-# a caller further down the call stack. This is used to get the environment
-# associated with the yacc() call if none was provided.
-# -----------------------------------------------------------------------------
-def get_caller_module_dict(levels):
- f = sys._getframe(levels)
- ldict = f.f_globals.copy()
- if f.f_globals != f.f_locals:
- ldict.update(f.f_locals)
- return ldict
-
-# -----------------------------------------------------------------------------
-# _funcs_to_names()
-#
-# Given a list of regular expression functions, this converts it to a list
-# suitable for output to a table file
-# -----------------------------------------------------------------------------
-def _funcs_to_names(funclist, namelist):
- result = []
- for f, name in zip(funclist, namelist):
- if f and f[0]:
- result.append((name, f[1]))
- else:
- result.append(f)
- return result
-
-# -----------------------------------------------------------------------------
-# _names_to_funcs()
-#
-# Given a list of regular expression function names, this converts it back to
-# functions.
-# -----------------------------------------------------------------------------
-def _names_to_funcs(namelist, fdict):
- result = []
- for n in namelist:
- if n and n[0]:
- result.append((fdict[n[0]], n[1]))
- else:
- result.append(n)
- return result
-
-# -----------------------------------------------------------------------------
-# _form_master_re()
-#
-# This function takes a list of all of the regex components and attempts to
-# form the master regular expression. Given limitations in the Python re
-# module, it may be necessary to break the master regex into separate expressions.
-# -----------------------------------------------------------------------------
-def _form_master_re(relist, reflags, ldict, toknames):
- if not relist:
- return []
- regex = '|'.join(relist)
- try:
- lexre = re.compile(regex, reflags)
-
- # Build the index to function map for the matching engine
- lexindexfunc = [None] * (max(lexre.groupindex.values()) + 1)
- lexindexnames = lexindexfunc[:]
-
- for f, i in lexre.groupindex.items():
- handle = ldict.get(f, None)
- if type(handle) in (types.FunctionType, types.MethodType):
- lexindexfunc[i] = (handle, toknames[f])
- lexindexnames[i] = f
- elif handle is not None:
- lexindexnames[i] = f
- if f.find('ignore_') > 0:
- lexindexfunc[i] = (None, None)
- else:
- lexindexfunc[i] = (None, toknames[f])
-
- return [(lexre, lexindexfunc)], [regex], [lexindexnames]
- except Exception:
- m = int(len(relist)/2)
- if m == 0:
- m = 1
- llist, lre, lnames = _form_master_re(relist[:m], reflags, ldict, toknames)
- rlist, rre, rnames = _form_master_re(relist[m:], reflags, ldict, toknames)
- return (llist+rlist), (lre+rre), (lnames+rnames)
-
-# -----------------------------------------------------------------------------
-# def _statetoken(s,names)
-#
-# Given a declaration name s of the form "t_" and a dictionary whose keys are
-# state names, this function returns a tuple (states,tokenname) where states
-# is a tuple of state names and tokenname is the name of the token. For example,
-# calling this with s = "t_foo_bar_SPAM" might return (('foo','bar'),'SPAM')
-# -----------------------------------------------------------------------------
-def _statetoken(s, names):
- nonstate = 1
- parts = s.split('_')
- for i, part in enumerate(parts[1:], 1):
- if part not in names and part != 'ANY':
- break
-
- if i > 1:
- states = tuple(parts[1:i])
- else:
- states = ('INITIAL',)
-
- if 'ANY' in states:
- states = tuple(names)
-
- tokenname = '_'.join(parts[i:])
- return (states, tokenname)
-
-
-# -----------------------------------------------------------------------------
-# LexerReflect()
-#
-# This class represents information needed to build a lexer as extracted from a
-# user's input file.
-# -----------------------------------------------------------------------------
-class LexerReflect(object):
- def __init__(self, ldict, log=None, reflags=0):
- self.ldict = ldict
- self.error_func = None
- self.tokens = []
- self.reflags = reflags
- self.stateinfo = {'INITIAL': 'inclusive'}
- self.modules = set()
- self.error = False
- self.log = PlyLogger(sys.stderr) if log is None else log
-
- # Get all of the basic information
- def get_all(self):
- self.get_tokens()
- self.get_literals()
- self.get_states()
- self.get_rules()
-
- # Validate all of the information
- def validate_all(self):
- self.validate_tokens()
- self.validate_literals()
- self.validate_rules()
- return self.error
-
- # Get the tokens map
- def get_tokens(self):
- tokens = self.ldict.get('tokens', None)
- if not tokens:
- self.log.error('No token list is defined')
- self.error = True
- return
-
- if not isinstance(tokens, (list, tuple)):
- self.log.error('tokens must be a list or tuple')
- self.error = True
- return
-
- if not tokens:
- self.log.error('tokens is empty')
- self.error = True
- return
-
- self.tokens = tokens
-
- # Validate the tokens
- def validate_tokens(self):
- terminals = {}
- for n in self.tokens:
- if not _is_identifier.match(n):
- self.log.error("Bad token name '%s'", n)
- self.error = True
- if n in terminals:
- self.log.warning("Token '%s' multiply defined", n)
- terminals[n] = 1
-
- # Get the literals specifier
- def get_literals(self):
- self.literals = self.ldict.get('literals', '')
- if not self.literals:
- self.literals = ''
-
- # Validate literals
- def validate_literals(self):
- try:
- for c in self.literals:
- if not isinstance(c, StringTypes) or len(c) > 1:
- self.log.error('Invalid literal %s. Must be a single character', repr(c))
- self.error = True
-
- except TypeError:
- self.log.error('Invalid literals specification. literals must be a sequence of characters')
- self.error = True
-
- def get_states(self):
- self.states = self.ldict.get('states', None)
- # Build statemap
- if self.states:
- if not isinstance(self.states, (tuple, list)):
- self.log.error('states must be defined as a tuple or list')
- self.error = True
- else:
- for s in self.states:
- if not isinstance(s, tuple) or len(s) != 2:
- self.log.error("Invalid state specifier %s. Must be a tuple (statename,'exclusive|inclusive')", repr(s))
- self.error = True
- continue
- name, statetype = s
- if not isinstance(name, StringTypes):
- self.log.error('State name %s must be a string', repr(name))
- self.error = True
- continue
- if not (statetype == 'inclusive' or statetype == 'exclusive'):
- self.log.error("State type for state %s must be 'inclusive' or 'exclusive'", name)
- self.error = True
- continue
- if name in self.stateinfo:
- self.log.error("State '%s' already defined", name)
- self.error = True
- continue
- self.stateinfo[name] = statetype
-
- # Get all of the symbols with a t_ prefix and sort them into various
- # categories (functions, strings, error functions, and ignore characters)
-
- def get_rules(self):
- tsymbols = [f for f in self.ldict if f[:2] == 't_']
-
- # Now build up a list of functions and a list of strings
- self.toknames = {} # Mapping of symbols to token names
- self.funcsym = {} # Symbols defined as functions
- self.strsym = {} # Symbols defined as strings
- self.ignore = {} # Ignore strings by state
- self.errorf = {} # Error functions by state
- self.eoff = {} # EOF functions by state
-
- for s in self.stateinfo:
- self.funcsym[s] = []
- self.strsym[s] = []
-
- if len(tsymbols) == 0:
- self.log.error('No rules of the form t_rulename are defined')
- self.error = True
- return
-
- for f in tsymbols:
- t = self.ldict[f]
- states, tokname = _statetoken(f, self.stateinfo)
- self.toknames[f] = tokname
-
- if hasattr(t, '__call__'):
- if tokname == 'error':
- for s in states:
- self.errorf[s] = t
- elif tokname == 'eof':
- for s in states:
- self.eoff[s] = t
- elif tokname == 'ignore':
- line = t.__code__.co_firstlineno
- file = t.__code__.co_filename
- self.log.error("%s:%d: Rule '%s' must be defined as a string", file, line, t.__name__)
- self.error = True
- else:
- for s in states:
- self.funcsym[s].append((f, t))
- elif isinstance(t, StringTypes):
- if tokname == 'ignore':
- for s in states:
- self.ignore[s] = t
- if '\\' in t:
- self.log.warning("%s contains a literal backslash '\\'", f)
-
- elif tokname == 'error':
- self.log.error("Rule '%s' must be defined as a function", f)
- self.error = True
- else:
- for s in states:
- self.strsym[s].append((f, t))
- else:
- self.log.error('%s not defined as a function or string', f)
- self.error = True
-
- # Sort the functions by line number
- for f in self.funcsym.values():
- f.sort(key=lambda x: x[1].__code__.co_firstlineno)
-
- # Sort the strings by regular expression length
- for s in self.strsym.values():
- s.sort(key=lambda x: len(x[1]), reverse=True)
-
- # Validate all of the t_rules collected
- def validate_rules(self):
- for state in self.stateinfo:
- # Validate all rules defined by functions
-
- for fname, f in self.funcsym[state]:
- line = f.__code__.co_firstlineno
- file = f.__code__.co_filename
- module = inspect.getmodule(f)
- self.modules.add(module)
-
- tokname = self.toknames[fname]
- if isinstance(f, types.MethodType):
- reqargs = 2
- else:
- reqargs = 1
- nargs = f.__code__.co_argcount
- if nargs > reqargs:
- self.log.error("%s:%d: Rule '%s' has too many arguments", file, line, f.__name__)
- self.error = True
- continue
-
- if nargs < reqargs:
- self.log.error("%s:%d: Rule '%s' requires an argument", file, line, f.__name__)
- self.error = True
- continue
-
- if not _get_regex(f):
- self.log.error("%s:%d: No regular expression defined for rule '%s'", file, line, f.__name__)
- self.error = True
- continue
-
- try:
- c = re.compile('(?P<%s>%s)' % (fname, _get_regex(f)), self.reflags)
- if c.match(''):
- self.log.error("%s:%d: Regular expression for rule '%s' matches empty string", file, line, f.__name__)
- self.error = True
- except re.error as e:
- self.log.error("%s:%d: Invalid regular expression for rule '%s'. %s", file, line, f.__name__, e)
- if '#' in _get_regex(f):
- self.log.error("%s:%d. Make sure '#' in rule '%s' is escaped with '\\#'", file, line, f.__name__)
- self.error = True
-
- # Validate all rules defined by strings
- for name, r in self.strsym[state]:
- tokname = self.toknames[name]
- if tokname == 'error':
- self.log.error("Rule '%s' must be defined as a function", name)
- self.error = True
- continue
-
- if tokname not in self.tokens and tokname.find('ignore_') < 0:
- self.log.error("Rule '%s' defined for an unspecified token %s", name, tokname)
- self.error = True
- continue
-
- try:
- c = re.compile('(?P<%s>%s)' % (name, r), self.reflags)
- if (c.match('')):
- self.log.error("Regular expression for rule '%s' matches empty string", name)
- self.error = True
- except re.error as e:
- self.log.error("Invalid regular expression for rule '%s'. %s", name, e)
- if '#' in r:
- self.log.error("Make sure '#' in rule '%s' is escaped with '\\#'", name)
- self.error = True
-
- if not self.funcsym[state] and not self.strsym[state]:
- self.log.error("No rules defined for state '%s'", state)
- self.error = True
-
- # Validate the error function
- efunc = self.errorf.get(state, None)
- if efunc:
- f = efunc
- line = f.__code__.co_firstlineno
- file = f.__code__.co_filename
- module = inspect.getmodule(f)
- self.modules.add(module)
-
- if isinstance(f, types.MethodType):
- reqargs = 2
- else:
- reqargs = 1
- nargs = f.__code__.co_argcount
- if nargs > reqargs:
- self.log.error("%s:%d: Rule '%s' has too many arguments", file, line, f.__name__)
- self.error = True
-
- if nargs < reqargs:
- self.log.error("%s:%d: Rule '%s' requires an argument", file, line, f.__name__)
- self.error = True
-
- for module in self.modules:
- self.validate_module(module)
-
- # -----------------------------------------------------------------------------
- # validate_module()
- #
- # This checks to see if there are duplicated t_rulename() functions or strings
- # in the parser input file. This is done using a simple regular expression
- # match on each line in the source code of the given module.
- # -----------------------------------------------------------------------------
-
- def validate_module(self, module):
- try:
- lines, linen = inspect.getsourcelines(module)
- except IOError:
- return
-
- fre = re.compile(r'\s*def\s+(t_[a-zA-Z_0-9]*)\(')
- sre = re.compile(r'\s*(t_[a-zA-Z_0-9]*)\s*=')
-
- counthash = {}
- linen += 1
- for line in lines:
- m = fre.match(line)
- if not m:
- m = sre.match(line)
- if m:
- name = m.group(1)
- prev = counthash.get(name)
- if not prev:
- counthash[name] = linen
- else:
- filename = inspect.getsourcefile(module)
- self.log.error('%s:%d: Rule %s redefined. Previously defined on line %d', filename, linen, name, prev)
- self.error = True
- linen += 1
-
-# -----------------------------------------------------------------------------
-# lex(module)
-#
-# Build all of the regular expression rules from definitions in the supplied module
-# -----------------------------------------------------------------------------
-def lex(module=None, object=None, debug=False, optimize=False, lextab='lextab',
- reflags=int(re.VERBOSE), nowarn=False, outputdir=None, debuglog=None, errorlog=None):
-
- if lextab is None:
- lextab = 'lextab'
-
- global lexer
-
- ldict = None
- stateinfo = {'INITIAL': 'inclusive'}
- lexobj = Lexer()
- lexobj.lexoptimize = optimize
- global token, input
-
- if errorlog is None:
- errorlog = PlyLogger(sys.stderr)
-
- if debug:
- if debuglog is None:
- debuglog = PlyLogger(sys.stderr)
-
- # Get the module dictionary used for the lexer
- if object:
- module = object
-
- # Get the module dictionary used for the parser
- if module:
- _items = [(k, getattr(module, k)) for k in dir(module)]
- ldict = dict(_items)
- # If no __file__ attribute is available, try to obtain it from the __module__ instead
- if '__file__' not in ldict:
- ldict['__file__'] = sys.modules[ldict['__module__']].__file__
- else:
- ldict = get_caller_module_dict(2)
-
- # Determine if the module is package of a package or not.
- # If so, fix the tabmodule setting so that tables load correctly
- pkg = ldict.get('__package__')
- if pkg and isinstance(lextab, str):
- if '.' not in lextab:
- lextab = pkg + '.' + lextab
-
- # Collect parser information from the dictionary
- linfo = LexerReflect(ldict, log=errorlog, reflags=reflags)
- linfo.get_all()
- if not optimize:
- if linfo.validate_all():
- raise SyntaxError("Can't build lexer")
-
- if optimize and lextab:
- try:
- lexobj.readtab(lextab, ldict)
- token = lexobj.token
- input = lexobj.input
- lexer = lexobj
- return lexobj
-
- except ImportError:
- pass
-
- # Dump some basic debugging information
- if debug:
- debuglog.info('lex: tokens = %r', linfo.tokens)
- debuglog.info('lex: literals = %r', linfo.literals)
- debuglog.info('lex: states = %r', linfo.stateinfo)
-
- # Build a dictionary of valid token names
- lexobj.lextokens = set()
- for n in linfo.tokens:
- lexobj.lextokens.add(n)
-
- # Get literals specification
- if isinstance(linfo.literals, (list, tuple)):
- lexobj.lexliterals = type(linfo.literals[0])().join(linfo.literals)
- else:
- lexobj.lexliterals = linfo.literals
-
- lexobj.lextokens_all = lexobj.lextokens | set(lexobj.lexliterals)
-
- # Get the stateinfo dictionary
- stateinfo = linfo.stateinfo
-
- regexs = {}
- # Build the master regular expressions
- for state in stateinfo:
- regex_list = []
-
- # Add rules defined by functions first
- for fname, f in linfo.funcsym[state]:
- line = f.__code__.co_firstlineno
- file = f.__code__.co_filename
- regex_list.append('(?P<%s>%s)' % (fname, _get_regex(f)))
- if debug:
- debuglog.info("lex: Adding rule %s -> '%s' (state '%s')", fname, _get_regex(f), state)
-
- # Now add all of the simple rules
- for name, r in linfo.strsym[state]:
- regex_list.append('(?P<%s>%s)' % (name, r))
- if debug:
- debuglog.info("lex: Adding rule %s -> '%s' (state '%s')", name, r, state)
-
- regexs[state] = regex_list
-
- # Build the master regular expressions
-
- if debug:
- debuglog.info('lex: ==== MASTER REGEXS FOLLOW ====')
-
- for state in regexs:
- lexre, re_text, re_names = _form_master_re(regexs[state], reflags, ldict, linfo.toknames)
- lexobj.lexstatere[state] = lexre
- lexobj.lexstateretext[state] = re_text
- lexobj.lexstaterenames[state] = re_names
- if debug:
- for i, text in enumerate(re_text):
- debuglog.info("lex: state '%s' : regex[%d] = '%s'", state, i, text)
-
- # For inclusive states, we need to add the regular expressions from the INITIAL state
- for state, stype in stateinfo.items():
- if state != 'INITIAL' and stype == 'inclusive':
- lexobj.lexstatere[state].extend(lexobj.lexstatere['INITIAL'])
- lexobj.lexstateretext[state].extend(lexobj.lexstateretext['INITIAL'])
- lexobj.lexstaterenames[state].extend(lexobj.lexstaterenames['INITIAL'])
-
- lexobj.lexstateinfo = stateinfo
- lexobj.lexre = lexobj.lexstatere['INITIAL']
- lexobj.lexretext = lexobj.lexstateretext['INITIAL']
- lexobj.lexreflags = reflags
-
- # Set up ignore variables
- lexobj.lexstateignore = linfo.ignore
- lexobj.lexignore = lexobj.lexstateignore.get('INITIAL', '')
-
- # Set up error functions
- lexobj.lexstateerrorf = linfo.errorf
- lexobj.lexerrorf = linfo.errorf.get('INITIAL', None)
- if not lexobj.lexerrorf:
- errorlog.warning('No t_error rule is defined')
-
- # Set up eof functions
- lexobj.lexstateeoff = linfo.eoff
- lexobj.lexeoff = linfo.eoff.get('INITIAL', None)
-
- # Check state information for ignore and error rules
- for s, stype in stateinfo.items():
- if stype == 'exclusive':
- if s not in linfo.errorf:
- errorlog.warning("No error rule is defined for exclusive state '%s'", s)
- if s not in linfo.ignore and lexobj.lexignore:
- errorlog.warning("No ignore rule is defined for exclusive state '%s'", s)
- elif stype == 'inclusive':
- if s not in linfo.errorf:
- linfo.errorf[s] = linfo.errorf.get('INITIAL', None)
- if s not in linfo.ignore:
- linfo.ignore[s] = linfo.ignore.get('INITIAL', '')
-
- # Create global versions of the token() and input() functions
- token = lexobj.token
- input = lexobj.input
- lexer = lexobj
-
- # If in optimize mode, we write the lextab
- if lextab and optimize:
- if outputdir is None:
- # If no output directory is set, the location of the output files
- # is determined according to the following rules:
- # - If lextab specifies a package, files go into that package directory
- # - Otherwise, files go in the same directory as the specifying module
- if isinstance(lextab, types.ModuleType):
- srcfile = lextab.__file__
- else:
- if '.' not in lextab:
- srcfile = ldict['__file__']
- else:
- parts = lextab.split('.')
- pkgname = '.'.join(parts[:-1])
- exec('import %s' % pkgname)
- srcfile = getattr(sys.modules[pkgname], '__file__', '')
- outputdir = os.path.dirname(srcfile)
- try:
- lexobj.writetab(lextab, outputdir)
- except IOError as e:
- errorlog.warning("Couldn't write lextab module %r. %s" % (lextab, e))
-
- return lexobj
-
-# -----------------------------------------------------------------------------
-# runmain()
-#
-# This runs the lexer as a main program
-# -----------------------------------------------------------------------------
-
-def runmain(lexer=None, data=None):
- if not data:
- try:
- filename = sys.argv[1]
- f = open(filename)
- data = f.read()
- f.close()
- except IndexError:
- sys.stdout.write('Reading from standard input (type EOF to end):\n')
- data = sys.stdin.read()
-
- if lexer:
- _input = lexer.input
- else:
- _input = input
- _input(data)
- if lexer:
- _token = lexer.token
- else:
- _token = token
-
- while True:
- tok = _token()
- if not tok:
- break
- sys.stdout.write('(%s,%r,%d,%d)\n' % (tok.type, tok.value, tok.lineno, tok.lexpos))
-
-# -----------------------------------------------------------------------------
-# @TOKEN(regex)
-#
-# This decorator function can be used to set the regex expression on a function
-# when its docstring might need to be set in an alternative way
-# -----------------------------------------------------------------------------
-
-def TOKEN(r):
- def set_regex(f):
- if hasattr(r, '__call__'):
- f.regex = _get_regex(r)
- else:
- f.regex = r
- return f
- return set_regex
-
-# Alternative spelling of the TOKEN decorator
-Token = TOKEN
-
diff --git a/components/script/dom/bindings/codegen/ply/ply/yacc.py b/components/script/dom/bindings/codegen/ply/ply/yacc.py
deleted file mode 100644
index 03bd86ee078..00000000000
--- a/components/script/dom/bindings/codegen/ply/ply/yacc.py
+++ /dev/null
@@ -1,3494 +0,0 @@
-# -----------------------------------------------------------------------------
-# ply: yacc.py
-#
-# Copyright (C) 2001-2017
-# David M. Beazley (Dabeaz LLC)
-# All rights reserved.
-#
-# Redistribution and use in source and binary forms, with or without
-# modification, are permitted provided that the following conditions are
-# met:
-#
-# * Redistributions of source code must retain the above copyright notice,
-# this list of conditions and the following disclaimer.
-# * Redistributions in binary form must reproduce the above copyright notice,
-# this list of conditions and the following disclaimer in the documentation
-# and/or other materials provided with the distribution.
-# * Neither the name of the David Beazley or Dabeaz LLC may be used to
-# endorse or promote products derived from this software without
-# specific prior written permission.
-#
-# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
-# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
-# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
-# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
-# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
-# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
-# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
-# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
-# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
-# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
-# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
-# -----------------------------------------------------------------------------
-#
-# This implements an LR parser that is constructed from grammar rules defined
-# as Python functions. The grammer is specified by supplying the BNF inside
-# Python documentation strings. The inspiration for this technique was borrowed
-# from John Aycock's Spark parsing system. PLY might be viewed as cross between
-# Spark and the GNU bison utility.
-#
-# The current implementation is only somewhat object-oriented. The
-# LR parser itself is defined in terms of an object (which allows multiple
-# parsers to co-exist). However, most of the variables used during table
-# construction are defined in terms of global variables. Users shouldn't
-# notice unless they are trying to define multiple parsers at the same
-# time using threads (in which case they should have their head examined).
-#
-# This implementation supports both SLR and LALR(1) parsing. LALR(1)
-# support was originally implemented by Elias Ioup (ezioup@alumni.uchicago.edu),
-# using the algorithm found in Aho, Sethi, and Ullman "Compilers: Principles,
-# Techniques, and Tools" (The Dragon Book). LALR(1) has since been replaced
-# by the more efficient DeRemer and Pennello algorithm.
-#
-# :::::::: WARNING :::::::
-#
-# Construction of LR parsing tables is fairly complicated and expensive.
-# To make this module run fast, a *LOT* of work has been put into
-# optimization---often at the expensive of readability and what might
-# consider to be good Python "coding style." Modify the code at your
-# own risk!
-# ----------------------------------------------------------------------------
-
-import re
-import types
-import sys
-import os.path
-import inspect
-import base64
-import warnings
-
-__version__ = '3.10'
-__tabversion__ = '3.10'
-
-#-----------------------------------------------------------------------------
-# === User configurable parameters ===
-#
-# Change these to modify the default behavior of yacc (if you wish)
-#-----------------------------------------------------------------------------
-
-yaccdebug = True # Debugging mode. If set, yacc generates a
- # a 'parser.out' file in the current directory
-
-debug_file = 'parser.out' # Default name of the debugging file
-tab_module = 'parsetab' # Default name of the table module
-default_lr = 'LALR' # Default LR table generation method
-
-error_count = 3 # Number of symbols that must be shifted to leave recovery mode
-
-yaccdevel = False # Set to True if developing yacc. This turns off optimized
- # implementations of certain functions.
-
-resultlimit = 40 # Size limit of results when running in debug mode.
-
-pickle_protocol = 0 # Protocol to use when writing pickle files
-
-# String type-checking compatibility
-if sys.version_info[0] < 3:
- string_types = basestring
-else:
- string_types = str
-
-MAXINT = sys.maxsize
-
-# This object is a stand-in for a logging object created by the
-# logging module. PLY will use this by default to create things
-# such as the parser.out file. If a user wants more detailed
-# information, they can create their own logging object and pass
-# it into PLY.
-
-class PlyLogger(object):
- def __init__(self, f):
- self.f = f
-
- def debug(self, msg, *args, **kwargs):
- self.f.write((msg % args) + '\n')
-
- info = debug
-
- def warning(self, msg, *args, **kwargs):
- self.f.write('WARNING: ' + (msg % args) + '\n')
-
- def error(self, msg, *args, **kwargs):
- self.f.write('ERROR: ' + (msg % args) + '\n')
-
- critical = debug
-
-# Null logger is used when no output is generated. Does nothing.
-class NullLogger(object):
- def __getattribute__(self, name):
- return self
-
- def __call__(self, *args, **kwargs):
- return self
-
-# Exception raised for yacc-related errors
-class YaccError(Exception):
- pass
-
-# Format the result message that the parser produces when running in debug mode.
-def format_result(r):
- repr_str = repr(r)
- if '\n' in repr_str:
- repr_str = repr(repr_str)
- if len(repr_str) > resultlimit:
- repr_str = repr_str[:resultlimit] + ' ...'
- result = '<%s @ 0x%x> (%s)' % (type(r).__name__, id(r), repr_str)
- return result
-
-# Format stack entries when the parser is running in debug mode
-def format_stack_entry(r):
- repr_str = repr(r)
- if '\n' in repr_str:
- repr_str = repr(repr_str)
- if len(repr_str) < 16:
- return repr_str
- else:
- return '<%s @ 0x%x>' % (type(r).__name__, id(r))
-
-# Panic mode error recovery support. This feature is being reworked--much of the
-# code here is to offer a deprecation/backwards compatible transition
-
-_errok = None
-_token = None
-_restart = None
-_warnmsg = '''PLY: Don't use global functions errok(), token(), and restart() in p_error().
-Instead, invoke the methods on the associated parser instance:
-
- def p_error(p):
- ...
- # Use parser.errok(), parser.token(), parser.restart()
- ...
-
- parser = yacc.yacc()
-'''
-
-def errok():
- warnings.warn(_warnmsg)
- return _errok()
-
-def restart():
- warnings.warn(_warnmsg)
- return _restart()
-
-def token():
- warnings.warn(_warnmsg)
- return _token()
-
-# Utility function to call the p_error() function with some deprecation hacks
-def call_errorfunc(errorfunc, token, parser):
- global _errok, _token, _restart
- _errok = parser.errok
- _token = parser.token
- _restart = parser.restart
- r = errorfunc(token)
- try:
- del _errok, _token, _restart
- except NameError:
- pass
- return r
-
-#-----------------------------------------------------------------------------
-# === LR Parsing Engine ===
-#
-# The following classes are used for the LR parser itself. These are not
-# used during table construction and are independent of the actual LR
-# table generation algorithm
-#-----------------------------------------------------------------------------
-
-# This class is used to hold non-terminal grammar symbols during parsing.
-# It normally has the following attributes set:
-# .type = Grammar symbol type
-# .value = Symbol value
-# .lineno = Starting line number
-# .endlineno = Ending line number (optional, set automatically)
-# .lexpos = Starting lex position
-# .endlexpos = Ending lex position (optional, set automatically)
-
-class YaccSymbol:
- def __str__(self):
- return self.type
-
- def __repr__(self):
- return str(self)
-
-# This class is a wrapper around the objects actually passed to each
-# grammar rule. Index lookup and assignment actually assign the
-# .value attribute of the underlying YaccSymbol object.
-# The lineno() method returns the line number of a given
-# item (or 0 if not defined). The linespan() method returns
-# a tuple of (startline,endline) representing the range of lines
-# for a symbol. The lexspan() method returns a tuple (lexpos,endlexpos)
-# representing the range of positional information for a symbol.
-
-class YaccProduction:
- def __init__(self, s, stack=None):
- self.slice = s
- self.stack = stack
- self.lexer = None
- self.parser = None
-
- def __getitem__(self, n):
- if isinstance(n, slice):
- return [s.value for s in self.slice[n]]
- elif n >= 0:
- return self.slice[n].value
- else:
- return self.stack[n].value
-
- def __setitem__(self, n, v):
- self.slice[n].value = v
-
- def __getslice__(self, i, j):
- return [s.value for s in self.slice[i:j]]
-
- def __len__(self):
- return len(self.slice)
-
- def lineno(self, n):
- return getattr(self.slice[n], 'lineno', 0)
-
- def set_lineno(self, n, lineno):
- self.slice[n].lineno = lineno
-
- def linespan(self, n):
- startline = getattr(self.slice[n], 'lineno', 0)
- endline = getattr(self.slice[n], 'endlineno', startline)
- return startline, endline
-
- def lexpos(self, n):
- return getattr(self.slice[n], 'lexpos', 0)
-
- def lexspan(self, n):
- startpos = getattr(self.slice[n], 'lexpos', 0)
- endpos = getattr(self.slice[n], 'endlexpos', startpos)
- return startpos, endpos
-
- def error(self):
- raise SyntaxError
-
-# -----------------------------------------------------------------------------
-# == LRParser ==
-#
-# The LR Parsing engine.
-# -----------------------------------------------------------------------------
-
-class LRParser:
- def __init__(self, lrtab, errorf):
- self.productions = lrtab.lr_productions
- self.action = lrtab.lr_action
- self.goto = lrtab.lr_goto
- self.errorfunc = errorf
- self.set_defaulted_states()
- self.errorok = True
-
- def errok(self):
- self.errorok = True
-
- def restart(self):
- del self.statestack[:]
- del self.symstack[:]
- sym = YaccSymbol()
- sym.type = '$end'
- self.symstack.append(sym)
- self.statestack.append(0)
-
- # Defaulted state support.
- # This method identifies parser states where there is only one possible reduction action.
- # For such states, the parser can make a choose to make a rule reduction without consuming
- # the next look-ahead token. This delayed invocation of the tokenizer can be useful in
- # certain kinds of advanced parsing situations where the lexer and parser interact with
- # each other or change states (i.e., manipulation of scope, lexer states, etc.).
- #
- # See: http://www.gnu.org/software/bison/manual/html_node/Default-Reductions.html#Default-Reductions
- def set_defaulted_states(self):
- self.defaulted_states = {}
- for state, actions in self.action.items():
- rules = list(actions.values())
- if len(rules) == 1 and rules[0] < 0:
- self.defaulted_states[state] = rules[0]
-
- def disable_defaulted_states(self):
- self.defaulted_states = {}
-
- def parse(self, input=None, lexer=None, debug=False, tracking=False, tokenfunc=None):
- if debug or yaccdevel:
- if isinstance(debug, int):
- debug = PlyLogger(sys.stderr)
- return self.parsedebug(input, lexer, debug, tracking, tokenfunc)
- elif tracking:
- return self.parseopt(input, lexer, debug, tracking, tokenfunc)
- else:
- return self.parseopt_notrack(input, lexer, debug, tracking, tokenfunc)
-
-
- # !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
- # parsedebug().
- #
- # This is the debugging enabled version of parse(). All changes made to the
- # parsing engine should be made here. Optimized versions of this function
- # are automatically created by the ply/ygen.py script. This script cuts out
- # sections enclosed in markers such as this:
- #
- # #--! DEBUG
- # statements
- # #--! DEBUG
- #
- # !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
-
- def parsedebug(self, input=None, lexer=None, debug=False, tracking=False, tokenfunc=None):
- #--! parsedebug-start
- lookahead = None # Current lookahead symbol
- lookaheadstack = [] # Stack of lookahead symbols
- actions = self.action # Local reference to action table (to avoid lookup on self.)
- goto = self.goto # Local reference to goto table (to avoid lookup on self.)
- prod = self.productions # Local reference to production list (to avoid lookup on self.)
- defaulted_states = self.defaulted_states # Local reference to defaulted states
- pslice = YaccProduction(None) # Production object passed to grammar rules
- errorcount = 0 # Used during error recovery
-
- #--! DEBUG
- debug.info('PLY: PARSE DEBUG START')
- #--! DEBUG
-
- # If no lexer was given, we will try to use the lex module
- if not lexer:
- from . import lex
- lexer = lex.lexer
-
- # Set up the lexer and parser objects on pslice
- pslice.lexer = lexer
- pslice.parser = self
-
- # If input was supplied, pass to lexer
- if input is not None:
- lexer.input(input)
-
- if tokenfunc is None:
- # Tokenize function
- get_token = lexer.token
- else:
- get_token = tokenfunc
-
- # Set the parser() token method (sometimes used in error recovery)
- self.token = get_token
-
- # Set up the state and symbol stacks
-
- statestack = [] # Stack of parsing states
- self.statestack = statestack
- symstack = [] # Stack of grammar symbols
- self.symstack = symstack
-
- pslice.stack = symstack # Put in the production
- errtoken = None # Err token
-
- # The start state is assumed to be (0,$end)
-
- statestack.append(0)
- sym = YaccSymbol()
- sym.type = '$end'
- symstack.append(sym)
- state = 0
- while True:
- # Get the next symbol on the input. If a lookahead symbol
- # is already set, we just use that. Otherwise, we'll pull
- # the next token off of the lookaheadstack or from the lexer
-
- #--! DEBUG
- debug.debug('')
- debug.debug('State : %s', state)
- #--! DEBUG
-
- if state not in defaulted_states:
- if not lookahead:
- if not lookaheadstack:
- lookahead = get_token() # Get the next token
- else:
- lookahead = lookaheadstack.pop()
- if not lookahead:
- lookahead = YaccSymbol()
- lookahead.type = '$end'
-
- # Check the action table
- ltype = lookahead.type
- t = actions[state].get(ltype)
- else:
- t = defaulted_states[state]
- #--! DEBUG
- debug.debug('Defaulted state %s: Reduce using %d', state, -t)
- #--! DEBUG
-
- #--! DEBUG
- debug.debug('Stack : %s',
- ('%s . %s' % (' '.join([xx.type for xx in symstack][1:]), str(lookahead))).lstrip())
- #--! DEBUG
-
- if t is not None:
- if t > 0:
- # shift a symbol on the stack
- statestack.append(t)
- state = t
-
- #--! DEBUG
- debug.debug('Action : Shift and goto state %s', t)
- #--! DEBUG
-
- symstack.append(lookahead)
- lookahead = None
-
- # Decrease error count on successful shift
- if errorcount:
- errorcount -= 1
- continue
-
- if t < 0:
- # reduce a symbol on the stack, emit a production
- p = prod[-t]
- pname = p.name
- plen = p.len
-
- # Get production function
- sym = YaccSymbol()
- sym.type = pname # Production name
- sym.value = None
-
- #--! DEBUG
- if plen:
- debug.info('Action : Reduce rule [%s] with %s and goto state %d', p.str,
- '['+','.join([format_stack_entry(_v.value) for _v in symstack[-plen:]])+']',
- goto[statestack[-1-plen]][pname])
- else:
- debug.info('Action : Reduce rule [%s] with %s and goto state %d', p.str, [],
- goto[statestack[-1]][pname])
-
- #--! DEBUG
-
- if plen:
- targ = symstack[-plen-1:]
- targ[0] = sym
-
- #--! TRACKING
- if tracking:
- t1 = targ[1]
- sym.lineno = t1.lineno
- sym.lexpos = t1.lexpos
- t1 = targ[-1]
- sym.endlineno = getattr(t1, 'endlineno', t1.lineno)
- sym.endlexpos = getattr(t1, 'endlexpos', t1.lexpos)
- #--! TRACKING
-
- # !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
- # The code enclosed in this section is duplicated
- # below as a performance optimization. Make sure
- # changes get made in both locations.
-
- pslice.slice = targ
-
- try:
- # Call the grammar rule with our special slice object
- del symstack[-plen:]
- self.state = state
- p.callable(pslice)
- del statestack[-plen:]
- #--! DEBUG
- debug.info('Result : %s', format_result(pslice[0]))
- #--! DEBUG
- symstack.append(sym)
- state = goto[statestack[-1]][pname]
- statestack.append(state)
- except SyntaxError:
- # If an error was set. Enter error recovery state
- lookaheadstack.append(lookahead) # Save the current lookahead token
- symstack.extend(targ[1:-1]) # Put the production slice back on the stack
- statestack.pop() # Pop back one state (before the reduce)
- state = statestack[-1]
- sym.type = 'error'
- sym.value = 'error'
- lookahead = sym
- errorcount = error_count
- self.errorok = False
-
- continue
- # !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
-
- else:
-
- #--! TRACKING
- if tracking:
- sym.lineno = lexer.lineno
- sym.lexpos = lexer.lexpos
- #--! TRACKING
-
- targ = [sym]
-
- # !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
- # The code enclosed in this section is duplicated
- # above as a performance optimization. Make sure
- # changes get made in both locations.
-
- pslice.slice = targ
-
- try:
- # Call the grammar rule with our special slice object
- self.state = state
- p.callable(pslice)
- #--! DEBUG
- debug.info('Result : %s', format_result(pslice[0]))
- #--! DEBUG
- symstack.append(sym)
- state = goto[statestack[-1]][pname]
- statestack.append(state)
- except SyntaxError:
- # If an error was set. Enter error recovery state
- lookaheadstack.append(lookahead) # Save the current lookahead token
- statestack.pop() # Pop back one state (before the reduce)
- state = statestack[-1]
- sym.type = 'error'
- sym.value = 'error'
- lookahead = sym
- errorcount = error_count
- self.errorok = False
-
- continue
- # !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
-
- if t == 0:
- n = symstack[-1]
- result = getattr(n, 'value', None)
- #--! DEBUG
- debug.info('Done : Returning %s', format_result(result))
- debug.info('PLY: PARSE DEBUG END')
- #--! DEBUG
- return result
-
- if t is None:
-
- #--! DEBUG
- debug.error('Error : %s',
- ('%s . %s' % (' '.join([xx.type for xx in symstack][1:]), str(lookahead))).lstrip())
- #--! DEBUG
-
- # We have some kind of parsing error here. To handle
- # this, we are going to push the current token onto
- # the tokenstack and replace it with an 'error' token.
- # If there are any synchronization rules, they may
- # catch it.
- #
- # In addition to pushing the error token, we call call
- # the user defined p_error() function if this is the
- # first syntax error. This function is only called if
- # errorcount == 0.
- if errorcount == 0 or self.errorok:
- errorcount = error_count
- self.errorok = False
- errtoken = lookahead
- if errtoken.type == '$end':
- errtoken = None # End of file!
- if self.errorfunc:
- if errtoken and not hasattr(errtoken, 'lexer'):
- errtoken.lexer = lexer
- self.state = state
- tok = call_errorfunc(self.errorfunc, errtoken, self)
- if self.errorok:
- # User must have done some kind of panic
- # mode recovery on their own. The
- # returned token is the next lookahead
- lookahead = tok
- errtoken = None
- continue
- else:
- if errtoken:
- if hasattr(errtoken, 'lineno'):
- lineno = lookahead.lineno
- else:
- lineno = 0
- if lineno:
- sys.stderr.write('yacc: Syntax error at line %d, token=%s\n' % (lineno, errtoken.type))
- else:
- sys.stderr.write('yacc: Syntax error, token=%s' % errtoken.type)
- else:
- sys.stderr.write('yacc: Parse error in input. EOF\n')
- return
-
- else:
- errorcount = error_count
-
- # case 1: the statestack only has 1 entry on it. If we're in this state, the
- # entire parse has been rolled back and we're completely hosed. The token is
- # discarded and we just keep going.
-
- if len(statestack) <= 1 and lookahead.type != '$end':
- lookahead = None
- errtoken = None
- state = 0
- # Nuke the pushback stack
- del lookaheadstack[:]
- continue
-
- # case 2: the statestack has a couple of entries on it, but we're
- # at the end of the file. nuke the top entry and generate an error token
-
- # Start nuking entries on the stack
- if lookahead.type == '$end':
- # Whoa. We're really hosed here. Bail out
- return
-
- if lookahead.type != 'error':
- sym = symstack[-1]
- if sym.type == 'error':
- # Hmmm. Error is on top of stack, we'll just nuke input
- # symbol and continue
- #--! TRACKING
- if tracking:
- sym.endlineno = getattr(lookahead, 'lineno', sym.lineno)
- sym.endlexpos = getattr(lookahead, 'lexpos', sym.lexpos)
- #--! TRACKING
- lookahead = None
- continue
-
- # Create the error symbol for the first time and make it the new lookahead symbol
- t = YaccSymbol()
- t.type = 'error'
-
- if hasattr(lookahead, 'lineno'):
- t.lineno = t.endlineno = lookahead.lineno
- if hasattr(lookahead, 'lexpos'):
- t.lexpos = t.endlexpos = lookahead.lexpos
- t.value = lookahead
- lookaheadstack.append(lookahead)
- lookahead = t
- else:
- sym = symstack.pop()
- #--! TRACKING
- if tracking:
- lookahead.lineno = sym.lineno
- lookahead.lexpos = sym.lexpos
- #--! TRACKING
- statestack.pop()
- state = statestack[-1]
-
- continue
-
- # Call an error function here
- raise RuntimeError('yacc: internal parser error!!!\n')
-
- #--! parsedebug-end
-
- # !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
- # parseopt().
- #
- # Optimized version of parse() method. DO NOT EDIT THIS CODE DIRECTLY!
- # This code is automatically generated by the ply/ygen.py script. Make
- # changes to the parsedebug() method instead.
- # !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
-
- def parseopt(self, input=None, lexer=None, debug=False, tracking=False, tokenfunc=None):
- #--! parseopt-start
- lookahead = None # Current lookahead symbol
- lookaheadstack = [] # Stack of lookahead symbols
- actions = self.action # Local reference to action table (to avoid lookup on self.)
- goto = self.goto # Local reference to goto table (to avoid lookup on self.)
- prod = self.productions # Local reference to production list (to avoid lookup on self.)
- defaulted_states = self.defaulted_states # Local reference to defaulted states
- pslice = YaccProduction(None) # Production object passed to grammar rules
- errorcount = 0 # Used during error recovery
-
-
- # If no lexer was given, we will try to use the lex module
- if not lexer:
- from . import lex
- lexer = lex.lexer
-
- # Set up the lexer and parser objects on pslice
- pslice.lexer = lexer
- pslice.parser = self
-
- # If input was supplied, pass to lexer
- if input is not None:
- lexer.input(input)
-
- if tokenfunc is None:
- # Tokenize function
- get_token = lexer.token
- else:
- get_token = tokenfunc
-
- # Set the parser() token method (sometimes used in error recovery)
- self.token = get_token
-
- # Set up the state and symbol stacks
-
- statestack = [] # Stack of parsing states
- self.statestack = statestack
- symstack = [] # Stack of grammar symbols
- self.symstack = symstack
-
- pslice.stack = symstack # Put in the production
- errtoken = None # Err token
-
- # The start state is assumed to be (0,$end)
-
- statestack.append(0)
- sym = YaccSymbol()
- sym.type = '$end'
- symstack.append(sym)
- state = 0
- while True:
- # Get the next symbol on the input. If a lookahead symbol
- # is already set, we just use that. Otherwise, we'll pull
- # the next token off of the lookaheadstack or from the lexer
-
-
- if state not in defaulted_states:
- if not lookahead:
- if not lookaheadstack:
- lookahead = get_token() # Get the next token
- else:
- lookahead = lookaheadstack.pop()
- if not lookahead:
- lookahead = YaccSymbol()
- lookahead.type = '$end'
-
- # Check the action table
- ltype = lookahead.type
- t = actions[state].get(ltype)
- else:
- t = defaulted_states[state]
-
-
- if t is not None:
- if t > 0:
- # shift a symbol on the stack
- statestack.append(t)
- state = t
-
-
- symstack.append(lookahead)
- lookahead = None
-
- # Decrease error count on successful shift
- if errorcount:
- errorcount -= 1
- continue
-
- if t < 0:
- # reduce a symbol on the stack, emit a production
- p = prod[-t]
- pname = p.name
- plen = p.len
-
- # Get production function
- sym = YaccSymbol()
- sym.type = pname # Production name
- sym.value = None
-
-
- if plen:
- targ = symstack[-plen-1:]
- targ[0] = sym
-
- #--! TRACKING
- if tracking:
- t1 = targ[1]
- sym.lineno = t1.lineno
- sym.lexpos = t1.lexpos
- t1 = targ[-1]
- sym.endlineno = getattr(t1, 'endlineno', t1.lineno)
- sym.endlexpos = getattr(t1, 'endlexpos', t1.lexpos)
- #--! TRACKING
-
- # !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
- # The code enclosed in this section is duplicated
- # below as a performance optimization. Make sure
- # changes get made in both locations.
-
- pslice.slice = targ
-
- try:
- # Call the grammar rule with our special slice object
- del symstack[-plen:]
- self.state = state
- p.callable(pslice)
- del statestack[-plen:]
- symstack.append(sym)
- state = goto[statestack[-1]][pname]
- statestack.append(state)
- except SyntaxError:
- # If an error was set. Enter error recovery state
- lookaheadstack.append(lookahead) # Save the current lookahead token
- symstack.extend(targ[1:-1]) # Put the production slice back on the stack
- statestack.pop() # Pop back one state (before the reduce)
- state = statestack[-1]
- sym.type = 'error'
- sym.value = 'error'
- lookahead = sym
- errorcount = error_count
- self.errorok = False
-
- continue
- # !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
-
- else:
-
- #--! TRACKING
- if tracking:
- sym.lineno = lexer.lineno
- sym.lexpos = lexer.lexpos
- #--! TRACKING
-
- targ = [sym]
-
- # !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
- # The code enclosed in this section is duplicated
- # above as a performance optimization. Make sure
- # changes get made in both locations.
-
- pslice.slice = targ
-
- try:
- # Call the grammar rule with our special slice object
- self.state = state
- p.callable(pslice)
- symstack.append(sym)
- state = goto[statestack[-1]][pname]
- statestack.append(state)
- except SyntaxError:
- # If an error was set. Enter error recovery state
- lookaheadstack.append(lookahead) # Save the current lookahead token
- statestack.pop() # Pop back one state (before the reduce)
- state = statestack[-1]
- sym.type = 'error'
- sym.value = 'error'
- lookahead = sym
- errorcount = error_count
- self.errorok = False
-
- continue
- # !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
-
- if t == 0:
- n = symstack[-1]
- result = getattr(n, 'value', None)
- return result
-
- if t is None:
-
-
- # We have some kind of parsing error here. To handle
- # this, we are going to push the current token onto
- # the tokenstack and replace it with an 'error' token.
- # If there are any synchronization rules, they may
- # catch it.
- #
- # In addition to pushing the error token, we call call
- # the user defined p_error() function if this is the
- # first syntax error. This function is only called if
- # errorcount == 0.
- if errorcount == 0 or self.errorok:
- errorcount = error_count
- self.errorok = False
- errtoken = lookahead
- if errtoken.type == '$end':
- errtoken = None # End of file!
- if self.errorfunc:
- if errtoken and not hasattr(errtoken, 'lexer'):
- errtoken.lexer = lexer
- self.state = state
- tok = call_errorfunc(self.errorfunc, errtoken, self)
- if self.errorok:
- # User must have done some kind of panic
- # mode recovery on their own. The
- # returned token is the next lookahead
- lookahead = tok
- errtoken = None
- continue
- else:
- if errtoken:
- if hasattr(errtoken, 'lineno'):
- lineno = lookahead.lineno
- else:
- lineno = 0
- if lineno:
- sys.stderr.write('yacc: Syntax error at line %d, token=%s\n' % (lineno, errtoken.type))
- else:
- sys.stderr.write('yacc: Syntax error, token=%s' % errtoken.type)
- else:
- sys.stderr.write('yacc: Parse error in input. EOF\n')
- return
-
- else:
- errorcount = error_count
-
- # case 1: the statestack only has 1 entry on it. If we're in this state, the
- # entire parse has been rolled back and we're completely hosed. The token is
- # discarded and we just keep going.
-
- if len(statestack) <= 1 and lookahead.type != '$end':
- lookahead = None
- errtoken = None
- state = 0
- # Nuke the pushback stack
- del lookaheadstack[:]
- continue
-
- # case 2: the statestack has a couple of entries on it, but we're
- # at the end of the file. nuke the top entry and generate an error token
-
- # Start nuking entries on the stack
- if lookahead.type == '$end':
- # Whoa. We're really hosed here. Bail out
- return
-
- if lookahead.type != 'error':
- sym = symstack[-1]
- if sym.type == 'error':
- # Hmmm. Error is on top of stack, we'll just nuke input
- # symbol and continue
- #--! TRACKING
- if tracking:
- sym.endlineno = getattr(lookahead, 'lineno', sym.lineno)
- sym.endlexpos = getattr(lookahead, 'lexpos', sym.lexpos)
- #--! TRACKING
- lookahead = None
- continue
-
- # Create the error symbol for the first time and make it the new lookahead symbol
- t = YaccSymbol()
- t.type = 'error'
-
- if hasattr(lookahead, 'lineno'):
- t.lineno = t.endlineno = lookahead.lineno
- if hasattr(lookahead, 'lexpos'):
- t.lexpos = t.endlexpos = lookahead.lexpos
- t.value = lookahead
- lookaheadstack.append(lookahead)
- lookahead = t
- else:
- sym = symstack.pop()
- #--! TRACKING
- if tracking:
- lookahead.lineno = sym.lineno
- lookahead.lexpos = sym.lexpos
- #--! TRACKING
- statestack.pop()
- state = statestack[-1]
-
- continue
-
- # Call an error function here
- raise RuntimeError('yacc: internal parser error!!!\n')
-
- #--! parseopt-end
-
- # !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
- # parseopt_notrack().
- #
- # Optimized version of parseopt() with line number tracking removed.
- # DO NOT EDIT THIS CODE DIRECTLY. This code is automatically generated
- # by the ply/ygen.py script. Make changes to the parsedebug() method instead.
- # !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
-
- def parseopt_notrack(self, input=None, lexer=None, debug=False, tracking=False, tokenfunc=None):
- #--! parseopt-notrack-start
- lookahead = None # Current lookahead symbol
- lookaheadstack = [] # Stack of lookahead symbols
- actions = self.action # Local reference to action table (to avoid lookup on self.)
- goto = self.goto # Local reference to goto table (to avoid lookup on self.)
- prod = self.productions # Local reference to production list (to avoid lookup on self.)
- defaulted_states = self.defaulted_states # Local reference to defaulted states
- pslice = YaccProduction(None) # Production object passed to grammar rules
- errorcount = 0 # Used during error recovery
-
-
- # If no lexer was given, we will try to use the lex module
- if not lexer:
- from . import lex
- lexer = lex.lexer
-
- # Set up the lexer and parser objects on pslice
- pslice.lexer = lexer
- pslice.parser = self
-
- # If input was supplied, pass to lexer
- if input is not None:
- lexer.input(input)
-
- if tokenfunc is None:
- # Tokenize function
- get_token = lexer.token
- else:
- get_token = tokenfunc
-
- # Set the parser() token method (sometimes used in error recovery)
- self.token = get_token
-
- # Set up the state and symbol stacks
-
- statestack = [] # Stack of parsing states
- self.statestack = statestack
- symstack = [] # Stack of grammar symbols
- self.symstack = symstack
-
- pslice.stack = symstack # Put in the production
- errtoken = None # Err token
-
- # The start state is assumed to be (0,$end)
-
- statestack.append(0)
- sym = YaccSymbol()
- sym.type = '$end'
- symstack.append(sym)
- state = 0
- while True:
- # Get the next symbol on the input. If a lookahead symbol
- # is already set, we just use that. Otherwise, we'll pull
- # the next token off of the lookaheadstack or from the lexer
-
-
- if state not in defaulted_states:
- if not lookahead:
- if not lookaheadstack:
- lookahead = get_token() # Get the next token
- else:
- lookahead = lookaheadstack.pop()
- if not lookahead:
- lookahead = YaccSymbol()
- lookahead.type = '$end'
-
- # Check the action table
- ltype = lookahead.type
- t = actions[state].get(ltype)
- else:
- t = defaulted_states[state]
-
-
- if t is not None:
- if t > 0:
- # shift a symbol on the stack
- statestack.append(t)
- state = t
-
-
- symstack.append(lookahead)
- lookahead = None
-
- # Decrease error count on successful shift
- if errorcount:
- errorcount -= 1
- continue
-
- if t < 0:
- # reduce a symbol on the stack, emit a production
- p = prod[-t]
- pname = p.name
- plen = p.len
-
- # Get production function
- sym = YaccSymbol()
- sym.type = pname # Production name
- sym.value = None
-
-
- if plen:
- targ = symstack[-plen-1:]
- targ[0] = sym
-
-
- # !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
- # The code enclosed in this section is duplicated
- # below as a performance optimization. Make sure
- # changes get made in both locations.
-
- pslice.slice = targ
-
- try:
- # Call the grammar rule with our special slice object
- del symstack[-plen:]
- self.state = state
- p.callable(pslice)
- del statestack[-plen:]
- symstack.append(sym)
- state = goto[statestack[-1]][pname]
- statestack.append(state)
- except SyntaxError:
- # If an error was set. Enter error recovery state
- lookaheadstack.append(lookahead) # Save the current lookahead token
- symstack.extend(targ[1:-1]) # Put the production slice back on the stack
- statestack.pop() # Pop back one state (before the reduce)
- state = statestack[-1]
- sym.type = 'error'
- sym.value = 'error'
- lookahead = sym
- errorcount = error_count
- self.errorok = False
-
- continue
- # !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
-
- else:
-
-
- targ = [sym]
-
- # !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
- # The code enclosed in this section is duplicated
- # above as a performance optimization. Make sure
- # changes get made in both locations.
-
- pslice.slice = targ
-
- try:
- # Call the grammar rule with our special slice object
- self.state = state
- p.callable(pslice)
- symstack.append(sym)
- state = goto[statestack[-1]][pname]
- statestack.append(state)
- except SyntaxError:
- # If an error was set. Enter error recovery state
- lookaheadstack.append(lookahead) # Save the current lookahead token
- statestack.pop() # Pop back one state (before the reduce)
- state = statestack[-1]
- sym.type = 'error'
- sym.value = 'error'
- lookahead = sym
- errorcount = error_count
- self.errorok = False
-
- continue
- # !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
-
- if t == 0:
- n = symstack[-1]
- result = getattr(n, 'value', None)
- return result
-
- if t is None:
-
-
- # We have some kind of parsing error here. To handle
- # this, we are going to push the current token onto
- # the tokenstack and replace it with an 'error' token.
- # If there are any synchronization rules, they may
- # catch it.
- #
- # In addition to pushing the error token, we call call
- # the user defined p_error() function if this is the
- # first syntax error. This function is only called if
- # errorcount == 0.
- if errorcount == 0 or self.errorok:
- errorcount = error_count
- self.errorok = False
- errtoken = lookahead
- if errtoken.type == '$end':
- errtoken = None # End of file!
- if self.errorfunc:
- if errtoken and not hasattr(errtoken, 'lexer'):
- errtoken.lexer = lexer
- self.state = state
- tok = call_errorfunc(self.errorfunc, errtoken, self)
- if self.errorok:
- # User must have done some kind of panic
- # mode recovery on their own. The
- # returned token is the next lookahead
- lookahead = tok
- errtoken = None
- continue
- else:
- if errtoken:
- if hasattr(errtoken, 'lineno'):
- lineno = lookahead.lineno
- else:
- lineno = 0
- if lineno:
- sys.stderr.write('yacc: Syntax error at line %d, token=%s\n' % (lineno, errtoken.type))
- else:
- sys.stderr.write('yacc: Syntax error, token=%s' % errtoken.type)
- else:
- sys.stderr.write('yacc: Parse error in input. EOF\n')
- return
-
- else:
- errorcount = error_count
-
- # case 1: the statestack only has 1 entry on it. If we're in this state, the
- # entire parse has been rolled back and we're completely hosed. The token is
- # discarded and we just keep going.
-
- if len(statestack) <= 1 and lookahead.type != '$end':
- lookahead = None
- errtoken = None
- state = 0
- # Nuke the pushback stack
- del lookaheadstack[:]
- continue
-
- # case 2: the statestack has a couple of entries on it, but we're
- # at the end of the file. nuke the top entry and generate an error token
-
- # Start nuking entries on the stack
- if lookahead.type == '$end':
- # Whoa. We're really hosed here. Bail out
- return
-
- if lookahead.type != 'error':
- sym = symstack[-1]
- if sym.type == 'error':
- # Hmmm. Error is on top of stack, we'll just nuke input
- # symbol and continue
- lookahead = None
- continue
-
- # Create the error symbol for the first time and make it the new lookahead symbol
- t = YaccSymbol()
- t.type = 'error'
-
- if hasattr(lookahead, 'lineno'):
- t.lineno = t.endlineno = lookahead.lineno
- if hasattr(lookahead, 'lexpos'):
- t.lexpos = t.endlexpos = lookahead.lexpos
- t.value = lookahead
- lookaheadstack.append(lookahead)
- lookahead = t
- else:
- sym = symstack.pop()
- statestack.pop()
- state = statestack[-1]
-
- continue
-
- # Call an error function here
- raise RuntimeError('yacc: internal parser error!!!\n')
-
- #--! parseopt-notrack-end
-
-# -----------------------------------------------------------------------------
-# === Grammar Representation ===
-#
-# The following functions, classes, and variables are used to represent and
-# manipulate the rules that make up a grammar.
-# -----------------------------------------------------------------------------
-
-# regex matching identifiers
-_is_identifier = re.compile(r'^[a-zA-Z0-9_-]+$')
-
-# -----------------------------------------------------------------------------
-# class Production:
-#
-# This class stores the raw information about a single production or grammar rule.
-# A grammar rule refers to a specification such as this:
-#
-# expr : expr PLUS term
-#
-# Here are the basic attributes defined on all productions
-#
-# name - Name of the production. For example 'expr'
-# prod - A list of symbols on the right side ['expr','PLUS','term']
-# prec - Production precedence level
-# number - Production number.
-# func - Function that executes on reduce
-# file - File where production function is defined
-# lineno - Line number where production function is defined
-#
-# The following attributes are defined or optional.
-#
-# len - Length of the production (number of symbols on right hand side)
-# usyms - Set of unique symbols found in the production
-# -----------------------------------------------------------------------------
-
-class Production(object):
- reduced = 0
- def __init__(self, number, name, prod, precedence=('right', 0), func=None, file='', line=0):
- self.name = name
- self.prod = tuple(prod)
- self.number = number
- self.func = func
- self.callable = None
- self.file = file
- self.line = line
- self.prec = precedence
-
- # Internal settings used during table construction
-
- self.len = len(self.prod) # Length of the production
-
- # Create a list of unique production symbols used in the production
- self.usyms = []
- for s in self.prod:
- if s not in self.usyms:
- self.usyms.append(s)
-
- # List of all LR items for the production
- self.lr_items = []
- self.lr_next = None
-
- # Create a string representation
- if self.prod:
- self.str = '%s -> %s' % (self.name, ' '.join(self.prod))
- else:
- self.str = '%s -> <empty>' % self.name
-
- def __str__(self):
- return self.str
-
- def __repr__(self):
- return 'Production(' + str(self) + ')'
-
- def __len__(self):
- return len(self.prod)
-
- def __nonzero__(self):
- return 1
-
- def __getitem__(self, index):
- return self.prod[index]
-
- # Return the nth lr_item from the production (or None if at the end)
- def lr_item(self, n):
- if n > len(self.prod):
- return None
- p = LRItem(self, n)
- # Precompute the list of productions immediately following.
- try:
- p.lr_after = Prodnames[p.prod[n+1]]
- except (IndexError, KeyError):
- p.lr_after = []
- try:
- p.lr_before = p.prod[n-1]
- except IndexError:
- p.lr_before = None
- return p
-
- # Bind the production function name to a callable
- def bind(self, pdict):
- if self.func:
- self.callable = pdict[self.func]
-
-# This class serves as a minimal standin for Production objects when
-# reading table data from files. It only contains information
-# actually used by the LR parsing engine, plus some additional
-# debugging information.
-class MiniProduction(object):
- def __init__(self, str, name, len, func, file, line):
- self.name = name
- self.len = len
- self.func = func
- self.callable = None
- self.file = file
- self.line = line
- self.str = str
-
- def __str__(self):
- return self.str
-
- def __repr__(self):
- return 'MiniProduction(%s)' % self.str
-
- # Bind the production function name to a callable
- def bind(self, pdict):
- if self.func:
- self.callable = pdict[self.func]
-
-
-# -----------------------------------------------------------------------------
-# class LRItem
-#
-# This class represents a specific stage of parsing a production rule. For
-# example:
-#
-# expr : expr . PLUS term
-#
-# In the above, the "." represents the current location of the parse. Here
-# basic attributes:
-#
-# name - Name of the production. For example 'expr'
-# prod - A list of symbols on the right side ['expr','.', 'PLUS','term']
-# number - Production number.
-#
-# lr_next Next LR item. Example, if we are ' expr -> expr . PLUS term'
-# then lr_next refers to 'expr -> expr PLUS . term'
-# lr_index - LR item index (location of the ".") in the prod list.
-# lookaheads - LALR lookahead symbols for this item
-# len - Length of the production (number of symbols on right hand side)
-# lr_after - List of all productions that immediately follow
-# lr_before - Grammar symbol immediately before
-# -----------------------------------------------------------------------------
-
-class LRItem(object):
- def __init__(self, p, n):
- self.name = p.name
- self.prod = list(p.prod)
- self.number = p.number
- self.lr_index = n
- self.lookaheads = {}
- self.prod.insert(n, '.')
- self.prod = tuple(self.prod)
- self.len = len(self.prod)
- self.usyms = p.usyms
-
- def __str__(self):
- if self.prod:
- s = '%s -> %s' % (self.name, ' '.join(self.prod))
- else:
- s = '%s -> <empty>' % self.name
- return s
-
- def __repr__(self):
- return 'LRItem(' + str(self) + ')'
-
-# -----------------------------------------------------------------------------
-# rightmost_terminal()
-#
-# Return the rightmost terminal from a list of symbols. Used in add_production()
-# -----------------------------------------------------------------------------
-def rightmost_terminal(symbols, terminals):
- i = len(symbols) - 1
- while i >= 0:
- if symbols[i] in terminals:
- return symbols[i]
- i -= 1
- return None
-
-# -----------------------------------------------------------------------------
-# === GRAMMAR CLASS ===
-#
-# The following class represents the contents of the specified grammar along
-# with various computed properties such as first sets, follow sets, LR items, etc.
-# This data is used for critical parts of the table generation process later.
-# -----------------------------------------------------------------------------
-
-class GrammarError(YaccError):
- pass
-
-class Grammar(object):
- def __init__(self, terminals):
- self.Productions = [None] # A list of all of the productions. The first
- # entry is always reserved for the purpose of
- # building an augmented grammar
-
- self.Prodnames = {} # A dictionary mapping the names of nonterminals to a list of all
- # productions of that nonterminal.
-
- self.Prodmap = {} # A dictionary that is only used to detect duplicate
- # productions.
-
- self.Terminals = {} # A dictionary mapping the names of terminal symbols to a
- # list of the rules where they are used.
-
- for term in terminals:
- self.Terminals[term] = []
-
- self.Terminals['error'] = []
-
- self.Nonterminals = {} # A dictionary mapping names of nonterminals to a list
- # of rule numbers where they are used.
-
- self.First = {} # A dictionary of precomputed FIRST(x) symbols
-
- self.Follow = {} # A dictionary of precomputed FOLLOW(x) symbols
-
- self.Precedence = {} # Precedence rules for each terminal. Contains tuples of the
- # form ('right',level) or ('nonassoc', level) or ('left',level)
-
- self.UsedPrecedence = set() # Precedence rules that were actually used by the grammer.
- # This is only used to provide error checking and to generate
- # a warning about unused precedence rules.
-
- self.Start = None # Starting symbol for the grammar
-
-
- def __len__(self):
- return len(self.Productions)
-
- def __getitem__(self, index):
- return self.Productions[index]
-
- # -----------------------------------------------------------------------------
- # set_precedence()
- #
- # Sets the precedence for a given terminal. assoc is the associativity such as
- # 'left','right', or 'nonassoc'. level is a numeric level.
- #
- # -----------------------------------------------------------------------------
-
- def set_precedence(self, term, assoc, level):
- assert self.Productions == [None], 'Must call set_precedence() before add_production()'
- if term in self.Precedence:
- raise GrammarError('Precedence already specified for terminal %r' % term)
- if assoc not in ['left', 'right', 'nonassoc']:
- raise GrammarError("Associativity must be one of 'left','right', or 'nonassoc'")
- self.Precedence[term] = (assoc, level)
-
- # -----------------------------------------------------------------------------
- # add_production()
- #
- # Given an action function, this function assembles a production rule and
- # computes its precedence level.
- #
- # The production rule is supplied as a list of symbols. For example,
- # a rule such as 'expr : expr PLUS term' has a production name of 'expr' and
- # symbols ['expr','PLUS','term'].
- #
- # Precedence is determined by the precedence of the right-most non-terminal
- # or the precedence of a terminal specified by %prec.
- #
- # A variety of error checks are performed to make sure production symbols
- # are valid and that %prec is used correctly.
- # -----------------------------------------------------------------------------
-
- def add_production(self, prodname, syms, func=None, file='', line=0):
-
- if prodname in self.Terminals:
- raise GrammarError('%s:%d: Illegal rule name %r. Already defined as a token' % (file, line, prodname))
- if prodname == 'error':
- raise GrammarError('%s:%d: Illegal rule name %r. error is a reserved word' % (file, line, prodname))
- if not _is_identifier.match(prodname):
- raise GrammarError('%s:%d: Illegal rule name %r' % (file, line, prodname))
-
- # Look for literal tokens
- for n, s in enumerate(syms):
- if s[0] in "'\"":
- try:
- c = eval(s)
- if (len(c) > 1):
- raise GrammarError('%s:%d: Literal token %s in rule %r may only be a single character' %
- (file, line, s, prodname))
- if c not in self.Terminals:
- self.Terminals[c] = []
- syms[n] = c
- continue
- except SyntaxError:
- pass
- if not _is_identifier.match(s) and s != '%prec':
- raise GrammarError('%s:%d: Illegal name %r in rule %r' % (file, line, s, prodname))
-
- # Determine the precedence level
- if '%prec' in syms:
- if syms[-1] == '%prec':
- raise GrammarError('%s:%d: Syntax error. Nothing follows %%prec' % (file, line))
- if syms[-2] != '%prec':
- raise GrammarError('%s:%d: Syntax error. %%prec can only appear at the end of a grammar rule' %
- (file, line))
- precname = syms[-1]
- prodprec = self.Precedence.get(precname)
- if not prodprec:
- raise GrammarError('%s:%d: Nothing known about the precedence of %r' % (file, line, precname))
- else:
- self.UsedPrecedence.add(precname)
- del syms[-2:] # Drop %prec from the rule
- else:
- # If no %prec, precedence is determined by the rightmost terminal symbol
- precname = rightmost_terminal(syms, self.Terminals)
- prodprec = self.Precedence.get(precname, ('right', 0))
-
- # See if the rule is already in the rulemap
- map = '%s -> %s' % (prodname, syms)
- if map in self.Prodmap:
- m = self.Prodmap[map]
- raise GrammarError('%s:%d: Duplicate rule %s. ' % (file, line, m) +
- 'Previous definition at %s:%d' % (m.file, m.line))
-
- # From this point on, everything is valid. Create a new Production instance
- pnumber = len(self.Productions)
- if prodname not in self.Nonterminals:
- self.Nonterminals[prodname] = []
-
- # Add the production number to Terminals and Nonterminals
- for t in syms:
- if t in self.Terminals:
- self.Terminals[t].append(pnumber)
- else:
- if t not in self.Nonterminals:
- self.Nonterminals[t] = []
- self.Nonterminals[t].append(pnumber)
-
- # Create a production and add it to the list of productions
- p = Production(pnumber, prodname, syms, prodprec, func, file, line)
- self.Productions.append(p)
- self.Prodmap[map] = p
-
- # Add to the global productions list
- try:
- self.Prodnames[prodname].append(p)
- except KeyError:
- self.Prodnames[prodname] = [p]
-
- # -----------------------------------------------------------------------------
- # set_start()
- #
- # Sets the starting symbol and creates the augmented grammar. Production
- # rule 0 is S' -> start where start is the start symbol.
- # -----------------------------------------------------------------------------
-
- def set_start(self, start=None):
- if not start:
- start = self.Productions[1].name
- if start not in self.Nonterminals:
- raise GrammarError('start symbol %s undefined' % start)
- self.Productions[0] = Production(0, "S'", [start])
- self.Nonterminals[start].append(0)
- self.Start = start
-
- # -----------------------------------------------------------------------------
- # find_unreachable()
- #
- # Find all of the nonterminal symbols that can't be reached from the starting
- # symbol. Returns a list of nonterminals that can't be reached.
- # -----------------------------------------------------------------------------
-
- def find_unreachable(self):
-
- # Mark all symbols that are reachable from a symbol s
- def mark_reachable_from(s):
- if s in reachable:
- return
- reachable.add(s)
- for p in self.Prodnames.get(s, []):
- for r in p.prod:
- mark_reachable_from(r)
-
- reachable = set()
- mark_reachable_from(self.Productions[0].prod[0])
- return [s for s in self.Nonterminals if s not in reachable]
-
- # -----------------------------------------------------------------------------
- # infinite_cycles()
- #
- # This function looks at the various parsing rules and tries to detect
- # infinite recursion cycles (grammar rules where there is no possible way
- # to derive a string of only terminals).
- # -----------------------------------------------------------------------------
-
- def infinite_cycles(self):
- terminates = {}
-
- # Terminals:
- for t in self.Terminals:
- terminates[t] = True
-
- terminates['$end'] = True
-
- # Nonterminals:
-
- # Initialize to false:
- for n in self.Nonterminals:
- terminates[n] = False
-
- # Then propagate termination until no change:
- while True:
- some_change = False
- for (n, pl) in self.Prodnames.items():
- # Nonterminal n terminates iff any of its productions terminates.
- for p in pl:
- # Production p terminates iff all of its rhs symbols terminate.
- for s in p.prod:
- if not terminates[s]:
- # The symbol s does not terminate,
- # so production p does not terminate.
- p_terminates = False
- break
- else:
- # didn't break from the loop,
- # so every symbol s terminates
- # so production p terminates.
- p_terminates = True
-
- if p_terminates:
- # symbol n terminates!
- if not terminates[n]:
- terminates[n] = True
- some_change = True
- # Don't need to consider any more productions for this n.
- break
-
- if not some_change:
- break
-
- infinite = []
- for (s, term) in terminates.items():
- if not term:
- if s not in self.Prodnames and s not in self.Terminals and s != 'error':
- # s is used-but-not-defined, and we've already warned of that,
- # so it would be overkill to say that it's also non-terminating.
- pass
- else:
- infinite.append(s)
-
- return infinite
-
- # -----------------------------------------------------------------------------
- # undefined_symbols()
- #
- # Find all symbols that were used the grammar, but not defined as tokens or
- # grammar rules. Returns a list of tuples (sym, prod) where sym in the symbol
- # and prod is the production where the symbol was used.
- # -----------------------------------------------------------------------------
- def undefined_symbols(self):
- result = []
- for p in self.Productions:
- if not p:
- continue
-
- for s in p.prod:
- if s not in self.Prodnames and s not in self.Terminals and s != 'error':
- result.append((s, p))
- return result
-
- # -----------------------------------------------------------------------------
- # unused_terminals()
- #
- # Find all terminals that were defined, but not used by the grammar. Returns
- # a list of all symbols.
- # -----------------------------------------------------------------------------
- def unused_terminals(self):
- unused_tok = []
- for s, v in self.Terminals.items():
- if s != 'error' and not v:
- unused_tok.append(s)
-
- return unused_tok
-
- # ------------------------------------------------------------------------------
- # unused_rules()
- #
- # Find all grammar rules that were defined, but not used (maybe not reachable)
- # Returns a list of productions.
- # ------------------------------------------------------------------------------
-
- def unused_rules(self):
- unused_prod = []
- for s, v in self.Nonterminals.items():
- if not v:
- p = self.Prodnames[s][0]
- unused_prod.append(p)
- return unused_prod
-
- # -----------------------------------------------------------------------------
- # unused_precedence()
- #
- # Returns a list of tuples (term,precedence) corresponding to precedence
- # rules that were never used by the grammar. term is the name of the terminal
- # on which precedence was applied and precedence is a string such as 'left' or
- # 'right' corresponding to the type of precedence.
- # -----------------------------------------------------------------------------
-
- def unused_precedence(self):
- unused = []
- for termname in self.Precedence:
- if not (termname in self.Terminals or termname in self.UsedPrecedence):
- unused.append((termname, self.Precedence[termname][0]))
-
- return unused
-
- # -------------------------------------------------------------------------
- # _first()
- #
- # Compute the value of FIRST1(beta) where beta is a tuple of symbols.
- #
- # During execution of compute_first1, the result may be incomplete.
- # Afterward (e.g., when called from compute_follow()), it will be complete.
- # -------------------------------------------------------------------------
- def _first(self, beta):
-
- # We are computing First(x1,x2,x3,...,xn)
- result = []
- for x in beta:
- x_produces_empty = False
-
- # Add all the non-<empty> symbols of First[x] to the result.
- for f in self.First[x]:
- if f == '<empty>':
- x_produces_empty = True
- else:
- if f not in result:
- result.append(f)
-
- if x_produces_empty:
- # We have to consider the next x in beta,
- # i.e. stay in the loop.
- pass
- else:
- # We don't have to consider any further symbols in beta.
- break
- else:
- # There was no 'break' from the loop,
- # so x_produces_empty was true for all x in beta,
- # so beta produces empty as well.
- result.append('<empty>')
-
- return result
-
- # -------------------------------------------------------------------------
- # compute_first()
- #
- # Compute the value of FIRST1(X) for all symbols
- # -------------------------------------------------------------------------
- def compute_first(self):
- if self.First:
- return self.First
-
- # Terminals:
- for t in self.Terminals:
- self.First[t] = [t]
-
- self.First['$end'] = ['$end']
-
- # Nonterminals:
-
- # Initialize to the empty set:
- for n in self.Nonterminals:
- self.First[n] = []
-
- # Then propagate symbols until no change:
- while True:
- some_change = False
- for n in self.Nonterminals:
- for p in self.Prodnames[n]:
- for f in self._first(p.prod):
- if f not in self.First[n]:
- self.First[n].append(f)
- some_change = True
- if not some_change:
- break
-
- return self.First
-
- # ---------------------------------------------------------------------
- # compute_follow()
- #
- # Computes all of the follow sets for every non-terminal symbol. The
- # follow set is the set of all symbols that might follow a given
- # non-terminal. See the Dragon book, 2nd Ed. p. 189.
- # ---------------------------------------------------------------------
- def compute_follow(self, start=None):
- # If already computed, return the result
- if self.Follow:
- return self.Follow
-
- # If first sets not computed yet, do that first.
- if not self.First:
- self.compute_first()
-
- # Add '$end' to the follow list of the start symbol
- for k in self.Nonterminals:
- self.Follow[k] = []
-
- if not start:
- start = self.Productions[1].name
-
- self.Follow[start] = ['$end']
-
- while True:
- didadd = False
- for p in self.Productions[1:]:
- # Here is the production set
- for i, B in enumerate(p.prod):
- if B in self.Nonterminals:
- # Okay. We got a non-terminal in a production
- fst = self._first(p.prod[i+1:])
- hasempty = False
- for f in fst:
- if f != '<empty>' and f not in self.Follow[B]:
- self.Follow[B].append(f)
- didadd = True
- if f == '<empty>':
- hasempty = True
- if hasempty or i == (len(p.prod)-1):
- # Add elements of follow(a) to follow(b)
- for f in self.Follow[p.name]:
- if f not in self.Follow[B]:
- self.Follow[B].append(f)
- didadd = True
- if not didadd:
- break
- return self.Follow
-
-
- # -----------------------------------------------------------------------------
- # build_lritems()
- #
- # This function walks the list of productions and builds a complete set of the
- # LR items. The LR items are stored in two ways: First, they are uniquely
- # numbered and placed in the list _lritems. Second, a linked list of LR items
- # is built for each production. For example:
- #
- # E -> E PLUS E
- #
- # Creates the list
- #
- # [E -> . E PLUS E, E -> E . PLUS E, E -> E PLUS . E, E -> E PLUS E . ]
- # -----------------------------------------------------------------------------
-
- def build_lritems(self):
- for p in self.Productions:
- lastlri = p
- i = 0
- lr_items = []
- while True:
- if i > len(p):
- lri = None
- else:
- lri = LRItem(p, i)
- # Precompute the list of productions immediately following
- try:
- lri.lr_after = self.Prodnames[lri.prod[i+1]]
- except (IndexError, KeyError):
- lri.lr_after = []
- try:
- lri.lr_before = lri.prod[i-1]
- except IndexError:
- lri.lr_before = None
-
- lastlri.lr_next = lri
- if not lri:
- break
- lr_items.append(lri)
- lastlri = lri
- i += 1
- p.lr_items = lr_items
-
-# -----------------------------------------------------------------------------
-# == Class LRTable ==
-#
-# This basic class represents a basic table of LR parsing information.
-# Methods for generating the tables are not defined here. They are defined
-# in the derived class LRGeneratedTable.
-# -----------------------------------------------------------------------------
-
-class VersionError(YaccError):
- pass
-
-class LRTable(object):
- def __init__(self):
- self.lr_action = None
- self.lr_goto = None
- self.lr_productions = None
- self.lr_method = None
-
- def read_table(self, module):
- if isinstance(module, types.ModuleType):
- parsetab = module
- else:
- exec('import %s' % module)
- parsetab = sys.modules[module]
-
- if parsetab._tabversion != __tabversion__:
- raise VersionError('yacc table file version is out of date')
-
- self.lr_action = parsetab._lr_action
- self.lr_goto = parsetab._lr_goto
-
- self.lr_productions = []
- for p in parsetab._lr_productions:
- self.lr_productions.append(MiniProduction(*p))
-
- self.lr_method = parsetab._lr_method
- return parsetab._lr_signature
-
- def read_pickle(self, filename):
- try:
- import cPickle as pickle
- except ImportError:
- import pickle
-
- if not os.path.exists(filename):
- raise ImportError
-
- in_f = open(filename, 'rb')
-
- tabversion = pickle.load(in_f)
- if tabversion != __tabversion__:
- raise VersionError('yacc table file version is out of date')
- self.lr_method = pickle.load(in_f)
- signature = pickle.load(in_f)
- self.lr_action = pickle.load(in_f)
- self.lr_goto = pickle.load(in_f)
- productions = pickle.load(in_f)
-
- self.lr_productions = []
- for p in productions:
- self.lr_productions.append(MiniProduction(*p))
-
- in_f.close()
- return signature
-
- # Bind all production function names to callable objects in pdict
- def bind_callables(self, pdict):
- for p in self.lr_productions:
- p.bind(pdict)
-
-
-# -----------------------------------------------------------------------------
-# === LR Generator ===
-#
-# The following classes and functions are used to generate LR parsing tables on
-# a grammar.
-# -----------------------------------------------------------------------------
-
-# -----------------------------------------------------------------------------
-# digraph()
-# traverse()
-#
-# The following two functions are used to compute set valued functions
-# of the form:
-#
-# F(x) = F'(x) U U{F(y) | x R y}
-#
-# This is used to compute the values of Read() sets as well as FOLLOW sets
-# in LALR(1) generation.
-#
-# Inputs: X - An input set
-# R - A relation
-# FP - Set-valued function
-# ------------------------------------------------------------------------------
-
-def digraph(X, R, FP):
- N = {}
- for x in X:
- N[x] = 0
- stack = []
- F = {}
- for x in X:
- if N[x] == 0:
- traverse(x, N, stack, F, X, R, FP)
- return F
-
-def traverse(x, N, stack, F, X, R, FP):
- stack.append(x)
- d = len(stack)
- N[x] = d
- F[x] = FP(x) # F(X) <- F'(x)
-
- rel = R(x) # Get y's related to x
- for y in rel:
- if N[y] == 0:
- traverse(y, N, stack, F, X, R, FP)
- N[x] = min(N[x], N[y])
- for a in F.get(y, []):
- if a not in F[x]:
- F[x].append(a)
- if N[x] == d:
- N[stack[-1]] = MAXINT
- F[stack[-1]] = F[x]
- element = stack.pop()
- while element != x:
- N[stack[-1]] = MAXINT
- F[stack[-1]] = F[x]
- element = stack.pop()
-
-class LALRError(YaccError):
- pass
-
-# -----------------------------------------------------------------------------
-# == LRGeneratedTable ==
-#
-# This class implements the LR table generation algorithm. There are no
-# public methods except for write()
-# -----------------------------------------------------------------------------
-
-class LRGeneratedTable(LRTable):
- def __init__(self, grammar, method='LALR', log=None):
- if method not in ['SLR', 'LALR']:
- raise LALRError('Unsupported method %s' % method)
-
- self.grammar = grammar
- self.lr_method = method
-
- # Set up the logger
- if not log:
- log = NullLogger()
- self.log = log
-
- # Internal attributes
- self.lr_action = {} # Action table
- self.lr_goto = {} # Goto table
- self.lr_productions = grammar.Productions # Copy of grammar Production array
- self.lr_goto_cache = {} # Cache of computed gotos
- self.lr0_cidhash = {} # Cache of closures
-
- self._add_count = 0 # Internal counter used to detect cycles
-
- # Diagonistic information filled in by the table generator
- self.sr_conflict = 0
- self.rr_conflict = 0
- self.conflicts = [] # List of conflicts
-
- self.sr_conflicts = []
- self.rr_conflicts = []
-
- # Build the tables
- self.grammar.build_lritems()
- self.grammar.compute_first()
- self.grammar.compute_follow()
- self.lr_parse_table()
-
- # Compute the LR(0) closure operation on I, where I is a set of LR(0) items.
-
- def lr0_closure(self, I):
- self._add_count += 1
-
- # Add everything in I to J
- J = I[:]
- didadd = True
- while didadd:
- didadd = False
- for j in J:
- for x in j.lr_after:
- if getattr(x, 'lr0_added', 0) == self._add_count:
- continue
- # Add B --> .G to J
- J.append(x.lr_next)
- x.lr0_added = self._add_count
- didadd = True
-
- return J
-
- # Compute the LR(0) goto function goto(I,X) where I is a set
- # of LR(0) items and X is a grammar symbol. This function is written
- # in a way that guarantees uniqueness of the generated goto sets
- # (i.e. the same goto set will never be returned as two different Python
- # objects). With uniqueness, we can later do fast set comparisons using
- # id(obj) instead of element-wise comparison.
-
- def lr0_goto(self, I, x):
- # First we look for a previously cached entry
- g = self.lr_goto_cache.get((id(I), x))
- if g:
- return g
-
- # Now we generate the goto set in a way that guarantees uniqueness
- # of the result
-
- s = self.lr_goto_cache.get(x)
- if not s:
- s = {}
- self.lr_goto_cache[x] = s
-
- gs = []
- for p in I:
- n = p.lr_next
- if n and n.lr_before == x:
- s1 = s.get(id(n))
- if not s1:
- s1 = {}
- s[id(n)] = s1
- gs.append(n)
- s = s1
- g = s.get('$end')
- if not g:
- if gs:
- g = self.lr0_closure(gs)
- s['$end'] = g
- else:
- s['$end'] = gs
- self.lr_goto_cache[(id(I), x)] = g
- return g
-
- # Compute the LR(0) sets of item function
- def lr0_items(self):
- C = [self.lr0_closure([self.grammar.Productions[0].lr_next])]
- i = 0
- for I in C:
- self.lr0_cidhash[id(I)] = i
- i += 1
-
- # Loop over the items in C and each grammar symbols
- i = 0
- while i < len(C):
- I = C[i]
- i += 1
-
- # Collect all of the symbols that could possibly be in the goto(I,X) sets
- asyms = {}
- for ii in I:
- for s in ii.usyms:
- asyms[s] = None
-
- for x in asyms:
- g = self.lr0_goto(I, x)
- if not g or id(g) in self.lr0_cidhash:
- continue
- self.lr0_cidhash[id(g)] = len(C)
- C.append(g)
-
- return C
-
- # -----------------------------------------------------------------------------
- # ==== LALR(1) Parsing ====
- #
- # LALR(1) parsing is almost exactly the same as SLR except that instead of
- # relying upon Follow() sets when performing reductions, a more selective
- # lookahead set that incorporates the state of the LR(0) machine is utilized.
- # Thus, we mainly just have to focus on calculating the lookahead sets.
- #
- # The method used here is due to DeRemer and Pennelo (1982).
- #
- # DeRemer, F. L., and T. J. Pennelo: "Efficient Computation of LALR(1)
- # Lookahead Sets", ACM Transactions on Programming Languages and Systems,
- # Vol. 4, No. 4, Oct. 1982, pp. 615-649
- #
- # Further details can also be found in:
- #
- # J. Tremblay and P. Sorenson, "The Theory and Practice of Compiler Writing",
- # McGraw-Hill Book Company, (1985).
- #
- # -----------------------------------------------------------------------------
-
- # -----------------------------------------------------------------------------
- # compute_nullable_nonterminals()
- #
- # Creates a dictionary containing all of the non-terminals that might produce
- # an empty production.
- # -----------------------------------------------------------------------------
-
- def compute_nullable_nonterminals(self):
- nullable = set()
- num_nullable = 0
- while True:
- for p in self.grammar.Productions[1:]:
- if p.len == 0:
- nullable.add(p.name)
- continue
- for t in p.prod:
- if t not in nullable:
- break
- else:
- nullable.add(p.name)
- if len(nullable) == num_nullable:
- break
- num_nullable = len(nullable)
- return nullable
-
- # -----------------------------------------------------------------------------
- # find_nonterminal_trans(C)
- #
- # Given a set of LR(0) items, this functions finds all of the non-terminal
- # transitions. These are transitions in which a dot appears immediately before
- # a non-terminal. Returns a list of tuples of the form (state,N) where state
- # is the state number and N is the nonterminal symbol.
- #
- # The input C is the set of LR(0) items.
- # -----------------------------------------------------------------------------
-
- def find_nonterminal_transitions(self, C):
- trans = []
- for stateno, state in enumerate(C):
- for p in state:
- if p.lr_index < p.len - 1:
- t = (stateno, p.prod[p.lr_index+1])
- if t[1] in self.grammar.Nonterminals:
- if t not in trans:
- trans.append(t)
- return trans
-
- # -----------------------------------------------------------------------------
- # dr_relation()
- #
- # Computes the DR(p,A) relationships for non-terminal transitions. The input
- # is a tuple (state,N) where state is a number and N is a nonterminal symbol.
- #
- # Returns a list of terminals.
- # -----------------------------------------------------------------------------
-
- def dr_relation(self, C, trans, nullable):
- dr_set = {}
- state, N = trans
- terms = []
-
- g = self.lr0_goto(C[state], N)
- for p in g:
- if p.lr_index < p.len - 1:
- a = p.prod[p.lr_index+1]
- if a in self.grammar.Terminals:
- if a not in terms:
- terms.append(a)
-
- # This extra bit is to handle the start state
- if state == 0 and N == self.grammar.Productions[0].prod[0]:
- terms.append('$end')
-
- return terms
-
- # -----------------------------------------------------------------------------
- # reads_relation()
- #
- # Computes the READS() relation (p,A) READS (t,C).
- # -----------------------------------------------------------------------------
-
- def reads_relation(self, C, trans, empty):
- # Look for empty transitions
- rel = []
- state, N = trans
-
- g = self.lr0_goto(C[state], N)
- j = self.lr0_cidhash.get(id(g), -1)
- for p in g:
- if p.lr_index < p.len - 1:
- a = p.prod[p.lr_index + 1]
- if a in empty:
- rel.append((j, a))
-
- return rel
-
- # -----------------------------------------------------------------------------
- # compute_lookback_includes()
- #
- # Determines the lookback and includes relations
- #
- # LOOKBACK:
- #
- # This relation is determined by running the LR(0) state machine forward.
- # For example, starting with a production "N : . A B C", we run it forward
- # to obtain "N : A B C ." We then build a relationship between this final
- # state and the starting state. These relationships are stored in a dictionary
- # lookdict.
- #
- # INCLUDES:
- #
- # Computes the INCLUDE() relation (p,A) INCLUDES (p',B).
- #
- # This relation is used to determine non-terminal transitions that occur
- # inside of other non-terminal transition states. (p,A) INCLUDES (p', B)
- # if the following holds:
- #
- # B -> LAT, where T -> epsilon and p' -L-> p
- #
- # L is essentially a prefix (which may be empty), T is a suffix that must be
- # able to derive an empty string. State p' must lead to state p with the string L.
- #
- # -----------------------------------------------------------------------------
-
- def compute_lookback_includes(self, C, trans, nullable):
- lookdict = {} # Dictionary of lookback relations
- includedict = {} # Dictionary of include relations
-
- # Make a dictionary of non-terminal transitions
- dtrans = {}
- for t in trans:
- dtrans[t] = 1
-
- # Loop over all transitions and compute lookbacks and includes
- for state, N in trans:
- lookb = []
- includes = []
- for p in C[state]:
- if p.name != N:
- continue
-
- # Okay, we have a name match. We now follow the production all the way
- # through the state machine until we get the . on the right hand side
-
- lr_index = p.lr_index
- j = state
- while lr_index < p.len - 1:
- lr_index = lr_index + 1
- t = p.prod[lr_index]
-
- # Check to see if this symbol and state are a non-terminal transition
- if (j, t) in dtrans:
- # Yes. Okay, there is some chance that this is an includes relation
- # the only way to know for certain is whether the rest of the
- # production derives empty
-
- li = lr_index + 1
- while li < p.len:
- if p.prod[li] in self.grammar.Terminals:
- break # No forget it
- if p.prod[li] not in nullable:
- break
- li = li + 1
- else:
- # Appears to be a relation between (j,t) and (state,N)
- includes.append((j, t))
-
- g = self.lr0_goto(C[j], t) # Go to next set
- j = self.lr0_cidhash.get(id(g), -1) # Go to next state
-
- # When we get here, j is the final state, now we have to locate the production
- for r in C[j]:
- if r.name != p.name:
- continue
- if r.len != p.len:
- continue
- i = 0
- # This look is comparing a production ". A B C" with "A B C ."
- while i < r.lr_index:
- if r.prod[i] != p.prod[i+1]:
- break
- i = i + 1
- else:
- lookb.append((j, r))
- for i in includes:
- if i not in includedict:
- includedict[i] = []
- includedict[i].append((state, N))
- lookdict[(state, N)] = lookb
-
- return lookdict, includedict
-
- # -----------------------------------------------------------------------------
- # compute_read_sets()
- #
- # Given a set of LR(0) items, this function computes the read sets.
- #
- # Inputs: C = Set of LR(0) items
- # ntrans = Set of nonterminal transitions
- # nullable = Set of empty transitions
- #
- # Returns a set containing the read sets
- # -----------------------------------------------------------------------------
-
- def compute_read_sets(self, C, ntrans, nullable):
- FP = lambda x: self.dr_relation(C, x, nullable)
- R = lambda x: self.reads_relation(C, x, nullable)
- F = digraph(ntrans, R, FP)
- return F
-
- # -----------------------------------------------------------------------------
- # compute_follow_sets()
- #
- # Given a set of LR(0) items, a set of non-terminal transitions, a readset,
- # and an include set, this function computes the follow sets
- #
- # Follow(p,A) = Read(p,A) U U {Follow(p',B) | (p,A) INCLUDES (p',B)}
- #
- # Inputs:
- # ntrans = Set of nonterminal transitions
- # readsets = Readset (previously computed)
- # inclsets = Include sets (previously computed)
- #
- # Returns a set containing the follow sets
- # -----------------------------------------------------------------------------
-
- def compute_follow_sets(self, ntrans, readsets, inclsets):
- FP = lambda x: readsets[x]
- R = lambda x: inclsets.get(x, [])
- F = digraph(ntrans, R, FP)
- return F
-
- # -----------------------------------------------------------------------------
- # add_lookaheads()
- #
- # Attaches the lookahead symbols to grammar rules.
- #
- # Inputs: lookbacks - Set of lookback relations
- # followset - Computed follow set
- #
- # This function directly attaches the lookaheads to productions contained
- # in the lookbacks set
- # -----------------------------------------------------------------------------
-
- def add_lookaheads(self, lookbacks, followset):
- for trans, lb in lookbacks.items():
- # Loop over productions in lookback
- for state, p in lb:
- if state not in p.lookaheads:
- p.lookaheads[state] = []
- f = followset.get(trans, [])
- for a in f:
- if a not in p.lookaheads[state]:
- p.lookaheads[state].append(a)
-
- # -----------------------------------------------------------------------------
- # add_lalr_lookaheads()
- #
- # This function does all of the work of adding lookahead information for use
- # with LALR parsing
- # -----------------------------------------------------------------------------
-
- def add_lalr_lookaheads(self, C):
- # Determine all of the nullable nonterminals
- nullable = self.compute_nullable_nonterminals()
-
- # Find all non-terminal transitions
- trans = self.find_nonterminal_transitions(C)
-
- # Compute read sets
- readsets = self.compute_read_sets(C, trans, nullable)
-
- # Compute lookback/includes relations
- lookd, included = self.compute_lookback_includes(C, trans, nullable)
-
- # Compute LALR FOLLOW sets
- followsets = self.compute_follow_sets(trans, readsets, included)
-
- # Add all of the lookaheads
- self.add_lookaheads(lookd, followsets)
-
- # -----------------------------------------------------------------------------
- # lr_parse_table()
- #
- # This function constructs the parse tables for SLR or LALR
- # -----------------------------------------------------------------------------
- def lr_parse_table(self):
- Productions = self.grammar.Productions
- Precedence = self.grammar.Precedence
- goto = self.lr_goto # Goto array
- action = self.lr_action # Action array
- log = self.log # Logger for output
-
- actionp = {} # Action production array (temporary)
-
- log.info('Parsing method: %s', self.lr_method)
-
- # Step 1: Construct C = { I0, I1, ... IN}, collection of LR(0) items
- # This determines the number of states
-
- C = self.lr0_items()
-
- if self.lr_method == 'LALR':
- self.add_lalr_lookaheads(C)
-
- # Build the parser table, state by state
- st = 0
- for I in C:
- # Loop over each production in I
- actlist = [] # List of actions
- st_action = {}
- st_actionp = {}
- st_goto = {}
- log.info('')
- log.info('state %d', st)
- log.info('')
- for p in I:
- log.info(' (%d) %s', p.number, p)
- log.info('')
-
- for p in I:
- if p.len == p.lr_index + 1:
- if p.name == "S'":
- # Start symbol. Accept!
- st_action['$end'] = 0
- st_actionp['$end'] = p
- else:
- # We are at the end of a production. Reduce!
- if self.lr_method == 'LALR':
- laheads = p.lookaheads[st]
- else:
- laheads = self.grammar.Follow[p.name]
- for a in laheads:
- actlist.append((a, p, 'reduce using rule %d (%s)' % (p.number, p)))
- r = st_action.get(a)
- if r is not None:
- # Whoa. Have a shift/reduce or reduce/reduce conflict
- if r > 0:
- # Need to decide on shift or reduce here
- # By default we favor shifting. Need to add
- # some precedence rules here.
-
- # Shift precedence comes from the token
- sprec, slevel = Precedence.get(a, ('right', 0))
-
- # Reduce precedence comes from rule being reduced (p)
- rprec, rlevel = Productions[p.number].prec
-
- if (slevel < rlevel) or ((slevel == rlevel) and (rprec == 'left')):
- # We really need to reduce here.
- st_action[a] = -p.number
- st_actionp[a] = p
- if not slevel and not rlevel:
- log.info(' ! shift/reduce conflict for %s resolved as reduce', a)
- self.sr_conflicts.append((st, a, 'reduce'))
- Productions[p.number].reduced += 1
- elif (slevel == rlevel) and (rprec == 'nonassoc'):
- st_action[a] = None
- else:
- # Hmmm. Guess we'll keep the shift
- if not rlevel:
- log.info(' ! shift/reduce conflict for %s resolved as shift', a)
- self.sr_conflicts.append((st, a, 'shift'))
- elif r < 0:
- # Reduce/reduce conflict. In this case, we favor the rule
- # that was defined first in the grammar file
- oldp = Productions[-r]
- pp = Productions[p.number]
- if oldp.line > pp.line:
- st_action[a] = -p.number
- st_actionp[a] = p
- chosenp, rejectp = pp, oldp
- Productions[p.number].reduced += 1
- Productions[oldp.number].reduced -= 1
- else:
- chosenp, rejectp = oldp, pp
- self.rr_conflicts.append((st, chosenp, rejectp))
- log.info(' ! reduce/reduce conflict for %s resolved using rule %d (%s)',
- a, st_actionp[a].number, st_actionp[a])
- else:
- raise LALRError('Unknown conflict in state %d' % st)
- else:
- st_action[a] = -p.number
- st_actionp[a] = p
- Productions[p.number].reduced += 1
- else:
- i = p.lr_index
- a = p.prod[i+1] # Get symbol right after the "."
- if a in self.grammar.Terminals:
- g = self.lr0_goto(I, a)
- j = self.lr0_cidhash.get(id(g), -1)
- if j >= 0:
- # We are in a shift state
- actlist.append((a, p, 'shift and go to state %d' % j))
- r = st_action.get(a)
- if r is not None:
- # Whoa have a shift/reduce or shift/shift conflict
- if r > 0:
- if r != j:
- raise LALRError('Shift/shift conflict in state %d' % st)
- elif r < 0:
- # Do a precedence check.
- # - if precedence of reduce rule is higher, we reduce.
- # - if precedence of reduce is same and left assoc, we reduce.
- # - otherwise we shift
-
- # Shift precedence comes from the token
- sprec, slevel = Precedence.get(a, ('right', 0))
-
- # Reduce precedence comes from the rule that could have been reduced
- rprec, rlevel = Productions[st_actionp[a].number].prec
-
- if (slevel > rlevel) or ((slevel == rlevel) and (rprec == 'right')):
- # We decide to shift here... highest precedence to shift
- Productions[st_actionp[a].number].reduced -= 1
- st_action[a] = j
- st_actionp[a] = p
- if not rlevel:
- log.info(' ! shift/reduce conflict for %s resolved as shift', a)
- self.sr_conflicts.append((st, a, 'shift'))
- elif (slevel == rlevel) and (rprec == 'nonassoc'):
- st_action[a] = None
- else:
- # Hmmm. Guess we'll keep the reduce
- if not slevel and not rlevel:
- log.info(' ! shift/reduce conflict for %s resolved as reduce', a)
- self.sr_conflicts.append((st, a, 'reduce'))
-
- else:
- raise LALRError('Unknown conflict in state %d' % st)
- else:
- st_action[a] = j
- st_actionp[a] = p
-
- # Print the actions associated with each terminal
- _actprint = {}
- for a, p, m in actlist:
- if a in st_action:
- if p is st_actionp[a]:
- log.info(' %-15s %s', a, m)
- _actprint[(a, m)] = 1
- log.info('')
- # Print the actions that were not used. (debugging)
- not_used = 0
- for a, p, m in actlist:
- if a in st_action:
- if p is not st_actionp[a]:
- if not (a, m) in _actprint:
- log.debug(' ! %-15s [ %s ]', a, m)
- not_used = 1
- _actprint[(a, m)] = 1
- if not_used:
- log.debug('')
-
- # Construct the goto table for this state
-
- nkeys = {}
- for ii in I:
- for s in ii.usyms:
- if s in self.grammar.Nonterminals:
- nkeys[s] = None
- for n in nkeys:
- g = self.lr0_goto(I, n)
- j = self.lr0_cidhash.get(id(g), -1)
- if j >= 0:
- st_goto[n] = j
- log.info(' %-30s shift and go to state %d', n, j)
-
- action[st] = st_action
- actionp[st] = st_actionp
- goto[st] = st_goto
- st += 1
-
- # -----------------------------------------------------------------------------
- # write()
- #
- # This function writes the LR parsing tables to a file
- # -----------------------------------------------------------------------------
-
- def write_table(self, tabmodule, outputdir='', signature=''):
- if isinstance(tabmodule, types.ModuleType):
- raise IOError("Won't overwrite existing tabmodule")
-
- basemodulename = tabmodule.split('.')[-1]
- filename = os.path.join(outputdir, basemodulename) + '.py'
- try:
- f = open(filename, 'w')
-
- f.write('''
-# %s
-# This file is automatically generated. Do not edit.
-_tabversion = %r
-
-_lr_method = %r
-
-_lr_signature = %r
- ''' % (os.path.basename(filename), __tabversion__, self.lr_method, signature))
-
- # Change smaller to 0 to go back to original tables
- smaller = 1
-
- # Factor out names to try and make smaller
- if smaller:
- items = {}
-
- for s, nd in self.lr_action.items():
- for name, v in nd.items():
- i = items.get(name)
- if not i:
- i = ([], [])
- items[name] = i
- i[0].append(s)
- i[1].append(v)
-
- f.write('\n_lr_action_items = {')
- for k, v in items.items():
- f.write('%r:([' % k)
- for i in v[0]:
- f.write('%r,' % i)
- f.write('],[')
- for i in v[1]:
- f.write('%r,' % i)
-
- f.write(']),')
- f.write('}\n')
-
- f.write('''
-_lr_action = {}
-for _k, _v in _lr_action_items.items():
- for _x,_y in zip(_v[0],_v[1]):
- if not _x in _lr_action: _lr_action[_x] = {}
- _lr_action[_x][_k] = _y
-del _lr_action_items
-''')
-
- else:
- f.write('\n_lr_action = { ')
- for k, v in self.lr_action.items():
- f.write('(%r,%r):%r,' % (k[0], k[1], v))
- f.write('}\n')
-
- if smaller:
- # Factor out names to try and make smaller
- items = {}
-
- for s, nd in self.lr_goto.items():
- for name, v in nd.items():
- i = items.get(name)
- if not i:
- i = ([], [])
- items[name] = i
- i[0].append(s)
- i[1].append(v)
-
- f.write('\n_lr_goto_items = {')
- for k, v in items.items():
- f.write('%r:([' % k)
- for i in v[0]:
- f.write('%r,' % i)
- f.write('],[')
- for i in v[1]:
- f.write('%r,' % i)
-
- f.write(']),')
- f.write('}\n')
-
- f.write('''
-_lr_goto = {}
-for _k, _v in _lr_goto_items.items():
- for _x, _y in zip(_v[0], _v[1]):
- if not _x in _lr_goto: _lr_goto[_x] = {}
- _lr_goto[_x][_k] = _y
-del _lr_goto_items
-''')
- else:
- f.write('\n_lr_goto = { ')
- for k, v in self.lr_goto.items():
- f.write('(%r,%r):%r,' % (k[0], k[1], v))
- f.write('}\n')
-
- # Write production table
- f.write('_lr_productions = [\n')
- for p in self.lr_productions:
- if p.func:
- f.write(' (%r,%r,%d,%r,%r,%d),\n' % (p.str, p.name, p.len,
- p.func, os.path.basename(p.file), p.line))
- else:
- f.write(' (%r,%r,%d,None,None,None),\n' % (str(p), p.name, p.len))
- f.write(']\n')
- f.close()
-
- except IOError as e:
- raise
-
-
- # -----------------------------------------------------------------------------
- # pickle_table()
- #
- # This function pickles the LR parsing tables to a supplied file object
- # -----------------------------------------------------------------------------
-
- def pickle_table(self, filename, signature=''):
- try:
- import cPickle as pickle
- except ImportError:
- import pickle
- with open(filename, 'wb') as outf:
- pickle.dump(__tabversion__, outf, pickle_protocol)
- pickle.dump(self.lr_method, outf, pickle_protocol)
- pickle.dump(signature, outf, pickle_protocol)
- pickle.dump(self.lr_action, outf, pickle_protocol)
- pickle.dump(self.lr_goto, outf, pickle_protocol)
-
- outp = []
- for p in self.lr_productions:
- if p.func:
- outp.append((p.str, p.name, p.len, p.func, os.path.basename(p.file), p.line))
- else:
- outp.append((str(p), p.name, p.len, None, None, None))
- pickle.dump(outp, outf, pickle_protocol)
-
-# -----------------------------------------------------------------------------
-# === INTROSPECTION ===
-#
-# The following functions and classes are used to implement the PLY
-# introspection features followed by the yacc() function itself.
-# -----------------------------------------------------------------------------
-
-# -----------------------------------------------------------------------------
-# get_caller_module_dict()
-#
-# This function returns a dictionary containing all of the symbols defined within
-# a caller further down the call stack. This is used to get the environment
-# associated with the yacc() call if none was provided.
-# -----------------------------------------------------------------------------
-
-def get_caller_module_dict(levels):
- f = sys._getframe(levels)
- ldict = f.f_globals.copy()
- if f.f_globals != f.f_locals:
- ldict.update(f.f_locals)
- return ldict
-
-# -----------------------------------------------------------------------------
-# parse_grammar()
-#
-# This takes a raw grammar rule string and parses it into production data
-# -----------------------------------------------------------------------------
-def parse_grammar(doc, file, line):
- grammar = []
- # Split the doc string into lines
- pstrings = doc.splitlines()
- lastp = None
- dline = line
- for ps in pstrings:
- dline += 1
- p = ps.split()
- if not p:
- continue
- try:
- if p[0] == '|':
- # This is a continuation of a previous rule
- if not lastp:
- raise SyntaxError("%s:%d: Misplaced '|'" % (file, dline))
- prodname = lastp
- syms = p[1:]
- else:
- prodname = p[0]
- lastp = prodname
- syms = p[2:]
- assign = p[1]
- if assign != ':' and assign != '::=':
- raise SyntaxError("%s:%d: Syntax error. Expected ':'" % (file, dline))
-
- grammar.append((file, dline, prodname, syms))
- except SyntaxError:
- raise
- except Exception:
- raise SyntaxError('%s:%d: Syntax error in rule %r' % (file, dline, ps.strip()))
-
- return grammar
-
-# -----------------------------------------------------------------------------
-# ParserReflect()
-#
-# This class represents information extracted for building a parser including
-# start symbol, error function, tokens, precedence list, action functions,
-# etc.
-# -----------------------------------------------------------------------------
-class ParserReflect(object):
- def __init__(self, pdict, log=None):
- self.pdict = pdict
- self.start = None
- self.error_func = None
- self.tokens = None
- self.modules = set()
- self.grammar = []
- self.error = False
-
- if log is None:
- self.log = PlyLogger(sys.stderr)
- else:
- self.log = log
-
- # Get all of the basic information
- def get_all(self):
- self.get_start()
- self.get_error_func()
- self.get_tokens()
- self.get_precedence()
- self.get_pfunctions()
-
- # Validate all of the information
- def validate_all(self):
- self.validate_start()
- self.validate_error_func()
- self.validate_tokens()
- self.validate_precedence()
- self.validate_pfunctions()
- self.validate_modules()
- return self.error
-
- # Compute a signature over the grammar
- def signature(self):
- parts = []
- try:
- if self.start:
- parts.append(self.start)
- if self.prec:
- parts.append(''.join([''.join(p) for p in self.prec]))
- if self.tokens:
- parts.append(' '.join(self.tokens))
- for f in self.pfuncs:
- if f[3]:
- parts.append(f[3])
- except (TypeError, ValueError):
- pass
- return ''.join(parts)
-
- # -----------------------------------------------------------------------------
- # validate_modules()
- #
- # This method checks to see if there are duplicated p_rulename() functions
- # in the parser module file. Without this function, it is really easy for
- # users to make mistakes by cutting and pasting code fragments (and it's a real
- # bugger to try and figure out why the resulting parser doesn't work). Therefore,
- # we just do a little regular expression pattern matching of def statements
- # to try and detect duplicates.
- # -----------------------------------------------------------------------------
-
- def validate_modules(self):
- # Match def p_funcname(
- fre = re.compile(r'\s*def\s+(p_[a-zA-Z_0-9]*)\(')
-
- for module in self.modules:
- try:
- lines, linen = inspect.getsourcelines(module)
- except IOError:
- continue
-
- counthash = {}
- for linen, line in enumerate(lines):
- linen += 1
- m = fre.match(line)
- if m:
- name = m.group(1)
- prev = counthash.get(name)
- if not prev:
- counthash[name] = linen
- else:
- filename = inspect.getsourcefile(module)
- self.log.warning('%s:%d: Function %s redefined. Previously defined on line %d',
- filename, linen, name, prev)
-
- # Get the start symbol
- def get_start(self):
- self.start = self.pdict.get('start')
-
- # Validate the start symbol
- def validate_start(self):
- if self.start is not None:
- if not isinstance(self.start, string_types):
- self.log.error("'start' must be a string")
-
- # Look for error handler
- def get_error_func(self):
- self.error_func = self.pdict.get('p_error')
-
- # Validate the error function
- def validate_error_func(self):
- if self.error_func:
- if isinstance(self.error_func, types.FunctionType):
- ismethod = 0
- elif isinstance(self.error_func, types.MethodType):
- ismethod = 1
- else:
- self.log.error("'p_error' defined, but is not a function or method")
- self.error = True
- return
-
- eline = self.error_func.__code__.co_firstlineno
- efile = self.error_func.__code__.co_filename
- module = inspect.getmodule(self.error_func)
- self.modules.add(module)
-
- argcount = self.error_func.__code__.co_argcount - ismethod
- if argcount != 1:
- self.log.error('%s:%d: p_error() requires 1 argument', efile, eline)
- self.error = True
-
- # Get the tokens map
- def get_tokens(self):
- tokens = self.pdict.get('tokens')
- if not tokens:
- self.log.error('No token list is defined')
- self.error = True
- return
-
- if not isinstance(tokens, (list, tuple)):
- self.log.error('tokens must be a list or tuple')
- self.error = True
- return
-
- if not tokens:
- self.log.error('tokens is empty')
- self.error = True
- return
-
- self.tokens = tokens
-
- # Validate the tokens
- def validate_tokens(self):
- # Validate the tokens.
- if 'error' in self.tokens:
- self.log.error("Illegal token name 'error'. Is a reserved word")
- self.error = True
- return
-
- terminals = set()
- for n in self.tokens:
- if n in terminals:
- self.log.warning('Token %r multiply defined', n)
- terminals.add(n)
-
- # Get the precedence map (if any)
- def get_precedence(self):
- self.prec = self.pdict.get('precedence')
-
- # Validate and parse the precedence map
- def validate_precedence(self):
- preclist = []
- if self.prec:
- if not isinstance(self.prec, (list, tuple)):
- self.log.error('precedence must be a list or tuple')
- self.error = True
- return
- for level, p in enumerate(self.prec):
- if not isinstance(p, (list, tuple)):
- self.log.error('Bad precedence table')
- self.error = True
- return
-
- if len(p) < 2:
- self.log.error('Malformed precedence entry %s. Must be (assoc, term, ..., term)', p)
- self.error = True
- return
- assoc = p[0]
- if not isinstance(assoc, string_types):
- self.log.error('precedence associativity must be a string')
- self.error = True
- return
- for term in p[1:]:
- if not isinstance(term, string_types):
- self.log.error('precedence items must be strings')
- self.error = True
- return
- preclist.append((term, assoc, level+1))
- self.preclist = preclist
-
- # Get all p_functions from the grammar
- def get_pfunctions(self):
- p_functions = []
- for name, item in self.pdict.items():
- if not name.startswith('p_') or name == 'p_error':
- continue
- if isinstance(item, (types.FunctionType, types.MethodType)):
- line = getattr(item, 'co_firstlineno', item.__code__.co_firstlineno)
- module = inspect.getmodule(item)
- p_functions.append((line, module, name, item.__doc__))
-
- # Sort all of the actions by line number; make sure to stringify
- # modules to make them sortable, since `line` may not uniquely sort all
- # p functions
- p_functions.sort(key=lambda p_function: (
- p_function[0],
- str(p_function[1]),
- p_function[2],
- p_function[3]))
- self.pfuncs = p_functions
-
- # Validate all of the p_functions
- def validate_pfunctions(self):
- grammar = []
- # Check for non-empty symbols
- if len(self.pfuncs) == 0:
- self.log.error('no rules of the form p_rulename are defined')
- self.error = True
- return
-
- for line, module, name, doc in self.pfuncs:
- file = inspect.getsourcefile(module)
- func = self.pdict[name]
- if isinstance(func, types.MethodType):
- reqargs = 2
- else:
- reqargs = 1
- if func.__code__.co_argcount > reqargs:
- self.log.error('%s:%d: Rule %r has too many arguments', file, line, func.__name__)
- self.error = True
- elif func.__code__.co_argcount < reqargs:
- self.log.error('%s:%d: Rule %r requires an argument', file, line, func.__name__)
- self.error = True
- elif not func.__doc__:
- self.log.warning('%s:%d: No documentation string specified in function %r (ignored)',
- file, line, func.__name__)
- else:
- try:
- parsed_g = parse_grammar(doc, file, line)
- for g in parsed_g:
- grammar.append((name, g))
- except SyntaxError as e:
- self.log.error(str(e))
- self.error = True
-
- # Looks like a valid grammar rule
- # Mark the file in which defined.
- self.modules.add(module)
-
- # Secondary validation step that looks for p_ definitions that are not functions
- # or functions that look like they might be grammar rules.
-
- for n, v in self.pdict.items():
- if n.startswith('p_') and isinstance(v, (types.FunctionType, types.MethodType)):
- continue
- if n.startswith('t_'):
- continue
- if n.startswith('p_') and n != 'p_error':
- self.log.warning('%r not defined as a function', n)
- if ((isinstance(v, types.FunctionType) and v.__code__.co_argcount == 1) or
- (isinstance(v, types.MethodType) and v.__func__.__code__.co_argcount == 2)):
- if v.__doc__:
- try:
- doc = v.__doc__.split(' ')
- if doc[1] == ':':
- self.log.warning('%s:%d: Possible grammar rule %r defined without p_ prefix',
- v.__code__.co_filename, v.__code__.co_firstlineno, n)
- except IndexError:
- pass
-
- self.grammar = grammar
-
-# -----------------------------------------------------------------------------
-# yacc(module)
-#
-# Build a parser
-# -----------------------------------------------------------------------------
-
-def yacc(method='LALR', debug=yaccdebug, module=None, tabmodule=tab_module, start=None,
- check_recursion=True, optimize=False, write_tables=True, debugfile=debug_file,
- outputdir=None, debuglog=None, errorlog=None, picklefile=None):
-
- if tabmodule is None:
- tabmodule = tab_module
-
- # Reference to the parsing method of the last built parser
- global parse
-
- # If pickling is enabled, table files are not created
- if picklefile:
- write_tables = 0
-
- if errorlog is None:
- errorlog = PlyLogger(sys.stderr)
-
- # Get the module dictionary used for the parser
- if module:
- _items = [(k, getattr(module, k)) for k in dir(module)]
- pdict = dict(_items)
- # If no __file__ attribute is available, try to obtain it from the __module__ instead
- if '__file__' not in pdict:
- pdict['__file__'] = sys.modules[pdict['__module__']].__file__
- else:
- pdict = get_caller_module_dict(2)
-
- if outputdir is None:
- # If no output directory is set, the location of the output files
- # is determined according to the following rules:
- # - If tabmodule specifies a package, files go into that package directory
- # - Otherwise, files go in the same directory as the specifying module
- if isinstance(tabmodule, types.ModuleType):
- srcfile = tabmodule.__file__
- else:
- if '.' not in tabmodule:
- srcfile = pdict['__file__']
- else:
- parts = tabmodule.split('.')
- pkgname = '.'.join(parts[:-1])
- exec('import %s' % pkgname)
- srcfile = getattr(sys.modules[pkgname], '__file__', '')
- outputdir = os.path.dirname(srcfile)
-
- # Determine if the module is package of a package or not.
- # If so, fix the tabmodule setting so that tables load correctly
- pkg = pdict.get('__package__')
- if pkg and isinstance(tabmodule, str):
- if '.' not in tabmodule:
- tabmodule = pkg + '.' + tabmodule
-
-
-
- # Set start symbol if it's specified directly using an argument
- if start is not None:
- pdict['start'] = start
-
- # Collect parser information from the dictionary
- pinfo = ParserReflect(pdict, log=errorlog)
- pinfo.get_all()
-
- if pinfo.error:
- raise YaccError('Unable to build parser')
-
- # Check signature against table files (if any)
- signature = pinfo.signature()
-
- # Read the tables
- try:
- lr = LRTable()
- if picklefile:
- read_signature = lr.read_pickle(picklefile)
- else:
- read_signature = lr.read_table(tabmodule)
- if optimize or (read_signature == signature):
- try:
- lr.bind_callables(pinfo.pdict)
- parser = LRParser(lr, pinfo.error_func)
- parse = parser.parse
- return parser
- except Exception as e:
- errorlog.warning('There was a problem loading the table file: %r', e)
- except VersionError as e:
- errorlog.warning(str(e))
- except ImportError:
- pass
-
- if debuglog is None:
- if debug:
- try:
- debuglog = PlyLogger(open(os.path.join(outputdir, debugfile), 'w'))
- except IOError as e:
- errorlog.warning("Couldn't open %r. %s" % (debugfile, e))
- debuglog = NullLogger()
- else:
- debuglog = NullLogger()
-
- debuglog.info('Created by PLY version %s (http://www.dabeaz.com/ply)', __version__)
-
- errors = False
-
- # Validate the parser information
- if pinfo.validate_all():
- raise YaccError('Unable to build parser')
-
- if not pinfo.error_func:
- errorlog.warning('no p_error() function is defined')
-
- # Create a grammar object
- grammar = Grammar(pinfo.tokens)
-
- # Set precedence level for terminals
- for term, assoc, level in pinfo.preclist:
- try:
- grammar.set_precedence(term, assoc, level)
- except GrammarError as e:
- errorlog.warning('%s', e)
-
- # Add productions to the grammar
- for funcname, gram in pinfo.grammar:
- file, line, prodname, syms = gram
- try:
- grammar.add_production(prodname, syms, funcname, file, line)
- except GrammarError as e:
- errorlog.error('%s', e)
- errors = True
-
- # Set the grammar start symbols
- try:
- if start is None:
- grammar.set_start(pinfo.start)
- else:
- grammar.set_start(start)
- except GrammarError as e:
- errorlog.error(str(e))
- errors = True
-
- if errors:
- raise YaccError('Unable to build parser')
-
- # Verify the grammar structure
- undefined_symbols = grammar.undefined_symbols()
- for sym, prod in undefined_symbols:
- errorlog.error('%s:%d: Symbol %r used, but not defined as a token or a rule', prod.file, prod.line, sym)
- errors = True
-
- unused_terminals = grammar.unused_terminals()
- if unused_terminals:
- debuglog.info('')
- debuglog.info('Unused terminals:')
- debuglog.info('')
- for term in unused_terminals:
- errorlog.warning('Token %r defined, but not used', term)
- debuglog.info(' %s', term)
-
- # Print out all productions to the debug log
- if debug:
- debuglog.info('')
- debuglog.info('Grammar')
- debuglog.info('')
- for n, p in enumerate(grammar.Productions):
- debuglog.info('Rule %-5d %s', n, p)
-
- # Find unused non-terminals
- unused_rules = grammar.unused_rules()
- for prod in unused_rules:
- errorlog.warning('%s:%d: Rule %r defined, but not used', prod.file, prod.line, prod.name)
-
- if len(unused_terminals) == 1:
- errorlog.warning('There is 1 unused token')
- if len(unused_terminals) > 1:
- errorlog.warning('There are %d unused tokens', len(unused_terminals))
-
- if len(unused_rules) == 1:
- errorlog.warning('There is 1 unused rule')
- if len(unused_rules) > 1:
- errorlog.warning('There are %d unused rules', len(unused_rules))
-
- if debug:
- debuglog.info('')
- debuglog.info('Terminals, with rules where they appear')
- debuglog.info('')
- terms = list(grammar.Terminals)
- terms.sort()
- for term in terms:
- debuglog.info('%-20s : %s', term, ' '.join([str(s) for s in grammar.Terminals[term]]))
-
- debuglog.info('')
- debuglog.info('Nonterminals, with rules where they appear')
- debuglog.info('')
- nonterms = list(grammar.Nonterminals)
- nonterms.sort()
- for nonterm in nonterms:
- debuglog.info('%-20s : %s', nonterm, ' '.join([str(s) for s in grammar.Nonterminals[nonterm]]))
- debuglog.info('')
-
- if check_recursion:
- unreachable = grammar.find_unreachable()
- for u in unreachable:
- errorlog.warning('Symbol %r is unreachable', u)
-
- infinite = grammar.infinite_cycles()
- for inf in infinite:
- errorlog.error('Infinite recursion detected for symbol %r', inf)
- errors = True
-
- unused_prec = grammar.unused_precedence()
- for term, assoc in unused_prec:
- errorlog.error('Precedence rule %r defined for unknown symbol %r', assoc, term)
- errors = True
-
- if errors:
- raise YaccError('Unable to build parser')
-
- # Run the LRGeneratedTable on the grammar
- if debug:
- errorlog.debug('Generating %s tables', method)
-
- lr = LRGeneratedTable(grammar, method, debuglog)
-
- if debug:
- num_sr = len(lr.sr_conflicts)
-
- # Report shift/reduce and reduce/reduce conflicts
- if num_sr == 1:
- errorlog.warning('1 shift/reduce conflict')
- elif num_sr > 1:
- errorlog.warning('%d shift/reduce conflicts', num_sr)
-
- num_rr = len(lr.rr_conflicts)
- if num_rr == 1:
- errorlog.warning('1 reduce/reduce conflict')
- elif num_rr > 1:
- errorlog.warning('%d reduce/reduce conflicts', num_rr)
-
- # Write out conflicts to the output file
- if debug and (lr.sr_conflicts or lr.rr_conflicts):
- debuglog.warning('')
- debuglog.warning('Conflicts:')
- debuglog.warning('')
-
- for state, tok, resolution in lr.sr_conflicts:
- debuglog.warning('shift/reduce conflict for %s in state %d resolved as %s', tok, state, resolution)
-
- already_reported = set()
- for state, rule, rejected in lr.rr_conflicts:
- if (state, id(rule), id(rejected)) in already_reported:
- continue
- debuglog.warning('reduce/reduce conflict in state %d resolved using rule (%s)', state, rule)
- debuglog.warning('rejected rule (%s) in state %d', rejected, state)
- errorlog.warning('reduce/reduce conflict in state %d resolved using rule (%s)', state, rule)
- errorlog.warning('rejected rule (%s) in state %d', rejected, state)
- already_reported.add((state, id(rule), id(rejected)))
-
- warned_never = []
- for state, rule, rejected in lr.rr_conflicts:
- if not rejected.reduced and (rejected not in warned_never):
- debuglog.warning('Rule (%s) is never reduced', rejected)
- errorlog.warning('Rule (%s) is never reduced', rejected)
- warned_never.append(rejected)
-
- # Write the table file if requested
- if write_tables:
- try:
- lr.write_table(tabmodule, outputdir, signature)
- except IOError as e:
- errorlog.warning("Couldn't create %r. %s" % (tabmodule, e))
-
- # Write a pickled version of the tables
- if picklefile:
- try:
- lr.pickle_table(picklefile, signature)
- except IOError as e:
- errorlog.warning("Couldn't create %r. %s" % (picklefile, e))
-
- # Build the parser
- lr.bind_callables(pinfo.pdict)
- parser = LRParser(lr, pinfo.error_func)
-
- parse = parser.parse
- return parser
diff --git a/components/script/dom/bindings/codegen/ply/ply/ygen.py b/components/script/dom/bindings/codegen/ply/ply/ygen.py
deleted file mode 100644
index acf5ca1a37b..00000000000
--- a/components/script/dom/bindings/codegen/ply/ply/ygen.py
+++ /dev/null
@@ -1,74 +0,0 @@
-# ply: ygen.py
-#
-# This is a support program that auto-generates different versions of the YACC parsing
-# function with different features removed for the purposes of performance.
-#
-# Users should edit the method LParser.parsedebug() in yacc.py. The source code
-# for that method is then used to create the other methods. See the comments in
-# yacc.py for further details.
-
-import os.path
-import shutil
-
-def get_source_range(lines, tag):
- srclines = enumerate(lines)
- start_tag = '#--! %s-start' % tag
- end_tag = '#--! %s-end' % tag
-
- for start_index, line in srclines:
- if line.strip().startswith(start_tag):
- break
-
- for end_index, line in srclines:
- if line.strip().endswith(end_tag):
- break
-
- return (start_index + 1, end_index)
-
-def filter_section(lines, tag):
- filtered_lines = []
- include = True
- tag_text = '#--! %s' % tag
- for line in lines:
- if line.strip().startswith(tag_text):
- include = not include
- elif include:
- filtered_lines.append(line)
- return filtered_lines
-
-def main():
- dirname = os.path.dirname(__file__)
- shutil.copy2(os.path.join(dirname, 'yacc.py'), os.path.join(dirname, 'yacc.py.bak'))
- with open(os.path.join(dirname, 'yacc.py'), 'r') as f:
- lines = f.readlines()
-
- parse_start, parse_end = get_source_range(lines, 'parsedebug')
- parseopt_start, parseopt_end = get_source_range(lines, 'parseopt')
- parseopt_notrack_start, parseopt_notrack_end = get_source_range(lines, 'parseopt-notrack')
-
- # Get the original source
- orig_lines = lines[parse_start:parse_end]
-
- # Filter the DEBUG sections out
- parseopt_lines = filter_section(orig_lines, 'DEBUG')
-
- # Filter the TRACKING sections out
- parseopt_notrack_lines = filter_section(parseopt_lines, 'TRACKING')
-
- # Replace the parser source sections with updated versions
- lines[parseopt_notrack_start:parseopt_notrack_end] = parseopt_notrack_lines
- lines[parseopt_start:parseopt_end] = parseopt_lines
-
- lines = [line.rstrip()+'\n' for line in lines]
- with open(os.path.join(dirname, 'yacc.py'), 'w') as f:
- f.writelines(lines)
-
- print('Updated yacc.py')
-
-if __name__ == '__main__':
- main()
-
-
-
-
-
diff --git a/components/script/dom/bindings/codegen/ply/setup.cfg b/components/script/dom/bindings/codegen/ply/setup.cfg
deleted file mode 100644
index 4ec8a167da9..00000000000
--- a/components/script/dom/bindings/codegen/ply/setup.cfg
+++ /dev/null
@@ -1,11 +0,0 @@
-[bdist_wheel]
-universal = 1
-
-[metadata]
-description-file = README.md
-
-[egg_info]
-tag_build =
-tag_date = 0
-tag_svn_revision = 0
-
diff --git a/components/script/dom/bindings/codegen/ply/setup.py b/components/script/dom/bindings/codegen/ply/setup.py
deleted file mode 100644
index ee8ccd0ccf5..00000000000
--- a/components/script/dom/bindings/codegen/ply/setup.py
+++ /dev/null
@@ -1,31 +0,0 @@
-try:
- from setuptools import setup
-except ImportError:
- from distutils.core import setup
-
-setup(name = "ply",
- description="Python Lex & Yacc",
- long_description = """
-PLY is yet another implementation of lex and yacc for Python. Some notable
-features include the fact that its implemented entirely in Python and it
-uses LALR(1) parsing which is efficient and well suited for larger grammars.
-
-PLY provides most of the standard lex/yacc features including support for empty
-productions, precedence rules, error recovery, and support for ambiguous grammars.
-
-PLY is extremely easy to use and provides very extensive error checking.
-It is compatible with both Python 2 and Python 3.
-""",
- license="""BSD""",
- version = "3.10",
- author = "David Beazley",
- author_email = "dave@dabeaz.com",
- maintainer = "David Beazley",
- maintainer_email = "dave@dabeaz.com",
- url = "http://www.dabeaz.com/ply/",
- packages = ['ply'],
- classifiers = [
- 'Programming Language :: Python :: 3',
- 'Programming Language :: Python :: 2',
- ]
- )
diff --git a/components/script/dom/bindings/codegen/run.py b/components/script/dom/bindings/codegen/run.py
index 4d8d05f0116..a632abc1d9b 100644
--- a/components/script/dom/bindings/codegen/run.py
+++ b/components/script/dom/bindings/codegen/run.py
@@ -6,14 +6,18 @@ import os
import sys
import json
+SCRIPT_PATH = os.path.abspath(os.path.dirname(__file__))
+SERVO_ROOT = os.path.abspath(os.path.join(SCRIPT_PATH, "..", "..", "..", "..", ".."))
+
def main():
os.chdir(os.path.join(os.path.dirname(__file__)))
- sys.path[0:0] = ["./parser", "./ply"]
+ sys.path.insert(0, os.path.join(SERVO_ROOT, "third_party", "WebIDL"))
+ sys.path.insert(0, os.path.join(SERVO_ROOT, "third_party", "ply"))
css_properties_json, out_dir = sys.argv[1:]
- doc_servo = "../../../../../target/doc/servo"
- webidls_dir = "../../webidls"
+ doc_servo = os.path.join(SERVO_ROOT, "target", "doc", "servo")
+ webidls_dir = os.path.join(SCRIPT_PATH, "..", "..", "webidls")
config_file = "Bindings.conf"
import WebIDL