diff --git a/docs/conf.py b/docs/conf.py index fadd37502..0217623d3 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -12,8 +12,8 @@ # import os import sys -sys.path.insert(0, os.path.dirname(os.path.dirname(os.path.abspath(__file__)))) +sys.path.insert(0, os.path.dirname(os.path.dirname(os.path.abspath(__file__)))) # -- Project information ----------------------------------------------------- @@ -58,7 +58,6 @@ # This pattern also affects html_static_path and html_extra_path. exclude_patterns = ['_build', 'Thumbs.db', '.DS_Store'] - # -- Options for HTML output ------------------------------------------------- # The theme to use for HTML and HTML Help pages. See the documentation for diff --git a/docs/pyffi/object_models/any_type.rst b/docs/pyffi/object_models/any_type.rst new file mode 100644 index 000000000..2352cf2f3 --- /dev/null +++ b/docs/pyffi/object_models/any_type.rst @@ -0,0 +1 @@ +.. automodule:: pyffi.object_models.any_type diff --git a/docs/pyffi/object_models/array_type.rst b/docs/pyffi/object_models/array_type.rst new file mode 100644 index 000000000..182748fc6 --- /dev/null +++ b/docs/pyffi/object_models/array_type.rst @@ -0,0 +1 @@ +.. automodule:: pyffi.object_models.array_type diff --git a/docs/pyffi/object_models/basic.rst b/docs/pyffi/object_models/basic.rst new file mode 100644 index 000000000..e0a4ed237 --- /dev/null +++ b/docs/pyffi/object_models/basic.rst @@ -0,0 +1 @@ +.. automodule:: pyffi.object_models.basic diff --git a/docs/pyffi/object_models/binary_type.rst b/docs/pyffi/object_models/binary_type.rst new file mode 100644 index 000000000..ff22ce6ef --- /dev/null +++ b/docs/pyffi/object_models/binary_type.rst @@ -0,0 +1 @@ +.. automodule:: pyffi.object_models.binary_type diff --git a/docs/pyffi/object_models/common.rst b/docs/pyffi/object_models/common.rst new file mode 100644 index 000000000..41faeef78 --- /dev/null +++ b/docs/pyffi/object_models/common.rst @@ -0,0 +1 @@ +.. automodule:: pyffi.object_models.common diff --git a/docs/pyffi/object_models/editable.rst b/docs/pyffi/object_models/editable.rst new file mode 100644 index 000000000..f930d37bc --- /dev/null +++ b/docs/pyffi/object_models/editable.rst @@ -0,0 +1 @@ +.. automodule:: pyffi.object_models.editable diff --git a/docs/pyffi/object_models/expression.rst b/docs/pyffi/object_models/expression.rst new file mode 100644 index 000000000..d745808b5 --- /dev/null +++ b/docs/pyffi/object_models/expression.rst @@ -0,0 +1 @@ +.. automodule:: pyffi.object_models.expression diff --git a/docs/pyffi/object_models.rst b/docs/pyffi/object_models/index.rst similarity index 100% rename from docs/pyffi/object_models.rst rename to docs/pyffi/object_models/index.rst diff --git a/docs/pyffi/object_models/mex/index.rst b/docs/pyffi/object_models/mex/index.rst new file mode 100644 index 000000000..0a162271e --- /dev/null +++ b/docs/pyffi/object_models/mex/index.rst @@ -0,0 +1 @@ +.. automodule:: pyffi.object_models.mex diff --git a/docs/pyffi/object_models/niftoolsxml/array.rst b/docs/pyffi/object_models/niftoolsxml/array.rst new file mode 100644 index 000000000..be87c0cec --- /dev/null +++ b/docs/pyffi/object_models/niftoolsxml/array.rst @@ -0,0 +1 @@ +.. automodule:: pyffi.object_models.niftoolsxml.array diff --git a/docs/pyffi/object_models/niftoolsxml/bit_struct.rst b/docs/pyffi/object_models/niftoolsxml/bit_struct.rst new file mode 100644 index 000000000..a093be2f7 --- /dev/null +++ b/docs/pyffi/object_models/niftoolsxml/bit_struct.rst @@ -0,0 +1 @@ +.. automodule:: pyffi.object_models.niftoolsxml.bit_struct diff --git a/docs/pyffi/object_models/niftoolsxml/enum.rst b/docs/pyffi/object_models/niftoolsxml/enum.rst new file mode 100644 index 000000000..5a928c354 --- /dev/null +++ b/docs/pyffi/object_models/niftoolsxml/enum.rst @@ -0,0 +1 @@ +.. automodule:: pyffi.object_models.niftoolsxml.enum diff --git a/docs/pyffi/object_models/niftoolsxml/index.rst b/docs/pyffi/object_models/niftoolsxml/index.rst new file mode 100644 index 000000000..df9a57ba9 --- /dev/null +++ b/docs/pyffi/object_models/niftoolsxml/index.rst @@ -0,0 +1 @@ +.. automodule:: pyffi.object_models.niftoolsxml diff --git a/docs/pyffi/object_models/niftoolsxml/struct.rst b/docs/pyffi/object_models/niftoolsxml/struct.rst new file mode 100644 index 000000000..26806896c --- /dev/null +++ b/docs/pyffi/object_models/niftoolsxml/struct.rst @@ -0,0 +1 @@ +.. automodule:: pyffi.object_models.niftoolsxml.struct_ diff --git a/docs/pyffi/object_models/simple_type.rst b/docs/pyffi/object_models/simple_type.rst new file mode 100644 index 000000000..261173d58 --- /dev/null +++ b/docs/pyffi/object_models/simple_type.rst @@ -0,0 +1 @@ +.. automodule:: pyffi.object_models.simple_type diff --git a/docs/pyffi/object_models/xml/array.rst b/docs/pyffi/object_models/xml/array.rst new file mode 100644 index 000000000..aa9389866 --- /dev/null +++ b/docs/pyffi/object_models/xml/array.rst @@ -0,0 +1 @@ +.. automodule:: pyffi.object_models.xml.array diff --git a/docs/pyffi/object_models/xml/bit_struct.rst b/docs/pyffi/object_models/xml/bit_struct.rst new file mode 100644 index 000000000..f967b8ba4 --- /dev/null +++ b/docs/pyffi/object_models/xml/bit_struct.rst @@ -0,0 +1 @@ +.. automodule:: pyffi.object_models.xml.bit_struct diff --git a/docs/pyffi/object_models/xml/enum.rst b/docs/pyffi/object_models/xml/enum.rst new file mode 100644 index 000000000..201db1a29 --- /dev/null +++ b/docs/pyffi/object_models/xml/enum.rst @@ -0,0 +1 @@ +.. automodule:: pyffi.object_models.xml.enum diff --git a/docs/pyffi/object_models/xml/index.rst b/docs/pyffi/object_models/xml/index.rst new file mode 100644 index 000000000..4425859d0 --- /dev/null +++ b/docs/pyffi/object_models/xml/index.rst @@ -0,0 +1 @@ +.. automodule:: pyffi.object_models.xml diff --git a/docs/pyffi/object_models/xml/struct.rst b/docs/pyffi/object_models/xml/struct.rst new file mode 100644 index 000000000..41be511d0 --- /dev/null +++ b/docs/pyffi/object_models/xml/struct.rst @@ -0,0 +1 @@ +.. automodule:: pyffi.object_models.xml.struct_ diff --git a/docs/pyffi/object_models/xsd/index.rst b/docs/pyffi/object_models/xsd/index.rst new file mode 100644 index 000000000..b68483957 --- /dev/null +++ b/docs/pyffi/object_models/xsd/index.rst @@ -0,0 +1 @@ +.. automodule:: pyffi.object_models.xsd diff --git a/examples/metaclasses/howto_generate_class_from_xml.py b/examples/metaclasses/howto_generate_class_from_xml.py index 4e85be144..83a8a7e96 100644 --- a/examples/metaclasses/howto_generate_class_from_xml.py +++ b/examples/metaclasses/howto_generate_class_from_xml.py @@ -11,6 +11,7 @@ DEBUG = False + # This metaclass checks for the presence of an _attrs and __doc__ attribute. # Used as metaclass of _Block. class _MetaBlock(type): @@ -21,6 +22,7 @@ def __init__(cls, name, bases, dct): if '__doc__' not in dct: raise TypeError(str(cls) + ': missing __doc__ attribute') + class _Block(object, metaclass=_MetaBlock): """Base class from which all file block types are derived. @@ -31,6 +33,7 @@ class _Block(object, metaclass=_MetaBlock): interface, see MetaFileFormat.__init__ for an example. """ _attrs = () + # initialize all _attrs attributes def __init__(self): self._initAttributes(self.__class__) @@ -65,6 +68,7 @@ def _strAttributes(self, cls): s += str(name) + ' : ' + str(getattr(self, name)) + '\n' return s + # The MetaFileFormat class transforms the XML description of a file format # into a bunch of classes via the "type(name, bases, dct)" factory. # Because its base is type, MetaFileFormat is a metaclass: each file format @@ -87,7 +91,7 @@ def __init__(cls, name, bases, dct): # of course we should read the data from file dct['xml_file_name'] # the code below is only a proof of concept block_name = 'NiObjectNET' - block_ancestor = _Block # base of all block classes + block_ancestor = _Block # base of all block classes block_dct = {} # add docstring block_dct['__doc__'] = 'Some nif object.' @@ -95,20 +99,19 @@ def __init__(cls, name, bases, dct): # is a tuple containing all attributes: their name, default, and so on # (to be extended! probably have a tuple of Attribute instances # instead of a tuple of tuples) - block_dct['_attrs'] = ( ('name', 'noname'), ) + block_dct['_attrs'] = (('name', 'noname'),) # create class cls. setattr(cls, block_name, type(block_name, (block_ancestor,), block_dct)) - if DEBUG: print('cls.NiObjectNET: ', dir(cls.NiObjectNET)) # debug + if DEBUG: print('cls.NiObjectNET: ', dir(cls.NiObjectNET)) # debug # do another one block_name = 'NiNode' block_ancestor = getattr(cls, 'NiObjectNET') block_dct = {} block_dct['__doc__'] = 'Basic node.' - block_dct['_attrs'] = ( ('translateX', 0.0), ('translateY', 0.0), ('translateZ', 0.0) ) + block_dct['_attrs'] = (('translateX', 0.0), ('translateY', 0.0), ('translateZ', 0.0)) setattr(cls, block_name, type(block_name, (block_ancestor,), block_dct)) - if DEBUG: print('cls.NiNode: ', dir(cls.NiNode)) # debug - + if DEBUG: print('cls.NiNode: ', dir(cls.NiNode)) # debug # The NifFormat class simply processes nif.xml via MetaFileFormat @@ -117,6 +120,7 @@ def __init__(cls, name, bases, dct): class NifFormat(object, metaclass=MetaFileFormat): xml_file_name = "nif.xml" + # For example, NifFormat.NiNode is now a class representing the NiNode block # type! The _Block class, from which NifFormat.NiNode is derived, takes care # of initialization of all attributes, and printing them. diff --git a/examples/simple/simple.py b/examples/simple/simple.py index 7f0fb9516..e5a0239cd 100644 --- a/examples/simple/simple.py +++ b/examples/simple/simple.py @@ -1,10 +1,12 @@ import os -import pyffi.object_models.xml + import pyffi.object_models.common +import pyffi.object_models.xml + class SimpleFormat(pyffi.object_models.xml.FileFormat): xml_file_name = 'simple.xml' - xml_file_path = [ os.path.dirname(__file__) ] + xml_file_path = [os.path.dirname(__file__)] # basic types @@ -26,4 +28,4 @@ class Example: def addInteger(self, x): self.numIntegers += 1 self.integers.update_size() - self.integers[self.numIntegers-1] = x + self.integers[self.numIntegers - 1] = x diff --git a/examples/simple/testread.py b/examples/simple/testread.py index 0fd4cab93..029b8de28 100644 --- a/examples/simple/testread.py +++ b/examples/simple/testread.py @@ -1,4 +1,5 @@ from simple import SimpleFormat + x = SimpleFormat.Data() f = open('somefile.simple', 'rb') x.read(f) diff --git a/examples/simple/testwrite.py b/examples/simple/testwrite.py index d3ea9e798..569bc79a2 100644 --- a/examples/simple/testwrite.py +++ b/examples/simple/testwrite.py @@ -1,4 +1,5 @@ from simple import SimpleFormat + x = SimpleFormat.Data() x.example.num_integers = 5 x.example.integers.update_size() diff --git a/pyffi/__init__.py b/pyffi/__init__.py index 41dbf0ce0..0e9f507fd 100644 --- a/pyffi/__init__.py +++ b/pyffi/__init__.py @@ -4,10 +4,11 @@ .. toctree:: :maxdepth: 3 + :titlesonly: formats/index spells/index - object_models + object_models/index """ diff --git a/pyffi/formats/bsa/__init__.py b/pyffi/formats/bsa/__init__.py index 68438a589..d6bfdb0c0 100644 --- a/pyffi/formats/bsa/__init__.py +++ b/pyffi/formats/bsa/__init__.py @@ -111,15 +111,14 @@ import logging -import struct import os import re +import struct -import pyffi.object_models.xml -import pyffi.object_models.common -from pyffi.object_models.xml.basic import BasicBase import pyffi.object_models -from pyffi.utils.graph import EdgeFilter +import pyffi.object_models.common +import pyffi.object_models.xml +from pyffi.object_models.basic import BasicBase class BsaFormat(pyffi.object_models.xml.FileFormat): @@ -152,7 +151,7 @@ def get_size(self, data=None): def read(self, stream, data=None): length, = struct.unpack(' CgfFormat.EPSILON - or abs(self.m_12) > CgfFormat.EPSILON - or abs(self.m_13) > CgfFormat.EPSILON - or abs(self.m_21) > CgfFormat.EPSILON - or abs(self.m_22 - 1.0) > CgfFormat.EPSILON - or abs(self.m_23) > CgfFormat.EPSILON - or abs(self.m_31) > CgfFormat.EPSILON - or abs(self.m_32) > CgfFormat.EPSILON - or abs(self.m_33 - 1.0) > CgfFormat.EPSILON): + if (abs(self.m_11 - 1.0) > CgfFormat.EPSILON + or abs(self.m_12) > CgfFormat.EPSILON + or abs(self.m_13) > CgfFormat.EPSILON + or abs(self.m_21) > CgfFormat.EPSILON + or abs(self.m_22 - 1.0) > CgfFormat.EPSILON + or abs(self.m_23) > CgfFormat.EPSILON + or abs(self.m_31) > CgfFormat.EPSILON + or abs(self.m_32) > CgfFormat.EPSILON + or abs(self.m_33 - 1.0) > CgfFormat.EPSILON): return False else: return True @@ -1145,19 +1148,19 @@ def is_rotation(self): return False scale = self.get_scale() if abs(scale.x - 1.0) > 0.01 \ - or abs(scale.y - 1.0) > 0.01 \ - or abs(scale.z - 1.0) > 0.01: + or abs(scale.y - 1.0) > 0.01 \ + or abs(scale.z - 1.0) > 0.01: return False return True def get_determinant(self): """Return determinant.""" - return (self.m_11*self.m_22*self.m_33 - +self.m_12*self.m_23*self.m_31 - +self.m_13*self.m_21*self.m_32 - -self.m_31*self.m_22*self.m_13 - -self.m_21*self.m_12*self.m_33 - -self.m_11*self.m_32*self.m_23) + return (self.m_11 * self.m_22 * self.m_33 + + self.m_12 * self.m_23 * self.m_31 + + self.m_13 * self.m_21 * self.m_32 + - self.m_31 * self.m_22 * self.m_13 + - self.m_21 * self.m_12 * self.m_33 + - self.m_11 * self.m_32 * self.m_23) def get_scale(self): """Gets the scale (assuming is_scale_rotation is true!).""" @@ -1224,32 +1227,31 @@ def get_scale_quat(self): if trace > CgfFormat.EPSILON: s = (trace ** 0.5) * 2 - quat.x = -( rot.m_32 - rot.m_23 ) / s - quat.y = -( rot.m_13 - rot.m_31 ) / s - quat.z = -( rot.m_21 - rot.m_12 ) / s + quat.x = -(rot.m_32 - rot.m_23) / s + quat.y = -(rot.m_13 - rot.m_31) / s + quat.z = -(rot.m_21 - rot.m_12) / s quat.w = 0.25 * s elif rot.m_11 > max((rot.m_22, rot.m_33)): - s = (( 1.0 + rot.m_11 - rot.m_22 - rot.m_33 ) ** 0.5) * 2 + s = ((1.0 + rot.m_11 - rot.m_22 - rot.m_33) ** 0.5) * 2 quat.x = 0.25 * s - quat.y = (rot.m_21 + rot.m_12 ) / s - quat.z = (rot.m_13 + rot.m_31 ) / s - quat.w = -(rot.m_32 - rot.m_23 ) / s + quat.y = (rot.m_21 + rot.m_12) / s + quat.z = (rot.m_13 + rot.m_31) / s + quat.w = -(rot.m_32 - rot.m_23) / s elif rot.m_22 > rot.m_33: - s = (( 1.0 + rot.m_22 - rot.m_11 - rot.m_33 ) ** 0.5) * 2 - quat.x = (rot.m_21 + rot.m_12 ) / s + s = ((1.0 + rot.m_22 - rot.m_11 - rot.m_33) ** 0.5) * 2 + quat.x = (rot.m_21 + rot.m_12) / s quat.y = 0.25 * s - quat.z = (rot.m_32 + rot.m_23 ) / s - quat.w = -(rot.m_13 - rot.m_31 ) / s + quat.z = (rot.m_32 + rot.m_23) / s + quat.w = -(rot.m_13 - rot.m_31) / s else: - s = (( 1.0 + rot.m_33 - rot.m_11 - rot.m_22 ) ** 0.5) * 2 - quat.x = (rot.m_13 + rot.m_31 ) / s - quat.y = (rot.m_32 + rot.m_23 ) / s + s = ((1.0 + rot.m_33 - rot.m_11 - rot.m_22) ** 0.5) * 2 + quat.x = (rot.m_13 + rot.m_31) / s + quat.y = (rot.m_32 + rot.m_23) / s quat.z = 0.25 * s - quat.w = -(rot.m_21 - rot.m_12 ) / s + quat.w = -(rot.m_21 - rot.m_12) / s return scale, quat - def get_inverse(self): """Get inverse (assuming is_scale_rotation is true!).""" # transpose inverts rotation but keeps the scale @@ -1296,7 +1298,7 @@ def __mul__(self, rhs): return mat else: raise TypeError( - "do not know how to multiply Matrix33 with %s"%rhs.__class__) + "do not know how to multiply Matrix33 with %s" % rhs.__class__) def __div__(self, rhs): if isinstance(rhs, (float, int)): @@ -1313,28 +1315,28 @@ def __div__(self, rhs): return mat else: raise TypeError( - "do not know how to divide Matrix33 by %s"%rhs.__class__) + "do not know how to divide Matrix33 by %s" % rhs.__class__) def __rmul__(self, lhs): if isinstance(lhs, (float, int)): - return self * lhs # commutes + return self * lhs # commutes else: raise TypeError( - "do not know how to multiply %s with Matrix33"%lhs.__class__) + "do not know how to multiply %s with Matrix33" % lhs.__class__) def __eq__(self, mat): if not isinstance(mat, CgfFormat.Matrix33): raise TypeError( - "do not know how to compare Matrix33 and %s"%mat.__class__) + "do not know how to compare Matrix33 and %s" % mat.__class__) if (abs(self.m_11 - mat.m_11) > CgfFormat.EPSILON - or abs(self.m_12 - mat.m_12) > CgfFormat.EPSILON - or abs(self.m_13 - mat.m_13) > CgfFormat.EPSILON - or abs(self.m_21 - mat.m_21) > CgfFormat.EPSILON - or abs(self.m_22 - mat.m_22) > CgfFormat.EPSILON - or abs(self.m_23 - mat.m_23) > CgfFormat.EPSILON - or abs(self.m_31 - mat.m_31) > CgfFormat.EPSILON - or abs(self.m_32 - mat.m_32) > CgfFormat.EPSILON - or abs(self.m_33 - mat.m_33) > CgfFormat.EPSILON): + or abs(self.m_12 - mat.m_12) > CgfFormat.EPSILON + or abs(self.m_13 - mat.m_13) > CgfFormat.EPSILON + or abs(self.m_21 - mat.m_21) > CgfFormat.EPSILON + or abs(self.m_22 - mat.m_22) > CgfFormat.EPSILON + or abs(self.m_23 - mat.m_23) > CgfFormat.EPSILON + or abs(self.m_31 - mat.m_31) > CgfFormat.EPSILON + or abs(self.m_32 - mat.m_32) > CgfFormat.EPSILON + or abs(self.m_33 - mat.m_33) > CgfFormat.EPSILON): return False return True @@ -1349,7 +1351,7 @@ def as_list(self): [self.m_21, self.m_22, self.m_23, self.m_24], [self.m_31, self.m_32, self.m_33, self.m_34], [self.m_41, self.m_42, self.m_43, self.m_44] - ] + ] def as_tuple(self): """Return matrix as 4x4 tuple.""" @@ -1358,7 +1360,7 @@ def as_tuple(self): (self.m_21, self.m_22, self.m_23, self.m_24), (self.m_31, self.m_32, self.m_33, self.m_34), (self.m_41, self.m_42, self.m_43, self.m_44) - ) + ) def set_rows(self, row0, row1, row2, row3): """Set matrix from rows.""" @@ -1368,15 +1370,15 @@ def set_rows(self, row0, row1, row2, row3): self.m_41, self.m_42, self.m_43, self.m_44 = row3 def __str__(self): - return( - '[ %6.3f %6.3f %6.3f %6.3f ]\n' - '[ %6.3f %6.3f %6.3f %6.3f ]\n' - '[ %6.3f %6.3f %6.3f %6.3f ]\n' - '[ %6.3f %6.3f %6.3f %6.3f ]\n' - % (self.m_11, self.m_12, self.m_13, self.m_14, - self.m_21, self.m_22, self.m_23, self.m_24, - self.m_31, self.m_32, self.m_33, self.m_34, - self.m_41, self.m_42, self.m_43, self.m_44)) + return ( + '[ %6.3f %6.3f %6.3f %6.3f ]\n' + '[ %6.3f %6.3f %6.3f %6.3f ]\n' + '[ %6.3f %6.3f %6.3f %6.3f ]\n' + '[ %6.3f %6.3f %6.3f %6.3f ]\n' + % (self.m_11, self.m_12, self.m_13, self.m_14, + self.m_21, self.m_22, self.m_23, self.m_24, + self.m_31, self.m_32, self.m_33, self.m_34, + self.m_41, self.m_42, self.m_43, self.m_44)) def set_identity(self): """Set to identity matrix.""" @@ -1400,21 +1402,21 @@ def set_identity(self): def is_identity(self): """Return ``True`` if the matrix is close to identity.""" if (abs(self.m_11 - 1.0) > CgfFormat.EPSILON - or abs(self.m_12) > CgfFormat.EPSILON - or abs(self.m_13) > CgfFormat.EPSILON - or abs(self.m_14) > CgfFormat.EPSILON - or abs(self.m_21) > CgfFormat.EPSILON - or abs(self.m_22 - 1.0) > CgfFormat.EPSILON - or abs(self.m_23) > CgfFormat.EPSILON - or abs(self.m_24) > CgfFormat.EPSILON - or abs(self.m_31) > CgfFormat.EPSILON - or abs(self.m_32) > CgfFormat.EPSILON - or abs(self.m_33 - 1.0) > CgfFormat.EPSILON - or abs(self.m_34) > CgfFormat.EPSILON - or abs(self.m_41) > CgfFormat.EPSILON - or abs(self.m_42) > CgfFormat.EPSILON - or abs(self.m_43) > CgfFormat.EPSILON - or abs(self.m_44 - 1.0) > CgfFormat.EPSILON): + or abs(self.m_12) > CgfFormat.EPSILON + or abs(self.m_13) > CgfFormat.EPSILON + or abs(self.m_14) > CgfFormat.EPSILON + or abs(self.m_21) > CgfFormat.EPSILON + or abs(self.m_22 - 1.0) > CgfFormat.EPSILON + or abs(self.m_23) > CgfFormat.EPSILON + or abs(self.m_24) > CgfFormat.EPSILON + or abs(self.m_31) > CgfFormat.EPSILON + or abs(self.m_32) > CgfFormat.EPSILON + or abs(self.m_33 - 1.0) > CgfFormat.EPSILON + or abs(self.m_34) > CgfFormat.EPSILON + or abs(self.m_41) > CgfFormat.EPSILON + or abs(self.m_42) > CgfFormat.EPSILON + or abs(self.m_43) > CgfFormat.EPSILON + or abs(self.m_44 - 1.0) > CgfFormat.EPSILON): return False else: return True @@ -1537,6 +1539,7 @@ def set_scale_rotation_translation(self, scale, rotation, translation): def get_inverse(self, fast=True): """Calculates inverse (fast assumes is_scale_rotation_translation is True).""" + def adjoint(m, ii, jj): result = [] for i, row in enumerate(m): @@ -1546,9 +1549,10 @@ def adjoint(m, ii, jj): if j == jj: continue result[-1].append(x) return result + def determinant(m): if len(m) == 2: - return m[0][0]*m[1][1] - m[1][0]*m[0][1] + return m[0][0] * m[1][1] - m[1][0] * m[0][1] result = 0.0 for i in range(len(m)): det = determinant(adjoint(m, i, 0)) @@ -1575,10 +1579,10 @@ def determinant(m): nn = [[0.0 for i in range(4)] for j in range(4)] det = determinant(m) if abs(det) < CgfFormat.EPSILON: - raise ZeroDivisionError('cannot invert matrix:\n%s'%self) + raise ZeroDivisionError('cannot invert matrix:\n%s' % self) for i in range(4): for j in range(4): - if (i+j) & 1: + if (i + j) & 1: nn[j][i] = -determinant(adjoint(m, i, j)) / det else: nn[j][i] = determinant(adjoint(m, i, j)) / det @@ -1612,25 +1616,25 @@ def __mul__(self, x): raise TypeError("matrix*vector not supported; please use left multiplication (vector*matrix)") elif isinstance(x, CgfFormat.Matrix44): m = CgfFormat.Matrix44() - m.m_11 = self.m_11 * x.m_11 + self.m_12 * x.m_21 + self.m_13 * x.m_31 + self.m_14 * x.m_41 - m.m_12 = self.m_11 * x.m_12 + self.m_12 * x.m_22 + self.m_13 * x.m_32 + self.m_14 * x.m_42 - m.m_13 = self.m_11 * x.m_13 + self.m_12 * x.m_23 + self.m_13 * x.m_33 + self.m_14 * x.m_43 - m.m_14 = self.m_11 * x.m_14 + self.m_12 * x.m_24 + self.m_13 * x.m_34 + self.m_14 * x.m_44 - m.m_21 = self.m_21 * x.m_11 + self.m_22 * x.m_21 + self.m_23 * x.m_31 + self.m_24 * x.m_41 - m.m_22 = self.m_21 * x.m_12 + self.m_22 * x.m_22 + self.m_23 * x.m_32 + self.m_24 * x.m_42 - m.m_23 = self.m_21 * x.m_13 + self.m_22 * x.m_23 + self.m_23 * x.m_33 + self.m_24 * x.m_43 - m.m_24 = self.m_21 * x.m_14 + self.m_22 * x.m_24 + self.m_23 * x.m_34 + self.m_24 * x.m_44 - m.m_31 = self.m_31 * x.m_11 + self.m_32 * x.m_21 + self.m_33 * x.m_31 + self.m_34 * x.m_41 - m.m_32 = self.m_31 * x.m_12 + self.m_32 * x.m_22 + self.m_33 * x.m_32 + self.m_34 * x.m_42 - m.m_33 = self.m_31 * x.m_13 + self.m_32 * x.m_23 + self.m_33 * x.m_33 + self.m_34 * x.m_43 - m.m_34 = self.m_31 * x.m_14 + self.m_32 * x.m_24 + self.m_33 * x.m_34 + self.m_34 * x.m_44 - m.m_41 = self.m_41 * x.m_11 + self.m_42 * x.m_21 + self.m_43 * x.m_31 + self.m_44 * x.m_41 - m.m_42 = self.m_41 * x.m_12 + self.m_42 * x.m_22 + self.m_43 * x.m_32 + self.m_44 * x.m_42 - m.m_43 = self.m_41 * x.m_13 + self.m_42 * x.m_23 + self.m_43 * x.m_33 + self.m_44 * x.m_43 - m.m_44 = self.m_41 * x.m_14 + self.m_42 * x.m_24 + self.m_43 * x.m_34 + self.m_44 * x.m_44 + m.m_11 = self.m_11 * x.m_11 + self.m_12 * x.m_21 + self.m_13 * x.m_31 + self.m_14 * x.m_41 + m.m_12 = self.m_11 * x.m_12 + self.m_12 * x.m_22 + self.m_13 * x.m_32 + self.m_14 * x.m_42 + m.m_13 = self.m_11 * x.m_13 + self.m_12 * x.m_23 + self.m_13 * x.m_33 + self.m_14 * x.m_43 + m.m_14 = self.m_11 * x.m_14 + self.m_12 * x.m_24 + self.m_13 * x.m_34 + self.m_14 * x.m_44 + m.m_21 = self.m_21 * x.m_11 + self.m_22 * x.m_21 + self.m_23 * x.m_31 + self.m_24 * x.m_41 + m.m_22 = self.m_21 * x.m_12 + self.m_22 * x.m_22 + self.m_23 * x.m_32 + self.m_24 * x.m_42 + m.m_23 = self.m_21 * x.m_13 + self.m_22 * x.m_23 + self.m_23 * x.m_33 + self.m_24 * x.m_43 + m.m_24 = self.m_21 * x.m_14 + self.m_22 * x.m_24 + self.m_23 * x.m_34 + self.m_24 * x.m_44 + m.m_31 = self.m_31 * x.m_11 + self.m_32 * x.m_21 + self.m_33 * x.m_31 + self.m_34 * x.m_41 + m.m_32 = self.m_31 * x.m_12 + self.m_32 * x.m_22 + self.m_33 * x.m_32 + self.m_34 * x.m_42 + m.m_33 = self.m_31 * x.m_13 + self.m_32 * x.m_23 + self.m_33 * x.m_33 + self.m_34 * x.m_43 + m.m_34 = self.m_31 * x.m_14 + self.m_32 * x.m_24 + self.m_33 * x.m_34 + self.m_34 * x.m_44 + m.m_41 = self.m_41 * x.m_11 + self.m_42 * x.m_21 + self.m_43 * x.m_31 + self.m_44 * x.m_41 + m.m_42 = self.m_41 * x.m_12 + self.m_42 * x.m_22 + self.m_43 * x.m_32 + self.m_44 * x.m_42 + m.m_43 = self.m_41 * x.m_13 + self.m_42 * x.m_23 + self.m_43 * x.m_33 + self.m_44 * x.m_43 + m.m_44 = self.m_41 * x.m_14 + self.m_42 * x.m_24 + self.m_43 * x.m_34 + self.m_44 * x.m_44 return m else: - raise TypeError("do not know how to multiply Matrix44 with %s"%x.__class__) + raise TypeError("do not know how to multiply Matrix44 with %s" % x.__class__) def __div__(self, x): if isinstance(x, (float, int)): @@ -1653,19 +1657,19 @@ def __div__(self, x): m.m_44 = self.m_44 / x return m else: - raise TypeError("do not know how to divide Matrix44 by %s"%x.__class__) + raise TypeError("do not know how to divide Matrix44 by %s" % x.__class__) def __rmul__(self, x): if isinstance(x, (float, int)): return self * x else: - raise TypeError("do not know how to multiply %s with Matrix44"%x.__class__) + raise TypeError("do not know how to multiply %s with Matrix44" % x.__class__) def __eq__(self, m): if isinstance(m, type(None)): return False if not isinstance(m, CgfFormat.Matrix44): - raise TypeError("do not know how to compare Matrix44 and %s"%m.__class__) + raise TypeError("do not know how to compare Matrix44 and %s" % m.__class__) if abs(self.m_11 - m.m_11) > CgfFormat.EPSILON: return False if abs(self.m_12 - m.m_12) > CgfFormat.EPSILON: return False if abs(self.m_13 - m.m_13) > CgfFormat.EPSILON: return False @@ -1727,7 +1731,7 @@ def __add__(self, x): m.m_44 = self.m_44 + x return m else: - raise TypeError("do not know how to add Matrix44 and %s"%x.__class__) + raise TypeError("do not know how to add Matrix44 and %s" % x.__class__) def __sub__(self, x): if isinstance(x, (CgfFormat.Matrix44)): @@ -1844,7 +1848,7 @@ def get_triangles(self): elif self.indices_data: inds = self.indices_data.indices for i in range(0, len(inds), 3): - yield inds[i], inds[i+1], inds[i+2] + yield inds[i], inds[i + 1], inds[i + 2] def get_material_indices(self): """Generator for all materials (per triangle).""" @@ -1863,7 +1867,7 @@ def get_uvs(self): yield uv.u, uv.v elif self.uvs_data: for uv in self.uvs_data.uvs: - yield uv.u, 1.0 - uv.v # OpenGL fix! + yield uv.u, 1.0 - uv.v # OpenGL fix! def get_uv_triangles(self): """Generator for all uv triangles.""" @@ -1874,7 +1878,7 @@ def get_uv_triangles(self): # Crysis: UV triangles coincide with triangles inds = self.indices_data.indices for i in range(0, len(inds), 3): - yield inds[i], inds[i+1], inds[i+2] + yield inds[i], inds[i + 1], inds[i + 2] ### DEPRECATED: USE set_geometry INSTEAD ### def set_vertices_normals(self, vertices, normals): @@ -1910,8 +1914,8 @@ def set_vertices_normals(self, vertices, normals): # set vertex coordinates and normals for Crysis for cryvert, crynorm, vert, norm in zip(self.vertices_data.vertices, - self.normals_data.normals, - vertices, normals): + self.normals_data.normals, + vertices, normals): cryvert.x = vert[0] cryvert.y = vert[1] cryvert.z = vert[2] @@ -1921,9 +1925,9 @@ def set_vertices_normals(self, vertices, normals): ### STILL WIP!!! ### def set_geometry(self, - verticeslist = None, normalslist = None, - triangleslist = None, matlist = None, - uvslist = None, colorslist = None): + verticeslist=None, normalslist=None, + triangleslist=None, matlist=None, + uvslist=None, colorslist=None): """Set geometry data. >>> from pyffi.formats.cgf import CgfFormat @@ -2519,10 +2523,10 @@ def set_geometry(self, firstvertexindex = 0 firstindicesindex = 0 for vertices, normals, triangles, mat, uvs, colors, meshsubset in zip( - verticeslist, normalslist, - triangleslist, matlist, - uvslist, colorslist, - self.mesh_subsets.mesh_subsets): + verticeslist, normalslist, + triangleslist, matlist, + uvslist, colorslist, + self.mesh_subsets.mesh_subsets): # set Crysis mesh subset info meshsubset.first_index = firstindicesindex @@ -2547,8 +2551,7 @@ def set_geometry(self, # set vertex coordinates and normals for Crysis for cryvert, crynorm, vert, norm in zip( - self.vertices_data.vertices, self.normals_data.normals, vertices, normals): - + self.vertices_data.vertices, self.normals_data.normals, vertices, normals): cryvert.x = vert[0] cryvert.y = vert[1] cryvert.z = vert[2] @@ -2581,7 +2584,7 @@ def set_geometry(self, # set Crysis uv info for cryuv, uv in zip(self.uvs_data.uvs, uvs): cryuv.u = uv[0] - cryuv.v = 1.0 - uv[1] # OpenGL fix + cryuv.v = 1.0 - uv[1] # OpenGL fix if not colors is None: # set Far Cry color info @@ -2627,17 +2630,17 @@ def update_tangent_space(self): # set Crysis tangents info tangents, binormals, orientations = pyffi.utils.tangentspace.getTangentSpace( - vertices = list((vert.x, vert.y, vert.z) - for vert in self.vertices_data.vertices), - normals = list((norm.x, norm.y, norm.z) - for norm in self.normals_data.normals), - uvs = list((uv.u, uv.v) - for uv in self.uvs_data.uvs), - triangles = list(self.get_triangles()), - orientation = True) + vertices=list((vert.x, vert.y, vert.z) + for vert in self.vertices_data.vertices), + normals=list((norm.x, norm.y, norm.z) + for norm in self.normals_data.normals), + uvs=list((uv.u, uv.v) + for uv in self.uvs_data.uvs), + triangles=list(self.get_triangles()), + orientation=True) for crytangent, tan, bin, orient in zip(self.tangents_data.tangents, - tangents, binormals, orientations): + tangents, binormals, orientations): if orient > 0: tangent_w = 32767 else: @@ -2690,11 +2693,11 @@ def get_name_shader_script(self): if (script_begin != -1): if (name.count("/") != 1): # must have exactly one script - raise ValueError("%s malformed, has multiple ""/"""%name) - mtlscript = name[script_begin+1:] + raise ValueError("%s malformed, has multiple ""/""" % name) + mtlscript = name[script_begin + 1:] else: mtlscript = "" - if (shader_begin != -1): # if a shader was specified + if (shader_begin != -1): # if a shader was specified mtl_end = shader_begin # must have exactly one shader if (name.count("(") != 1): @@ -2702,20 +2705,20 @@ def get_name_shader_script(self): # like in jungle_camp_sleeping_barack # here we handle that case if name[shader_begin + 1] == "(" \ - and name[shader_begin + 1:].count("(") == 1: + and name[shader_begin + 1:].count("(") == 1: shader_begin += 1 else: - raise ValueError("%s malformed, has multiple ""("""%name) + raise ValueError("%s malformed, has multiple ""(""" % name) if (name.count(")") != 1): - raise ValueError("%s malformed, has multiple "")"""%name) + raise ValueError("%s malformed, has multiple "")""" % name) # shader name should non-empty if shader_begin > shader_end: - raise ValueError("%s malformed, ""("" comes after "")"""%name) + raise ValueError("%s malformed, ""("" comes after "")""" % name) # script must be immediately followed by the material if (script_begin != -1) and (shader_end + 1 != script_begin): - raise ValueError("%s malformed, shader not followed by script"%name) + raise ValueError("%s malformed, shader not followed by script" % name) mtlname = name[:mtl_end] - mtlshader = name[shader_begin+1:shader_end] + mtlshader = name[shader_begin + 1:shader_end] else: if script_begin != -1: mtlname = name[:script_begin] @@ -2758,7 +2761,7 @@ class SourceInfoChunk: def get_global_display(self): """Return a name for the block.""" idx = max(self.source_file.rfind("\\"), self.source_file.rfind("/")) - return self.source_file[idx+1:] + return self.source_file[idx + 1:] class TimingChunk: def get_global_display(self): @@ -2773,12 +2776,12 @@ def as_tuple(self): return (self.x, self.y, self.z) def norm(self): - return (self.x*self.x + self.y*self.y + self.z*self.z) ** 0.5 + return (self.x * self.x + self.y * self.y + self.z * self.z) ** 0.5 def normalize(self): norm = self.norm() if norm < CgfFormat.EPSILON: - raise ZeroDivisionError('cannot normalize vector %s'%self) + raise ZeroDivisionError('cannot normalize vector %s' % self) self.x /= norm self.y /= norm self.z /= norm @@ -2791,7 +2794,7 @@ def get_copy(self): return v def __str__(self): - return "[ %6.3f %6.3f %6.3f ]"%(self.x, self.y, self.z) + return "[ %6.3f %6.3f %6.3f ]" % (self.x, self.y, self.z) def __mul__(self, x): if isinstance(x, (float, int)): @@ -2811,7 +2814,7 @@ def __mul__(self, x): elif isinstance(x, CgfFormat.Matrix44): return self * x.get_matrix_33() + x.get_translation() else: - raise TypeError("do not know how to multiply Vector3 with %s"%x.__class__) + raise TypeError("do not know how to multiply Vector3 with %s" % x.__class__) def __rmul__(self, x): if isinstance(x, (float, int)): @@ -2821,7 +2824,7 @@ def __rmul__(self, x): v.z = x * self.z return v else: - raise TypeError("do not know how to multiply %s and Vector3"%x.__class__) + raise TypeError("do not know how to multiply %s and Vector3" % x.__class__) def __div__(self, x): if isinstance(x, (float, int)): @@ -2831,7 +2834,7 @@ def __div__(self, x): v.z = self.z / x return v else: - raise TypeError("do not know how to divide Vector3 and %s"%x.__class__) + raise TypeError("do not know how to divide Vector3 and %s" % x.__class__) def __add__(self, x): if isinstance(x, (float, int)): @@ -2847,7 +2850,7 @@ def __add__(self, x): v.z = self.z + x.z return v else: - raise TypeError("do not know how to add Vector3 and %s"%x.__class__) + raise TypeError("do not know how to add Vector3 and %s" % x.__class__) def __radd__(self, x): if isinstance(x, (float, int)): @@ -2857,7 +2860,7 @@ def __radd__(self, x): v.z = x + self.z return v else: - raise TypeError("do not know how to add %s and Vector3"%x.__class__) + raise TypeError("do not know how to add %s and Vector3" % x.__class__) def __sub__(self, x): if isinstance(x, (float, int)): @@ -2873,7 +2876,7 @@ def __sub__(self, x): v.z = self.z - x.z return v else: - raise TypeError("do not know how to substract Vector3 and %s"%x.__class__) + raise TypeError("do not know how to substract Vector3 and %s" % x.__class__) def __rsub__(self, x): if isinstance(x, (float, int)): @@ -2883,7 +2886,7 @@ def __rsub__(self, x): v.z = x - self.z return v else: - raise TypeError("do not know how to substract %s and Vector3"%x.__class__) + raise TypeError("do not know how to substract %s and Vector3" % x.__class__) def __neg__(self): v = CgfFormat.Vector3() @@ -2896,18 +2899,18 @@ def __neg__(self): def crossproduct(self, x): if isinstance(x, CgfFormat.Vector3): v = CgfFormat.Vector3() - v.x = self.y*x.z - self.z*x.y - v.y = self.z*x.x - self.x*x.z - v.z = self.x*x.y - self.y*x.x + v.x = self.y * x.z - self.z * x.y + v.y = self.z * x.x - self.x * x.z + v.z = self.x * x.y - self.y * x.x return v else: - raise TypeError("do not know how to calculate crossproduct of Vector3 and %s"%x.__class__) + raise TypeError("do not know how to calculate crossproduct of Vector3 and %s" % x.__class__) def __eq__(self, x): if isinstance(x, type(None)): return False if not isinstance(x, CgfFormat.Vector3): - raise TypeError("do not know how to compare Vector3 and %s"%x.__class__) + raise TypeError("do not know how to compare Vector3 and %s" % x.__class__) if abs(self.x - x.x) > CgfFormat.EPSILON: return False if abs(self.y - x.y) > CgfFormat.EPSILON: return False if abs(self.z - x.z) > CgfFormat.EPSILON: return False diff --git a/pyffi/formats/dae/__init__.py b/pyffi/formats/dae/__init__.py index a01c03c91..43297b428 100644 --- a/pyffi/formats/dae/__init__.py +++ b/pyffi/formats/dae/__init__.py @@ -108,12 +108,12 @@ # # ***** END LICENSE BLOCK ***** -import struct import os import re import pyffi.object_models.xsd + class DaeFormat(pyffi.object_models.xsd.FileFormat): """This class implements the DAE format.""" xsdFileName = 'COLLADASchema.xsd' @@ -185,4 +185,3 @@ def write(self, stream): # implementation of dae-specific basic types # TODO - diff --git a/pyffi/formats/dds/__init__.py b/pyffi/formats/dds/__init__.py index 4d295e49b..513c4dafb 100644 --- a/pyffi/formats/dds/__init__.py +++ b/pyffi/formats/dds/__init__.py @@ -104,16 +104,17 @@ # # ***** END LICENSE BLOCK ***** -import struct import os import re +import struct -import pyffi.object_models.xml -import pyffi.object_models.common -from pyffi.object_models.xml.basic import BasicBase import pyffi.object_models +import pyffi.object_models.common +import pyffi.object_models.xml +from pyffi.object_models.basic import BasicBase from pyffi.utils.graph import EdgeFilter + class DdsFormat(pyffi.object_models.xml.FileFormat): """This class implements the DDS format.""" xml_file_name = 'dds.xml' @@ -140,6 +141,7 @@ class DdsFormat(pyffi.object_models.xml.FileFormat): class HeaderString(BasicBase): """Basic type which implements the header of a DDS file.""" + def __init__(self, **kwargs): BasicBase.__init__(self, **kwargs) @@ -198,6 +200,7 @@ def version_number(version_str): class Data(pyffi.object_models.FileFormat.Data): """A class to contain the actual dds data.""" + def __init__(self, version=0x09000000): self.version = version self.header = DdsFormat.Header() @@ -221,9 +224,9 @@ def inspect_quick(self, stream): raise ValueError("Not a DDS file.") size = struct.unpack(">> for vnum in sorted(NifFormat.versions.values()): +>>> for vnum in sorted(NifFormat.versions_num.values()): ... print('0x%08X' % vnum) # doctest: +REPORT_UDIFF 0x02030000 0x03000000 @@ -348,37 +349,37 @@ # # ***** END LICENSE BLOCK ***** -from itertools import repeat, chain -import logging -import math # math.pi +import math # math.pi import os import re import struct -import sys import warnings import weakref +from itertools import repeat, chain import pyffi.formats.bsa import pyffi.formats.dds -import pyffi.object_models.common import pyffi.object_models -from pyffi.object_models.xml import FileFormat +import pyffi.object_models.common import pyffi.utils.inertia -from pyffi.utils.mathutils import * # XXX todo get rid of from XXX import * import pyffi.utils.mopp +import pyffi.utils.quickhull import pyffi.utils.tristrip import pyffi.utils.vertex_cache -import pyffi.utils.quickhull + +from pyffi.object_models.basic import BasicBase # XXX convert the following to absolute imports from pyffi.object_models.editable import EditableBoolComboBox -from pyffi.utils.graph import EdgeFilter -from pyffi.object_models.xml.basic import BasicBase +from pyffi.object_models.niftoolsxml import NifToolsFileFormat, Version from pyffi.object_models.xml.struct_ import StructBase +from pyffi.utils import get_single +from pyffi.utils.graph import EdgeFilter +from pyffi.utils.mathutils import * # XXX todo get rid of from XXX import * - -class NifFormat(FileFormat): - """This class contains the generated classes from the xml.""" +# TODO: Things have been renamed, must figure out what! +class NifFormat(NifToolsFileFormat): + """This class contains the generated classes from the niftoolsxml.""" xml_file_name = 'nif.xml' # where to look for nif.xml and in what order: NIFXMLPATH env var, # or NifFormat module directory @@ -402,20 +403,26 @@ class NifFormat(FileFormat): # basic types ulittle32 = pyffi.object_models.common.ULittle32 int = pyffi.object_models.common.Int + int64 = pyffi.object_models.common.Int64 uint = pyffi.object_models.common.UInt - byte = pyffi.object_models.common.UByte # not a typo + uint64 = pyffi.object_models.common.UInt64 + byte = pyffi.object_models.common.UByte # not a typo + # normbyte = pyffi.object_models.common.NormByte # TODO: I actually don't know how a 1 byte float is implemented + normbyte = pyffi.object_models.common.UByte char = pyffi.object_models.common.Char short = pyffi.object_models.common.Short ushort = pyffi.object_models.common.UShort float = pyffi.object_models.common.Float + hfloat = pyffi.object_models.common.HFloat BlockTypeIndex = pyffi.object_models.common.UShort - StringIndex = pyffi.object_models.common.UInt + NiFixedString = pyffi.object_models.common.UInt SizedString = pyffi.object_models.common.SizedString # implementation of nif-specific basic types class StringOffset(pyffi.object_models.common.Int): """This is just an integer with -1 as default value.""" + def __init__(self, **kwargs): pyffi.object_models.common.Int.__init__(self, **kwargs) self.set_value(-1) @@ -432,6 +439,7 @@ class bool(BasicBase, EditableBoolComboBox): >>> i.get_value() True """ + def __init__(self, **kwargs): BasicBase.__init__(self, **kwargs) self.set_value(False) @@ -488,6 +496,7 @@ class Ref(BasicBase): _is_template = True _has_links = True _has_refs = True + def __init__(self, **kwargs): BasicBase.__init__(self, **kwargs) self._template = kwargs.get("template") @@ -500,7 +509,7 @@ def set_value(self, value): if value is None: self._value = None else: - if self._template != None: + if self._template is not None: if not isinstance(value, self._template): raise TypeError( 'expected an instance of %s but got instance of %s' @@ -517,7 +526,7 @@ def get_hash(self, data=None): return None def read(self, stream, data): - self.set_value(None) # fix_links will set this field + self.set_value(None) # fix_links will set this field block_index, = struct.unpack(data._byte_order + 'i', stream.read(4)) data._link_stack.append(block_index) @@ -545,11 +554,11 @@ def fix_links(self, data): block_index = data._link_stack.pop(0) # case when there's no link if data.version >= 0x0303000D: - if block_index == -1: # link by block number + if block_index == -1: # link by block number self.set_value(None) return else: - if block_index == 0: # link by pointer + if block_index == 0: # link by pointer self.set_value(None) return # other case: look up the link and check the link type @@ -557,10 +566,10 @@ def fix_links(self, data): self.set_value(block) if self._template != None: if not isinstance(block, self._template): - #raise TypeError('expected an instance of %s but got instance of %s'%(self._template, block.__class__)) + # raise TypeError('expected an instance of %s but got instance of %s'%(self._template, block.__class__)) logging.getLogger("pyffi.nif.ref").warn( - "Expected an %s but got %s: ignoring reference." - % (self._template, block.__class__)) + "Expected an %s but got %s: ignoring reference." + % (self._template, block.__class__)) def get_links(self, data=None): val = self.get_value() @@ -600,7 +609,7 @@ def replace_global_node(self, oldbranch, newbranch, if self.get_value() is oldbranch: # set_value takes care of template type self.set_value(newbranch) - #print("replacing", repr(oldbranch), "->", repr(newbranch)) + # print("replacing", repr(old_branch), "->", repr(new_branch)) if self.get_value() is not None: self.get_value().replace_global_node(oldbranch, newbranch) @@ -635,7 +644,7 @@ def set_value(self, value): def __str__(self): # avoid infinite recursion - return '%s instance at 0x%08X'%(self._value.__class__, id(self._value)) + return '%s instance at 0x%08X' % (self._value.__class__, id(self._value)) def get_refs(self, data=None): return [] @@ -644,11 +653,11 @@ def get_hash(self, data=None): return None def replace_global_node(self, oldbranch, newbranch, - edge_filter=EdgeFilter()): + edge_filter=EdgeFilter()): # overridden to avoid infinite recursion if self.get_value() is oldbranch: self.set_value(newbranch) - #print("replacing", repr(oldbranch), "->", repr(newbranch)) + # print("replacing", repr(old_branch), "->", repr(new_branch)) class LineString(BasicBase): """Basic type for strings ending in a newline character (0x0a). @@ -674,6 +683,7 @@ class LineString(BasicBase): >>> str(m) 'Hi There' """ + def __init__(self, **kwargs): BasicBase.__init__(self, **kwargs) self.set_value('') @@ -688,7 +698,7 @@ def __str__(self): return pyffi.object_models.common._as_str(self._value) def get_size(self, data=None): - return len(self._value) + 1 # +1 for trailing endline + return len(self._value) + 1 # +1 for trailing endline def get_hash(self, data=None): return self.get_value() @@ -758,7 +768,7 @@ def version_string(version, modification=None): 'Joymaster HS1 Object Format - (JMI), Version 20.3.0.9' """ if version == -1 or version is None: - raise ValueError('No string for version %s.'%version) + raise ValueError('No string for version %s.' % version) if modification == "neosteam": if version != 0x0A010000: raise ValueError("NeoSteam must have version 0x0A010000.") @@ -770,9 +780,10 @@ def version_string(version, modification=None): if version == 0x03000300: v = "3.03" elif version <= 0x03010000: - v = "%i.%i"%((version >> 24) & 0xff, (version >> 16) & 0xff) + v = "%i.%i" % ((version >> 24) & 0xff, (version >> 16) & 0xff) else: - v = "%i.%i.%i.%i"%((version >> 24) & 0xff, (version >> 16) & 0xff, (version >> 8) & 0xff, version & 0xff) + v = "%i.%i.%i.%i" % ( + (version >> 24) & 0xff, (version >> 16) & 0xff, (version >> 8) & 0xff, version & 0xff) if modification == "ndoors": return "NDSNIF....@....@...., Version %s" % v elif modification == "jmihs1": @@ -789,7 +800,7 @@ def __str__(self): def read(self, stream, data): modification = data.modification - ver, = struct.unpack(' 10000: raise ValueError('string too long (0x%08X at 0x%08X)' @@ -931,6 +943,7 @@ def get_hash(self, data=None): class FilePath(string): """A file path.""" + def get_hash(self, data=None): """Returns a case insensitive hash value.""" return self.get_value().lower() @@ -938,9 +951,10 @@ def get_hash(self, data=None): class ByteArray(BasicBase): """Array (list) of bytes. Implemented as basic type to speed up reading and also to prevent data to be dumped by __str__.""" + def __init__(self, **kwargs): BasicBase.__init__(self, **kwargs) - self.set_value("".encode()) # b'' for > py25 + self.set_value("".encode()) # b'' for > py25 def get_value(self): return self._value @@ -970,6 +984,7 @@ def __str__(self): class ByteMatrix(BasicBase): """Matrix of bytes. Implemented as basic type to speed up reading and to prevent data being dumped by __str__.""" + def __init__(self, **kwargs): BasicBase.__init__(self, **kwargs) self.set_value([]) @@ -978,14 +993,14 @@ def get_value(self): return self._value def set_value(self, value): - assert(isinstance(value, list)) + assert (isinstance(value, list)) if value: size1 = len(value[0]) for x in value: # TODO fix this for py3k - #assert(isinstance(x, basestring)) - assert(len(x) == size1) - self._value = value # should be a list of strings of bytes + # assert(isinstance(x, basestring)) + assert (len(x) == size1) + self._value = value # should be a list of strings of bytes def get_size(self, data=None): if len(self._value) == 0: @@ -994,7 +1009,7 @@ def get_size(self, data=None): return len(self._value) * len(self._value[0]) + 8 def get_hash(self, data=None): - return tuple( x.__hash__() for x in self._value ) + return tuple(x.__hash__() for x in self._value) def read(self, stream, data): size1, = struct.unpack(data._byte_order + 'I', @@ -1050,12 +1065,12 @@ def version_number(version_str): try: ver_list = [int(x) for x in version_str.split('.')] except ValueError: - return -1 # version not supported (i.e. version_str '10.0.1.3a' would trigger this) + return -1 # version not supported (i.e. version_str '10.0.1.3a' would trigger this) if len(ver_list) > 4 or len(ver_list) < 1: - return -1 # version not supported + return -1 # version not supported for ver_digit in ver_list: if (ver_digit | 0xff) > 0xff: - return -1 # version not supported + return -1 # version not supported while len(ver_list) < 4: ver_list.append(0) return (ver_list[0] << 24) + (ver_list[1] << 16) + (ver_list[2] << 8) + ver_list[3] @@ -1075,8 +1090,8 @@ class Data(pyffi.object_models.FileFormat.Data): :type version: ``int`` :ivar user_version: The nif user version. :type user_version: ``int`` - :ivar user_version_2: The nif user version 2. - :type user_version_2: ``int`` + :ivar bs_version: The nif user version 2. + :type bs_version: ``int`` :ivar roots: List of root blocks. :type roots: ``list`` of L{NifFormat.NiObject} :ivar header: The nif header. @@ -1087,6 +1102,7 @@ class Data(pyffi.object_models.FileFormat.Data): :type modification: ``str`` """ + logger = logging.getLogger("pyffi.nif.data") _link_stack = None _block_dct = None _string_list = None @@ -1108,7 +1124,7 @@ def __str__(self): def get_detail_display(self): return self.__str__() - def __init__(self, version=0x04000002, user_version=0, user_version_2=0): + def __init__(self, version=0x04000002, user_version=0, bs_version=0): """Initialize nif data. By default, this creates an empty nif document of the given version and user version. @@ -1122,8 +1138,8 @@ def __init__(self, version=0x04000002, user_version=0, user_version_2=0): self._version_value_.set_value(version) self._user_version_value_ = self.VersionUInt() self._user_version_value_.set_value(user_version) - self._user_version_2_value_ = self.VersionUInt() - self._user_version_2_value_.set_value(user_version_2) + self._bs_version_value_ = self.VersionUInt() + self._bs_version_value_.set_value(bs_version) # create new header self.header = NifFormat.Header() # empty list of root blocks (this encodes the footer) @@ -1135,22 +1151,44 @@ def __init__(self, version=0x04000002, user_version=0, user_version_2=0): def _getVersion(self): return self._version_value_.get_value() - def _setVersion(self, value): + + def _setVersion(self, value, skip_attrs_update=False): self._version_value_.set_value(value) - + if not skip_attrs_update: + self._update_block_attrs() + def _getUserVersion(self): return self._user_version_value_.get_value() - def _setUserVersion(self, value): + + def _setUserVersion(self, value, skip_attrs_update=False): self._user_version_value_.set_value(value) + if not skip_attrs_update: + self._update_block_attrs() + + def _getBSVersion(self): + return self._bs_version_value_.get_value() + + def _setBSVersion(self, value, skip_attrs_update=False): + self._bs_version_value_.set_value(value) + if not skip_attrs_update: + self._update_block_attrs() + + def _getBSHeader(self): # TODO: Why + return self.header.bs_header - def _getUserVersion2(self): - return self._user_version_2_value_.get_value() - def _setUserVersion2(self, value): - self._user_version_2_value_.set_value(value) + def set_version(self, version: Version, skip_attrs_update=False): + self._setVersion(version.num, skip_attrs_update=True) + if version.user: + self._setUserVersion(get_single(version.user), skip_attrs_update=True) + if version.bsver: + self._setBSVersion(get_single(version.bsver), skip_attrs_update=True) + if not skip_attrs_update: + self._update_block_attrs() version = property(_getVersion, _setVersion) user_version = property(_getUserVersion, _setUserVersion) - user_version_2 = property(_getUserVersion2, _setUserVersion2) + bs_version = property(_getBSVersion, _setBSVersion) + bs_header = property(_getBSHeader) # new functions @@ -1193,11 +1231,11 @@ def inspect_version_only(self, stream): ver = NifFormat.version_number(version_str) except: raise ValueError("Nif version %s not supported." % version_str) - if not ver in list(NifFormat.versions.values()): - raise ValueError("Nif version %s not supported." % version_str) + if not ver in list(NifFormat.versions_num.values()): + raise ValueError("Nif version %s not supported. (%s)" % (version_str, NifFormat.versions_num.values())) # check version integer and user version - userver = 0 - userver2 = 0 + user_version = 0 + bs_version = 0 if ver >= 0x0303000D: ver_int = None try: @@ -1227,15 +1265,15 @@ def inspect_version_only(self, stream): # big endian! self._byte_order = '>' if ver >= 0x0A010000: - userver, = struct.unpack('= 10: - stream.read(4) # number of blocks - userver2, = struct.unpack('= 10: + stream.read(4) # number of blocks + bs_version, = struct.unpack('= 2: + # if verbose >= 2: # print(hdr) # set up footer @@ -1496,7 +1537,7 @@ def write(self, stream): # write the file logger.debug("Writing header") - #logger.debug("%s" % self.header) + # logger.debug("%s" % self.header) self.header.write(stream, self) for block in self.blocks: # signal top level object if block is a root object @@ -1511,8 +1552,8 @@ def write(self, stream): else: # write block type string s = NifFormat.SizedString() - assert(block_type_list[block_type_dct[block]] - == block.__class__.__name__) # debug + assert (block_type_list[block_type_dct[block]] + == block.__class__.__name__) # debug s.set_value(block.__class__.__name__) s.write(stream, self) # write block index @@ -1529,7 +1570,7 @@ def write(self, stream): ftr.write(stream, self) def _makeBlockList( - self, root, block_index_dct, block_type_list, block_type_dct): + self, root, block_index_dct, block_type_list, block_type_dct): """This is a helper function for write to set up the list of all blocks, the block index map, and the block type map. @@ -1604,6 +1645,13 @@ def _blockChildBeforeParent(block): self._makeBlockList( child, block_index_dct, block_type_list, block_type_dct) + def _update_block_attrs(self): + for block in self.blocks: + try: + block.update_version(self) + except AttributeError: + self.logger.warning("%s doesn't support version updates", block) + # extensions of generated structures class Footer: @@ -1616,13 +1664,12 @@ def read(self, stream, data): raise ValueError( "Expected trailing zero byte in footer, " "but got %i instead." % extrabyte) - + def write(self, stream, data): StructBase.write(self, stream, data) modification = getattr(data, 'modification', None) if modification == "neosteam": stream.write("\x00".encode("ascii")) - class Header: def has_block_type(self, block_type): @@ -1662,7 +1709,7 @@ def as_list(self): [self.m_11, self.m_12, self.m_13], [self.m_21, self.m_22, self.m_23], [self.m_31, self.m_32, self.m_33] - ] + ] def as_tuple(self): """Return matrix as 3x3 tuple.""" @@ -1670,16 +1717,16 @@ def as_tuple(self): (self.m_11, self.m_12, self.m_13), (self.m_21, self.m_22, self.m_23), (self.m_31, self.m_32, self.m_33) - ) + ) def __str__(self): return ( - "[ %6.3f %6.3f %6.3f ]\n" - "[ %6.3f %6.3f %6.3f ]\n" - "[ %6.3f %6.3f %6.3f ]\n" - % (self.m_11, self.m_12, self.m_13, - self.m_21, self.m_22, self.m_23, - self.m_31, self.m_32, self.m_33)) + "[ %6.3f %6.3f %6.3f ]\n" + "[ %6.3f %6.3f %6.3f ]\n" + "[ %6.3f %6.3f %6.3f ]\n" + % (self.m_11, self.m_12, self.m_13, + self.m_21, self.m_22, self.m_23, + self.m_31, self.m_32, self.m_33)) def set_identity(self): """Set to identity matrix.""" @@ -1695,15 +1742,15 @@ def set_identity(self): def is_identity(self): """Return ``True`` if the matrix is close to identity.""" - if (abs(self.m_11 - 1.0) > NifFormat.EPSILON - or abs(self.m_12) > NifFormat.EPSILON - or abs(self.m_13) > NifFormat.EPSILON - or abs(self.m_21) > NifFormat.EPSILON - or abs(self.m_22 - 1.0) > NifFormat.EPSILON - or abs(self.m_23) > NifFormat.EPSILON - or abs(self.m_31) > NifFormat.EPSILON - or abs(self.m_32) > NifFormat.EPSILON - or abs(self.m_33 - 1.0) > NifFormat.EPSILON): + if (abs(self.m_11 - 1.0) > NifFormat.EPSILON + or abs(self.m_12) > NifFormat.EPSILON + or abs(self.m_13) > NifFormat.EPSILON + or abs(self.m_21) > NifFormat.EPSILON + or abs(self.m_22 - 1.0) > NifFormat.EPSILON + or abs(self.m_23) > NifFormat.EPSILON + or abs(self.m_31) > NifFormat.EPSILON + or abs(self.m_32) > NifFormat.EPSILON + or abs(self.m_33 - 1.0) > NifFormat.EPSILON): return False else: return True @@ -1773,20 +1820,20 @@ def is_rotation(self): def get_determinant(self): """Return determinant.""" - return (self.m_11*self.m_22*self.m_33 - +self.m_12*self.m_23*self.m_31 - +self.m_13*self.m_21*self.m_32 - -self.m_31*self.m_22*self.m_13 - -self.m_21*self.m_12*self.m_33 - -self.m_11*self.m_32*self.m_23) + return (self.m_11 * self.m_22 * self.m_33 + + self.m_12 * self.m_23 * self.m_31 + + self.m_13 * self.m_21 * self.m_32 + - self.m_31 * self.m_22 * self.m_13 + - self.m_21 * self.m_12 * self.m_33 + - self.m_11 * self.m_32 * self.m_23) def get_scale(self): """Gets the scale (assuming is_scale_rotation is true!).""" scale = self.get_determinant() if scale < 0: - return -((-scale)**(1.0/3.0)) + return -((-scale) ** (1.0 / 3.0)) else: - return scale**(1.0/3.0) + return scale ** (1.0 / 3.0) def get_scale_rotation(self): """Decompose the matrix into scale and rotation, where scale is a float @@ -1826,37 +1873,36 @@ def get_scale_quat(self): if trace > NifFormat.EPSILON: s = (trace ** 0.5) * 2 - quat.x = -( rot.m_32 - rot.m_23 ) / s - quat.y = -( rot.m_13 - rot.m_31 ) / s - quat.z = -( rot.m_21 - rot.m_12 ) / s + quat.x = -(rot.m_32 - rot.m_23) / s + quat.y = -(rot.m_13 - rot.m_31) / s + quat.z = -(rot.m_21 - rot.m_12) / s quat.w = 0.25 * s elif rot.m_11 > max((rot.m_22, rot.m_33)): - s = (( 1.0 + rot.m_11 - rot.m_22 - rot.m_33 ) ** 0.5) * 2 + s = ((1.0 + rot.m_11 - rot.m_22 - rot.m_33) ** 0.5) * 2 quat.x = 0.25 * s - quat.y = (rot.m_21 + rot.m_12 ) / s - quat.z = (rot.m_13 + rot.m_31 ) / s - quat.w = -(rot.m_32 - rot.m_23 ) / s + quat.y = (rot.m_21 + rot.m_12) / s + quat.z = (rot.m_13 + rot.m_31) / s + quat.w = -(rot.m_32 - rot.m_23) / s elif rot.m_22 > rot.m_33: - s = (( 1.0 + rot.m_22 - rot.m_11 - rot.m_33 ) ** 0.5) * 2 - quat.x = (rot.m_21 + rot.m_12 ) / s + s = ((1.0 + rot.m_22 - rot.m_11 - rot.m_33) ** 0.5) * 2 + quat.x = (rot.m_21 + rot.m_12) / s quat.y = 0.25 * s - quat.z = (rot.m_32 + rot.m_23 ) / s - quat.w = -(rot.m_13 - rot.m_31 ) / s + quat.z = (rot.m_32 + rot.m_23) / s + quat.w = -(rot.m_13 - rot.m_31) / s else: - s = (( 1.0 + rot.m_33 - rot.m_11 - rot.m_22 ) ** 0.5) * 2 - quat.x = (rot.m_13 + rot.m_31 ) / s - quat.y = (rot.m_32 + rot.m_23 ) / s + s = ((1.0 + rot.m_33 - rot.m_11 - rot.m_22) ** 0.5) * 2 + quat.x = (rot.m_13 + rot.m_31) / s + quat.y = (rot.m_32 + rot.m_23) / s quat.z = 0.25 * s - quat.w = -(rot.m_21 - rot.m_12 ) / s + quat.w = -(rot.m_21 - rot.m_12) / s return scale, quat - def get_inverse(self): """Get inverse (assuming is_scale_rotation is true!).""" # transpose inverts rotation but keeps the scale # dividing by scale^2 inverts the scale as well - return self.get_transpose() / (self.m_11**2 + self.m_12**2 + self.m_13**2) + return self.get_transpose() / (self.m_11 ** 2 + self.m_12 ** 2 + self.m_13 ** 2) def __mul__(self, rhs): if isinstance(rhs, (float, int)): @@ -1889,7 +1935,7 @@ def __mul__(self, rhs): return mat else: raise TypeError( - "do not know how to multiply Matrix33 with %s"%rhs.__class__) + "do not know how to multiply Matrix33 with %s" % rhs.__class__) def __div__(self, rhs): if isinstance(rhs, (float, int)): @@ -1906,31 +1952,31 @@ def __div__(self, rhs): return mat else: raise TypeError( - "do not know how to divide Matrix33 by %s"%rhs.__class__) + "do not know how to divide Matrix33 by %s" % rhs.__class__) # py3k __truediv__ = __div__ def __rmul__(self, lhs): if isinstance(lhs, (float, int)): - return self * lhs # commutes + return self * lhs # commutes else: raise TypeError( - "do not know how to multiply %s with Matrix33"%lhs.__class__) + "do not know how to multiply %s with Matrix33" % lhs.__class__) def __eq__(self, mat): if not isinstance(mat, NifFormat.Matrix33): raise TypeError( - "do not know how to compare Matrix33 and %s"%mat.__class__) + "do not know how to compare Matrix33 and %s" % mat.__class__) if (abs(self.m_11 - mat.m_11) > NifFormat.EPSILON - or abs(self.m_12 - mat.m_12) > NifFormat.EPSILON - or abs(self.m_13 - mat.m_13) > NifFormat.EPSILON - or abs(self.m_21 - mat.m_21) > NifFormat.EPSILON - or abs(self.m_22 - mat.m_22) > NifFormat.EPSILON - or abs(self.m_23 - mat.m_23) > NifFormat.EPSILON - or abs(self.m_31 - mat.m_31) > NifFormat.EPSILON - or abs(self.m_32 - mat.m_32) > NifFormat.EPSILON - or abs(self.m_33 - mat.m_33) > NifFormat.EPSILON): + or abs(self.m_12 - mat.m_12) > NifFormat.EPSILON + or abs(self.m_13 - mat.m_13) > NifFormat.EPSILON + or abs(self.m_21 - mat.m_21) > NifFormat.EPSILON + or abs(self.m_22 - mat.m_22) > NifFormat.EPSILON + or abs(self.m_23 - mat.m_23) > NifFormat.EPSILON + or abs(self.m_31 - mat.m_31) > NifFormat.EPSILON + or abs(self.m_32 - mat.m_32) > NifFormat.EPSILON + or abs(self.m_33 - mat.m_33) > NifFormat.EPSILON): return False return True @@ -1973,7 +2019,6 @@ def sup_norm(self): for row in self.as_list()) class Vector3: - def assign(self, vec): """ Set this vector to values from another object that supports iteration or x,y,z properties """ # see if it is an iterable @@ -1988,13 +2033,13 @@ def assign(self, vec): self.y = vec.y if hasattr(vec, "z"): self.z = vec.z - + def __iter__(self): # just a convenience so we can do: x,y,z = Vector3() yield self.x yield self.y yield self.z - + def as_list(self): return [self.x, self.y, self.z] @@ -2002,12 +2047,12 @@ def as_tuple(self): return (self.x, self.y, self.z) def norm(self, sqrt=math.sqrt): - return sqrt(self.x*self.x + self.y*self.y + self.z*self.z) + return sqrt(self.x * self.x + self.y * self.y + self.z * self.z) def normalize(self, ignore_error=False, sqrt=math.sqrt): # inlining norm() to reduce overhead try: - factor = 1.0 / sqrt(self.x*self.x + self.y*self.y + self.z*self.z) + factor = 1.0 / sqrt(self.x * self.x + self.y * self.y + self.z * self.z) except ZeroDivisionError: if not ignore_error: raise @@ -2031,7 +2076,7 @@ def get_copy(self): return v def __str__(self): - return "[ %6.3f %6.3f %6.3f ]"%(self.x, self.y, self.z) + return "[ %6.3f %6.3f %6.3f ]" % (self.x, self.y, self.z) def __mul__(self, x): if isinstance(x, (float, int)): @@ -2051,7 +2096,7 @@ def __mul__(self, x): elif isinstance(x, NifFormat.Matrix44): return self * x.get_matrix_33() + x.get_translation() else: - raise TypeError("do not know how to multiply Vector3 with %s"%x.__class__) + raise TypeError("do not know how to multiply Vector3 with %s" % x.__class__) def __rmul__(self, x): if isinstance(x, (float, int)): @@ -2061,7 +2106,7 @@ def __rmul__(self, x): v.z = x * self.z return v else: - raise TypeError("do not know how to multiply %s and Vector3"%x.__class__) + raise TypeError("do not know how to multiply %s and Vector3" % x.__class__) def __div__(self, x): if isinstance(x, (float, int)): @@ -2071,7 +2116,7 @@ def __div__(self, x): v.z = self.z / x return v else: - raise TypeError("do not know how to divide Vector3 and %s"%x.__class__) + raise TypeError("do not know how to divide Vector3 and %s" % x.__class__) # py3k __truediv__ = __div__ @@ -2090,7 +2135,7 @@ def __add__(self, x): v.z = self.z + x.z return v else: - raise TypeError("do not know how to add Vector3 and %s"%x.__class__) + raise TypeError("do not know how to add Vector3 and %s" % x.__class__) def __radd__(self, x): if isinstance(x, (float, int)): @@ -2100,7 +2145,7 @@ def __radd__(self, x): v.z = x + self.z return v else: - raise TypeError("do not know how to add %s and Vector3"%x.__class__) + raise TypeError("do not know how to add %s and Vector3" % x.__class__) def __sub__(self, x): if isinstance(x, (float, int)): @@ -2116,7 +2161,7 @@ def __sub__(self, x): v.z = self.z - x.z return v else: - raise TypeError("do not know how to substract Vector3 and %s"%x.__class__) + raise TypeError("do not know how to substract Vector3 and %s" % x.__class__) def __rsub__(self, x): if isinstance(x, (float, int)): @@ -2126,7 +2171,7 @@ def __rsub__(self, x): v.z = x - self.z return v else: - raise TypeError("do not know how to substract %s and Vector3"%x.__class__) + raise TypeError("do not know how to substract %s and Vector3" % x.__class__) def __neg__(self): v = NifFormat.Vector3() @@ -2139,12 +2184,12 @@ def __neg__(self): def crossproduct(self, x): if isinstance(x, NifFormat.Vector3): v = NifFormat.Vector3() - v.x = self.y*x.z - self.z*x.y - v.y = self.z*x.x - self.x*x.z - v.z = self.x*x.y - self.y*x.x + v.x = self.y * x.z - self.z * x.y + v.y = self.z * x.x - self.x * x.z + v.z = self.x * x.y - self.y * x.x return v else: - raise TypeError("do not know how to calculate crossproduct of Vector3 and %s"%x.__class__) + raise TypeError("do not know how to calculate crossproduct of Vector3 and %s" % x.__class__) def __eq__(self, x): if isinstance(x, type(None)): @@ -2208,7 +2253,7 @@ def get_vector_3(self): return v def __str__(self): - return "[ %6.3f %6.3f %6.3f %6.3f ]"%(self.x, self.y, self.z, self.w) + return "[ %6.3f %6.3f %6.3f %6.3f ]" % (self.x, self.y, self.z, self.w) def __eq__(self, rhs): if isinstance(rhs, type(None)): @@ -2252,16 +2297,16 @@ def apply_scale(self, scale): self.dimensions.x *= scale self.dimensions.y *= scale self.dimensions.z *= scale - self.minimum_size *= scale + self.minimum_size *= scale - def get_mass_center_inertia(self, density = 1, solid = True): + def get_mass_center_inertia(self, density=1, solid=True): """Return mass, center, and inertia tensor.""" # the dimensions describe half the size of the box in each dimension # so the length of a single edge is dimension.dir * 2 mass, inertia = pyffi.utils.inertia.getMassInertiaBox( (self.dimensions.x * 2, self.dimensions.y * 2, self.dimensions.z * 2), - density = density, solid = solid) - return mass, (0,0,0), inertia + density=density, solid=solid) + return mass, (0, 0, 0), inertia class bhkCapsuleShape: def apply_scale(self, scale): @@ -2277,13 +2322,13 @@ def apply_scale(self, scale): self.second_point.y *= scale self.second_point.z *= scale - def get_mass_center_inertia(self, density = 1, solid = True): + def get_mass_center_inertia(self, density=1, solid=True): """Return mass, center, and inertia tensor.""" # (assumes self.radius == self.radius_1 == self.radius_2) length = (self.first_point - self.second_point).norm() mass, inertia = pyffi.utils.inertia.getMassInertiaCapsule( - radius = self.radius, length = length, - density = density, solid = solid) + radius=self.radius, length=length, + density=density, solid=solid) # now fix inertia so it is expressed in the right coordinates # need a transform that maps (0,0,length/2) on (second - first) / 2 # and (0,0,-length/2) on (first - second)/2 @@ -2292,15 +2337,15 @@ def get_mass_center_inertia(self, density = 1, solid = True): index = min(enumerate(vec1), key=lambda val: abs(val[1]))[0] vec2 = vecCrossProduct(vec1, tuple((1 if i == index else 0) for i in range(3))) - vec2 = vecscalarMul(vec2, 1/vecNorm(vec2)) + vec2 = vecscalarMul(vec2, 1 / vecNorm(vec2)) # find an orthogonal vector to vec1 and vec2 vec3 = vecCrossProduct(vec1, vec2) # get transform matrix - transform_transposed = (vec2, vec3, vec1) # this is effectively the transposed of our transform + transform_transposed = (vec2, vec3, vec1) # this is effectively the transposed of our transform transform = matTransposed(transform_transposed) # check the result (debug) - assert(vecDistance(matvecMul(transform, (0,0,1)), vec1) < 0.0001) - assert(abs(matDeterminant(transform) - 1) < 0.0001) + assert (vecDistance(matvecMul(transform, (0, 0, 1)), vec1) < 0.0001) + assert (abs(matDeterminant(transform) - 1) < 0.0001) # transform the inertia tensor inertia = matMul(matMul(transform_transposed, inertia), transform) return (mass, @@ -2323,15 +2368,15 @@ def get_transform_a_b(self, parent): chainA = parent.find_chain(self.entities[0]) chainB = parent.find_chain(self.entities[1]) # validate the chains - assert(isinstance(chainA[-1], NifFormat.bhkRigidBody)) - assert(isinstance(chainA[-2], NifFormat.NiCollisionObject)) - assert(isinstance(chainA[-3], NifFormat.NiNode)) - assert(isinstance(chainB[-1], NifFormat.bhkRigidBody)) - assert(isinstance(chainB[-2], NifFormat.NiCollisionObject)) - assert(isinstance(chainB[-3], NifFormat.NiNode)) + assert (isinstance(chainA[-1], NifFormat.bhkRigidBody)) + assert (isinstance(chainA[-2], NifFormat.NiCollisionObject)) + assert (isinstance(chainA[-3], NifFormat.NiNode)) + assert (isinstance(chainB[-1], NifFormat.bhkRigidBody)) + assert (isinstance(chainB[-2], NifFormat.NiCollisionObject)) + assert (isinstance(chainB[-3], NifFormat.NiNode)) # return the relative transform - return (chainA[-3].get_transform(relative_to = parent) - * chainB[-3].get_transform(relative_to = parent).get_inverse()) + return (chainA[-3].get_transform(relative_to=parent) + * chainB[-3].get_transform(relative_to=parent).get_inverse()) class bhkConvexVerticesShape: def apply_scale(self, scale): @@ -2344,14 +2389,14 @@ def apply_scale(self, scale): for n in self.normals: n.w *= scale - def get_mass_center_inertia(self, density = 1, solid = True): + def get_mass_center_inertia(self, density=1, solid=True): """Return mass, center, and inertia tensor.""" # first find an enumeration of all triangles making up the convex shape vertices, triangles = pyffi.utils.quickhull.qhull3d( [vert.as_tuple() for vert in self.vertices]) # now calculate mass, center, and inertia return pyffi.utils.inertia.get_mass_center_inertia_polyhedron( - vertices, triangles, density = density, solid = solid) + vertices, triangles, density=density, solid=solid) class bhkLimitedHingeConstraint: def apply_scale(self, scale): @@ -2370,11 +2415,11 @@ def update_a_b(self, parent): self.limited_hinge.update_a_b(self.get_transform_a_b(parent)) class bhkListShape: - def get_mass_center_inertia(self, density = 1, solid = True): + def get_mass_center_inertia(self, density=1, solid=True): """Return center of gravity and area.""" - subshapes_mci = [ subshape.get_mass_center_inertia(density = density, - solid = solid) - for subshape in self.sub_shapes ] + subshapes_mci = [subshape.get_mass_center_inertia(density=density, + solid=solid) + for subshape in self.sub_shapes] total_mass = 0 total_center = (0, 0, 0) total_inertia = ((0, 0, 0), (0, 0, 0), (0, 0, 0)) @@ -2392,7 +2437,7 @@ def get_mass_center_inertia(self, density = 1, solid = True): total_inertia = matAdd(total_inertia, inertia) return total_mass, total_center, total_inertia - def add_shape(self, shape, front = False): + def add_shape(self, shape, front=False): """Add shape to list.""" # check if it's already there if shape in self.sub_shapes: return @@ -2405,7 +2450,7 @@ def add_shape(self, shape, front = False): self.sub_shapes[num_shapes] = shape else: for i in range(num_shapes, 0, -1): - self.sub_shapes[i] = self.sub_shapes[i-1] + self.sub_shapes[i] = self.sub_shapes[i - 1] self.sub_shapes[0] = shape # expand list of unknown ints as well self.num_unknown_ints = num_shapes + 1 @@ -2461,10 +2506,10 @@ def update_origin_scale(self): maxx = max(v.x for v in self.shape.data.vertices) maxy = max(v.y for v in self.shape.data.vertices) maxz = max(v.z for v in self.shape.data.vertices) - self.origin.x = minx - 0.1 - self.origin.y = miny - 0.1 - self.origin.z = minz - 0.1 - self.scale = (256*256*254) / (0.2+max([maxx-minx,maxy-miny,maxz-minz])) + self.mopp_code.offset.x = minx - 0.1 + self.mopp_code.offset.y = miny - 0.1 + self.mopp_code.offset.z = minz - 0.1 + self.scale = (256 * 256 * 254) / (0.2 + max([maxx - minx, maxy - miny, maxz - minz])) def update_mopp(self): """Update the MOPP data, scale, and origin, and welding info. @@ -2492,14 +2537,14 @@ def update_mopp_welding(self): material_per_vertex = [] for subshape in self.shape.get_sub_shapes(): material_per_vertex += ( - [subshape.material] * subshape.num_vertices) + [subshape.material] * subshape.num_vertices) material_per_triangle = [ material_per_vertex[hktri.triangle.v_1] for hktri in self.shape.data.triangles] # compute havok info try: origin, scale, mopp, welding_infos \ - = pyffi.utils.mopp.getMopperOriginScaleCodeWelding( + = pyffi.utils.mopp.getMopperOriginScaleCodeWelding( [vert.as_tuple() for vert in self.shape.data.vertices], [(hktri.triangle.v_1, hktri.triangle.v_2, @@ -2511,9 +2556,9 @@ def update_mopp_welding(self): else: # must use calculated scale and origin self.scale = scale - self.origin.x = origin[0] - self.origin.y = origin[1] - self.origin.z = origin[2] + self.mopp_code.offset.x = origin[0] + self.mopp_code.offset.y = origin[1] + self.mopp_code.offset.z = origin[2] # if havok's mopper failed, do a simple mopp if failed: logger.exception( @@ -2531,19 +2576,19 @@ def update_mopp_welding(self): welding_infos = [] # delete mopp and replace with new data - self.mopp_data_size = len(mopp) - self.mopp_data.update_size() + self.mopp_code.data_size = len(mopp) + self.mopp_code.data.update_size() for i, b in enumerate(mopp): - self.mopp_data[i] = b + self.mopp_code.data[i] = b # update welding information for hktri, welding_info in zip(self.shape.data.triangles, welding_infos): - hktri.welding_info = welding_info + hktri.welding_info.set_value(welding_info) def _makeSimpleMopp(self): """Make a simple mopp.""" - mopp = [] # the mopp 'assembly' script - self._q = 256*256 / self.scale # quantization factor + mopp = [] # the mopp 'assembly' script + self._q = 256 * 256 / self.scale # quantization factor # opcodes BOUNDX = 0x26 @@ -2554,14 +2599,14 @@ def _makeSimpleMopp(self): TESTZ = 0x12 # add first crude bounding box checks - self._vertsceil = [ self._moppCeil(v) for v in self.shape.data.vertices ] - self._vertsfloor = [ self._moppFloor(v) for v in self.shape.data.vertices ] - minx = min([ v[0] for v in self._vertsfloor ]) - miny = min([ v[1] for v in self._vertsfloor ]) - minz = min([ v[2] for v in self._vertsfloor ]) - maxx = max([ v[0] for v in self._vertsceil ]) - maxy = max([ v[1] for v in self._vertsceil ]) - maxz = max([ v[2] for v in self._vertsceil ]) + self._vertsceil = [self._moppCeil(v) for v in self.shape.data.vertices] + self._vertsfloor = [self._moppFloor(v) for v in self.shape.data.vertices] + minx = min([v[0] for v in self._vertsfloor]) + miny = min([v[1] for v in self._vertsfloor]) + minz = min([v[2] for v in self._vertsfloor]) + maxx = max([v[0] for v in self._vertsceil]) + maxy = max([v[1] for v in self._vertsceil]) + maxz = max([v[2] for v in self._vertsceil]) if minx < 0 or miny < 0 or minz < 0: raise ValueError("cannot update mopp tree with invalid origin") if maxx > 255 or maxy > 255 or maxz > 255: raise ValueError("cannot update mopp tree with invalid scale") mopp.extend([BOUNDZ, minz, maxz]) @@ -2570,44 +2615,44 @@ def _makeSimpleMopp(self): # add tree using subsequent X-Y-Z splits # (slow and no noticable difference from other simple tree so deactivated) - #tris = range(len(self.shape.data.triangles)) - #tree = self.split_triangles(tris, [[minx,maxx],[miny,maxy],[minz,maxz]]) - #mopp += self.mopp_from_tree(tree) + # tris = range(len(self.shape.data.triangles)) + # tree = self.split_triangles(tris, [[minx,maxx],[miny,maxy],[minz,maxz]]) + # mopp += self.mopp_from_tree(tree) # add a trivial tree # this prevents the player of walking through the model # but arrows may still fly through numtriangles = len(self.shape.data.triangles) i = 0x30 - for t in range(numtriangles-1): - mopp.extend([TESTZ, maxz, 0, 1, i]) - i += 1 - if i == 0x50: - mopp.extend([0x09, 0x20]) # increment triangle offset - i = 0x30 + for t in range(numtriangles - 1): + mopp.extend([TESTZ, maxz, 0, 1, i]) + i += 1 + if i == 0x50: + mopp.extend([0x09, 0x20]) # increment triangle offset + i = 0x30 mopp.extend([i]) return mopp def _moppCeil(self, v): - moppx = int((v.x + 0.1 - self.origin.x) / self._q + 0.99999999) - moppy = int((v.y + 0.1 - self.origin.y) / self._q + 0.99999999) - moppz = int((v.z + 0.1 - self.origin.z) / self._q + 0.99999999) + moppx = int((v.x + 0.1 - self.mopp_code.offset.x) / self._q + 0.99999999) + moppy = int((v.y + 0.1 - self.mopp_code.offset.y) / self._q + 0.99999999) + moppz = int((v.z + 0.1 - self.mopp_code.offset.z) / self._q + 0.99999999) return [moppx, moppy, moppz] def _moppFloor(self, v): - moppx = int((v.x - 0.1 - self.origin.x) / self._q) - moppy = int((v.y - 0.1 - self.origin.y) / self._q) - moppz = int((v.z - 0.1 - self.origin.z) / self._q) + moppx = int((v.x - 0.1 - self.mopp_code.offset.x) / self._q) + moppy = int((v.y - 0.1 - self.mopp_code.offset.y) / self._q) + moppz = int((v.z - 0.1 - self.mopp_code.offset.z) / self._q) return [moppx, moppy, moppz] def split_triangles(self, ts, bbox, dir=0): """Direction 0=X, 1=Y, 2=Z""" - btest = [] # for bounding box tests - test = [] # for branch command + btest = [] # for bounding box tests + test = [] # for branch command # check bounding box - tris = [ t.triangle for t in self.shape.data.triangles ] - tsverts = [ tris[t].v_1 for t in ts] + [ tris[t].v_2 for t in ts] + [ tris[t].v_3 for t in ts] + tris = [t.triangle for t in self.shape.data.triangles] + tsverts = [tris[t].v_1 for t in ts] + [tris[t].v_2 for t in ts] + [tris[t].v_3 for t in ts] minx = min([self._vertsfloor[v][0] for v in tsverts]) miny = min([self._vertsfloor[v][1] for v in tsverts]) minz = min([self._vertsfloor[v][2] for v in tsverts]) @@ -2616,47 +2661,48 @@ def split_triangles(self, ts, bbox, dir=0): maxz = max([self._vertsceil[v][2] for v in tsverts]) # add bounding box checks if it's reduced in a direction if (maxx - minx < bbox[0][1] - bbox[0][0]): - btest += [ 0x26, minx, maxx ] + btest += [0x26, minx, maxx] bbox[0][0] = minx bbox[0][1] = maxx if (maxy - miny < bbox[1][1] - bbox[1][0]): - btest += [ 0x27, miny, maxy ] + btest += [0x27, miny, maxy] bbox[1][0] = miny bbox[1][1] = maxy if (maxz - minz < bbox[2][1] - bbox[2][0]): - btest += [ 0x28, minz, maxz ] + btest += [0x28, minz, maxz] bbox[2][0] = minz bbox[2][1] = maxz # if only one triangle, no further split needed if len(ts) == 1: if ts[0] < 32: - return [ btest, [ 0x30 + ts[0] ], [], [] ] + return [btest, [0x30 + ts[0]], [], []] elif ts[0] < 256: - return [ btest, [ 0x50, ts[0] ], [], [] ] + return [btest, [0x50, ts[0]], [], []] else: - return [ btest, [ 0x51, ts[0] >> 8, ts[0] & 255 ], [], [] ] + return [btest, [0x51, ts[0] >> 8, ts[0] & 255], [], []] # sort triangles in required direction - ts.sort(key = lambda t: max(self._vertsceil[tris[t].v_1][dir], self._vertsceil[tris[t].v_2][dir], self._vertsceil[tris[t].v_3][dir])) + ts.sort(key=lambda t: max(self._vertsceil[tris[t].v_1][dir], self._vertsceil[tris[t].v_2][dir], + self._vertsceil[tris[t].v_3][dir])) # split into two - ts1 = ts[:len(ts)/2] - ts2 = ts[len(ts)/2:] + ts1 = ts[:len(ts) / 2] + ts2 = ts[len(ts) / 2:] # get maximum coordinate of small group - ts1verts = [ tris[t].v_1 for t in ts1] + [ tris[t].v_2 for t in ts1] + [ tris[t].v_3 for t in ts1] - ts2verts = [ tris[t].v_1 for t in ts2] + [ tris[t].v_2 for t in ts2] + [ tris[t].v_3 for t in ts2] + ts1verts = [tris[t].v_1 for t in ts1] + [tris[t].v_2 for t in ts1] + [tris[t].v_3 for t in ts1] + ts2verts = [tris[t].v_1 for t in ts2] + [tris[t].v_2 for t in ts2] + [tris[t].v_3 for t in ts2] ts1max = max([self._vertsceil[v][dir] for v in ts1verts]) # get minimum coordinate of large group ts2min = min([self._vertsfloor[v][dir] for v in ts2verts]) # set up test - test += [0x10+dir, ts1max, ts2min] + test += [0x10 + dir, ts1max, ts2min] # set up new bounding boxes for each subtree # make copy - bbox1 = [[bbox[0][0],bbox[0][1]],[bbox[1][0],bbox[1][1]],[bbox[2][0],bbox[2][1]]] - bbox2 = [[bbox[0][0],bbox[0][1]],[bbox[1][0],bbox[1][1]],[bbox[2][0],bbox[2][1]]] + bbox1 = [[bbox[0][0], bbox[0][1]], [bbox[1][0], bbox[1][1]], [bbox[2][0], bbox[2][1]]] + bbox2 = [[bbox[0][0], bbox[0][1]], [bbox[1][0], bbox[1][1]], [bbox[2][0], bbox[2][1]]] # update bound in test direction bbox1[dir][1] = ts1max bbox2[dir][0] = ts2min # return result - nextdir = dir+1 + nextdir = dir + 1 if nextdir == 3: nextdir = 0 return [btest, test, self.split_triangles(ts1, bbox1, nextdir), self.split_triangles(ts2, bbox2, nextdir)] @@ -2667,7 +2713,7 @@ def mopp_from_tree(self, tree): submopp1 = self.mopp_from_tree(tree[2]) submopp2 = self.mopp_from_tree(tree[3]) if len(submopp1) < 256: - mopp += [ len(submopp1) ] + mopp += [len(submopp1)] mopp += submopp1 mopp += submopp2 else: @@ -2681,13 +2727,14 @@ def mopp_from_tree(self, tree): return mopp # ported and extended from NifVis/bhkMoppBvTreeShape.py - def parse_mopp(self, start = 0, depth = 0, toffset = 0, verbose = False): + def parse_mopp(self, start=0, depth=0, toffset=0, verbose=False): """The mopp data is printed to the debug channel while parsed. Returns list of indices into mopp data of the bytes processed and a list of triangle indices encountered. The verbose argument is ignored (and is deprecated). """ + class Message: def __init__(self): self.logger = logging.getLogger("pyffi.mopp") @@ -2706,96 +2753,96 @@ def error(self): self.logger.error(self.msg) self.msg = "" - mopp = self.mopp_data # shortcut notation - ids = [] # indices of bytes processed - tris = [] # triangle indices - i = start # current index - ret = False # set to True if an opcode signals a triangle index - while i < self.mopp_data_size and not ret: + mopp = self.mopp_code.data # shortcut notation + ids = [] # indices of bytes processed + tris = [] # triangle indices + i = start # current index + ret = False # set to True if an opcode signals a triangle index + while i < self.mopp_code.data_size and not ret: # get opcode and print it code = mopp[i] msg = Message() - msg.append("%4i:"%i + " "*depth + '0x%02X ' % code) + msg.append("%4i:" % i + " " * depth + '0x%02X ' % code) if code == 0x09: # increment triangle offset - toffset += mopp[i+1] - msg.append(mopp[i+1]) + toffset += mopp[i + 1] + msg.append(mopp[i + 1]) msg.append('%i [ triangle offset += %i, offset is now %i ]' - % (mopp[i+1], mopp[i+1], toffset)) - ids.extend([i,i+1]) + % (mopp[i + 1], mopp[i + 1], toffset)) + ids.extend([i, i + 1]) i += 2 - elif code in [ 0x0A ]: + elif code in [0x0A]: # increment triangle offset - toffset += mopp[i+1]*256 + mopp[i+2] - msg.append(mopp[i+1],mopp[i+2]) + toffset += mopp[i + 1] * 256 + mopp[i + 2] + msg.append(mopp[i + 1], mopp[i + 2]) msg.append('[ triangle offset += %i, offset is now %i ]' - % (mopp[i+1]*256 + mopp[i+2], toffset)) - ids.extend([i,i+1,i+2]) + % (mopp[i + 1] * 256 + mopp[i + 2], toffset)) + ids.extend([i, i + 1, i + 2]) i += 3 - elif code in [ 0x0B ]: + elif code in [0x0B]: # unsure about first two arguments, but the 3rd and 4th set triangle offset - toffset = 256*mopp[i+3] + mopp[i+4] - msg.append(mopp[i+1],mopp[i+2],mopp[i+3],mopp[i+4]) + toffset = 256 * mopp[i + 3] + mopp[i + 4] + msg.append(mopp[i + 1], mopp[i + 2], mopp[i + 3], mopp[i + 4]) msg.append('[ triangle offset = %i ]' % toffset) - ids.extend([i,i+1,i+2,i+3,i+4]) + ids.extend([i, i + 1, i + 2, i + 3, i + 4]) i += 5 - elif code in range(0x30,0x50): + elif code in range(0x30, 0x50): # triangle compact - msg.append('[ triangle %i ]'%(code-0x30+toffset)) + msg.append('[ triangle %i ]' % (code - 0x30 + toffset)) ids.append(i) - tris.append(code-0x30+toffset) + tris.append(code - 0x30 + toffset) i += 1 ret = True elif code == 0x50: # triangle byte - msg.append(mopp[i+1]) - msg.append('[ triangle %i ]'%(mopp[i+1]+toffset)) - ids.extend([i,i+1]) - tris.append(mopp[i+1]+toffset) + msg.append(mopp[i + 1]) + msg.append('[ triangle %i ]' % (mopp[i + 1] + toffset)) + ids.extend([i, i + 1]) + tris.append(mopp[i + 1] + toffset) i += 2 ret = True - elif code in [ 0x51 ]: + elif code in [0x51]: # triangle short - t = mopp[i+1]*256 + mopp[i+2] + toffset - msg.append(mopp[i+1],mopp[i+2]) + t = mopp[i + 1] * 256 + mopp[i + 2] + toffset + msg.append(mopp[i + 1], mopp[i + 2]) msg.append('[ triangle %i ]' % t) - ids.extend([i,i+1,i+2]) + ids.extend([i, i + 1, i + 2]) tris.append(t) i += 3 ret = True - elif code in [ 0x53 ]: + elif code in [0x53]: # triangle short? - t = mopp[i+3]*256 + mopp[i+4] + toffset - msg.append(mopp[i+1],mopp[i+2],mopp[i+3],mopp[i+4]) + t = mopp[i + 3] * 256 + mopp[i + 4] + toffset + msg.append(mopp[i + 1], mopp[i + 2], mopp[i + 3], mopp[i + 4]) msg.append('[ triangle %i ]' % t) - ids.extend([i,i+1,i+2,i+3,i+4]) + ids.extend([i, i + 1, i + 2, i + 3, i + 4]) tris.append(t) i += 5 ret = True - elif code in [ 0x05 ]: + elif code in [0x05]: # byte jump - msg.append('[ jump -> %i: ]'%(i+2+mopp[i+1])) - ids.extend([i,i+1]) - i += 2+mopp[i+1] + msg.append('[ jump -> %i: ]' % (i + 2 + mopp[i + 1])) + ids.extend([i, i + 1]) + i += 2 + mopp[i + 1] - elif code in [ 0x06 ]: + elif code in [0x06]: # short jump - jump = mopp[i+1]*256 + mopp[i+2] - msg.append('[ jump -> %i: ]'%(i+3+jump)) - ids.extend([i,i+1,i+2]) - i += 3+jump + jump = mopp[i + 1] * 256 + mopp[i + 2] + msg.append('[ jump -> %i: ]' % (i + 3 + jump)) + ids.extend([i, i + 1, i + 2]) + i += 3 + jump - elif code in [0x10,0x11,0x12, 0x13,0x14,0x15, 0x16,0x17,0x18, 0x19, 0x1A, 0x1B, 0x1C]: + elif code in [0x10, 0x11, 0x12, 0x13, 0x14, 0x15, 0x16, 0x17, 0x18, 0x19, 0x1A, 0x1B, 0x1C]: # compact if-then-else with two arguments - msg.append(mopp[i+1], mopp[i+2]) + msg.append(mopp[i + 1], mopp[i + 2]) if code == 0x10: msg.append('[ branch X') elif code == 0x11: @@ -2804,88 +2851,96 @@ def error(self): msg.append('[ branch Z') else: msg.append('[ branch ?') - msg.append('-> %i: %i: ]'%(i+4,i+4+mopp[i+3])) + msg.append('-> %i: %i: ]' % (i + 4, i + 4 + mopp[i + 3])) msg.debug() - msg.append(" " + " "*depth + 'if:') + msg.append(" " + " " * depth + 'if:') msg.debug() - idssub1, trissub1 = self.parse_mopp(start = i+4, depth = depth+1, toffset = toffset, verbose = verbose) - msg.append(" " + " "*depth + 'else:') + idssub1, trissub1 = self.parse_mopp(start=i + 4, depth=depth + 1, toffset=toffset, verbose=verbose) + msg.append(" " + " " * depth + 'else:') msg.debug() - idssub2, trissub2 = self.parse_mopp(start = i+4+mopp[i+3], depth = depth+1, toffset = toffset, verbose = verbose) - ids.extend([i,i+1,i+2,i+3]) + idssub2, trissub2 = self.parse_mopp(start=i + 4 + mopp[i + 3], depth=depth + 1, toffset=toffset, + verbose=verbose) + ids.extend([i, i + 1, i + 2, i + 3]) ids.extend(idssub1) ids.extend(idssub2) tris.extend(trissub1) tris.extend(trissub2) ret = True - elif code in [0x20,0x21,0x22]: + elif code in [0x20, 0x21, 0x22]: # compact if-then-else with one argument - msg.append(mopp[i+1], '[ branch ? -> %i: %i: ]'%(i+3,i+3+mopp[i+2])).debug() - msg.append(" " + " "*depth + 'if:').debug() - idssub1, trissub1 = self.parse_mopp(start = i+3, depth = depth+1, toffset = toffset, verbose = verbose) - msg.append(" " + " "*depth + 'else:').debug() - idssub2, trissub2 = self.parse_mopp(start = i+3+mopp[i+2], depth = depth+1, toffset = toffset, verbose = verbose) - ids.extend([i,i+1,i+2]) + msg.append(mopp[i + 1], '[ branch ? -> %i: %i: ]' % (i + 3, i + 3 + mopp[i + 2])).debug() + msg.append(" " + " " * depth + 'if:').debug() + idssub1, trissub1 = self.parse_mopp(start=i + 3, depth=depth + 1, toffset=toffset, verbose=verbose) + msg.append(" " + " " * depth + 'else:').debug() + idssub2, trissub2 = self.parse_mopp(start=i + 3 + mopp[i + 2], depth=depth + 1, toffset=toffset, + verbose=verbose) + ids.extend([i, i + 1, i + 2]) ids.extend(idssub1) ids.extend(idssub2) tris.extend(trissub1) tris.extend(trissub2) ret = True - elif code in [0x23,0x24,0x25]: # short if x <= a then 1; if x > b then 2; - jump1 = mopp[i+3] * 256 + mopp[i+4] - jump2 = mopp[i+5] * 256 + mopp[i+6] - msg.append(mopp[i+1], mopp[i+2], '[ branch ? -> %i: %i: ]'%(i+7+jump1,i+7+jump2)).debug() - msg.append(" " + " "*depth + 'if:').debug() - idssub1, trissub1 = self.parse_mopp(start = i+7+jump1, depth = depth+1, toffset = toffset, verbose = verbose) - msg.append(" " + " "*depth + 'else:').debug() - idssub2, trissub2 = self.parse_mopp(start = i+7+jump2, depth = depth+1, toffset = toffset, verbose = verbose) - ids.extend([i,i+1,i+2,i+3,i+4,i+5,i+6]) + elif code in [0x23, 0x24, 0x25]: # short if x <= a then 1; if x > b then 2; + jump1 = mopp[i + 3] * 256 + mopp[i + 4] + jump2 = mopp[i + 5] * 256 + mopp[i + 6] + msg.append(mopp[i + 1], mopp[i + 2], + '[ branch ? -> %i: %i: ]' % (i + 7 + jump1, i + 7 + jump2)).debug() + msg.append(" " + " " * depth + 'if:').debug() + idssub1, trissub1 = self.parse_mopp(start=i + 7 + jump1, depth=depth + 1, toffset=toffset, + verbose=verbose) + msg.append(" " + " " * depth + 'else:').debug() + idssub2, trissub2 = self.parse_mopp(start=i + 7 + jump2, depth=depth + 1, toffset=toffset, + verbose=verbose) + ids.extend([i, i + 1, i + 2, i + 3, i + 4, i + 5, i + 6]) ids.extend(idssub1) ids.extend(idssub2) tris.extend(trissub1) tris.extend(trissub2) ret = True - elif code in [0x26,0x27,0x28]: - msg.append(mopp[i+1], mopp[i+2]) + elif code in [0x26, 0x27, 0x28]: + msg.append(mopp[i + 1], mopp[i + 2]) if code == 0x26: msg.append('[ bound X ]') elif code == 0x27: msg.append('[ bound Y ]') elif code == 0x28: msg.append('[ bound Z ]') - ids.extend([i,i+1,i+2]) + ids.extend([i, i + 1, i + 2]) i += 3 elif code in [0x01, 0x02, 0x03, 0x04]: - msg.append(mopp[i+1], mopp[i+2], mopp[i+3], '[ bound XYZ? ]') - ids.extend([i,i+1,i+2,i+3]) + msg.append(mopp[i + 1], mopp[i + 2], mopp[i + 3], '[ bound XYZ? ]') + ids.extend([i, i + 1, i + 2, i + 3]) i += 4 else: - msg.append("unknown mopp code 0x%02X"%code).error() + msg.append("unknown mopp code 0x%02X" % code).error() msg.append("following bytes are").debug() - extrabytes = [mopp[j] for j in range(i+1,min(self.mopp_data_size,i+10))] - extraindex = [j for j in range(i+1,min(self.mopp_data_size,i+10))] + extrabytes = [mopp[j] for j in range(i + 1, min(self.mopp_code.data_size, i + 10))] + extraindex = [j for j in range(i + 1, min(self.mopp_code.data_size, i + 10))] msg.append(extrabytes).debug() for b, j in zip(extrabytes, extraindex): - if j+b+1 < self.mopp_data_size: - msg.append("opcode after jump %i is 0x%02X"%(b,mopp[j+b+1]), [mopp[k] for k in range(j+b+2,min(self.mopp_data_size,j+b+11))]).debug() - raise ValueError("unknown mopp opcode 0x%02X"%code) + if j + b + 1 < self.mopp_code.data_size: + msg.append("opcode after jump %i is 0x%02X" % (b, mopp[j + b + 1]), [mopp[k] for k in + range(j + b + 2, + min(self.mopp_data_size, + j + b + 11))]).debug() + raise ValueError("unknown mopp opcode 0x%02X" % code) msg.debug() return ids, tris class bhkMultiSphereShape: - def get_mass_center_inertia(self, density = 1, solid = True): + def get_mass_center_inertia(self, density=1, solid=True): """Return center of gravity and area.""" subshapes_mci = [ (mass, center, inertia) for (mass, inertia), center in - zip( ( pyffi.utils.inertia.getMassInertiaSphere(radius = sphere.radius, - density = density, solid = solid) - for sphere in self.spheres ), - ( sphere.center.as_tuple() for sphere in self.spheres ) ) ] + zip((pyffi.utils.inertia.getMassInertiaSphere(radius=sphere.radius, + density=density, solid=solid) + for sphere in self.spheres), + (sphere.center.as_tuple() for sphere in self.spheres))] total_mass = 0 total_center = (0, 0, 0) total_inertia = ((0, 0, 0), (0, 0, 0), (0, 0, 0)) @@ -2927,7 +2982,7 @@ def get_interchangeable_packed_shape(self): normals=normals, vertices=vertices, # default layer 1 (static collision) - layer=self.data_layers[0].layer if self.data_layers else 1, + layer=self.filters[0].layer if self.filters else 1, material=self.material.material) # set scale packed.scale_copy.x = 1.0 @@ -2939,16 +2994,16 @@ def get_interchangeable_packed_shape(self): # return result return packed - def get_mass_center_inertia(self, density = 1, solid = True): + def get_mass_center_inertia(self, density=1, solid=True): """Return mass, center, and inertia tensor.""" # first find mass, center, and inertia of all shapes subshapes_mci = [] for data in self.strips_data: subshapes_mci.append( pyffi.utils.inertia.get_mass_center_inertia_polyhedron( - [ vert.as_tuple() for vert in data.vertices ], - [ triangle for triangle in data.get_triangles() ], - density = density, solid = solid)) + [vert.as_tuple() for vert in data.vertices], + [triangle for triangle in data.get_triangles()], + density=density, solid=solid)) # now calculate mass, center, and inertia total_mass = 0 @@ -2962,15 +3017,15 @@ def get_mass_center_inertia(self, density = 1, solid = True): return total_mass, total_center, total_inertia class bhkPackedNiTriStripsShape: - def get_mass_center_inertia(self, density = 1, solid = True): + def get_mass_center_inertia(self, density=1, solid=True): """Return mass, center, and inertia tensor.""" return pyffi.utils.inertia.get_mass_center_inertia_polyhedron( - [ vert.as_tuple() for vert in self.data.vertices ], - [ ( hktriangle.triangle.v_1, - hktriangle.triangle.v_2, - hktriangle.triangle.v_3 ) - for hktriangle in self.data.triangles ], - density = density, solid = solid) + [vert.as_tuple() for vert in self.data.vertices], + [(hktriangle.triangle.v_1, + hktriangle.triangle.v_2, + hktriangle.triangle.v_3) + for hktriangle in self.data.triangles], + density=density, solid=solid) def get_sub_shapes(self): """Return sub shapes (works for both Oblivion and Fallout 3).""" @@ -2979,7 +3034,7 @@ def get_sub_shapes(self): else: return self.sub_shapes - def add_shape(self, triangles, normals, vertices, layer=0, material=0): + def add_shape(self, triangles: list[tuple[float, float, float]], normals, vertices, layer=0, material=0): """Pack the given geometry.""" # add the shape data if not self.data: @@ -2998,33 +3053,33 @@ def add_shape(self, triangles, normals, vertices, layer=0, material=0): data.sub_shapes[num_shapes].layer = layer data.sub_shapes[num_shapes].num_vertices = len(vertices) data.sub_shapes[num_shapes].material.material = material - firsttriangle = data.num_triangles - firstvertex = data.num_vertices + first_triangle = data.num_triangles + first_vertex = data.num_vertices data.num_triangles += len(triangles) data.triangles.update_size() - for tdata, t, n in zip(data.triangles[firsttriangle:], triangles, normals): - tdata.triangle.v_1 = t[0] + firstvertex - tdata.triangle.v_2 = t[1] + firstvertex - tdata.triangle.v_3 = t[2] + firstvertex - tdata.normal.x = n[0] - tdata.normal.y = n[1] - tdata.normal.z = n[2] + for tri_data, t, n in zip(data.triangles[first_triangle:], triangles, normals): + tri_data.triangle.v_1 = t[0] + first_vertex + tri_data.triangle.v_2 = t[1] + first_vertex + tri_data.triangle.v_3 = t[2] + first_vertex + tri_data.normal.x = n[0] + tri_data.normal.y = n[1] + tri_data.normal.z = n[2] data.num_vertices += len(vertices) data.vertices.update_size() - for vdata, v in zip(data.vertices[firstvertex:], vertices): - vdata.x = v[0] / 7.0 - vdata.y = v[1] / 7.0 - vdata.z = v[2] / 7.0 - + for vert_data, v in zip(data.vertices[first_vertex:], vertices): + vert_data.x = v[0] / 7.0 + vert_data.y = v[1] / 7.0 + vert_data.z = v[2] / 7.0 + def get_vertex_hash_generator( self, - vertexprecision=3, subshape_index=None): + vertex_precision=3, subshape_index=None): """Generator which produces a tuple of integers for each vertex to ease detection of duplicate/close enough to remove vertices. The precision parameter denote number of significant digits behind the comma. - For vertexprecision, 3 seems usually enough (maybe we'll + For vertex_precision, 3 seems usually enough (maybe we'll have to increase this at some point). >>> shape = NifFormat.bhkPackedNiTriStripsShape() @@ -3052,16 +3107,16 @@ def get_vertex_hash_generator( >>> list(shape.get_vertex_hash_generator(subshape_index=1)) [(2000, 2100, 2200)] - :param vertexprecision: Precision to be used for vertices. - :type vertexprecision: float + :param vertex_precision: Precision to be used for vertices. + :type vertex_precision: float :return: A generator yielding a hash value for each vertex. """ - vertexfactor = 10 ** vertexprecision + vertexfactor = 10 ** vertex_precision if subshape_index is None: for matid, vert in zip(chain(*[repeat(i, sub_shape.num_vertices) - for i, sub_shape - in enumerate(self.get_sub_shapes())]), - self.data.vertices): + for i, sub_shape + in enumerate(self.get_sub_shapes())]), + self.data.vertices): yield (matid, tuple(float_to_int(value * vertexfactor) for value in vert.as_list())) else: @@ -3070,9 +3125,9 @@ def get_vertex_hash_generator( self.get_sub_shapes()): first_vertex += subshape.num_vertices for vert_index in range( - first_vertex, - first_vertex - + self.get_sub_shapes()[subshape_index].num_vertices): + first_vertex, + first_vertex + + self.get_sub_shapes()[subshape_index].num_vertices): yield tuple(float_to_int(value * vertexfactor) for value in self.data.vertices[vert_index].as_list()) @@ -3160,28 +3215,28 @@ class bhkRigidBody: def apply_scale(self, scale): """Apply scale factor on data.""" # apply scale on transform - self.translation.x *= scale - self.translation.y *= scale - self.translation.z *= scale + self.rigid_body_info.translation.x *= scale + self.rigid_body_info.translation.y *= scale + self.rigid_body_info.translation.z *= scale # apply scale on center of gravity - self.center.x *= scale - self.center.y *= scale - self.center.z *= scale + self.rigid_body_info.center.x *= scale + self.rigid_body_info.center.y *= scale + self.rigid_body_info.center.z *= scale # apply scale on inertia tensor - self.inertia.m_11 *= (scale ** 2) - self.inertia.m_12 *= (scale ** 2) - self.inertia.m_13 *= (scale ** 2) - self.inertia.m_14 *= (scale ** 2) - self.inertia.m_21 *= (scale ** 2) - self.inertia.m_22 *= (scale ** 2) - self.inertia.m_23 *= (scale ** 2) - self.inertia.m_24 *= (scale ** 2) - self.inertia.m_31 *= (scale ** 2) - self.inertia.m_32 *= (scale ** 2) - self.inertia.m_33 *= (scale ** 2) - self.inertia.m_34 *= (scale ** 2) + self.rigid_body_info.inertia_tensor.m_11 *= (scale ** 2) + self.rigid_body_info.inertia_tensor.m_12 *= (scale ** 2) + self.rigid_body_info.inertia_tensor.m_13 *= (scale ** 2) + self.rigid_body_info.inertia_tensor.m_14 *= (scale ** 2) + self.rigid_body_info.inertia_tensor.m_21 *= (scale ** 2) + self.rigid_body_info.inertia_tensor.m_22 *= (scale ** 2) + self.rigid_body_info.inertia_tensor.m_23 *= (scale ** 2) + self.rigid_body_info.inertia_tensor.m_24 *= (scale ** 2) + self.rigid_body_info.inertia_tensor.m_31 *= (scale ** 2) + self.rigid_body_info.inertia_tensor.m_32 *= (scale ** 2) + self.rigid_body_info.inertia_tensor.m_33 *= (scale ** 2) + self.rigid_body_info.inertia_tensor.m_34 *= (scale ** 2) def update_mass_center_inertia(self, density=1, solid=True, mass=None): """Look at all the objects under this rigid body and update the mass, @@ -3194,35 +3249,35 @@ def update_mass_center_inertia(self, density=1, solid=True, mass=None): density=density, solid=solid) self.mass = calc_mass - self.center.x, self.center.y, self.center.z = center - self.inertia.m_11 = inertia[0][0] - self.inertia.m_12 = inertia[0][1] - self.inertia.m_13 = inertia[0][2] - self.inertia.m_14 = 0 - self.inertia.m_21 = inertia[1][0] - self.inertia.m_22 = inertia[1][1] - self.inertia.m_23 = inertia[1][2] - self.inertia.m_24 = 0 - self.inertia.m_31 = inertia[2][0] - self.inertia.m_32 = inertia[2][1] - self.inertia.m_33 = inertia[2][2] - self.inertia.m_34 = 0 + self.rigid_body_info.center.x, self.rigid_body_info.center.y, self.rigid_body_info.center.z = center + self.rigid_body_info.inertia_tensor.m_11 = inertia[0][0] + self.rigid_body_info.inertia_tensor.m_12 = inertia[0][1] + self.rigid_body_info.inertia_tensor.m_13 = inertia[0][2] + self.rigid_body_info.inertia_tensor.m_14 = 0 + self.rigid_body_info.inertia_tensor.m_21 = inertia[1][0] + self.rigid_body_info.inertia_tensor.m_22 = inertia[1][1] + self.rigid_body_info.inertia_tensor.m_23 = inertia[1][2] + self.rigid_body_info.inertia_tensor.m_24 = 0 + self.rigid_body_info.inertia_tensor.m_31 = inertia[2][0] + self.rigid_body_info.inertia_tensor.m_32 = inertia[2][1] + self.rigid_body_info.inertia_tensor.m_33 = inertia[2][2] + self.rigid_body_info.inertia_tensor.m_34 = 0 if not mass is None: mass_correction = mass / calc_mass if calc_mass != 0 else 1 - self.mass = mass - self.inertia.m_11 *= mass_correction - self.inertia.m_12 *= mass_correction - self.inertia.m_13 *= mass_correction - self.inertia.m_14 *= mass_correction - self.inertia.m_21 *= mass_correction - self.inertia.m_22 *= mass_correction - self.inertia.m_23 *= mass_correction - self.inertia.m_24 *= mass_correction - self.inertia.m_31 *= mass_correction - self.inertia.m_32 *= mass_correction - self.inertia.m_33 *= mass_correction - self.inertia.m_34 *= mass_correction + self.rigid_body_info.mass = mass + self.rigid_body_info.inertia_tensor.m_11 *= mass_correction + self.rigid_body_info.inertia_tensor.m_12 *= mass_correction + self.rigid_body_info.inertia_tensor.m_13 *= mass_correction + self.rigid_body_info.inertia_tensor.m_14 *= mass_correction + self.rigid_body_info.inertia_tensor.m_21 *= mass_correction + self.rigid_body_info.inertia_tensor.m_22 *= mass_correction + self.rigid_body_info.inertia_tensor.m_23 *= mass_correction + self.rigid_body_info.inertia_tensor.m_24 *= mass_correction + self.rigid_body_info.inertia_tensor.m_31 *= mass_correction + self.rigid_body_info.inertia_tensor.m_32 *= mass_correction + self.rigid_body_info.inertia_tensor.m_33 *= mass_correction + self.rigid_body_info.inertia_tensor.m_34 *= mass_correction class bhkSphereShape: def apply_scale(self, scale): @@ -3230,13 +3285,13 @@ def apply_scale(self, scale): # apply scale on dimensions self.radius *= scale - def get_mass_center_inertia(self, density = 1, solid = True): + def get_mass_center_inertia(self, density=1, solid=True): """Return mass, center, and inertia tensor.""" # the dimensions describe half the size of the box in each dimension # so the length of a single edge is dimension.dir * 2 mass, inertia = pyffi.utils.inertia.getMassInertiaSphere( - self.radius, density = density, solid = solid) - return mass, (0,0,0), inertia + self.radius, density=density, solid=solid) + return mass, (0, 0, 0), inertia class bhkTransformShape: def apply_scale(self, scale): @@ -3254,7 +3309,7 @@ def get_mass_center_inertia(self, density=1, solid=True): # get transform matrix and translation vector transform = self.transform.get_matrix_33().as_tuple() transform_transposed = matTransposed(transform) - translation = ( self.transform.m_14, self.transform.m_24, self.transform.m_34 ) + translation = (self.transform.m_14, self.transform.m_24, self.transform.m_34) # transform center and inertia center = matvecMul(transform, center) center = vecAdd(center, translation) @@ -3278,16 +3333,16 @@ def get_dismember_partitions(self): triangles = [] trianglepartmap = [] for bodypart, skinpartblock in zip( - self.partitions, self.skin_partition.skin_partition_blocks): + self.partitions, self.skin_partition.skin_partition_blocks): part_triangles = list(skinpartblock.get_mapped_triangles()) triangles += part_triangles trianglepartmap += [bodypart.body_part] * len(part_triangles) return triangles, trianglepartmap - class ControllerLink: + class ControlledBlock: """ >>> from pyffi.formats.nif import NifFormat - >>> link = NifFormat.ControllerLink() + >>> link = NifFormat.ControlledBlock() >>> link.node_name_offset -1 >>> link.set_node_name("Bip01") @@ -3305,6 +3360,7 @@ class ControllerLink: >>> link.node_name b'Bip01 Tail' """ + def _get_string(self, offset): """A wrapper around string_palette.palette.get_string. Used by get_node_name etc. Returns the string at given offset.""" @@ -3330,7 +3386,7 @@ def get_node_name(self): >>> # a doctest >>> from pyffi.formats.nif import NifFormat - >>> link = NifFormat.ControllerLink() + >>> link = NifFormat.ControlledBlock() >>> link.string_palette = NifFormat.NiStringPalette() >>> palette = link.string_palette.palette >>> link.node_name_offset = palette.add_string("Bip01") @@ -3339,7 +3395,7 @@ def get_node_name(self): >>> # another doctest >>> from pyffi.formats.nif import NifFormat - >>> link = NifFormat.ControllerLink() + >>> link = NifFormat.ControlledBlock() >>> link.node_name = "Bip01" >>> link.get_node_name() b'Bip01' @@ -3379,25 +3435,25 @@ def set_controller_type(self, text): self.controller_type = text self.controller_type_offset = self._add_string(text) - def get_variable_1(self): - if self.variable_1: - return self.variable_1 + def get_controller_id(self): + if self.controller_id: + return self.controller_id else: - return self._get_string(self.variable_1_offset) + return self._get_string(self.controller_id_offset) - def set_variable_1(self, text): - self.variable_1 = text - self.variable_1_offset = self._add_string(text) + def set_controller_id(self, text): + self.controller_id = text + self.controller_id_offset = self._add_string(text) - def get_variable_2(self): - if self.variable_2: - return self.variable_2 + def get_interpolator_id(self): + if self.interpolator_id: + return self.interpolator_id else: - return self._get_string(self.variable_2_offset) + return self._get_string(self.interpolator_id_offset) - def set_variable_2(self, text): - self.variable_2 = text - self.variable_2_offset = self._add_string(text) + def set_interpolator_id(self, text): + self.interpolator_id = text + self.interpolator_id_offset = self._add_string(text) class hkPackedNiTriStripsData: def apply_scale(self, scale): @@ -3409,14 +3465,14 @@ def apply_scale(self, scale): vert.y *= scale vert.z *= scale - class InertiaMatrix: + class hkMatrix3: def as_list(self): """Return matrix as 3x3 list.""" return [ [self.m_11, self.m_12, self.m_13], [self.m_21, self.m_22, self.m_23], [self.m_31, self.m_32, self.m_33] - ] + ] def as_tuple(self): """Return matrix as 3x3 tuple.""" @@ -3424,16 +3480,16 @@ def as_tuple(self): (self.m_11, self.m_12, self.m_13), (self.m_21, self.m_22, self.m_23), (self.m_31, self.m_32, self.m_33) - ) + ) def __str__(self): - return( - "[ %6.3f %6.3f %6.3f ]\n" - "[ %6.3f %6.3f %6.3f ]\n" - "[ %6.3f %6.3f %6.3f ]\n" - % (self.m_11, self.m_12, self.m_13, - self.m_21, self.m_22, self.m_23, - self.m_31, self.m_32, self.m_33)) + return ( + "[ %6.3f %6.3f %6.3f ]\n" + "[ %6.3f %6.3f %6.3f ]\n" + "[ %6.3f %6.3f %6.3f ]\n" + % (self.m_11, self.m_12, self.m_13, + self.m_21, self.m_22, self.m_23, + self.m_31, self.m_32, self.m_33)) def set_identity(self): """Set to identity matrix.""" @@ -3452,22 +3508,22 @@ def set_identity(self): def is_identity(self): """Return ``True`` if the matrix is close to identity.""" - if (abs(self.m_11 - 1.0) > NifFormat.EPSILON - or abs(self.m_12) > NifFormat.EPSILON - or abs(self.m_13) > NifFormat.EPSILON - or abs(self.m_21) > NifFormat.EPSILON - or abs(self.m_22 - 1.0) > NifFormat.EPSILON - or abs(self.m_23) > NifFormat.EPSILON - or abs(self.m_31) > NifFormat.EPSILON - or abs(self.m_32) > NifFormat.EPSILON - or abs(self.m_33 - 1.0) > NifFormat.EPSILON): + if (abs(self.m_11 - 1.0) > NifFormat.EPSILON + or abs(self.m_12) > NifFormat.EPSILON + or abs(self.m_13) > NifFormat.EPSILON + or abs(self.m_21) > NifFormat.EPSILON + or abs(self.m_22 - 1.0) > NifFormat.EPSILON + or abs(self.m_23) > NifFormat.EPSILON + or abs(self.m_31) > NifFormat.EPSILON + or abs(self.m_32) > NifFormat.EPSILON + or abs(self.m_33 - 1.0) > NifFormat.EPSILON): return False else: return True def get_copy(self): """Return a copy of the matrix.""" - mat = NifFormat.InertiaMatrix() + mat = NifFormat.hkMatrix3() mat.m_11 = self.m_11 mat.m_12 = self.m_12 mat.m_13 = self.m_13 @@ -3483,18 +3539,18 @@ def get_copy(self): return mat def __eq__(self, mat): - if not isinstance(mat, NifFormat.InertiaMatrix): + if not isinstance(mat, NifFormat.hkMatrix3): raise TypeError( - "do not know how to compare InertiaMatrix and %s"%mat.__class__) + "do not know how to compare hkMatrix3 and %s" % mat.__class__) if (abs(self.m_11 - mat.m_11) > NifFormat.EPSILON - or abs(self.m_12 - mat.m_12) > NifFormat.EPSILON - or abs(self.m_13 - mat.m_13) > NifFormat.EPSILON - or abs(self.m_21 - mat.m_21) > NifFormat.EPSILON - or abs(self.m_22 - mat.m_22) > NifFormat.EPSILON - or abs(self.m_23 - mat.m_23) > NifFormat.EPSILON - or abs(self.m_31 - mat.m_31) > NifFormat.EPSILON - or abs(self.m_32 - mat.m_32) > NifFormat.EPSILON - or abs(self.m_33 - mat.m_33) > NifFormat.EPSILON): + or abs(self.m_12 - mat.m_12) > NifFormat.EPSILON + or abs(self.m_13 - mat.m_13) > NifFormat.EPSILON + or abs(self.m_21 - mat.m_21) > NifFormat.EPSILON + or abs(self.m_22 - mat.m_22) > NifFormat.EPSILON + or abs(self.m_23 - mat.m_23) > NifFormat.EPSILON + or abs(self.m_31 - mat.m_31) > NifFormat.EPSILON + or abs(self.m_32 - mat.m_32) > NifFormat.EPSILON + or abs(self.m_33 - mat.m_33) > NifFormat.EPSILON): return False return True @@ -3511,7 +3567,7 @@ def update_a_b(self, transform): self.pivot_b.z = pivot_b.z # axes (rotation only) transform = transform.get_matrix_33() - axle_b = self.axle_a.get_vector_3() * transform + axle_b = self.axle_a.get_vector_3() * transform perp_2_axle_in_b_2 = self.perp_2_axle_in_a_2.get_vector_3() * transform self.axle_b.x = axle_b.x self.axle_b.y = axle_b.y @@ -3528,7 +3584,7 @@ def as_list(self): [self.m_21, self.m_22, self.m_23, self.m_24], [self.m_31, self.m_32, self.m_33, self.m_34], [self.m_41, self.m_42, self.m_43, self.m_44] - ] + ] def as_tuple(self): """Return matrix as 4x4 tuple.""" @@ -3537,7 +3593,7 @@ def as_tuple(self): (self.m_21, self.m_22, self.m_23, self.m_24), (self.m_31, self.m_32, self.m_33, self.m_34), (self.m_41, self.m_42, self.m_43, self.m_44) - ) + ) def set_rows(self, row0, row1, row2, row3): """Set matrix from rows.""" @@ -3547,15 +3603,15 @@ def set_rows(self, row0, row1, row2, row3): self.m_41, self.m_42, self.m_43, self.m_44 = row3 def __str__(self): - return( - "[ %6.3f %6.3f %6.3f %6.3f ]\n" - "[ %6.3f %6.3f %6.3f %6.3f ]\n" - "[ %6.3f %6.3f %6.3f %6.3f ]\n" - "[ %6.3f %6.3f %6.3f %6.3f ]\n" - % (self.m_11, self.m_12, self.m_13, self.m_14, - self.m_21, self.m_22, self.m_23, self.m_24, - self.m_31, self.m_32, self.m_33, self.m_34, - self.m_41, self.m_42, self.m_43, self.m_44)) + return ( + "[ %6.3f %6.3f %6.3f %6.3f ]\n" + "[ %6.3f %6.3f %6.3f %6.3f ]\n" + "[ %6.3f %6.3f %6.3f %6.3f ]\n" + "[ %6.3f %6.3f %6.3f %6.3f ]\n" + % (self.m_11, self.m_12, self.m_13, self.m_14, + self.m_21, self.m_22, self.m_23, self.m_24, + self.m_31, self.m_32, self.m_33, self.m_34, + self.m_41, self.m_42, self.m_43, self.m_44)) def set_identity(self): """Set to identity matrix.""" @@ -3579,21 +3635,21 @@ def set_identity(self): def is_identity(self): """Return ``True`` if the matrix is close to identity.""" if (abs(self.m_11 - 1.0) > NifFormat.EPSILON - or abs(self.m_12) > NifFormat.EPSILON - or abs(self.m_13) > NifFormat.EPSILON - or abs(self.m_14) > NifFormat.EPSILON - or abs(self.m_21) > NifFormat.EPSILON - or abs(self.m_22 - 1.0) > NifFormat.EPSILON - or abs(self.m_23) > NifFormat.EPSILON - or abs(self.m_24) > NifFormat.EPSILON - or abs(self.m_31) > NifFormat.EPSILON - or abs(self.m_32) > NifFormat.EPSILON - or abs(self.m_33 - 1.0) > NifFormat.EPSILON - or abs(self.m_34) > NifFormat.EPSILON - or abs(self.m_41) > NifFormat.EPSILON - or abs(self.m_42) > NifFormat.EPSILON - or abs(self.m_43) > NifFormat.EPSILON - or abs(self.m_44 - 1.0) > NifFormat.EPSILON): + or abs(self.m_12) > NifFormat.EPSILON + or abs(self.m_13) > NifFormat.EPSILON + or abs(self.m_14) > NifFormat.EPSILON + or abs(self.m_21) > NifFormat.EPSILON + or abs(self.m_22 - 1.0) > NifFormat.EPSILON + or abs(self.m_23) > NifFormat.EPSILON + or abs(self.m_24) > NifFormat.EPSILON + or abs(self.m_31) > NifFormat.EPSILON + or abs(self.m_32) > NifFormat.EPSILON + or abs(self.m_33 - 1.0) > NifFormat.EPSILON + or abs(self.m_34) > NifFormat.EPSILON + or abs(self.m_41) > NifFormat.EPSILON + or abs(self.m_42) > NifFormat.EPSILON + or abs(self.m_43) > NifFormat.EPSILON + or abs(self.m_44 - 1.0) > NifFormat.EPSILON): return False else: return True @@ -3717,6 +3773,7 @@ def set_scale_rotation_translation(self, scale, rotation, translation): def get_inverse(self, fast=True): """Calculates inverse (fast assumes is_scale_rotation_translation is True).""" + def adjoint(m, ii, jj): result = [] for i, row in enumerate(m): @@ -3726,9 +3783,10 @@ def adjoint(m, ii, jj): if j == jj: continue result[-1].append(x) return result + def determinant(m): if len(m) == 2: - return m[0][0]*m[1][1] - m[1][0]*m[0][1] + return m[0][0] * m[1][1] - m[1][0] * m[0][1] result = 0.0 for i in range(len(m)): det = determinant(adjoint(m, i, 0)) @@ -3755,10 +3813,10 @@ def determinant(m): nn = [[0.0 for i in range(4)] for j in range(4)] det = determinant(m) if abs(det) < NifFormat.EPSILON: - raise ZeroDivisionError('cannot invert matrix:\n%s'%self) + raise ZeroDivisionError('cannot invert matrix:\n%s' % self) for i in range(4): for j in range(4): - if (i+j) & 1: + if (i + j) & 1: nn[j][i] = -determinant(adjoint(m, i, j)) / det else: nn[j][i] = determinant(adjoint(m, i, j)) / det @@ -3792,25 +3850,25 @@ def __mul__(self, x): raise TypeError("matrix*vector not supported; please use left multiplication (vector*matrix)") elif isinstance(x, NifFormat.Matrix44): m = NifFormat.Matrix44() - m.m_11 = self.m_11 * x.m_11 + self.m_12 * x.m_21 + self.m_13 * x.m_31 + self.m_14 * x.m_41 - m.m_12 = self.m_11 * x.m_12 + self.m_12 * x.m_22 + self.m_13 * x.m_32 + self.m_14 * x.m_42 - m.m_13 = self.m_11 * x.m_13 + self.m_12 * x.m_23 + self.m_13 * x.m_33 + self.m_14 * x.m_43 - m.m_14 = self.m_11 * x.m_14 + self.m_12 * x.m_24 + self.m_13 * x.m_34 + self.m_14 * x.m_44 - m.m_21 = self.m_21 * x.m_11 + self.m_22 * x.m_21 + self.m_23 * x.m_31 + self.m_24 * x.m_41 - m.m_22 = self.m_21 * x.m_12 + self.m_22 * x.m_22 + self.m_23 * x.m_32 + self.m_24 * x.m_42 - m.m_23 = self.m_21 * x.m_13 + self.m_22 * x.m_23 + self.m_23 * x.m_33 + self.m_24 * x.m_43 - m.m_24 = self.m_21 * x.m_14 + self.m_22 * x.m_24 + self.m_23 * x.m_34 + self.m_24 * x.m_44 - m.m_31 = self.m_31 * x.m_11 + self.m_32 * x.m_21 + self.m_33 * x.m_31 + self.m_34 * x.m_41 - m.m_32 = self.m_31 * x.m_12 + self.m_32 * x.m_22 + self.m_33 * x.m_32 + self.m_34 * x.m_42 - m.m_33 = self.m_31 * x.m_13 + self.m_32 * x.m_23 + self.m_33 * x.m_33 + self.m_34 * x.m_43 - m.m_34 = self.m_31 * x.m_14 + self.m_32 * x.m_24 + self.m_33 * x.m_34 + self.m_34 * x.m_44 - m.m_41 = self.m_41 * x.m_11 + self.m_42 * x.m_21 + self.m_43 * x.m_31 + self.m_44 * x.m_41 - m.m_42 = self.m_41 * x.m_12 + self.m_42 * x.m_22 + self.m_43 * x.m_32 + self.m_44 * x.m_42 - m.m_43 = self.m_41 * x.m_13 + self.m_42 * x.m_23 + self.m_43 * x.m_33 + self.m_44 * x.m_43 - m.m_44 = self.m_41 * x.m_14 + self.m_42 * x.m_24 + self.m_43 * x.m_34 + self.m_44 * x.m_44 + m.m_11 = self.m_11 * x.m_11 + self.m_12 * x.m_21 + self.m_13 * x.m_31 + self.m_14 * x.m_41 + m.m_12 = self.m_11 * x.m_12 + self.m_12 * x.m_22 + self.m_13 * x.m_32 + self.m_14 * x.m_42 + m.m_13 = self.m_11 * x.m_13 + self.m_12 * x.m_23 + self.m_13 * x.m_33 + self.m_14 * x.m_43 + m.m_14 = self.m_11 * x.m_14 + self.m_12 * x.m_24 + self.m_13 * x.m_34 + self.m_14 * x.m_44 + m.m_21 = self.m_21 * x.m_11 + self.m_22 * x.m_21 + self.m_23 * x.m_31 + self.m_24 * x.m_41 + m.m_22 = self.m_21 * x.m_12 + self.m_22 * x.m_22 + self.m_23 * x.m_32 + self.m_24 * x.m_42 + m.m_23 = self.m_21 * x.m_13 + self.m_22 * x.m_23 + self.m_23 * x.m_33 + self.m_24 * x.m_43 + m.m_24 = self.m_21 * x.m_14 + self.m_22 * x.m_24 + self.m_23 * x.m_34 + self.m_24 * x.m_44 + m.m_31 = self.m_31 * x.m_11 + self.m_32 * x.m_21 + self.m_33 * x.m_31 + self.m_34 * x.m_41 + m.m_32 = self.m_31 * x.m_12 + self.m_32 * x.m_22 + self.m_33 * x.m_32 + self.m_34 * x.m_42 + m.m_33 = self.m_31 * x.m_13 + self.m_32 * x.m_23 + self.m_33 * x.m_33 + self.m_34 * x.m_43 + m.m_34 = self.m_31 * x.m_14 + self.m_32 * x.m_24 + self.m_33 * x.m_34 + self.m_34 * x.m_44 + m.m_41 = self.m_41 * x.m_11 + self.m_42 * x.m_21 + self.m_43 * x.m_31 + self.m_44 * x.m_41 + m.m_42 = self.m_41 * x.m_12 + self.m_42 * x.m_22 + self.m_43 * x.m_32 + self.m_44 * x.m_42 + m.m_43 = self.m_41 * x.m_13 + self.m_42 * x.m_23 + self.m_43 * x.m_33 + self.m_44 * x.m_43 + m.m_44 = self.m_41 * x.m_14 + self.m_42 * x.m_24 + self.m_43 * x.m_34 + self.m_44 * x.m_44 return m else: - raise TypeError("do not know how to multiply Matrix44 with %s"%x.__class__) + raise TypeError("do not know how to multiply Matrix44 with %s" % x.__class__) def __div__(self, x): if isinstance(x, (float, int)): @@ -3833,7 +3891,7 @@ def __div__(self, x): m.m_44 = self.m_44 / x return m else: - raise TypeError("do not know how to divide Matrix44 by %s"%x.__class__) + raise TypeError("do not know how to divide Matrix44 by %s" % x.__class__) # py3k __truediv__ = __div__ @@ -3842,13 +3900,13 @@ def __rmul__(self, x): if isinstance(x, (float, int)): return self * x else: - raise TypeError("do not know how to multiply %s with Matrix44"%x.__class__) + raise TypeError("do not know how to multiply %s with Matrix44" % x.__class__) def __eq__(self, m): if isinstance(m, type(None)): return False if not isinstance(m, NifFormat.Matrix44): - raise TypeError("do not know how to compare Matrix44 and %s"%m.__class__) + raise TypeError("do not know how to compare Matrix44 and %s" % m.__class__) if abs(self.m_11 - m.m_11) > NifFormat.EPSILON: return False if abs(self.m_12 - m.m_12) > NifFormat.EPSILON: return False if abs(self.m_13 - m.m_13) > NifFormat.EPSILON: return False @@ -3910,7 +3968,7 @@ def __add__(self, x): m.m_44 = self.m_44 + x return m else: - raise TypeError("do not know how to add Matrix44 and %s"%x.__class__) + raise TypeError("do not know how to add Matrix44 and %s" % x.__class__) def __sub__(self, x): if isinstance(x, (NifFormat.Matrix44)): @@ -3992,6 +4050,7 @@ class NiAVObject: >>> [prop.name for prop in node.properties] [b'hello', b'world'] """ + def add_property(self, prop): """Add the given property to the property list. @@ -4010,7 +4069,7 @@ def remove_property(self, prop): :type prop: L{NifFormat.NiProperty} """ self.set_properties([otherprop for otherprop in self.get_properties() - if not(otherprop is prop)]) + if not (otherprop is prop)]) def get_properties(self): """Return a list of the properties of the block. @@ -4045,7 +4104,7 @@ def get_transform(self, relative_to=None): m.set_scale_rotation_translation(self.scale, self.rotation, self.translation) if not relative_to: return m # find chain from relative_to to self - chain = relative_to.find_chain(self, block_type = NifFormat.NiAVObject) + chain = relative_to.find_chain(self, block_type=NifFormat.NiAVObject) if not chain: raise ValueError( 'cannot find a chain of NiAVObject blocks ' @@ -4147,6 +4206,7 @@ class NiBSplineData: >>> list(block.get_comp_data(60, 2, 2, 2.5, 1.5)) # doctest: +ELLIPSIS [(1.0, 2.00...), (4.0, 2.99...)] """ + def _getData(self, offset, num_elements, element_size, controlpoints): """Helper function for get_float_data and get_short_data. For internal use only.""" @@ -4283,10 +4343,10 @@ def get_times(self): # return all times for i in range(self.basis_data.num_control_points): yield ( - self.start_time - + (i * (self.stop_time - self.start_time) - / (self.basis_data.num_control_points - 1)) - ) + self.start_time + + (i * (self.stop_time - self.start_time) + / (self.basis_data.num_control_points - 1)) + ) def _getFloatKeys(self, offset, element_size): """Helper function to get iterator to various keys. Internal use only.""" @@ -4298,8 +4358,8 @@ def _getFloatKeys(self, offset, element_size): return # yield all keys for key in self.spline_data.get_float_data(offset, - self.basis_data.num_control_points, - element_size): + self.basis_data.num_control_points, + element_size): yield key def _getCompKeys(self, offset, element_size, bias, multiplier): @@ -4312,9 +4372,9 @@ def _getCompKeys(self, offset, element_size, bias, multiplier): return # yield all keys for key in self.spline_data.get_comp_data(offset, - self.basis_data.num_control_points, - element_size, - bias, multiplier): + self.basis_data.num_control_points, + element_size, + bias, multiplier): yield key class NiBSplineTransformInterpolator: @@ -4356,7 +4416,7 @@ def add_controlled_block(self): >>> ctrlblock = seq.add_controlled_block() >>> seq.num_controlled_blocks 1 - >>> isinstance(ctrlblock, NifFormat.ControllerLink) + >>> isinstance(ctrlblock, NifFormat.ControlledBlock) True """ # add to the list @@ -4414,13 +4474,14 @@ class NiGeometryData: (4000, 5000, 6000, 0, 1000, 0, 0, 0, 0, 0, 310, 320, 330, 340) (1200, 3400, 5600, 1000, 0, 0, 97000, 96000, 0, 94000, 0, 0, 0, 0) """ + def update_center_radius(self): """Recalculate center and radius of the data.""" # in case there are no vertices, set center and radius to zero if len(self.vertices) == 0: - self.center.x = 0.0 - self.center.y = 0.0 - self.center.z = 0.0 + self.bounding_sphere.center.x = 0.0 + self.bounding_sphere.center.y = 0.0 + self.bounding_sphere.center.z = 0.0 self.radius = 0.0 return @@ -4436,9 +4497,9 @@ def update_center_radius(self): cx = (lowx + highx) * 0.5 cy = (lowy + highy) * 0.5 cz = (lowz + highz) * 0.5 - self.center.x = cx - self.center.y = cy - self.center.z = cz + self.bounding_sphere.center.x = cx + self.bounding_sphere.center.y = cy + self.bounding_sphere.center.z = cz # radius is the largest distance from the center r2 = 0.0 @@ -4446,8 +4507,8 @@ def update_center_radius(self): dx = cx - v.x dy = cy - v.y dz = cz - v.z - r2 = max(r2, dx*dx+dy*dy+dz*dz) - self.radius = r2 ** 0.5 + r2 = max(r2, dx * dx + dy * dy + dz * dz) + self.bounding_sphere.radius = r2 ** 0.5 def apply_scale(self, scale): """Apply scale factor on data.""" @@ -4456,15 +4517,15 @@ def apply_scale(self, scale): v.x *= scale v.y *= scale v.z *= scale - self.center.x *= scale - self.center.y *= scale - self.center.z *= scale - self.radius *= scale + self.bounding_sphere.center.x *= scale + self.bounding_sphere.center.y *= scale + self.bounding_sphere.center.z *= scale + self.bounding_sphere.radius *= scale def get_vertex_hash_generator( - self, - vertexprecision=3, normalprecision=3, - uvprecision=5, vcolprecision=3): + self, + vertex_precision=3, normalprecision=3, + uvprecision=5, vcolprecision=3): """Generator which produces a tuple of integers for each (vertex, normal, uv, vcol), to ease detection of duplicate vertices. The precision parameters denote number of @@ -4474,11 +4535,11 @@ def get_vertex_hash_generator( very large models the uv coordinates can be very close together. - For vertexprecision, 3 seems usually enough (maybe we'll + For vertex_precision, 3 seems usually enough (maybe we'll have to increase this at some point). - :param vertexprecision: Precision to be used for vertices. - :type vertexprecision: float + :param vertex_precision: Precision to be used for vertices. + :type vertex_precision: float :param normalprecision: Precision to be used for normals. :type normalprecision: float :param uvprecision: Precision to be used for uvs. @@ -4487,12 +4548,12 @@ def get_vertex_hash_generator( :type vcolprecision: float :return: A generator yielding a hash value for each vertex. """ - + verts = self.vertices if self.has_vertices else None norms = self.normals if self.has_normals else None uvsets = self.uv_sets if len(self.uv_sets) else None vcols = self.vertex_colors if self.has_vertex_colors else None - vertexfactor = 10 ** vertexprecision + vertexfactor = 10 ** vertex_precision normalfactor = 10 ** normalprecision uvfactor = 10 ** uvprecision vcolfactor = 10 ** vcolprecision @@ -4500,7 +4561,7 @@ def get_vertex_hash_generator( h = [] if verts: h.extend([float_to_int(x * vertexfactor) - for x in [verts[i].x, verts[i].y, verts[i].z]]) + for x in [verts[i].x, verts[i].y, verts[i].z]]) if norms: h.extend([float_to_int(x * normalfactor) for x in [norms[i].x, norms[i].y, norms[i].z]]) @@ -4574,6 +4635,7 @@ class NiGeometry: >>> [child.name for child in skelroot.children] [b'geom', b'bone1', b'bone21', b'bone2', b'bone22', b'bone211'] """ + def is_skin(self): """Returns True if geometry is skinned.""" return self.skin_instance != None @@ -4602,10 +4664,10 @@ def add_bone(self, bone, vert_weights): skelroot = skininst.skeleton_root bone_index = skininst.num_bones - skininst.num_bones = bone_index+1 + skininst.num_bones = bone_index + 1 skininst.bones.update_size() skininst.bones[bone_index] = bone - skindata.num_bones = bone_index+1 + skindata.num_bones = bone_index + 1 skindata.bone_list.update_size() skinbonedata = skindata.bone_list[bone_index] # set vertex weights @@ -4615,8 +4677,6 @@ def add_bone(self, bone, vert_weights): skinbonedata.vertex_weights[i].index = vert_index skinbonedata.vertex_weights[i].weight = vert_weight - - def get_vertex_weights(self): """Get vertex weights in a convenient format: list bone and weight per vertex.""" @@ -4648,33 +4708,32 @@ def get_vertex_weights(self): boneweightlist.append([bonenum, skinweight.weight]) return weights - def flatten_skin(self): """Reposition all bone blocks and geometry block in the tree to be direct children of the skeleton root. Returns list of all used bones by the skin.""" - if not self.is_skin(): return [] # nothing to do + if not self.is_skin(): return [] # nothing to do - result = [] # list of repositioned bones - self._validate_skin() # validate the skin + result = [] # list of repositioned bones + self._validate_skin() # validate the skin skininst = self.skin_instance skindata = skininst.data skelroot = skininst.skeleton_root # reparent geometry self.set_transform(self.get_transform(skelroot)) - geometry_parent = skelroot.find_chain(self, block_type = NifFormat.NiAVObject)[-2] - geometry_parent.remove_child(self) # detatch geometry from tree - skelroot.add_child(self, front = True) # and attatch it to the skeleton root + geometry_parent = skelroot.find_chain(self, block_type=NifFormat.NiAVObject)[-2] + geometry_parent.remove_child(self) # detatch geometry from tree + skelroot.add_child(self, front=True) # and attatch it to the skeleton root # reparent all the bone blocks for bone_block in skininst.bones: # skeleton root, if it is used as bone, does not need to be processed if bone_block == skelroot: continue # get bone parent - bone_parent = skelroot.find_chain(bone_block, block_type = NifFormat.NiAVObject)[-2] + bone_parent = skelroot.find_chain(bone_block, block_type=NifFormat.NiAVObject)[-2] # set new child transforms for child in bone_block.children: child.set_transform(child.get_transform(bone_parent)) @@ -4682,7 +4741,7 @@ def flatten_skin(self): for child in bone_block.children: bone_parent.add_child(child) bone_block.num_children = 0 - bone_block.children.update_size() # = remove_child on each child + bone_block.children.update_size() # = remove_child on each child # set new bone transform bone_block.set_transform(bone_block.get_transform(skelroot)) # reparent bone block @@ -4692,8 +4751,6 @@ def flatten_skin(self): return result - - # The nif skinning algorithm works as follows (as of nifskope): # v' # vertex after skinning in geometry space # = sum over {b in skininst.bones} # sum over all bones b that influence the mesh @@ -4715,9 +4772,9 @@ def get_skin_deformation(self): skindata = skininst.data skelroot = skininst.skeleton_root - vertices = [ NifFormat.Vector3() for i in range(self.data.num_vertices) ] - normals = [ NifFormat.Vector3() for i in range(self.data.num_vertices) ] - sumweights = [ 0.0 for i in range(self.data.num_vertices) ] + vertices = [NifFormat.Vector3() for i in range(self.data.num_vertices)] + normals = [NifFormat.Vector3() for i in range(self.data.num_vertices)] + sumweights = [0.0 for i in range(self.data.num_vertices)] skin_offset = skindata.get_transform() # store one transform & rotation per bone bone_transforms = [] @@ -4727,8 +4784,8 @@ def get_skin_deformation(self): bone_matrix = bone_block.get_transform(skelroot) transform = bone_offset * bone_matrix * skin_offset scale, rotation, translation = transform.get_scale_rotation_translation() - bone_transforms.append( (transform, rotation) ) - + bone_transforms.append((transform, rotation)) + # the usual case if skindata.has_vertex_weights: for i, bone_block in enumerate(skininst.bones): @@ -4765,14 +4822,12 @@ def get_skin_deformation(self): sumweights[vert_index] += weight for i, s in enumerate(sumweights): - if abs(s - 1.0) > 0.01: + if abs(s - 1.0) > 0.01: logging.getLogger("pyffi.nif.nigeometry").warn( "vertex %i has weights not summing to one" % i) return vertices, normals - - # ported and extended from niflib::NiNode::GoToSkeletonBindPosition() (r2518) def send_bones_to_bind_position(self): """Send all bones to their bind position. @@ -4806,8 +4861,6 @@ def send_bones_to_bind_position(self): child_matrix = child_offset.get_inverse() * parent_offset child_bone.set_transform(child_matrix) - - # ported from niflib::NiSkinData::ResetOffsets (r2561) def update_bind_position(self): """Make current position of the bones the bind position for this geometry. @@ -4830,7 +4883,7 @@ def update_bind_position(self): # calculate bone offsets for i, bone in enumerate(skininst.bones): - skindata.bone_list[i].set_transform(geomtransform * bone.get_transform(skelroot).get_inverse()) + skindata.bone_list[i].set_transform(geomtransform * bone.get_transform(skelroot).get_inverse()) def get_skin_partition(self): """Return the skin partition block.""" @@ -4866,12 +4919,12 @@ def apply_scale(self, scale): key.value.x *= scale key.value.y *= scale key.value.z *= scale - #key.forward.x *= scale - #key.forward.y *= scale - #key.forward.z *= scale - #key.backward.x *= scale - #key.backward.y *= scale - #key.backward.z *= scale + # key.forward.x *= scale + # key.forward.y *= scale + # key.forward.z *= scale + # key.backward.x *= scale + # key.backward.y *= scale + # key.backward.z *= scale # what to do with TBC? class NiMaterialColorController: @@ -4982,6 +5035,7 @@ class NiNode: >>> [effect.name for effect in node.effects] [b'hello', b'world'] """ + def add_child(self, child, front=False): """Add block to child list. @@ -5003,7 +5057,7 @@ def add_child(self, child, front=False): self.children[num_children] = child else: for i in range(num_children, 0, -1): - self.children[i] = self.children[i-1] + self.children[i] = self.children[i - 1] self.children[0] = child def remove_child(self, child): @@ -5013,7 +5067,7 @@ def remove_child(self, child): :type child: L{NifFormat.NiAVObject} """ self.set_children([otherchild for otherchild in self.get_children() - if not(otherchild is child)]) + if not (otherchild is child)]) def get_children(self): """Return a list of the children of the block. @@ -5052,7 +5106,7 @@ def remove_effect(self, effect): :type effect: L{NifFormat.NiDynamicEffect} """ self.set_effects([othereffect for othereffect in self.get_effects() - if not(othereffect is effect)]) + if not (othereffect is effect)]) def get_effects(self): """Return a list of the effects of the block. @@ -5106,7 +5160,7 @@ def merge_external_skeleton_root(self, skelroot): # fix links to skeleton root and bones for externalblock in child.tree(): if isinstance(externalblock, NifFormat.NiSkinInstance): - if not(externalblock.skeleton_root is skelroot): + if not (externalblock.skeleton_root is skelroot): raise ValueError( "expected skeleton root %s but got %s" % (skelroot.name, externalblock.skeleton_root.name)) @@ -5133,8 +5187,8 @@ def merge_skeleton_roots(self): """ logger = logging.getLogger("pyffi.nif.ninode") - result = [] # list of reparented blocks - failed = [] # list of blocks that could not be reparented + result = [] # list of reparented blocks + failed = [] # list of blocks that could not be reparented id44 = NifFormat.Matrix44() id44.set_identity() @@ -5155,12 +5209,12 @@ def merge_skeleton_roots(self): continue # check transforms if (geom.skin_instance.data.get_transform() - * geom.get_transform(geom.skin_instance.skeleton_root) != id44): + * geom.get_transform(geom.skin_instance.skeleton_root) != id44): logger.warn( "can't rebase %s: global skin data transform does not match " "geometry transform relative to skeleton root" % geom.name) failed.append(geom) - continue # skip this one + continue # skip this one # everything ok! # find geometry parent geomroot = geom.skin_instance.skeleton_root.find_chain(geom)[-2] @@ -5185,8 +5239,8 @@ def get_skinned_geometries(self): """ for geom in self.get_global_iterator(): if (isinstance(geom, NifFormat.NiGeometry) - and geom.is_skin() - and geom.skin_instance.skeleton_root is self): + and geom.is_skin() + and geom.skin_instance.skeleton_root is self): yield geom def send_geometries_to_bind_position(self): @@ -5285,8 +5339,8 @@ def send_geometries_to_bind_position(self): if not bonenode: continue bone_bind_transform[bonenode.name] = ( - bonedata.get_transform().get_inverse(fast=False) - * geom.get_transform(self)) + bonedata.get_transform().get_inverse(fast=False) + * geom.get_transform(self)) # validation: check that bones share bind position bone_bind_transform = {} @@ -5311,8 +5365,8 @@ def send_geometries_to_bind_position(self): for row in diff.as_list())) else: bone_bind_transform[bonenode.name] = ( - bonedata.get_transform().get_inverse(fast=False) - * geom.get_transform(self)) + bonedata.get_transform().get_inverse(fast=False) + * geom.get_transform(self)) logger.debug("Geometry bind position error is %f" % error) if error > 1e-3: @@ -5365,8 +5419,8 @@ def send_detached_geometries_to_node_position(self): for boneset in bonesets: logger.debug(str([bone.name for bone in boneset])) parts = [[geom for geom in geoms - if set(geom.skin_instance.bones) & set(boneset)] - for boneset in bonesets] + if set(geom.skin_instance.bones) & set(boneset)] + for boneset in bonesets] logger.debug("geometries per partition are") for part in parts: logger.debug(str([geom.name for geom in part])) @@ -5391,7 +5445,7 @@ def send_detached_geometries_to_node_position(self): # find a geometry that has this bone for geom in part: for bonenode, bonedata in zip(geom.skin_instance.bones, - geom.skin_instance.data.bone_list): + geom.skin_instance.data.bone_list): if bonenode is lowest_bonenode: lowest_geom = geom lowest_bonedata = bonedata @@ -5438,7 +5492,7 @@ def send_detached_geometries_to_node_position(self): logger.debug("transforming bind position of bone %s" % bonenode.name) bonedata.set_transform(diff.get_inverse(fast=False) - * bonedata.get_transform()) + * bonedata.get_transform()) # transform geometry logger.debug("transforming vertices and normals") for vert in geom.data.vertices: @@ -5485,7 +5539,9 @@ def send_bones_to_bind_position(self): * geom.get_transform(self))) if diff.sup_norm() > 1e-3: - logger.warning("Geometries %s and %s do not share the same bind position: bone %s will be sent to a position matching only one of these" % (geom.name, othergeom.name, bonenode.name)) + logger.warning( + "Geometries %s and %s do not share the same bind position: bone %s will be sent to a position matching only one of these" % ( + geom.name, othergeom.name, bonenode.name)) # break the loop break else: @@ -5712,7 +5768,7 @@ def add_integer_extra_data(self, name, value): self.add_extra_data(extra) class NiObject: - def find(self, block_name = None, block_type = None): + def find(self, block_name=None, block_type=None): # does this block match the search criteria? if block_name and block_type: if isinstance(self, block_type): @@ -5735,7 +5791,7 @@ def find(self, block_name = None, block_type = None): return None - def find_chain(self, block, block_type = None): + def find_chain(self, block, block_type=None): """Finds a chain of blocks going from C{self} to C{block}. If found, self is the first element and block is the last element. If no branch found, returns an empty list. Does not check whether there is more @@ -5760,7 +5816,7 @@ def apply_scale(self, scale): """ pass - def tree(self, block_type = None, follow_all = True, unique = False): + def tree(self, block_type=None, follow_all=True, unique=False): """A generator for parsing all blocks in the tree (starting from and including C{self}). @@ -5771,7 +5827,7 @@ def tree(self, block_type = None, follow_all = True, unique = False): # unique blocks: reduce this to the case of non-unique blocks if unique: block_list = [] - for block in self.tree(block_type = block_type, follow_all = follow_all, unique = False): + for block in self.tree(block_type=block_type, follow_all=follow_all, unique=False): if not block in block_list: yield block block_list.append(block) @@ -5783,11 +5839,11 @@ def tree(self, block_type = None, follow_all = True, unique = False): elif isinstance(self, block_type): yield self elif not follow_all: - return # don't recurse further + return # don't recurse further # yield tree attached to each child for child in self.get_refs(): - for block in child.tree(block_type = block_type, follow_all = follow_all): + for block in child.tree(block_type=block_type, follow_all=follow_all): yield block def _validateTree(self): @@ -5824,7 +5880,7 @@ def is_interchangeable(self, other): if self.__class__ is not other.__class__: return False if (self.name.lower() in specialnames - or other.name.lower() in specialnames): + or other.name.lower() in specialnames): # do not ignore name return self.get_hash() == other.get_hash() else: @@ -6073,14 +6129,14 @@ def is_interchangeable(self, other): # check class if (not isinstance(self, other.__class__) - or not isinstance(other, self.__class__)): + or not isinstance(other, self.__class__)): return False # check some trivial things first for attribute in ( - "num_vertices", "keep_flags", "compress_flags", "has_vertices", - "num_uv_sets", "has_normals", "center", "radius", - "has_vertex_colors", "has_uv", "consistency_flags"): + "num_vertices", "keep_flags", "compress_flags", "has_vertices", + "num_uv_sets", "has_normals", "center", "radius", + "has_vertex_colors", "has_uv", "consistency_flags"): if getattr(self, attribute) != getattr(other, attribute): return False @@ -6135,6 +6191,7 @@ def get_triangle_indices(self, triangles): :param triangles: An iterable of triangles to check. :type triangles: iterator or list of tuples of three ints """ + def triangleHash(triangle): """Calculate hash of a non-degenerate triangle. Returns ``None`` if the triangle is degenerate. @@ -6169,16 +6226,15 @@ def bytes2vectors(data, pos, num): vec = NifFormat.Vector3() # XXX _byte_order! assuming little endian vec.x, vec.y, vec.z = struct.unpack(' maxbonespervertex: # delete bone influences with least weight - weight.sort(key=lambda x: x[1], reverse=True) # sort by weight + weight.sort(key=lambda x: x[1], reverse=True) # sort by weight # save lost weight to return to user lostweight = max( lostweight, max( [x[1] for x in weight[maxbonespervertex:]])) - del weight[maxbonespervertex:] # only keep first elements + del weight[maxbonespervertex:] # only keep first elements # normalize - totalweight = sum([x[1] for x in weight]) # sum of all weights + totalweight = sum([x[1] for x in weight]) # sum of all weights for x in weight: x[1] /= totalweight maxbones = maxbonespervertex # sort by again by bone (relied on later when matching vertices) @@ -6526,7 +6580,7 @@ def update_skin_partition(self, # this triangle tribonesweights = {} for bonenum in tribones: tribonesweights[bonenum] = 0.0 - nono = set() # bones with weight 1 cannot be removed + nono = set() # bones with weight 1 cannot be removed for skinweights in [weights[t] for t in tri]: # skinweights[0] is the first skinweight influencing vertex t # and skinweights[0][0] is the bone number of that bone @@ -6552,7 +6606,7 @@ def update_skin_partition(self, # remove minbone from all vertices of this triangle and from all # matching vertices for t in tri: - for tt in [t]: #match[t]: + for tt in [t]: # match[t]: # remove bone weight = weights[tt] for i, (bonenum, boneweight) in enumerate(weight): @@ -6574,7 +6628,7 @@ def update_skin_partition(self, # keep creating partitions as long as there are triangles left while triangles: # create a partition - part = [set(), [], None] # bones, triangles, partition index + part = [set(), [], None] # bones, triangles, partition index usedverts = set() addtriangles = True # keep adding triangles to it as long as the flag is set @@ -6594,7 +6648,7 @@ def update_skin_partition(self, # or if part has all bones of tribones and index coincides # then add this triangle to this part if ((not part[0]) - or ((part[0] >= tribones) and (part[2] == partindex))): + or ((part[0] >= tribones) and (part[2] == partindex))): part[0] |= tribones part[1].append(tri) usedverts |= set(tri) @@ -6647,7 +6701,7 @@ def update_skin_partition(self, # merge all partitions logger.info("Merging partitions.") - merged = True # signals success, in which case do another run + merged = True # signals success, in which case do another run while merged: merged = False # newparts is to contain the updated merged partitions as we go @@ -6669,11 +6723,11 @@ def update_skin_partition(self, # if partition indices are the same, and bone limit is not # exceeded, merge them if ((parta[2] == partb[2]) - and (len(parta[0] | partb[0]) <= maxbonesperpartition)): + and (len(parta[0] | partb[0]) <= maxbonesperpartition)): parta[0] |= partb[0] parta[1] += partb[1] addedparts.add(b) - merged = True # signal another try in merging partitions + merged = True # signal another try in merging partitions # update partitions to the merged partitions parts = newparts @@ -6688,14 +6742,14 @@ def update_skin_partition(self, skinpart = skininst.skin_partition skindata.skin_partition = skinpart else: - # otherwise, create a new block and link it + # otherwise, create a new block and link it skinpart = NifFormat.NiSkinPartition() skindata.skin_partition = skinpart skininst.skin_partition = skinpart # set number of partitions - skinpart.num_skin_partition_blocks = len(parts) - skinpart.skin_partition_blocks.update_size() + skinpart.num_partitions = len(parts) + skinpart.partitions.update_size() # maximize bone sharing, if requested if maximize_bone_sharing: @@ -6752,7 +6806,7 @@ def update_skin_partition(self, # store part for next iteration lastpart = part - for skinpartblock, part in zip(skinpart.skin_partition_blocks, parts): + for skinpartblock, part in zip(skinpart.partitions, parts): # get sorted list of bones bones = sorted(list(part[0])) triangles = part[1] @@ -6769,8 +6823,8 @@ def update_skin_partition(self, # decide whether to use strip or triangles as primitive if stripify is None: stripifyblock = ( - strips_size < triangles_size - and all(len(strip) < 65536 for strip in strips)) + strips_size < triangles_size + and all(len(strip) < 65536 for strip in strips)) else: stripifyblock = stripify if stripifyblock: @@ -6820,7 +6874,7 @@ def update_skin_partition(self, for i, bonenum in enumerate(bones): skinpartblock.bones[i] = bonenum for i in range(len(bones), skinpartblock.num_bones): - skinpartblock.bones[i] = 0 # dummy bone slots refer to first bone + skinpartblock.bones[i] = 0 # dummy bone slots refer to first bone skinpartblock.has_vertex_map = True skinpartblock.vertex_map.update_size() for i, v in enumerate(vertices): @@ -6849,7 +6903,7 @@ def update_skin_partition(self, # clear strips array skinpartblock.strips.update_size() skinpartblock.triangles.update_size() - for i, (v_1,v_2,v_3) in enumerate(triangles): + for i, (v_1, v_2, v_3) in enumerate(triangles): skinpartblock.triangles[i].v_1 = vertices.index(v_1) skinpartblock.triangles[i].v_2 = vertices.index(v_2) skinpartblock.triangles[i].v_3 = vertices.index(v_3) @@ -6862,7 +6916,7 @@ def update_skin_partition(self, for j in range(len(weights[v])): skinpartblock.bone_indices[i][j] = bones.index(weights[v][j][0]) boneindices.remove(skinpartblock.bone_indices[i][j]) - for j in range(len(weights[v]),skinpartblock.num_weights_per_vertex): + for j in range(len(weights[v]), skinpartblock.num_weights_per_vertex): if padbones: # if padbones is True then we have enforced # num_bones == num_weights_per_vertex so this will not trigger @@ -6895,7 +6949,7 @@ def update_skin_center_radius(self): """Update centers and radii of all skin data fields.""" # shortcuts relevant blocks if not self.skin_instance: - return # no skin, nothing to do + return # no skin, nothing to do self._validate_skin() geomdata = self.data skininst = self.skin_instance @@ -6926,7 +6980,7 @@ def update_skin_center_radius(self): r2 = 0.0 for v in boneverts: d = center - v - r2 = max(r2, d.x*d.x+d.y*d.y+d.z*d.z) + r2 = max(r2, d.x * d.x + d.y * d.y + d.z * d.z) radius = r2 ** 0.5 # transform center in proper coordinates (radius remains unaffected) @@ -6997,17 +7051,18 @@ class NiTriShapeData: >>> block.get_triangles() [(0, 2, 1), (1, 2, 3), (2, 4, 3)] """ + def get_triangles(self): return [(t.v_1, t.v_2, t.v_3) for t in self.triangles] - def set_triangles(self, triangles, stitchstrips = False): + def set_triangles(self, triangles, stitchstrips=False): # note: the stitchstrips argument is ignored - only present to ensure # uniform interface between NiTriShapeData and NiTriStripsData # initialize triangle array n = len(triangles) self.num_triangles = n - self.num_triangle_points = 3*n + self.num_triangle_points = 3 * n self.has_triangles = (n > 0) self.triangles.update_size() @@ -7038,10 +7093,11 @@ class NiTriStripsData: >>> block.get_triangles() [(0, 2, 1), (1, 2, 3), (2, 4, 3)] """ + def get_triangles(self): return pyffi.utils.tristrip.triangulate(self.points) - def set_triangles(self, triangles, stitchstrips = False): + def set_triangles(self, triangles, stitchstrips=False): self.set_strips(pyffi.utils.vertex_cache.stripify( triangles, stitchstrips=stitchstrips)) @@ -7075,8 +7131,8 @@ def update_a_b(self, transform): self.pivot_b.z = pivot_b.z # axes (rotation only) transform = transform.get_matrix_33() - plane_b = self.plane_a.get_vector_3() * transform - twist_b = self.twist_a.get_vector_3() * transform + plane_b = self.plane_a.get_vector_3() * transform + twist_b = self.twist_a.get_vector_3() * transform self.plane_b.x = plane_b.x self.plane_b.y = plane_b.y self.plane_b.z = plane_b.z @@ -7115,7 +7171,7 @@ def get_string(self, offset): ... ValueError: ... """ - _b00 = pyffi.object_models.common._b00 # shortcut + _b00 = pyffi.object_models.common._b00 # shortcut # check that offset isn't too large if offset >= len(self.palette): raise ValueError( @@ -7123,15 +7179,15 @@ def get_string(self, offset): "but palette is only %i long" % (offset, len(self.palette))) # check that a string starts at this offset - if offset > 0 and self.palette[offset-1:offset] != _b00: + if offset > 0 and self.palette[offset - 1:offset] != _b00: logger = logging.getLogger("pyffi.nif.stringpalette") logger.warning( "StringPalette: no string starts at offset %i " "(string is %s, preceeding character is %s)" % ( offset, self.palette[offset:self.palette.find(_b00, offset)], - self.palette[offset-1:offset], - )) + self.palette[offset - 1:offset], + )) # return the string return self.palette[offset:self.palette.find(_b00, offset)] @@ -7152,7 +7208,7 @@ def get_all_strings(self): >>> print(repr(pal.palette.decode("ascii")).lstrip("u")) 'abc\\x00def\\x00' """ - _b00 = pyffi.object_models.common._b00 # shortcut + _b00 = pyffi.object_models.common._b00 # shortcut return self.palette[:-1].split(_b00) def add_string(self, text): @@ -7175,7 +7231,7 @@ def add_string(self, text): # empty text if not text: return -1 - _b00 = pyffi.object_models.common._b00 # shortcut + _b00 = pyffi.object_models.common._b00 # shortcut # convert text to bytes if necessary text = pyffi.object_models.common._as_bytes(text) # check if string is already in the palette @@ -7211,7 +7267,7 @@ def clear(self): >>> print(repr(pal.palette.decode("ascii")).lstrip("u")) '' """ - self.palette = pyffi.object_models.common._b # empty bytes object + self.palette = pyffi.object_models.common._b # empty bytes object self.length = 0 class TexCoord: @@ -7219,14 +7275,14 @@ def as_list(self): return [self.u, self.v] def normalize(self): - r = (self.u*self.u + self.v*self.v) ** 0.5 + r = (self.u * self.u + self.v * self.v) ** 0.5 if r < NifFormat.EPSILON: - raise ZeroDivisionError('cannot normalize vector %s'%self) + raise ZeroDivisionError('cannot normalize vector %s' % self) self.u /= r self.v /= r def __str__(self): - return "[ %6.3f %6.3f ]"%(self.u, self.v) + return "[ %6.3f %6.3f ]" % (self.u, self.v) def __mul__(self, x): if isinstance(x, (float, int)): @@ -7237,7 +7293,7 @@ def __mul__(self, x): elif isinstance(x, NifFormat.TexCoord): return self.u * x.u + self.v * x.v else: - raise TypeError("do not know how to multiply TexCoord with %s"%x.__class__) + raise TypeError("do not know how to multiply TexCoord with %s" % x.__class__) def __rmul__(self, x): if isinstance(x, (float, int)): @@ -7246,7 +7302,7 @@ def __rmul__(self, x): v.v = x * self.v return v else: - raise TypeError("do not know how to multiply %s and TexCoord"%x.__class__) + raise TypeError("do not know how to multiply %s and TexCoord" % x.__class__) def __add__(self, x): if isinstance(x, (float, int)): @@ -7260,7 +7316,7 @@ def __add__(self, x): v.v = self.v + x.v return v else: - raise TypeError("do not know how to add TexCoord and %s"%x.__class__) + raise TypeError("do not know how to add TexCoord and %s" % x.__class__) def __radd__(self, x): if isinstance(x, (float, int)): @@ -7269,7 +7325,7 @@ def __radd__(self, x): v.v = x + self.v return v else: - raise TypeError("do not know how to add %s and TexCoord"%x.__class__) + raise TypeError("do not know how to add %s and TexCoord" % x.__class__) def __sub__(self, x): if isinstance(x, (float, int)): @@ -7283,7 +7339,7 @@ def __sub__(self, x): v.v = self.v - x.v return v else: - raise TypeError("do not know how to substract TexCoord and %s"%x.__class__) + raise TypeError("do not know how to substract TexCoord and %s" % x.__class__) def __rsub__(self, x): if isinstance(x, (float, int)): @@ -7292,7 +7348,7 @@ def __rsub__(self, x): v.v = x - self.v return v else: - raise TypeError("do not know how to substract %s and TexCoord"%x.__class__) + raise TypeError("do not know how to substract %s and TexCoord" % x.__class__) def __neg__(self): v = NifFormat.TexCoord() @@ -7300,6 +7356,8 @@ def __neg__(self): v.v = -self.v return v -if __name__=='__main__': + +if __name__ == '__main__': import doctest + doctest.testmod() diff --git a/pyffi/formats/nif/nifxml b/pyffi/formats/nif/nifxml index f265c5648..89d03ffe6 160000 --- a/pyffi/formats/nif/nifxml +++ b/pyffi/formats/nif/nifxml @@ -1 +1 @@ -Subproject commit f265c56482c728c6877e45d5b5993d3bff83670a +Subproject commit 89d03ffe69254ef4b3b58b0a22ec2ff9820b2e63 diff --git a/pyffi/formats/psk/__init__.py b/pyffi/formats/psk/__init__.py index cf689c539..8067786cc 100644 --- a/pyffi/formats/psk/__init__.py +++ b/pyffi/formats/psk/__init__.py @@ -95,17 +95,15 @@ # # ***** END LICENSE BLOCK ***** -from itertools import chain -import struct import os import re -import pyffi.object_models.xml -import pyffi.object_models.common -from pyffi.object_models.xml.basic import BasicBase import pyffi.object_models +import pyffi.object_models.common +import pyffi.object_models.xml from pyffi.utils.graph import EdgeFilter + class PskFormat(pyffi.object_models.xml.FileFormat): """This class implements the PSK format.""" xml_file_name = 'psk.xml' @@ -143,7 +141,7 @@ def version_number(version_str): class Data(pyffi.object_models.FileFormat.Data): """A class to contain the actual psk data.""" - version = 0 # no versioning, so far + version = 0 # no versioning, so far user_version = 0 def inspect_quick(self, stream): @@ -235,6 +233,8 @@ class Chunk: def get_global_display(self): return self.chunk_id.decode("utf8", "ignore") -if __name__=='__main__': + +if __name__ == '__main__': import doctest + doctest.testmod() diff --git a/pyffi/formats/rockstar/dir_/__init__.py b/pyffi/formats/rockstar/dir_/__init__.py index c36c37e6d..fe3f5978f 100644 --- a/pyffi/formats/rockstar/dir_/__init__.py +++ b/pyffi/formats/rockstar/dir_/__init__.py @@ -103,17 +103,16 @@ # # ***** END LICENSE BLOCK ***** -from itertools import chain -import struct import os import re +import struct -import pyffi.object_models.xml -import pyffi.object_models.common -from pyffi.object_models.xml.basic import BasicBase import pyffi.object_models +import pyffi.object_models.common +import pyffi.object_models.xml from pyffi.utils.graph import EdgeFilter + class DirFormat(pyffi.object_models.xml.FileFormat): """This class implements the DIR format.""" xml_file_name = 'dir.xml' @@ -124,6 +123,7 @@ class DirFormat(pyffi.object_models.xml.FileFormat): # basic types UInt = pyffi.object_models.common.UInt + class String(pyffi.object_models.common.FixedString): _len = 24 @@ -165,10 +165,10 @@ def inspect_quick(self, stream): except struct.error: # this happens if .dir only contains one file record off2 = size1 - if not(off1 == 0 - #and size1 < 1000 # heuristic - and off2 == size1 - and file1[-1] == 0): + if not (off1 == 0 + # and size1 < 1000 # heuristic + and off2 == size1 + and file1[-1] == 0): raise ValueError('Not a Rockstar DIR file.') finally: stream.seek(pos) @@ -187,7 +187,6 @@ def inspect(self, stream): finally: stream.seek(pos) - def read(self, stream): """Read a dir file. @@ -243,6 +242,8 @@ def pack(self, image, folder): if len(allbytes) < size: image.write('\x00' * (size - len(allbytes))) -if __name__=='__main__': + +if __name__ == '__main__': import doctest + doctest.testmod() diff --git a/pyffi/formats/tga/__init__.py b/pyffi/formats/tga/__init__.py index db20e9e55..8d80aa252 100644 --- a/pyffi/formats/tga/__init__.py +++ b/pyffi/formats/tga/__init__.py @@ -98,16 +98,16 @@ # # ***** END LICENSE BLOCK ***** -import struct, os, re +import os +import re +import struct -import pyffi.object_models.xml import pyffi.object_models.common -import pyffi.object_models.xml.basic -import pyffi.object_models.xml.struct_ -import pyffi.object_models +import pyffi.object_models.xml import pyffi.utils.graph from pyffi.utils.graph import EdgeFilter + class TgaFormat(pyffi.object_models.xml.FileFormat): """This class implements the TGA format.""" xml_file_name = 'tga.xml' @@ -128,8 +128,9 @@ class TgaFormat(pyffi.object_models.xml.FileFormat): float = pyffi.object_models.common.Float PixelData = pyffi.object_models.common.UndecodedData - class FooterString(pyffi.object_models.xml.basic.BasicBase): + class FooterString(pyffi.object_models.basic.BasicBase): """The Targa footer signature.""" + def __str__(self): return 'TRUEVISION-XFILE.\x00' @@ -143,7 +144,7 @@ def read(self, stream, data): if signat != self.__str__().encode("ascii"): raise ValueError( "invalid Targa signature: expected '%s' but got '%s'" - %(self.__str__(), signat)) + % (self.__str__(), signat)) def write(self, stream, data): """Write signature to stream. @@ -169,7 +170,7 @@ def set_value(self, value): if value != self.__str__(): raise ValueError( "invalid Targa signature: expected '%s' but got '%s'" - %(self.__str__(), value)) + % (self.__str__(), value)) def get_size(self, data=None): """Return number of bytes that the signature occupies in a file. @@ -198,7 +199,7 @@ def read(self, stream, data): self.children = [ TgaFormat.Pixel(argument=data.header.pixel_size) for i in range(data.header.width - * data.header.height)] + * data.header.height)] for pixel in self.children: pixel.read(stream, data) else: @@ -230,7 +231,7 @@ class Data(pyffi.object_models.FileFormat.Data): def __init__(self): self.header = TgaFormat.Header() self.image = TgaFormat.Image() - self.footer = None # TgaFormat.Footer() is optional + self.footer = None # TgaFormat.Footer() is optional def inspect(self, stream): """Quick heuristic check if stream contains Targa data, @@ -245,10 +246,10 @@ def inspect(self, stream): # read header try: id_length, colormap_type, image_type, \ - colormap_index, colormap_length, colormap_size, \ - x_origin, y_origin, width, height, \ - pixel_size, flags = struct.unpack("} is an L{AnyType} subclass. """ + def __init__(cls, name, bases, dct): """Initialize array type.""" # create the class @@ -148,6 +192,7 @@ def __init__(cls, name, bases, dct): if not issubclass(cls.ItemType, AnyType): raise TypeError("array ItemType must be an AnyType subclass") + class UniformArray(AnyArray, metaclass=MetaUniformArray): """Wrapper for array with elements of the same type; this type must be a subclass of L{pyffi.object_models.any_type.AnyType}. @@ -187,11 +232,13 @@ def validate(cls, item): % (item.__class__.__name__, cls.ItemType.__name__)) + class MetaUniformSimpleArray(type): """Metaclass for L{UniformSimpleArray}. Checks that L{ItemType} is an L{SimpleType} subclass. """ + def __init__(cls, name, bases, dct): """Initialize array type.""" # create the class @@ -201,6 +248,7 @@ def __init__(cls, name, bases, dct): pyffi.object_models.simple_type.SimpleType): raise TypeError("array ItemType must be a SimpleType subclass") + class UniformSimpleArray(AnyArray, metaclass=MetaUniformSimpleArray): """Base class for array's with direct access to values of simple items.""" ItemType = pyffi.object_models.simple_type.SimpleType diff --git a/pyffi/object_models/xml/basic.py b/pyffi/object_models/basic.py similarity index 89% rename from pyffi/object_models/xml/basic.py rename to pyffi/object_models/basic.py index 54ad2d229..cbd7414b8 100644 --- a/pyffi/object_models/xml/basic.py +++ b/pyffi/object_models/basic.py @@ -1,4 +1,19 @@ -"""Implements base class for basic types.""" +""" +:mod:`pyffi.object_models.basic` --- Basic Types +================================================ + +Implements base class for basic types. + +Implementation +-------------- + +.. autoclass:: BasicBase + :show-inheritance: + :members: + :undoc-members: + +.. todo:: Show examples for usage +""" # -------------------------------------------------------------------------- # ***** BEGIN LICENSE BLOCK ***** @@ -41,6 +56,7 @@ from pyffi.utils.graph import DetailNode + class BasicBase(DetailNode): """Base class from which all basic types are derived. @@ -76,13 +92,13 @@ class BasicBase(DetailNode): NotImplementedError """ - _is_template = False # is it a template type? - _has_links = False # does the type contain a Ref or a Ptr? - _has_refs = False # does the type contain a Ref? - _has_strings = False # does the type contain a string? - arg = None # default argument + _is_template = False # is it a template type? + _has_links = False # does the type contain a Ref or a Ptr? + _has_refs = False # does the type contain a Ref? + _has_strings = False # does the type contain a string? + arg = None # default argument - def __init__(self, template = None, argument = None, parent = None): + def __init__(self, template=None, argument=None, parent=None): """Initializes the instance. :param template: type used as template @@ -91,7 +107,7 @@ def __init__(self, template = None, argument = None, parent = None): :param parent: The parent of this instance, that is, the instance this instance is an attribute of.""" # parent disabled for performance - #self._parent = weakref.ref(parent) if parent else None + # self._parent = weakref.ref(parent) if parent else None pass # string representation @@ -168,4 +184,3 @@ def get_editor_value(self): def set_editor_value(self, editorvalue): """Set value from editor value.""" return self.set_value(editorvalue) - diff --git a/pyffi/object_models/binary_type.py b/pyffi/object_models/binary_type.py index 87567f011..7d4a3dc9c 100644 --- a/pyffi/object_models/binary_type.py +++ b/pyffi/object_models/binary_type.py @@ -1,4 +1,94 @@ -"""Implements common basic types in XML file format descriptions.""" +""" +:mod:`pyffi.object_models.binary_type` --- Abstract classes for data stored as a binary +======================================================================================= + +Implements common basic types in XML file format descriptions. + +Implementation +-------------- + +.. autoclass:: BinaryType + :show-inheritance: + :members: + :undoc-members: + +.. autoclass:: BinarySimpleType + :show-inheritance: + :members: + :undoc-members: + +.. autoclass:: IntType + :show-inheritance: + :members: + :undoc-members: + +.. autoclass:: UIntType + :show-inheritance: + :members: + :undoc-members: + +.. autoclass:: ByteType + :show-inheritance: + :members: + :undoc-members: + +.. autoclass:: UByteType + :show-inheritance: + :members: + :undoc-members: + +.. autoclass:: ShortType + :show-inheritance: + :members: + :undoc-members: + +.. autoclass:: UShortType + :show-inheritance: + :members: + :undoc-members: + +.. autoclass:: BoolType + :show-inheritance: + :members: + :undoc-members: + +.. autoclass:: CharType + :show-inheritance: + :members: + :undoc-members: + +.. autoclass:: Float + :show-inheritance: + :members: + :undoc-members: + +.. autoclass:: HFloat + :show-inheritance: + :members: + :undoc-members: + +.. autoclass:: ZString + :show-inheritance: + :members: + :undoc-members: + +.. autoclass:: FixedString + :show-inheritance: + :members: + :undoc-members: + +.. autoclass:: SizedString + :show-inheritance: + :members: + :undoc-members: + +.. autoclass:: UndecodedData + :show-inheritance: + :members: + :undoc-members: + +.. todo:: Show examples for usage +""" # ***** BEGIN LICENSE BLOCK ***** # @@ -40,34 +130,37 @@ import struct from pyffi.object_models.any_type import AnyType -from pyffi.object_models.simple_type import SimpleType -from pyffi.object_models.editable import EditableSpinBox +from pyffi.object_models.editable import EditableBoolComboBox from pyffi.object_models.editable import EditableFloatSpinBox from pyffi.object_models.editable import EditableLineEdit -from pyffi.object_models.editable import EditableBoolComboBox +from pyffi.object_models.editable import EditableSpinBox +from pyffi.object_models.simple_type import SimpleType + # Base classes class BinaryType(AnyType): """Abstract base class for binary data types.""" + def get_size(self): raise NotImplementedError + class BinarySimpleType(SimpleType, BinaryType): """Abstract base class for binary data types.""" pass -# Helper objects and helper functions (private) -_b = "".encode("ascii") # py3k's b"" -_b00 = "\x00".encode("ascii") # py3k's b"\x00" +# Helper objects and helper functions (private) +_b = "".encode("ascii") # py3k's b"" +_b00 = "\x00".encode("ascii") # py3k's b"\x00" # supports the bytes object for < py26 try: bytes except NameError: - bytes = str # for py25 backwards compatibility + bytes = str # for py25 backwards compatibility if bytes is str: # < py3k: str for byte strings, unicode for text strings @@ -78,6 +171,7 @@ class BinarySimpleType(SimpleType, BinaryType): _bytes = bytes _str = str + def _as_bytes(value): """Helper function which converts a string to _bytes (this is useful for set_value in all string classes, which use bytes for representation). @@ -101,6 +195,7 @@ def _as_bytes(value): else: raise TypeError("expected %s or %s" % (_bytes.__name__, _str.__name__)) + def _as_str(value): """Helper function to convert bytes back to str. This is used in the __str__ functions for simple string types. If you want a custom @@ -121,6 +216,7 @@ def _as_str(value): # (this avoids unicode errors) return value.encode("ascii", "replace") + # SimpleType implementations for common binary types class IntType(BinarySimpleType, EditableSpinBox): @@ -156,10 +252,10 @@ class IntType(BinarySimpleType, EditableSpinBox): '0x44332211' """ - _min = -0x80000000 #: Minimum value. + _min = -0x80000000 #: Minimum value. _max = 0x7fffffff #: Maximum value. - _struct = 'i' #: Character used to represent type in struct. - _size = 4 #: Number of bytes. + _struct = 'i' #: Character used to represent type in struct. + _size = 4 #: Number of bytes. # SimpleType @@ -177,13 +273,13 @@ def set_value(self, value): val = int(value) except ValueError: try: - val = int(value, 16) # for '0x...' strings + val = int(value, 16) # for '0x...' strings except ValueError: try: - val = getattr(self, value) # for enums + val = getattr(self, value) # for enums except AttributeError: raise ValueError( - "cannot convert value '%s' to integer"%value) + "cannot convert value '%s' to integer" % value) if val < self._min or val > self._max: raise ValueError('value out of range (%i)' % val) self._value = val @@ -232,6 +328,7 @@ def get_editor_maximum(self): """ return self._max + class UIntType(IntType): """Implementation of a 32-bit unsigned integer type.""" _min = 0 @@ -239,6 +336,7 @@ class UIntType(IntType): _struct = 'I' _size = 4 + class ByteType(IntType): """Implementation of a 8-bit signed integer type.""" _min = -0x80 @@ -246,6 +344,7 @@ class ByteType(IntType): _struct = 'b' _size = 1 + class UByteType(IntType): """Implementation of a 8-bit unsigned integer type.""" _min = 0 @@ -253,6 +352,7 @@ class UByteType(IntType): _struct = 'B' _size = 1 + class ShortType(IntType): """Implementation of a 16-bit signed integer type.""" _min = -0x8000 @@ -260,6 +360,7 @@ class ShortType(IntType): _struct = 'h' _size = 2 + class UShortType(UIntType): """Implementation of a 16-bit unsigned integer type.""" _min = 0 @@ -267,6 +368,7 @@ class UShortType(UIntType): _struct = 'H' _size = 2 + class BoolType(UByteType, EditableBoolComboBox): """Simple bool implementation.""" @@ -287,6 +389,7 @@ def set_value(self, value): raise TypeError("expected a bool") self._value = 1 if value else 0 + class CharType(BinarySimpleType, EditableLineEdit): """Implementation of an (unencoded) 8-bit character.""" @@ -300,8 +403,8 @@ def set_value(self, value): :param value: The value to assign (bytes of length 1). :type value: bytes """ - assert(isinstance(value, _bytes)) - assert(len(value) == 1) + assert (isinstance(value, _bytes)) + assert (len(value) == 1) self._value = value def read(self, stream): @@ -330,6 +433,7 @@ def get_size(self): """ return 1 + class Float(BinarySimpleType, EditableFloatSpinBox): """Implementation of a 32-bit float.""" @@ -368,6 +472,46 @@ def get_size(self): """ return 4 + +class HFloat(BinarySimpleType, EditableFloatSpinBox): + """Implementation of a 16-bit float.""" + + def __init__(self): + """Initialize the float.""" + self._value = 0.0 + + def set_value(self, value): + """Set value to C{value}. + + :param value: The value to assign. + :type value: float + """ + self._value = float(value) + + def read(self, stream): + """Read value from stream. + + :param stream: The stream to read from. + :type stream: file + """ + self._value = struct.unpack('>> str(m) 'Hi There!' """ - _maxlen = 1000 #: The maximum length. + _maxlen = 1000 #: The maximum length. def __init__(self): """Initialize the string.""" @@ -444,6 +588,7 @@ def get_size(self): """ return len(self._value) + 1 + class FixedString(BinarySimpleType, EditableLineEdit): """String of fixed length. Default length is 0, so you must override this class and set the _len class variable. @@ -521,6 +666,7 @@ def get_size(self): """ return self._len + class SizedString(BinarySimpleType, EditableLineEdit): """Basic type for strings. The type starts with an unsigned int which describes the length of the string. @@ -596,8 +742,10 @@ def write(self, stream): stream.write(struct.pack(' self._max: raise ValueError('value out of range (%i)' % val) self._value = val @@ -177,6 +280,12 @@ def write(self, stream, data): def __str__(self): return str(self.get_value()) + def __repr__(self): + return f"<{self.__class__.__name__} {{'min': {self._min}, 'max': {self._max}, 'struct': {self._struct}, 'size': {self._size}, 'val': {self._value}}}>" + + def __int__(self): + return int(self.get_value()) + @classmethod def get_size(cls, data=None): """Return number of bytes this type occupies in a file. @@ -206,6 +315,7 @@ def get_editor_maximum(self): """ return self._max + class UInt(Int): """Implementation of a 32-bit unsigned integer type.""" _min = 0 @@ -213,6 +323,7 @@ class UInt(Int): _struct = 'I' _size = 4 + class Int64(Int): """Implementation of a 64-bit signed integer type.""" _min = -0x8000000000000000 @@ -220,6 +331,7 @@ class Int64(Int): _struct = 'q' _size = 8 + class UInt64(Int): """Implementation of a 64-bit unsigned integer type.""" _min = 0 @@ -227,6 +339,7 @@ class UInt64(Int): _struct = 'Q' _size = 8 + class Byte(Int): """Implementation of a 8-bit signed integer type.""" _min = -0x80 @@ -234,6 +347,7 @@ class Byte(Int): _struct = 'b' _size = 1 + class UByte(Int): """Implementation of a 8-bit unsigned integer type.""" _min = 0 @@ -241,6 +355,7 @@ class UByte(Int): _struct = 'B' _size = 1 + class Short(Int): """Implementation of a 16-bit signed integer type.""" _min = -0x8000 @@ -248,6 +363,7 @@ class Short(Int): _struct = 'h' _size = 2 + class UShort(UInt): """Implementation of a 16-bit unsigned integer type.""" _min = 0 @@ -255,18 +371,19 @@ class UShort(UInt): _struct = 'H' _size = 2 + class ULittle32(UInt): """Little endian 32 bit unsigned integer (ignores specified data byte order). """ + def read(self, stream, data): """Read value from stream. :param stream: The stream to read from. :type stream: file """ - self._value = struct.unpack('<' + self._struct, - stream.read(self._size))[0] + self._value = struct.unpack('<' + self._struct, stream.read(self._size))[0] def write(self, stream, data): """Write value to stream. @@ -276,6 +393,7 @@ def write(self, stream, data): """ stream.write(struct.pack('<' + self._struct, self._value)) + class Bool(UByte, EditableBoolComboBox): """Simple bool implementation.""" @@ -294,6 +412,7 @@ def set_value(self, value): """ self._value = 1 if value else 0 + class Char(BasicBase, EditableLineEdit): """Implementation of an (unencoded) 8-bit character.""" @@ -315,8 +434,8 @@ def set_value(self, value): :param value: The value to assign (bytes of length 1). :type value: bytes """ - assert(isinstance(value, bytes)) - assert(len(value) == 1) + assert (isinstance(value, bytes)) + assert (len(value) == 1) self._value = value def read(self, stream, data): @@ -338,6 +457,9 @@ def write(self, stream, data): def __str__(self): return _as_str(self._value) + def __repr__(self): + return f"<{self.__class__.__name__} {{'val': {self._value}}}>" + def get_size(self, data=None): """Return number of bytes this type occupies in a file. @@ -352,6 +474,7 @@ def get_hash(self, data=None): """ self.get_value() + class Float(BasicBase, EditableFloatSpinBox): """Implementation of a 32-bit float.""" @@ -412,7 +535,122 @@ def get_hash(self, data=None): :return: An immutable object that can be used as a hash. """ - return int(self.get_value()*200) + return int(self.get_value() * 200) + + def __float__(self): + return float(self.get_value()) + + def __repr__(self): + return f"<{self.__class__.__name__} {{'val': {self._value}}}>" + + +class HFloat(Float, EditableFloatSpinBox): + """Implementation of a 16-bit float.""" + + def __init__(self, **kwargs): + """Initialize the float.""" + super(HFloat, self).__init__(**kwargs) + self._value = 0 + + def read(self, stream, data): + """Read value from stream. + + :param stream: The stream to read from. + :type stream: file + """ + self._value = struct.unpack(data._byte_order + 'e', + stream.read(2))[0] + + def write(self, stream, data): + """Write value to stream. + + :param stream: The stream to write to. + :type stream: file + """ + try: + stream.write(struct.pack(data._byte_order + 'e', + self._value)) + except OverflowError: + logger = logging.getLogger("pyffi.object_models") + logger.warn("float value overflow, writing NaN") + stream.write(struct.pack(data._byte_order + 'H', + 0x7fc00000)) + + def get_size(self, data=None): + """Return number of bytes this type occupies in a file. + + :return: Number of bytes. + """ + return 2 + + +class NormByte(Float, EditableFloatSpinBox): # TODO: This shit + """Implementation of an 8-bit float in the range -1.0:1.0, stored as a byte.""" + _min = -0x1 + _max = 0x1 + _struct = 'B' + _size = 1 + + def __init__(self, **kwargs): + super(NormByte, self).__init__(**kwargs) + self._value = 0.0 + + def get_value(self): + return self._value + + def set_value(self, value): + val = float(value) + if val < self._min or val > self._max: + raise ValueError('value out of range (%i - %i): %i', self._min, self._max, val) + self._value = val + + def read(self, stream, data): + """Read value from stream. + + :param stream: The stream to read from. + :param data: + :type stream: file + """ + self._value = struct.unpack(data._byte_order + self._struct, stream.read(self._size))[0] + + def write(self, stream, data): + """Write value to stream. + + :param stream: The stream to write to. + :param data: + :type stream: file + """ + stream.write(struct.pack(data._byte_order + self._struct, self._value)) + + @classmethod + def get_size(cls, data=None): + """Return number of bytes this type occupies in a file. + + :return: Number of bytes. + """ + return cls._size + + def get_hash(self, data=None): + """Return a hash value for this value. + + :return: An immutable object that can be used as a hash. + """ + return self.get_value() + + def get_editor_minimum(self): + """Minimum possible value. + + :return: Minimum possible value. + """ + return self._min + + def get_editor_maximum(self): + """Maximum possible value. + + :return: Maximum possible value. + """ + return self._max + class ZString(BasicBase, EditableLineEdit): """String of variable length (null terminated). @@ -434,7 +672,7 @@ class ZString(BasicBase, EditableLineEdit): >>> str(m) 'Hi There!' """ - _maxlen = 1000 #: The maximum length. + _maxlen = 1000 #: The maximum length. def __init__(self, **kwargs): """Initialize the string.""" @@ -444,6 +682,9 @@ def __init__(self, **kwargs): def __str__(self): return _as_str(self._value) + def __repr__(self): + return f"<{self.__class__.__name__} {{'val': {str(self._value)}}}>" + def get_value(self): """Return the string. @@ -506,6 +747,7 @@ def get_hash(self, data=None): """ return self._value + class FixedString(BasicBase, EditableLineEdit): """String of fixed length. Default length is 0, so you must override this class and set the _len class variable. @@ -539,6 +781,9 @@ def __init__(self, **kwargs): def __str__(self): return _as_str(self._value) + def __repr__(self): + return f"<{self.__class__.__name__} {{'val': {str(self._value)}}}>" + def get_value(self): """Return the string. @@ -591,6 +836,7 @@ def get_hash(self, data=None): """ return self._value + class SizedString(BasicBase, EditableLineEdit): """Basic type for strings. The type starts with an unsigned int which describes the length of the string. @@ -641,6 +887,9 @@ def set_value(self, value): def __str__(self): return _as_str(self._value) + def __repr__(self): + return f"<{self.__class__.__name__} {{'val': {str(self._value)}}}>" + def get_size(self, data=None): """Return number of bytes this type occupies in a file. @@ -678,8 +927,10 @@ def write(self, stream, data): len(self._value))) stream.write(self._value) + class UndecodedData(BasicBase): """Basic type for undecoded data trailing at the end of a file.""" + def __init__(self, **kwargs): BasicBase.__init__(self, **kwargs) self._value = b'' @@ -704,6 +955,9 @@ def set_value(self, value): def __str__(self): return '' + def __repr__(self): + return f"<{self.__class__.__name__} {{'val': {self._value}}}>" + def get_size(self, data=None): """Return number of bytes the data occupies in a file. @@ -734,4 +988,3 @@ def write(self, stream, data): :type stream: file """ stream.write(self._value) - diff --git a/pyffi/object_models/editable.py b/pyffi/object_models/editable.py index 50a01d4e3..f4827c050 100644 --- a/pyffi/object_models/editable.py +++ b/pyffi/object_models/editable.py @@ -1,4 +1,6 @@ -"""Implements abstract editor base classes. +""" +:mod:`pyffi.object_models.editable` --- Abstract editor base classes +==================================================================== These abstract base classes provide an abstract layer for editing data in a graphical user interface. @@ -6,6 +8,46 @@ @todo: Make these into true abstract base classes, and implement and use the get_editor_value and set_editor_value functions in non-abstract derived classes. + +Implementation +-------------- + +.. autoclass:: EditableBase + :show-inheritance: + :members: + :undoc-members: + +.. autoclass:: EditableSpinBox + :show-inheritance: + :members: + :undoc-members: + +.. autoclass:: EditableFloatSpinBox + :show-inheritance: + :members: + :undoc-members: + +.. autoclass:: EditableLineEdit + :show-inheritance: + :members: + :undoc-members: + +.. autoclass:: EditableTextEdit + :show-inheritance: + :members: + :undoc-members: + +.. autoclass:: EditableComboBox + :show-inheritance: + :members: + :undoc-members: + +.. autoclass:: EditableBoolComboBox + :show-inheritance: + :members: + :undoc-members: + +.. todo:: Show examples for usage """ # ***** BEGIN LICENSE BLOCK ***** @@ -45,8 +87,10 @@ # # ***** END LICENSE BLOCK ***** + class EditableBase(object): """The base class for all delegates.""" + def get_editor_value(self): """Return data as a value to initialize an editor with. Override this method. @@ -66,6 +110,7 @@ def set_editor_value(self, editorvalue): """ raise NotImplementedError + class EditableSpinBox(EditableBase): """Abstract base class for data that can be edited with a spin box that contains an integer. Override get_editor_minimum and get_editor_maximum to @@ -74,6 +119,7 @@ class EditableSpinBox(EditableBase): Requirement: get_editor_value must return an ``int``, set_editor_value must take an ``int``. """ + def get_editor_value(self): return self.get_value() @@ -86,6 +132,7 @@ def get_editor_minimum(self): def get_editor_maximum(self): return 0x7fffffff + class EditableFloatSpinBox(EditableSpinBox): """Abstract base class for data that can be edited with a spin box that contains a float. Override get_editor_decimals to set the number of decimals @@ -98,6 +145,7 @@ class EditableFloatSpinBox(EditableSpinBox): def get_editor_decimals(self): return 5 + class EditableLineEdit(EditableBase): """Abstract base class for data that can be edited with a single line editor. @@ -107,6 +155,7 @@ class EditableLineEdit(EditableBase): """ pass + class EditableTextEdit(EditableLineEdit): """Abstract base class for data that can be edited with a multiline editor. @@ -115,6 +164,7 @@ class EditableTextEdit(EditableLineEdit): """ pass + class EditableComboBox(EditableBase): """Abstract base class for data that can be edited with combo boxes. This can be used for for instance enum types. @@ -127,11 +177,13 @@ def get_editor_keys(self): """Tuple of strings, each string describing an item.""" return () + class EditableBoolComboBox(EditableComboBox): """Class for data that can be edited with a bool combo box. Requirement: get_value must return a ``bool``, set_value must take a ``bool``. """ + def get_editor_keys(self): return ("False", "True") @@ -145,4 +197,3 @@ def set_editor_value(self, editorvalue): def get_editor_value(self): return 1 if self.get_value() else 0 - diff --git a/pyffi/object_models/xml/expression.py b/pyffi/object_models/expression.py similarity index 74% rename from pyffi/object_models/xml/expression.py rename to pyffi/object_models/expression.py index e9d79318d..9dfde90ee 100644 --- a/pyffi/object_models/xml/expression.py +++ b/pyffi/object_models/expression.py @@ -1,5 +1,20 @@ -"""Expression parser (for arr1, arr2, cond, and vercond xml attributes of - tag).""" +""" +:mod:`pyffi.object_models.expression` --- Expression parser +=========================================================== + +Expression parser (for length, width, cond, and vercond xml attributes of + tag). + +Implementation +-------------- + +.. autoclass:: Expression + :show-inheritance: + :members: + :undoc-members: + +.. todo:: Show examples for usage +""" # -------------------------------------------------------------------------- # ***** BEGIN LICENSE BLOCK ***** @@ -40,8 +55,12 @@ # ***** END LICENSE BLOCK ***** # -------------------------------------------------------------------------- +import logging import re -import sys # stderr (for debugging) + +from pyffi.utils import parse_scientific_notation + +scientific_notation = re.compile("[-+]?[\d]+\.?[\d]*[Ee](?:[-+]?[\d]+)?") class Expression(object): @@ -73,17 +92,18 @@ class Expression(object): True >>> bool(Expression('1 != 1').eval()) False + >>> bool(Expression('0xFFF00000000000 >> 44').eval()) + 4095 """ - operators = set(('==', '!=', '>=', '<=', '&&', '||', '&', '|', '-', '!', - '<', '>', '/', '*', '+', '%')) + operators = {'<<', '>>', '==', '!=', '>=', '<=', '&&', '||', '&', '|', '-', '!', '<', '>', '/', '*', '+', '%'} def __init__(self, expr_str, name_filter=None): try: left, self._op, right = self._partition(expr_str) self._left = self._parse(left, name_filter) self._right = self._parse(right, name_filter) - except: + except Exception: print("error while parsing expression '%s'" % expr_str) raise @@ -115,6 +135,8 @@ def eval(self, data=None): elif isinstance(self._right, str): if (not self._right) or self._right == '""': right = "" + elif self._right.lower() in ["infinity", "inf"]: + right = float("inf") else: right = getattr(data, self._right) elif isinstance(self._right, type): @@ -125,40 +147,51 @@ def eval(self, data=None): assert (isinstance(self._right, int)) # debug right = self._right - if self._op == '==': - return left == right - elif self._op == '!=': - return left != right - elif self._op == '>=': - return left >= right - elif self._op == '<=': - return left <= right - elif self._op == '&&': - return left and right - elif self._op == '||': - return left or right - elif self._op == '&': - return left & right - elif self._op == '|': - return left | right - elif self._op == '-': - return left - right - elif self._op == '!': - return not (right) - elif self._op == '>': - return left > right - elif self._op == '<': - return left < right - elif self._op == '/': - return left / right - elif self._op == '*': - return left * right - elif self._op == '+': - return left + right - elif self._op == '%': - return left % right - else: - raise NotImplementedError("expression syntax error: operator '" + self._op + "' not implemented") + try: + if self._op == '>>': + return left >> right + elif self._op == '<<': + return left << right + elif self._op == '==': + return left == right + elif self._op == '!=': + return left != right + elif self._op == '>=': + return left >= right + elif self._op == '<=': + return left <= right + elif self._op == '&&': + return left and right + elif self._op == '||': + return left or right + elif self._op == '&': + return left & right + elif self._op == '|': + return left | right + elif self._op == '-': + return left - right + elif self._op == '!': + return not (right) + elif self._op == '>': + return left > right + elif self._op == '<': + return left < right + elif self._op == '/': + return left / right + elif self._op == '*': + return left * right + elif self._op == '+': + return left + right + elif self._op == '%': + return left % right + else: + raise NotImplementedError("expression syntax error: operator '" + self._op + "' not implemented") + except UnboundLocalError: + logging.getLogger().error("Expression value was unbound (left: %s, op: %s, right: %s", self._left, self._op, self._right) + raise + except ArithmeticError: + logging.getLogger().error("Failed to evaluate expression. Left: %s; Op: %s; Right: %s", left, self._op, right) + raise def __str__(self): """Reconstruct the expression to a string.""" @@ -168,6 +201,9 @@ def __str__(self): right = str(self._right) if not self._right is None else "" return left + ' ' + self._op + ' ' + right + def __repr__(self): + return "Expression(%s)" % self.__str__() + @classmethod def _parse(cls, expr_str, name_filter=None): """Returns an Expression, string, or int, depending on the @@ -175,13 +211,24 @@ def _parse(cls, expr_str, name_filter=None): if not expr_str: # empty string return None + srch = scientific_notation.search(expr_str) + if srch is not None and len(srch[0]) == len(expr_str): + return int(parse_scientific_notation(expr_str)) # We don't have float support so convert to int # brackets or operators => expression if ("(" in expr_str) or (")" in expr_str): return Expression(expr_str, name_filter) for op in cls.operators: - if expr_str.find(op) != -1: + pos = expr_str.find(op) + if pos != -1: + if srch is not None and srch.pos < pos < len(srch[0]): + continue return Expression(expr_str, name_filter) # try to convert it to an integer + if expr_str.startswith("0x"): + try: + return int(expr_str, 16) + except ValueError: + pass try: return int(expr_str) # failed, so return the string, passed through the name filter @@ -190,19 +237,22 @@ def _parse(cls, expr_str, name_filter=None): m = re.match(r'^([0-9]+)\.([0-9]+)\.([0-9]+)\.([0-9]+)$', expr_str) if m: ver = ( - (int(m.group(1)) << 24) - + (int(m.group(2)) << 16) - + (int(m.group(3)) << 8) - + int(m.group(4)) + (int(m.group(1)) << 24) + + (int(m.group(2)) << 16) + + (int(m.group(3)) << 8) + + int(m.group(4)) ) return ver # apply name filter on each component separately # (where a dot separates components) + if expr_str == "#ARG#": + return "arg" + if expr_str == "infinity": + return float("inf") if name_filter is None: name_filter = lambda x: x - return '.'.join(name_filter(comp) - for comp in expr_str.split(".")) - return expr_str + return '.'.join(name_filter(comp) for comp in expr_str.split(".")) + # return expr_str @classmethod def _partition(cls, expr_str): @@ -210,10 +260,14 @@ def _partition(cls, expr_str): >>> Expression._partition('abc || efg') ('abc', '||', 'efg') + >>> Expression._partition('0xFFF00000000000 >> 44') + ('0xFFF00000000000', '>>', '44') >>> Expression._partition('(a== b) &&(( b!=c)||d )') ('a== b', '&&', '( b!=c)||d') >>> Expression._partition('!(1 <= 2)') ('', '!', '(1 <= 2)') + >>> Expression._partition('3.402823466e+38') + ('3.402823466e+38', '', '') >>> Expression._partition('') ('', '', '') """ @@ -254,9 +308,14 @@ def _partition(cls, expr_str): else: raise ValueError("expression syntax error: expected operator at '%s'" % expr_str[op_startpos:]) else: + sci_not_search = scientific_notation.search(expr_str) + start = len(sci_not_search[0]) if sci_not_search is not None and sci_not_search.pos == 0 else 0 + if start == len(expr_str): + start = start - 1 # it's not... so we need to scan for the first operator - for op_startpos, ch in enumerate(expr_str): - if ch == ' ': continue + for op_startpos, ch in enumerate(expr_str, start=start): + if ch == ' ': + continue if ch == '(' or ch == ')': raise ValueError("expression syntax error: expected operator before '%s'" % expr_str[op_startpos:]) # to avoid confusion between && and &, and || and |, diff --git a/pyffi/object_models/mex/__init__.py b/pyffi/object_models/mex/__init__.py index 67b8e941b..7e6fdcc26 100644 --- a/pyffi/object_models/mex/__init__.py +++ b/pyffi/object_models/mex/__init__.py @@ -1,6 +1,25 @@ -"""Format classes and metaclasses for binary file formats described by a +""" +:mod:`pyffi.object_models.mex` --- Mex script fileformat parser +=============================================================== + +Format classes and metaclasses for binary file formats described by a mexscript file, and mexscript parser for converting the mexscript description into Python classes. + +Implementation +-------------- + +.. autoclass:: _MetaMexFileFormat + :show-inheritance: + :members: + :undoc-members: + +.. autoclass:: MexFileFormat + :show-inheritance: + :members: + :undoc-members: + +.. todo:: Show examples for usage """ # ***** BEGIN LICENSE BLOCK ***** @@ -45,6 +64,7 @@ import pyffi.object_models import pyffi.object_models.simple_type + class _MetaMexFileFormat(pyffi.object_models.MetaFileFormat): """Converts the mex script into an archive parser.""" @@ -57,12 +77,13 @@ def __init__(cls, name, bases, dct): mexfile = cls.openfile(mexfilename, cls.mexfilepath) # XXX todo: parse the script + class MexFileFormat(pyffi.object_models.FileFormat): """This class can be used as a base class for file formats described by a mexscript file. """ - mexfilename = None #: Override. - mexfilepath = None #: Override. + mexfilename = None #: Override. + mexfilepath = None #: Override. logger = logging.getLogger("pyffi.object_models.mex") class FileInfo: @@ -76,7 +97,7 @@ class FileInfo: fileformat = None """Potentially, the format of the file.""" - + offset = None """Offset in the archive.""" diff --git a/pyffi/object_models/niftoolsxml/__init__.py b/pyffi/object_models/niftoolsxml/__init__.py new file mode 100644 index 000000000..6f6c34361 --- /dev/null +++ b/pyffi/object_models/niftoolsxml/__init__.py @@ -0,0 +1,816 @@ +""" +:mod:`pyffi.object_models.xml` --- XML xml parser +================================================================= + +Format classes and metaclasses for binary file formats described by an xml +file, and xml handler for converting the xml description into Python classes. + +Contents +-------- + +.. toctree:: + :maxdepth: 2 + :titlesonly: + + array + bit_struct + enum + expression + struct + +Implementation +-------------- + +.. autoclass:: MetaFileFormat + :show-inheritance: + :members: + :undoc-members: + +.. autoclass:: NifToolsFileFormat + :show-inheritance: + :members: + :undoc-members: + +.. autoclass:: StructAttribute + :show-inheritance: + :members: + :undoc-members: + +.. autoclass:: BitStructAttribute + :show-inheritance: + :members: + :undoc-members: + +.. autoclass:: Version + :show-inheritance: + :members: + :undoc-members: + +.. autoclass:: Module + :show-inheritance: + :members: + :undoc-members: + +.. autoclass:: XmlParser + :show-inheritance: + :members: + :undoc-members: + +.. autoclass:: XmlError + :show-inheritance: + :members: + :undoc-members: + +.. todo:: Show examples for usage +""" + +# ***** BEGIN LICENSE BLOCK ***** +# +# Copyright (c) 2007-2012, Python File Format Interface +# All rights reserved. +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions +# are met: +# +# * Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# +# * Redistributions in binary form must reproduce the above +# copyright notice, this list of conditions and the following +# disclaimer in the documentation and/or other materials provided +# with the distribution. +# +# * Neither the name of the Python File Format Interface +# project nor the names of its contributors may be used to endorse +# or promote products derived from this software without specific +# prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS +# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE +# COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, +# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, +# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; +# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER +# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT +# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN +# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE +# POSSIBILITY OF SUCH DAMAGE. +# +# ***** END LICENSE BLOCK ***** + +import logging +import re +import time # for timing stuff +import typing +from typing import Callable, Optional, List, Tuple +from dataclasses import dataclass, field +# import xml.etree.ElementTree as ET +import lxml.etree as ET + +import pyffi.object_models +from pyffi.object_models.expression import Expression +from pyffi.object_models.basic import BasicBase +from pyffi.object_models.niftoolsxml.bit_struct import BitStructBase +from pyffi.object_models.niftoolsxml.enum import EnumBase +from pyffi.object_models.niftoolsxml.struct_ import StructBase + +main_games = re.compile("(?<={{)[^{}]+(?=}})") +logger = logging.getLogger("pyffi.object_models.xml") +default_list = field(default_factory=list) + + +def skip(_element: ET.Element): + return + + +class MetaFileFormat(pyffi.object_models.MetaFileFormat): + """The MetaFileFormat metaclass transforms the XML description + of a file format into a bunch of classes which can be directly + used to manipulate files in this format. + + The actual implementation of the parser is delegated to + pyffi.object_models.xml.FileFormat. + """ + + def __init__(cls, name, bases, dct): + """This function constitutes the core of the class generation + process. For instance, we declare NifFormat to have metaclass + MetaFileFormat, so upon creation of the NifFormat class, + the __init__ function is called, with + + :param cls: The class created using MetaFileFormat, for example + NifFormat. + :param str name: The name of the class, for example 'NifFormat'. + :param typing.Set[typing.Any] bases: The base classes, usually (object,). + :param dct: A dictionary of class attributes, such as 'xml_file_name'. + """ + + super(MetaFileFormat, cls).__init__(name, bases, dct) + + # preparation: make deep copy of lists of enums, structs, etc. + cls.xml_enum = cls.xml_enum[:] + cls.xml_alias = cls.xml_alias[:] + cls.xml_bit_struct = cls.xml_bit_struct[:] + cls.xml_struct = cls.xml_struct[:] + + # parse XML + + # we check dct to avoid parsing the same file more than once in + # the hierarchy + xml_file_name = dct.get('xml_file_name') + if xml_file_name: + cls.logger.debug("Parsing %s and generating classes." % xml_file_name) + # open XML file + start = time.time() + xml_file = cls.openfile(xml_file_name, cls.xml_file_path) + xmlp = XmlParser(cls) + try: + xmlp.load_xml(xml_file) + finally: + xml_file.close() + + cls.logger.debug("Parsing finished in %.3f seconds." % (time.time() - start)) + + +class NifToolsFileFormat(pyffi.object_models.FileFormat, metaclass=MetaFileFormat): + """This class can be used as a base class for file formats + described by a xml file.""" + xml_file_name = None #: Override. + xml_file_path = None #: Override. + logger = logger + + # We also keep an ordered list of all classes that have been created. + # The xml_struct list includes all xml generated struct classes, + # including those that are replaced by a native class in cls (for + # instance NifFormat.String). The idea is that these lists should + # contain sufficient info from the xml, so they can be used to write + # other python scripts that would otherwise have to implement their own + # xml parser. See makehsl.py for an example of usage. + # + # (note: no classes are created for basic types, so no list for those) + xml_enum = [] + xml_alias = [] + xml_bit_struct = [] + xml_struct = [] + + +@dataclass +class StructAttribute: + """Helper class to collect attribute data of struct add tags. + + Attributes: + displayname (str): The name of this member variable + name (str): The name of this member variable converted for use as a python variable + type_: The type of this attribute + default: The default value + template: The template type of this member variable + arg (Optional[Expression]): The argument of this member variable + length (Optional[Expression]): First array size + width (Optional[Expression]): Second array size + cond (Optional[Expression]): The condition of this member variable + vercond: The version condition of this member variable + since (Optional[int]): The version this member exists, None if there is no lower limit + until (Optional[int]): The last version this member exists, None if there is no upper limit + userver (Optional[int]): The user version this member exists, None if it exists for all + doc (str): The docstring of this attribute + is_abstract (bool): Whether this attribute is abstract or not (read and written)""" + + displayname: str + name: str + type_: typing.Any + default: typing.Any + template: typing.Any + arg: Optional[Expression] + length: Optional[Expression] + width: Optional[Expression] + cond: Optional[Expression] + vercond: Optional[Expression] + since: Optional[int] + until: Optional[int] + userver: Optional[int] + doc: str + is_abstract: bool + + # TODO: Handle Suffix + + @classmethod + def create(clz, cls, attrs): + """Initialize attribute from the xml attrs dictionary of an + add tag. + + :param cls: The class where all types reside. + :param attrs: The xml add tag attribute dictionary.""" + # mandatory parameters + params = { + 'displayname': attrs["name"], + # The name of this member variable. + 'name': cls.name_attribute(attrs["name"]) + } + # The type of this member variable (type is ``str`` for forward declarations, and resolved to :class:`BasicBase` or :class:`StructBase` later). + try: + attrs_type_str = attrs["type"] + except KeyError: + raise AttributeError("'%s' is missing a type attribute" % params['displayname']) + if attrs_type_str != "TEMPLATE": + try: + params['type_'] = getattr(cls, attrs_type_str) + except AttributeError: + # forward declaration, resolved in final_cleanup() + params['type_'] = attrs_type_str + else: + # type determined at runtime + params['type_'] = type(None) + # optional parameters + + # default value of this member variable. + params['default'] = attrs.get("default") + # template type of this member variable (initially ``str``, resolved in final_cleanup() to :class:`BasicBase` or :class:`StructBase` at the end + # of the xml parsing), and if there is no template type, then this variable will equal `None`. + params['template'] = attrs.get("template") + # argument of this member variable. + params['arg'] = attrs.get("arg") + # first array size of this member variable, as :class:`Expression` or `None`. + params['length'] = attrs.get("length") + # second array size of this member variable, as :class:`Expression` or `None`. + params['width'] = attrs.get("width") + # condition of this member variable, as :class:`Expression` or `None`. + params['cond'] = attrs.get("cond") + # version condition for this member variable + params['vercond'] = attrs.get("vercond") + # first version this member exists, as `int`, and `None` if there is no lower limit. + params['since'] = attrs.get("since") + # last version this member exists, as `int`, and `None` if there is no upper limit. + params['until'] = attrs.get("until") + # user version this member exists, as `int`, and `None` if it exists for all user versions. + params['userver'] = attrs.get("userver") + # docstring is handled in xml parser's characters function + params['doc'] = "" + # Whether the attribute is abstract or not (read and written). + params['is_abstract'] = (attrs.get("abstract") in ("1", "true")) + + # post-processing + if params['default']: + try: + tmp = params['type_']() + tmp.set_value(params['default']) + params['default'] = tmp.get_value() + del tmp + except Exception: + # conversion failed; not a big problem + params['default'] = None + if params['length']: + params['length'] = Expression(params['length'], cls.name_attribute) + if params['width']: + params['width'] = Expression(params['width'], cls.name_attribute) + if params['cond']: + params['cond'] = Expression(params['cond'], cls.name_attribute) + if params['vercond']: + params['vercond'] = Expression(params['vercond'], cls.name_attribute) + if params['arg']: + params['arg'] = Expression(params['arg'], cls.name_attribute) + if params['userver']: + params['userver'] = int(params['userver']) + if params['since']: + params['since'] = cls.version_number(params['since']) + if params['until']: + params['until'] = cls.version_number(params['until']) + + return clz(**params) + + +@dataclass +class BitStructAttribute: + """Helper class to collect attribute data of bitstruct bits tags. + + Attributes: + name: The name of this variable + numbits: Total number of bits used + default: The default value + cond: An expression to check if value can be used + since: The version where this started existing + until: The version where this existed until + userver: The user version applicable for this attribute + doc: The doc string of this attribute""" + name: str + numbits: int + default: Optional[int] + cond: Optional[Expression] + since: Optional[int] + until: Optional[int] + userver: Optional[int] + doc: Optional[str] + + @classmethod + def create(cls, clazz, attrs): + """Initialize attribute from the xml attrs dictionary of an + add tag. + + :param clazz: The class where all types reside. + :param attrs: The xml add tag attribute dictionary.""" + params = { + # mandatory parameters + "name": clazz.name_attribute(attrs["name"]), + "numbits": int(clazz.name_attribute(attrs["numbits"])), + # optional parameters + "default": attrs.get("default"), + "cond": attrs.get("cond"), + "since": attrs.get("since"), + "until": attrs.get("until"), + "userver": attrs.get("userver"), + "doc": "" + } + + # post-processing + if params["default"]: + params["default"] = int(params["default"]) + if params["cond"]: + params["cond"] = Expression(params["cond"], clazz.name_attribute) + if params["userver"]: + params["userver"] = int(params["userver"]) + if params["since"]: + params["since"] = clazz.version_number(params["since"]) + if params["until"]: + params["until"] = clazz.version_number(params["until"]) + + return cls(**params) + + +@dataclass +class Version: + """A dataclass for Versions, all versions must have an ID and version num""" + id: str + """The ID of the version""" + num: int + """The number of the version. Note: this is in integer format, this was converted using the `version_number` + function from the relative FileFormat subclass.""" + games: List[str] + """A list of games which use this version. The primary versions of games is stored by TODO: THIS SHIT""" + supported: bool = field(default_factory=lambda: True) + """This is false if this version is not fully supported by the xml""" + user: Optional[List[int]] = field(default_factory=list) + """The custom User Version(s) for a specific Version from a game/developer.""" + bsver: Optional[List[int]] = field(default_factory=list) + """The custom Bethesda Version(s) for a specific Version and User Version.""" + custom: bool = field(default_factory=lambda: False) + """True when version contains extensions to the format not originating from Gamebryo.""" + ext: Optional[List[str]] = field(default_factory=list) + """Any custom NIF extensions associated with this version.""" + + +@dataclass +class Module: + name: str + priority: int + depends: Optional[List[str]] = field(default_factory=list()) + + +class XmlError(Exception): + """The XML handler will throw this exception if something goes wrong while + parsing.""" + pass + + +class XmlParser: # TODO: look into lxml iterparse so that we can get error with specific line numbers + """ + Attributes: + tokens: A list of token tuples ({token: string}, [attrs]) + """ + struct_types = ("niobject", "struct") + bitstruct_types = ("bitfield", "bitflags", "bitstruct") + + def __init__(self, cls): + """Set up the xml parser.""" + + self.attrib = None + self.name = None + + self.load_dict: dict[str, Callable[[ET.Element], None]] = { + 'token': self.read_token, + 'verattr': skip, # Skip for now, not used + 'version': self.read_version, + 'module': skip, # Skip for now, not used + 'basic': self.read_basic, + 'enum': self.read_enum, + 'bitflags': self.read_bitstruct, + 'bitfield': self.read_bitstruct, + 'bitstruct': self.read_bitstruct, + 'struct': self.read_struct, + 'niobject': self.read_struct + } + + # initialize dictionaries + # map each supported version string to a version number + cls.versions = {} + # initialize dictionaries + # map each supported version string to a version number + cls.versions_num = {} + # map each supported game to a list of header version numbers + cls.games = {} + # note: block versions are stored in the _games attribute of the struct class + cls.main_versions = {} + # This will contain {Game Name}: {Primary Version} for use later + + # cls needs to be accessed in member functions, so make it an instance member variable + self.cls = cls + + # elements for creating new classes + self.class_name = None + self.class_dict = None + self.base_class = () + + # elements for versions + self.version_string = None + + # list of tuples ({tokens}, (target_attribs)) for each + self.tokens: list[Tuple[list[Tuple[str, str]], list[str]]] = [] + self.versions = [([], ("versions", "until", "since")), ] + + def load_xml(self, file): + """Loads an XML (can be filepath or open file) and does all parsing""" + parser = ET.XMLParser(remove_comments=True) # TODO: discuss whether we want the comments or not + tree = ET.parse(file, parser) + root = tree.getroot() + self.name = root.tag + self.attrib = root.attrib + logger.debug("Parsing XML root: %s<%s>", self.name, self.attrib) + self.load_root(root) + self.final_cleanup() + + def load_root(self, root: ET.Element): + """Goes over all children of the root node and calls the appropriate function depending on type of the child""" + for child in root: + tag = child.tag + if isinstance(tag, ET.Comment.__class__): # TODO: See above + continue + elif tag in self.load_dict: + self.load_dict[tag](child) + else: + logger.warning("Unknown type in XML: %s<%s>. Skipping...", tag, child.attrib) + + # the following constructs do not create classes + def read_token(self, token: ET.Element): + """Reads an XML block and stores it in the tokens list""" + + self.tokens.append(([], token.attrib["attrs"].split(" "))) + for sub_token in token: + string = sub_token.attrib["string"] + if sub_token.attrib["token"] == "#BSVER#": # #BSVER# = `BS Header\BS Version` which I don't think works, maybe + string = string.split('\\')[1] + self.tokens[-1][0].append((sub_token.attrib["token"], string)) + + def read_version(self, version: ET.Element): # TODO: Clean this up, currently really shit + """Reads an XML block and stores it in the versions list + + See :py:class:Version for more info!""" + ver_attr = version.attrib + + if 'id' not in ver_attr and 'num' not in ver_attr: + logger.error("Versions contains no `id` and/or `num` and is incorrect: (%s)", ver_attr) + return + # versions must be in reverse order so don't append but insert at beginning + # TODO: why must this be reverse order + # if "id" in version.attrib: + # self.versions[0][0].insert(0, (version.attrib["id"], version.attrib["num"])) + # add to supported versions + self.version_string = ver_attr["num"] + # Converts the version string into a number + + version_id = ver_attr['id'] + version_num = self.cls.version_number(ver_attr['num']) + + params = {"id": version_id, "num": version_num, 'games': version.text.split(', ')} + + if 'supported' in ver_attr: + params['supported'] = bool(ver_attr['supported']) + + # if 'user' in ver_attr: # TODO: + + # if 'bsver' in ver_attr: # TODO: + + if 'custom' in ver_attr: + params['custom'] = bool(ver_attr['custom']) + elif 'user' in params and 'bsver' in params and \ + params['user'] is not None and params['bsver'] is not None: + params['custom'] = True + + if 'ext' in ver_attr: + params['ext'] = list() # TODO: THIS + + ver = Version(**params) + self.version_string = version_id + self.cls.versions[version_id] = ver + self.cls.versions_num[ver_attr['num']] = ver.num + for game in main_games.findall(version.text): + if game in self.cls.main_versions: + logger.warning("Duplicate main game (%s) was found in version (%s)", game, version_id) + continue + self.cls.main_versions[game] = ver + self.update_gamesdict(self.cls.games, version.text) + self.version_string = None + + def read_module(self, module: ET.Element): + """Reads an XML block""" + # TODO + + def read_basic(self, basic: ET.Element): + """Maps to a type defined in self.cls""" + self.class_name = basic.attrib["name"] + # Each basic type corresponds to a type defined in C{self.cls}. + # The link between basic types and C{self.cls} types is done via the name of the class. + basic_class = getattr(self.cls, self.class_name) + # check the class variables + is_template = self.is_generic(basic.attrib) + if basic_class._is_template != is_template: + raise XmlError('class %s should have _is_template = %s' % (self.class_name, is_template)) + + # link class cls. to basic_class + setattr(self.cls, self.class_name, basic_class) + + # the following constructs create classes + def read_bitstruct(self, bitstruct: ET.Element): + """Create a bitstruct class""" + attrs = self.replace_tokens(bitstruct.attrib) + self.base_class = BitStructBase + self.update_class_dict(attrs, bitstruct.text) + try: + numbytes = int(attrs["numbytes"]) + except KeyError: + # niftools style: storage attribute + numbytes = getattr(self.cls, attrs["storage"]).get_size() + self.class_dict["_attrs"] = [] + self.class_dict["_numbytes"] = numbytes + for member in bitstruct: + attrs = self.replace_tokens(member.attrib) + if member.tag == "bits": + # eg. + # mandatory parameters + bit_attrs = attrs + elif member.tag == "option": + # niftools compatibility, we have a bitflags field + # so convert value into numbits + # first, calculate current bit position + bitpos = sum(bitattr.numbits for bitattr in self.class_dict["_attrs"]) + # avoid crash + if "value" in attrs: + # check if extra bits must be inserted + numextrabits = int(attrs["value"]) - bitpos + if numextrabits < 0: + raise XmlError("values of bitflags must be increasing") + if numextrabits > 0: + reserved = dict(name="Reserved Bits %i" % len(self.class_dict["_attrs"]), numbits=numextrabits) + self.class_dict["_attrs"].append(BitStructAttribute.create(self.cls, reserved)) + # add the actual attribute + bit_attrs = dict(name=attrs["name"], numbits=1) + # new nif xml + elif member.tag == "member": + bit_attrs = dict(name=attrs["name"], numbits=attrs["width"]) + else: + raise XmlError("only bits tags allowed in struct type declaration") + + self.class_dict["_attrs"].append(BitStructAttribute.create(self.cls, bit_attrs)) + self.update_doc(self.class_dict["_attrs"][-1].doc, member.text) + + self.create_class(bitstruct.tag) + + def read_struct(self, struct: ET.Element): + """Create a struct class""" + attrs = self.replace_tokens(struct.attrib) + self.update_class_dict(attrs, struct.text) + # struct types can be organized in a hierarchy + # if inherit attribute is defined, look for corresponding base block + class_basename = attrs.get("inherit") + if class_basename: + # class_basename must have been assigned to a class + try: + self.base_class = getattr(self.cls, class_basename) + except KeyError: + raise XmlError( + "Struct (%s) inherits an unknown or forward declared struct (%s)" % (struct.tag, class_basename)) + else: + self.base_class = StructBase + # set attributes (see class StructBase) + # 'generic' attribute is optional- if not set, then the struct is not a template + self.class_dict["_is_template"] = self.is_generic(attrs) + self.class_dict["_attrs"] = [] + # self.class_dict["_games"] = {} # TODO: Is this used? + for field in struct: + attrs = self.replace_tokens(field.attrib) + # the common case + if field.tag in ("add", "field"): + # add attribute to class dictionary + self.class_dict["_attrs"].append(StructAttribute.create(self.cls, attrs)) + self.update_doc(self.class_dict["_attrs"][-1].doc, field.text) + # Note: version tags are not found in current xml + else: + logger.warning("only add tags allowed in struct declaration") + # load defaults for this + for default in field: # TODO: Why do we do nothing with defaults + if default.tag != "default": + raise AttributeError("struct children's children must be 'default' tag") + self.create_class(struct.tag) + + def read_enum(self, enum: ET.Element): + """Create an enum class""" + attrs = self.replace_tokens(enum.attrib) + self.base_class = EnumBase + self.update_class_dict(attrs, enum.text) + try: + numbytes = int(attrs["numbytes"]) + except KeyError: + # niftools format uses a storage + # get number of bytes from that + typename = attrs["storage"] + try: + typ = getattr(self.cls, typename) + except AttributeError: + raise XmlError("typo, or forward declaration of type %s" % typename) + numbytes = typ.get_size() + # add stuff to classdict + self.class_dict["_numbytes"] = numbytes + self.class_dict["_enumkeys"] = [] + self.class_dict["_enumvalues"] = [] + for option in enum: + attrs = self.replace_tokens(option.attrib) + if option.tag not in ("option",): + raise XmlError("only option tags allowed in enum declaration, found %s instead" % option.tag) + value = attrs["value"] + try: + # note: use long rather than int to work around 0xffffffff + # error in qskope + value = int(value) + except ValueError: + value = int(value, 16) + self.class_dict["_enumkeys"].append(attrs["name"]) + self.class_dict["_enumvalues"].append(value) + self.create_class(enum.tag) + + def read_alias(self, alias: ET.Element): + """Create an alias class, ie. one that gives access to another class""" + self.update_class_dict(alias.attrib, alias.text) + typename = alias.attrib["type"] + try: + self.base_class = getattr(self.cls, typename) + except AttributeError: + raise XmlError("typo, or forward declaration of type %s" % typename) + self.create_class(alias.tag) + + # the following are helper functions + @staticmethod + def is_generic(attr): + # be backward compatible + return (attr.get("generic") == "true") or (attr.get("istemplate") == "1") + + def update_gamesdict(self, gamesdict, ver_text): + if ver_text: + # update the gamesdict dictionary + for gamestr in (g.strip() for g in ver_text.split(',')): + if gamestr in gamesdict: + gamesdict[gamestr].append(self.cls.versions[self.version_string]) + else: + gamesdict[gamestr] = [self.cls.versions[self.version_string]] + + def update_class_dict(self, attrs, doc_text): + """This initializes class_dict, sets the class name and doc text""" + doc_text = doc_text.strip() if doc_text else "" + self.class_name = attrs["name"] + self.class_dict = {"__doc__": doc_text, "__module__": self.cls.__module__} + + @staticmethod + def update_doc(doc, doc_text): + if doc_text: + doc += doc_text.strip() + + def create_class(self, tag): + """Creates a class for (tag name of the class that was just finished)""" + # assign it to cls. if it has not been implemented internally + + # type(name, bases, dict) returns a new type object, essentially a dynamic form of the class statement + cls_klass = getattr(self.cls, self.class_name, None) + # does the class exist? + if cls_klass: + # do nothing if this is a Basic type + if issubclass(cls_klass, BasicBase): + return + # it has been created in format's __init__.py + # create and add to base class of customizer + gen_klass = type("_" + self.class_name, (self.base_class,), self.class_dict) + setattr(self.cls, "_" + self.class_name, gen_klass) + # recreate the class, to ensure that the metaclass is called!! + # (otherwise, cls_klass does not have correct _attribute_list, etc.) + cls_klass = type(cls_klass.__name__, (gen_klass,) + cls_klass.__bases__, dict(cls_klass.__dict__)) + setattr(self.cls, self.class_name, cls_klass) + # if the class derives from Data, then make an alias + if issubclass(cls_klass, pyffi.object_models.FileFormat.Data): + self.cls.Data = cls_klass + # for the stuff below + gen_klass = cls_klass + else: + # does not yet exist: create it and assign to class dict + gen_klass = type(self.class_name, (self.base_class,), self.class_dict) + setattr(self.cls, self.class_name, gen_klass) + # append class to the appropriate list + if tag in self.struct_types: + self.cls.xml_struct.append(gen_klass) + elif tag in self.bitstruct_types: + self.cls.xml_bit_struct.append(gen_klass) + elif tag == "enum": + self.cls.xml_enum.append(gen_klass) + elif tag == "alias": + self.cls.xml_alias.append(gen_klass) + + def replace_tokens(self, attr_dict): + """Update attr_dict with content of tokens+versions list.""" + # TODO: Versions aren't replaced anymore as they may contain user and bethesda versions as well + for tokens, target_attribs in self.tokens: + # logger.warning("Tokens: %s", self.tokens) + for target_attrib in target_attribs: + # logger.warning("Target Attrib: %s", target_attrib) + if target_attrib in attr_dict: + expr_str = attr_dict[target_attrib] + for op_token, op_str in tokens: + expr_str = expr_str.replace(op_token, op_str) + attr_dict[target_attrib] = expr_str + # additional tokens that are not specified by nif.xml + fixed_tokens = ( + ("\\", "."), (">", ">"), ("<", "<"), ("&", "&"), ("#ARG#", "ARG"), ("#T#", "TEMPLATE")) + for attrib, expr_str in attr_dict.items(): + for op_token, op_str in fixed_tokens: + expr_str = expr_str.replace(op_token, op_str) + attr_dict[attrib] = expr_str + # onlyT & excludeT act as aliases for deprecated cond + prefs = (("onlyT", ""), ("excludeT", "!")) + for t, pref in prefs: + if t in attr_dict: + attr_dict["cond"] = pref + attr_dict[t] + break + return attr_dict + + def final_cleanup(self): + """Called when the xml is completely parsed. + Searches and adds class customized functions. + Fixes forward declaration of templates and adds all primary game versions to main_versions. + """ + # get 'name_attribute' for all classes + # we need this to fix them in cond="..." later + klass_filter = {} + for klass in self.cls.xml_struct: + klass_filter[self.cls.name_attribute(klass.__name__)] = klass + for obj in list(self.cls.__dict__.values()): + # skip objects that are not generated by the C{type} function + # or that do not derive from StructBase + if not (isinstance(obj, type) and issubclass(obj, StructBase)): + continue + # fix templates + for attr in obj._attrs: + templ = attr.template + if isinstance(templ, str): + attr.template = getattr(self.cls, templ) if templ != "TEMPLATE" else type(None) + attrtype = attr.type_ + if isinstance(attrtype, str): + attr.type_ = getattr(self.cls, attrtype) + # fix refs to types in conditions + if attr.cond: + attr.cond.map_(lambda x: klass_filter[x] if x in klass_filter else x) diff --git a/pyffi/object_models/niftoolsxml/array.py b/pyffi/object_models/niftoolsxml/array.py new file mode 100644 index 000000000..19135bcc4 --- /dev/null +++ b/pyffi/object_models/niftoolsxml/array.py @@ -0,0 +1,447 @@ +""" +:mod:`pyffi.object_models.xml.array` --- Array classes +============================================================== + +Implements class for arrays. + +Implementation +-------------- + +.. autoclass:: Array + :show-inheritance: + :members: + :undoc-members: + +.. autoclass:: _ListWrap + :show-inheritance: + :members: + :undoc-members: + +.. todo:: Show examples for usage +""" + +# -------------------------------------------------------------------------- +# ***** BEGIN LICENSE BLOCK ***** +# +# Copyright (c) 2007-2012, Python File Format Interface +# All rights reserved. +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions +# are met: +# +# * Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# +# * Redistributions in binary form must reproduce the above +# copyright notice, this list of conditions and the following +# disclaimer in the documentation and/or other materials provided +# with the distribution. +# +# * Neither the name of the Python File Format Interface +# project nor the names of its contributors may be used to endorse +# or promote products derived from this software without specific +# prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS +# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE +# COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, +# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, +# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; +# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER +# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT +# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN +# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE +# POSSIBILITY OF SUCH DAMAGE. +# +# ***** END LICENSE BLOCK ***** +# -------------------------------------------------------------------------- + +# note: some imports are defined at the end to avoid problems with circularity + +import logging +import weakref +from typing import Optional + +from object_models.expression import Expression +from pyffi.object_models.basic import BasicBase +from pyffi.utils.graph import DetailNode, EdgeFilter + + +class _ListWrap(list, DetailNode): + """A wrapper for list, which uses get_value and set_value for + getting and setting items of the basic type.""" + + def __init__(self, element_type, parent=None): + self._parent = weakref.ref(parent) if parent else None + self._elementType = element_type + # we link to the unbound methods (that is, self.__class__.xxx + # instead of self.xxx) to avoid circular references!! + if issubclass(element_type, BasicBase): + self._get_item_hook = self.__class__.get_basic_item + self._set_item_hook = self.__class__.set_basic_item + self._iter_item_hook = self.__class__.iter_basic_item + else: + self._get_item_hook = self.__class__.get_item + self._set_item_hook = self.__class__.set_item # TODO: Why should this work? + self._iter_item_hook = self.__class__.iter_item + + def __getitem__(self, index): + return self._get_item_hook(self, index) + + def __setitem__(self, index, value): + return self._set_item_hook(self, index, value) + + def __iter__(self): + return self._iter_item_hook(self) + + def __contains__(self, value): + # ensure that the "in" operator uses self.__iter__() rather than + # list.__iter__() + for elem in self.__iter__(): + if elem == value: + return True + return False + + def _not_implemented_hook(self, *args): + """A hook for members that are not implemented.""" + raise NotImplementedError + + def iter_basic_item(self): + """Iterator which calls C{get_value()} on all items. Applies when + the list has BasicBase elements.""" + for elem in list.__iter__(self): + yield elem.get_value() + + def iter_item(self): + """Iterator over all items. Applies when the list does not have + BasicBase elements.""" + for elem in list.__iter__(self): + yield elem + + def get_basic_item(self, index): + """Item getter which calls C{get_value()} on the C{index}'d item.""" + return list.__getitem__(self, index).get_value() + + def set_basic_item(self, index, value): + """Item setter which calls C{set_value()} on the C{index}'d item.""" + return list.__getitem__(self, index).set_value(value) + + def get_item(self, index): + """Regular item getter, used when the list does not have BasicBase + elements.""" + return list.__getitem__(self, index) + + def set_item(self, index, value): + """Regular item setter, used when the list does not have BasicBase + elements.""" + return list.__setitem__(self, index, value) + + # DetailNode + + def get_detail_child_nodes(self, edge_filter=EdgeFilter()): + """Yield children.""" + return (item for item in list.__iter__(self)) + + def get_detail_child_names(self, edge_filter=EdgeFilter()): + """Yield child names.""" + return ("[%i]" % row for row in range(list.__len__(self))) + + +class Array(_ListWrap): + """A general purpose class for 1 or 2 dimensional arrays consisting of + either BasicBase or StructBase elements.""" + + logger = logging.getLogger("pyffi.nif.data.array") + arg = None # default argument + + def __init__( + self, + name=None, + element_type=None, + element_type_template=None, + element_type_argument: Optional[Expression] = None, + length=None, width=None, + parent=None): + """Initialize the array type. + + :param name: The name of this array, used for error logging and debugging + :param element_type: The class describing the type of each element. + :param element_type_template: If the class takes a template type + argument, then this argument describes the template type. + :param element_type_argument: If the class takes a type argument, then + it is described here. + :param length: An C{Expression} describing the count (first dimension). + :param width: Either ``None``, or an C{Expression} describing the + second dimension count. + :param parent: The parent of this instance, that is, the instance this + array is an attribute of.""" + if width is None: + _ListWrap.__init__(self, + element_type=element_type, parent=parent) + else: + _ListWrap.__init__(self, + element_type=_ListWrap, parent=parent) + self._name = name + self._parent = weakref.ref(parent) if parent else None + self._length = length + self._width = width + self._elementType = element_type + self._elementTypeTemplate = element_type_template + self._elementTypeArgument = element_type_argument + + try: + if self._width is None: + for i in range(self._lengthT()): + elem_instance = self._elementType( + template=self._elementTypeTemplate, + argument=self._elementTypeArgument, + parent=self) + self.append(elem_instance) + else: + for i in range(self._lengthT()): + elem = _ListWrap(element_type=element_type, parent=self) + for j in range(self._widthT(i)): + elem_instance = self._elementType( + template=self._elementTypeTemplate, + argument=self._elementTypeArgument, + parent=elem) + elem.append(elem_instance) + self.append(elem) + except ArithmeticError: + self.logger.exception("Failed to initialize default array") + + def _lengthT(self): + """The _length the array should have, obtained by evaluating the _length expression.""" + if self._parent is None: + return int(self._length.eval()) + else: + return int(self._length.eval(self._parent())) + + def _widthT(self, index1): + """The _length the array should have, obtained by evaluating the width expression.""" + if self._width is None: + raise ValueError('single array treated as double array (bug?)') + if self._parent is None: + expr = self._width.eval() + else: + expr = self._width.eval(self._parent()) + if isinstance(expr, int): + return expr + elif isinstance(expr, float): + return int(expr) + else: + return int(expr[index1]) + + def deepcopy(self, block): + """Copy attributes from a given array which needs to have at least as many elements (possibly more) as self.""" + if self._width is None: + for i in range(self._lengthT()): + attrvalue = self[i] + if isinstance(attrvalue, StructBase): + attrvalue.deepcopy(block[i]) + elif isinstance(attrvalue, Array): + attrvalue.update_size() + attrvalue.deepcopy(block[i]) + else: + self[i] = block[i] + else: + for i in range(self._lengthT()): + for j in range(self._widthT(i)): + attrvalue = self[i][j] + if isinstance(attrvalue, StructBase): + attrvalue.deepcopy(block[i][j]) + elif isinstance(attrvalue, Array): + attrvalue.update_size() + attrvalue.deepcopy(block[i][j]) + else: + self[i][j] = block[i][j] + + # string of the array + def __str__(self): + text = '%s instance at 0x%08X\n' % (self.__class__, id(self)) + if self._width is None: + for i, element in enumerate(list.__iter__(self)): + if i > 16: + text += "etc...\n" + break + text += "%i: %s" % (i, element) + if text[-1:] != "\n": + text += "\n" + else: + k = 0 + for i, elemlist in enumerate(list.__iter__(self)): + for j, elem in enumerate(list.__iter__(elemlist)): + if k > 16: + text += "etc...\n" + break + text += "%i, %i: %s" % (i, j, elem) + if text[-1:] != "\n": + text += "\n" + k += 1 + if k > 16: + break + return text + + def update_size(self): + """Update the array size. Call this function whenever the size + parameters change in C{parent}.""" + ## TODO also update row numbers + old_size = len(self) + new_size = self._lengthT() + if self._width is None: + if new_size < old_size: + del self[new_size:old_size] + else: + for i in range(new_size - old_size): + elem = self._elementType( + template=self._elementTypeTemplate, + argument=self._elementTypeArgument) + self.append(elem) + else: + if new_size < old_size: + del self[new_size:old_size] + else: + for i in range(new_size - old_size): + self.append(_ListWrap(self._elementType)) + for i, elemlist in enumerate(list.__iter__(self)): + old_size_i = len(elemlist) + new_size_i = self._widthT(i) + if new_size_i < old_size_i: + del elemlist[new_size_i:old_size_i] + else: + for j in range(new_size_i - old_size_i): + elem = self._elementType( + template=self._elementTypeTemplate, + argument=self._elementTypeArgument) + elemlist.append(elem) + + def read(self, stream, data): + """Read array from stream.""" + # parse arguments + if self.arg is not None: + self._elementTypeArgument = self.arg + + # check array size + length = self._lengthT() + self.logger.debug("Reading array of size " + str(length)) + if length > 0x10000000: + raise ValueError('Array %s too long (%i)' % (self._name, length)) + del self[0:self.__len__()] + + # read array + if self._width is None: + for i in range(length): + element = self._elementType( + template=self._elementTypeTemplate, + argument=self._elementTypeArgument, + parent=self) + element.read(stream, data) + self.append(element) + else: + for i in range(length): + width = self._widthT(i) + if width > 0x10000000: + raise ValueError('array too long (%i)' % width) + element_list = _ListWrap(self._elementType, parent=self) + for j in range(width): + element = self._elementType( + template=self._elementTypeTemplate, + argument=self._elementTypeArgument, + parent=element_list) + element.read(stream, data) + element_list.append(element) + self.append(element_list) + + def write(self, stream, data): + """Write array to stream.""" + self._elementTypeArgument = self.arg + len1 = self._lengthT() + if len1 != self.__len__(): + raise ValueError('array size (%i) different from to field describing number of elements (%i)' % + (self.__len__(), len1)) + if len1 > 0x10000000: + raise ValueError('array too long (%i)' % len1) + if self._width is None: + for elem in list.__iter__(self): + elem.write(stream, data) + else: + for i, elemlist in enumerate(list.__iter__(self)): + len2i = self._widthT(i) + if len2i != elemlist.__len__(): + raise ValueError("array size (%i) different from to field describing number of elements (%i)" % + (elemlist.__len__(), len2i)) + if len2i > 0x10000000: + raise ValueError('array too long (%i)' % len2i) + for elem in list.__iter__(elemlist): + elem.write(stream, data) + + def fix_links(self, data): + """Fix the links in the array by calling C{fix_links} on all elements + of the array.""" + if not self._elementType._has_links: + return + for elem in self._elementList(): + elem.fix_links(data) + + def get_links(self, data=None): + """Return all links in the array by calling C{get_links} on all elements + of the array.""" + links = [] + if not self._elementType._has_links: + return links + for elem in self._elementList(): + links.extend(elem.get_links(data)) + return links + + def get_strings(self, data): + """Return all strings in the array by calling C{get_strings} on all + elements of the array.""" + strings = [] + if not self._elementType._has_strings: + return strings + for elem in self._elementList(): + strings.extend(elem.get_strings(data)) + return strings + + def get_refs(self, data=None): + """Return all references in the array by calling C{get_refs} on all + elements of the array.""" + links = [] + if not self._elementType._has_links: + return links + for elem in self._elementList(): + links.extend(elem.get_refs(data)) + return links + + def get_size(self, data=None): + """Calculate the sum of the size of all elements in the array.""" + return sum( + (elem.get_size(data) for elem in self._elementList()), 0) + + def get_hash(self, data=None): + """Calculate a hash value for the array, as a tuple.""" + hsh = [] + for elem in self._elementList(): + hsh.append(elem.get_hash(data)) + return tuple(hsh) + + def replace_global_node(self, old_branch, new_branch, **kwargs): + """Calculate a hash value for the array, as a tuple.""" + for elem in self._elementList(): + elem.replace_global_node(old_branch, new_branch, **kwargs) + + def _elementList(self, **kwargs): + """Generator for listing all elements.""" + if self._width is None: + for elem in list.__iter__(self): + yield elem + else: + for elemlist in list.__iter__(self): + for elem in list.__iter__(elemlist): + yield elem + + +from pyffi.object_models.xml.struct_ import StructBase diff --git a/pyffi/object_models/niftoolsxml/bit_struct.py b/pyffi/object_models/niftoolsxml/bit_struct.py new file mode 100644 index 000000000..985e22648 --- /dev/null +++ b/pyffi/object_models/niftoolsxml/bit_struct.py @@ -0,0 +1,503 @@ +""" +:mod:`pyffi.object_models.xml.bit_struct` --- Bit Structured Types +========================================================================== + +Implements base class for bitstruct types. + +Implementation +-------------- + +.. autoclass:: Bits + :show-inheritance: + :members: + :undoc-members: + +.. autoclass:: BitStructBase + :show-inheritance: + :members: + :undoc-members: + +.. autoclass:: _MetaBitStructBase + :show-inheritance: + :members: + :undoc-members: + +.. todo:: Show examples for usage +""" + +# -------------------------------------------------------------------------- +# ***** BEGIN LICENSE BLOCK ***** +# +# Copyright (c) 2007-2012, Python File Format Interface +# All rights reserved. +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions +# are met: +# +# * Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# +# * Redistributions in binary form must reproduce the above +# copyright notice, this list of conditions and the following +# disclaimer in the documentation and/or other materials provided +# with the distribution. +# +# * Neither the name of the Python File Format Interface +# project nor the names of its contributors may be used to endorse +# or promote products derived from this software without specific +# prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS +# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE +# COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, +# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, +# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; +# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER +# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT +# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN +# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE +# POSSIBILITY OF SUCH DAMAGE. +# +# ***** END LICENSE BLOCK ***** +# -------------------------------------------------------------------------- + +# note: some imports are defined at the end to avoid problems with circularity + +import struct +from functools import partial + +from pyffi.object_models.editable import EditableSpinBox # for Bits +from pyffi.utils.graph import DetailNode, EdgeFilter + + +class _MetaBitStructBase(type): + """This metaclass checks for the presence of a _attrs attribute. + For each attribute in _attrs, an property is generated which gets and sets bit fields. + Used as metaclass of BitStructBase.""" + + def __init__(cls, name, bases, dct): + super(_MetaBitStructBase, cls).__init__(name, bases, dct) + # consistency checks + if not '_attrs' in dct: + raise TypeError('%s: missing _attrs attribute' % cls) + if not '_numbytes' in dct: + raise TypeError('%s: missing _numbytes attribute' % cls) + + # check storage type + if cls._numbytes == 1: + cls._struct = 'B' + elif cls._numbytes == 2: + cls._struct = 'H' + elif cls._numbytes == 4: + cls._struct = 'I' + elif cls._numbytes == 8: + cls._struct = 'Q' + else: + raise RuntimeError("unsupported bitstruct numbytes") + + # template type? + cls._is_template = False + # does the type contain a Ref or a Ptr? + cls._has_links = False + # does the type contain a Ref? + cls._has_refs = False + # does the type contain a string? + cls._has_strings = False + for attr in dct['_attrs']: + # get and set basic attributes + setattr(cls, attr.name, property( + partial(BitStructBase.get_attribute, name=attr.name), + partial(BitStructBase.set_attribute, name=attr.name), + doc=attr.doc)) + + # precalculate the attribute list + cls._attribute_list = cls._get_attribute_list() + + # precalculate the attribute name list + cls._names = cls._get_names() + + def __repr__(cls): + return "" % (cls.__name__) + + +class Bits(DetailNode, EditableSpinBox): + """Basic implementation of a n-bit unsigned integer type (without read and write).""" + + def __init__(self, numbits=1, default=0, parent=None): + # parent disabled for performance + # self._parent = weakref.ref(parent) if parent else None + self._value = default + self._numbits = numbits + + def get_value(self): + """Return stored value.""" + return self._value + + def set_value(self, value): + """Set value to C{value}.""" + if not isinstance(value, int): + raise TypeError("bitstruct attribute must be integer") + if value >> self._numbits: + raise ValueError('value out of range (%i)' % value) + self._value = value + + def __str__(self): + return str(self.get_value()) + + # DetailNode + + def get_detail_display(self): + """Return an object that can be used to display the instance.""" + return str(self._value) + + # EditableSpinBox functions + + def get_editor_value(self): + return self.get_value() + + def set_editor_value(self, editorvalue): + self.set_value(editorvalue) + + def get_editor_minimum(self): + return 0 + + def get_editor_maximum(self): + return (1 << self._numbits) - 1 + + +class BitStructBase(DetailNode, metaclass=_MetaBitStructBase): + """Base class from which all file bitstruct types are derived. + + The BitStructBase class implements the basic bitstruct interface: + it will initialize all attributes using the class interface + using the _attrs class variable, represent them as strings, and so on. + The class variable _attrs must be declared every derived class interface. + + Each item in the class _attrs list stores the information about + the attribute as stored for instance in the xml file, and the + __value_ instance variable stores the actual attribute + instance. + + Direct access to the attributes is implemented using a + property which invokes the get_attribute and set_attribute + functions, as demonstrated below. + + See the pyffi.XmlHandler class for a more advanced example. + + >>> from pyffi.object_models.basic import BasicBase + >>> from pyffi.object_models.xml import BitStructAttribute as Attr + >>> class SimpleFormat(object): + ... @staticmethod + ... def name_attribute(name): + ... return name + >>> class Flags(BitStructBase): + ... _numbytes = 1 + ... _attrs = [ + ... Attr(SimpleFormat, dict(name='a', numbits='3')), + ... Attr(SimpleFormat, dict(name='b', numbits='1'))] + >>> SimpleFormat.Flags = Flags + >>> y = Flags() + >>> y.a = 5 + >>> y.b = 1 + >>> print(y) # doctest:+ELLIPSIS + instance at 0x... + * a : 5 + * b : 1 + + >>> y.get_attributes_values(None) + 13 + >>> y.populate_attribute_values(9, None) + >>> print(y) # doctest:+ELLIPSIS + instance at 0x... + * a : 1 + * b : 1 + + """ + + _attrs = [] + _numbytes = 1 # default width of a bitstruct + _games = {} + arg = None # default argument + + # initialize all attributes + def __init__(self, template=None, argument=None, parent=None): + """The constructor takes a tempate: any attribute whose type, + or template type, is type(None) - which corresponds to + TEMPLATE in the xml description - will be replaced by this + type. The argument is what the ARG xml tags will be replaced with. + + :param template: If the class takes a template type + argument, then this argument describes the template type. + :param argument: If the class takes a type argument, then + it is described here. + :param parent: The parent of this instance, that is, the instance this + array is an attribute of.""" + # used to track names of attributes that have already been added + # is faster than self.__dict__.has_key(...) + names = [] + # initialize argument + self.arg = argument + # save parent (note: disabled for performance) + # self._parent = weakref.ref(parent) if parent else None + + # initialize item list + # list is used for instance by qskope to display the structure in a tree view + self._items = [] + # initialize attributes + for attr in self._attribute_list: + # skip attributes with duplicate names + if attr.name in names: + continue + names.append(attr.name) + + # instantiate the integer + if attr.default is not None: + attr_instance = Bits(numbits=attr.numbits, default=attr.default, parent=self) + else: + attr_instance = Bits(numbits=attr.numbits, parent=self) + + # assign attribute value + setattr(self, "_%s_value_" % attr.name, attr_instance) + + # add instance to item list + self._items.append(attr_instance) + + def deepcopy(self, block): + """Copy attributes from a given block (one block class must be a + subclass of the other). Returns self.""" + # check class lineage + if isinstance(self, block.__class__): + attrlist = block._get_filtered_attribute_list() + elif isinstance(block, self.__class__): + attrlist = self._get_filtered_attribute_list() + else: + raise ValueError("deepcopy: classes %s and %s unrelated" + % (self.__class__.__name__, block.__class__.__name__)) + # copy the attributes + for attr in attrlist: + setattr(self, attr.name, getattr(block, attr.name)) + + return self + + def get_value(self): + return int(self) + + def set_value(self, value): # TODO: Make sure this works! + bitpos = 0 + for item in self._items: + item._value |= (value & ((1 << item._numbits) - 1)) << bitpos + bitpos += item._numbits + pass + + # string of all attributes + def __str__(self): + text = '%s instance at 0x%08X\n' % (self.__class__, id(self)) + # used to track names of attributes that have already been added + # is faster than self.__dict__.has_key(...) + for attr in self._get_filtered_attribute_list(): + # append string + attr_str_lines = str( + getattr(self, "_%s_value_" % attr.name)).splitlines() + if len(attr_str_lines) > 1: + text += '* %s :\n' % attr.name + for attr_str in attr_str_lines: + text += ' %s\n' % attr_str + else: + text += '* %s : %s\n' % (attr.name, attr_str_lines[0]) + return text + + def __int__(self): + value = 0 + bitpos = 0 + for item in self._items: + value |= (item._value & ((1 << item._numbits) - 1)) << bitpos + bitpos += item._numbits + return value + + def __and__(self, other): + return 0 if None else self.__int__() & int(other) + + def __or__(self, other): + return 0 if None else self.__int__() | int(other) + + def __lshift__(self, other): + return int(self) << other + + def __rshift__(self, other): + return int(self) >> other + + def read(self, stream, data): + """Read structure from stream.""" + # read all attributes + value = struct.unpack(data._byte_order + self._struct, stream.read(self._numbytes))[0] + + # set the structure variables + self.populate_attribute_values(value, data) + + def populate_attribute_values(self, value, data): + """Set structure values from integer.""" + bitpos = 0 + for attr in self._get_filtered_attribute_list(data): + # print(attr.name) # debug + attrvalue = (value >> bitpos) & ((1 << attr.numbits) - 1) + setattr(self, attr.name, attrvalue) + bitpos += attr.numbits + + def get_attributes_values(self, data): + # implementation note: not defined via __int__ because conversion + # takes arguments + """Get as integer.""" + value = 0 + bitpos = 0 + for attr in self._get_filtered_attribute_list(data): + attrvalue = getattr(self, attr.name) + value |= (attrvalue & ((1 << attr.numbits) - 1)) << bitpos + bitpos += attr.numbits + return value + + def write(self, stream, data): + """Write structure to stream.""" + stream.write(struct.pack(data._byte_order + self._struct, self.get_attributes_values(data))) + + def fix_links(self, data): + """Fix links in the structure.""" + return + + def get_links(self, data=None): + """Get list of all links in the structure.""" + return [] + + def get_strings(self, data): + """Get list of all strings in the structure.""" + return [] + + def get_refs(self, data=None): + """Get list of all references in the structure. Refs are + links that point down the tree. For instance, if you need to parse + the whole tree starting from the root you would use get_refs and not + get_links, as get_links could result in infinite recursion.""" + return [] + + def get_size(self, data=None): + """Calculate the structure size in bytes.""" + return self._numbytes + + def get_hash(self, data=None): + """Calculate a hash for the structure, as a tuple.""" + # calculate hash + hsh = [] + for attr in self._get_filtered_attribute_list(data): + hsh.append(getattr(self, attr.name)) + return tuple(hsh) + + @classmethod + def get_games(cls): + """Get games for which this block is supported.""" + return list(cls._games.keys()) + + @classmethod + def get_versions(cls, game): + """Get versions supported for C{game}.""" + return cls._games[game] + + @classmethod + def _get_attribute_list(cls): + """Calculate the list of all attributes of this structure.""" + # string of attributes of base classes of cls + attrs = [] + for base in cls.__bases__: + try: + attrs.extend(base._get_attribute_list()) + except AttributeError: # when base class is "object" + pass + attrs.extend(cls._attrs) + return attrs + + @classmethod + def _get_names(cls): + """Calculate the list of all attributes names in this structure. + Skips duplicate names.""" + # string of attributes of base classes of cls + names = [] + for attr in cls._attrs: + if attr.name in names: + continue + else: + names.append(attr.name) + return names + + def _get_filtered_attribute_list(self, data=None): + """Generator for listing all 'active' attributes, that is, attributes whose condition evaluates ``True``, + whose version interval contains C{version}, and whose user version is C{user_version}. + ``None`` for C{version} or C{user_version} means that these checks are ignored. + Duplicate names are skipped as well. + + Note: Use data instead of version and user_version (old way will be deprecated).""" + names = [] + if data: + version = data.version + user_version = data.user_version + else: + version = None + user_version = None + for attr in self._attribute_list: + # print(attr.name, version, attr.since, attr.until) # debug + + # check version + if not (version is None): + if (not (attr.since is None)) and version < attr.since: + continue + if (not (attr.until is None)) and version > attr.until: + continue + # print("version check passed") # debug + + # check user version + if not (attr.userver is None or user_version is None) \ + and user_version != attr.userver: + continue + # print("user version check passed") # debug + + # check condition + if not (attr.cond is None) and not attr.cond.eval(self): + continue + # print("condition passed") # debug + + # skip dupiclate names + if attr.name in names: + continue + # print("duplicate check passed") # debug + + names.append(attr.name) + # passed all tests + # so yield the attribute + yield attr + + def get_attribute(self, name): + """Get a basic attribute.""" + return getattr(self, "_" + name + "_value_").get_value() + + # important note: to apply partial(set_attribute, name = 'xyz') the + # name argument must be last + def set_attribute(self, value, name): + """Set the value of a basic attribute.""" + getattr(self, "_" + name + "_value_").set_value(value) + + def tree(self): + """A generator for parsing all blocks in the tree (starting from and + including C{self}). By default, there is no tree structure, so returns + self.""" + # return self + yield self + + # DetailNode + + def get_detail_child_nodes(self, edge_filter=EdgeFilter()): + """Yield children of this structure.""" + return (item for item in self._items) + + def get_detail_child_names(self, edge_filter=EdgeFilter()): + """Yield name of each child.""" + return (name for name in self._names) diff --git a/pyffi/object_models/niftoolsxml/enum.py b/pyffi/object_models/niftoolsxml/enum.py new file mode 100644 index 000000000..01cfdc298 --- /dev/null +++ b/pyffi/object_models/niftoolsxml/enum.py @@ -0,0 +1,231 @@ +""" +:mod:`pyffi.object_models.xml.enum` --- Enumerated Types +================================================================ + +Abstract base class for implementing xml enum types. + +Implementation +-------------- + +.. autoclass:: EnumBase + :show-inheritance: + :members: + :undoc-members: + +.. autoclass:: _MetaEnumBase + :show-inheritance: + :members: + :undoc-members: + +.. todo:: Show examples for usage +""" + +# ***** BEGIN LICENSE BLOCK ***** +# +# Copyright (c) 2007-2012, Python File Format Interface +# All rights reserved. +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions +# are met: +# +# * Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# +# * Redistributions in binary form must reproduce the above +# copyright notice, this list of conditions and the following +# disclaimer in the documentation and/or other materials provided +# with the distribution. +# +# * Neither the name of the Python File Format Interface +# project nor the names of its contributors may be used to endorse +# or promote products derived from this software without specific +# prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS +# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE +# COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, +# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, +# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; +# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER +# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT +# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN +# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE +# POSSIBILITY OF SUCH DAMAGE. +# +# ***** END LICENSE BLOCK ***** + +import logging +import struct + +from pyffi.object_models.basic import BasicBase +from pyffi.object_models.editable import EditableComboBox + + +class _MetaEnumBase(type): + """This metaclass checks for the presence of _enumkeys, _enumvalues, + and _numbytes attributes. It also adds enum class attributes. + + Used as metaclass of EnumBase.""" + + def __init__(cls, name, bases, dct): + super(_MetaEnumBase, cls).__init__(name, bases, dct) + # consistency checks + if not '_enumkeys' in dct: + raise TypeError('%s: missing _enumkeys attribute' % cls) + if not '_enumvalues' in dct: + raise TypeError('%s: missing _enumvalues attribute' % cls) + if not '_numbytes' in dct: + raise TypeError('%s: missing _numbytes attribute' % cls) + + # check storage type + if cls._numbytes == 1: + cls._struct = 'B' + elif cls._numbytes == 2: + cls._struct = 'H' + elif cls._numbytes == 4: + cls._struct = 'I' + else: + raise RuntimeError("unsupported enum numbytes") + + # template type? + cls._is_template = False + # does the type contain a Ref or a Ptr? + cls._has_links = False + # does the type contain a Ref? + cls._has_refs = False + # does the type contain a string? + cls._has_strings = False + + # for other read/write checking + cls._min = 0 + cls._max = (1 << (cls._numbytes * 8)) - 1 + + # set enum values as class attributes + for item, value in zip(cls._enumkeys, cls._enumvalues): + setattr(cls, item, value) + + def __iter__(cls): + cls.__i = 0 + return cls + + def __next__(cls): + if cls.__i < len(cls._enumvalues): + cls.__i += 1 + return (cls._enumkeys[cls.__i - 1], cls._enumvalues[cls.__i - 1]) + else: + return + + def __getitem__(cls, key): + if key in cls._enumkeys: + index = cls._enumkeys.index(key) + return cls._enumvalues[index] + elif key in cls._enumvalues: + index = cls.enumvalues.index(key) + return cls._enumkeys[index] + else: + raise KeyError(key) + + def __len__(cls): + return len(cls._enumkeys) + + def __repr__(cls): + return "" % cls.__name__ + + def __str__(cls): + returns = "{" + for idx, key in enumerate(cls._enumkeys): + if not idx == 0 and idx <= len(cls._enumkeys) - 1: + returns += ", " + returns += "\"%s\": \"%s\"" % (key, cls._enumvalues[idx]) + returns += "}" + return returns + + +class EnumBase(BasicBase, EditableComboBox, metaclass=_MetaEnumBase): + _enumkeys = [] + _enumvalues = [] + _numbytes = 1 # default width of an enum + + # + # BasicBase methods + # + + def __init__(self, **kwargs): + super(EnumBase, self).__init__(**kwargs) + self._value = self._enumvalues[0] + + def get_value(self): + """Return stored value.""" + return self._value + + def set_value(self, value): + """Set value to C{value}.""" + try: + val = int(value) + except ValueError: + try: + val = int(value, 16) # for '0x...' strings + except ValueError: + if value in self._enumkeys: + val = getattr(self, value) + else: + raise ValueError( + "cannot convert value '%s' to integer" % value) + if not val in self._enumvalues: + logger = logging.getLogger("pyffi.object_models.xml.enum") + logger.error('invalid enum value (%i) for %s' + % (val, self.__class__.__name__)) + else: + self._value = val + + def read(self, stream, data): + """Read value from stream.""" + self._value = struct.unpack(data._byte_order + self._struct, + stream.read(self._numbytes))[0] + + def write(self, stream, data): + """Write value to stream.""" + stream.write(struct.pack(data._byte_order + self._struct, + self._value)) + + def __str__(self): + try: + return self._enumkeys[self._enumvalues.index(self.get_value())] + except ValueError: + # not in _enumvalues list + return "" % self.get_value() + + def get_size(self, data=None): + """Return size of this type.""" + return self._numbytes + + def get_hash(self, data=None): + """Return a hash value for this value.""" + return self.get_value() + + # + # EditableComboBox methods + # + + def get_editor_keys(self): + """List or tuple of strings, each string describing an item.""" + return self._enumkeys + + def set_editor_value(self, index): + """Set value from item index.""" + self.set_value(self._enumvalues[index]) + + def get_editor_value(self): + """Get the item index from the enum value.""" + return self._enumvalues.index(self._value) + + def get_detail_display(self): + """Return object that can be used to display the instance.""" + try: + return self._enumkeys[self._enumvalues.index(self._value)] + except ValueError: + # value self._value is not in the self._enumvalues list + return "" % self._value diff --git a/pyffi/object_models/niftoolsxml/struct_.py b/pyffi/object_models/niftoolsxml/struct_.py new file mode 100644 index 000000000..921843ccb --- /dev/null +++ b/pyffi/object_models/niftoolsxml/struct_.py @@ -0,0 +1,795 @@ +""" +:mod:`pyffi.object_models.xml.struct_` --- Structured Types +=================================================================== + +Implements base class for struct types. + +Implementation +-------------- + +.. autoclass:: StructBase + :show-inheritance: + :members: + :undoc-members: + +.. autoclass:: _MetaStructBase + :show-inheritance: + :members: + :undoc-members: + +.. todo:: Show examples for usage +""" + +# -------------------------------------------------------------------------- +# ***** BEGIN LICENSE BLOCK ***** +# +# Copyright (c) 2007-2012, Python File Format Interface +# All rights reserved. +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions +# are met: +# +# * Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# +# * Redistributions in binary form must reproduce the above +# copyright notice, this list of conditions and the following +# disclaimer in the documentation and/or other materials provided +# with the distribution. +# +# * Neither the name of the Python File Format Interface +# project nor the names of its contributors may be used to endorse +# or promote products derived from this software without specific +# prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS +# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE +# COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, +# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, +# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; +# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER +# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT +# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN +# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE +# POSSIBILITY OF SUCH DAMAGE. +# +# ***** END LICENSE BLOCK ***** +# -------------------------------------------------------------------------- + +# note: some imports are defined at the end to avoid problems with circularity + +import logging +from functools import partial +from typing import Optional + +import pyffi.object_models.common +from pyffi.object_models.basic import BasicBase +from pyffi.object_models.expression import Expression +from pyffi.utils.graph import GlobalNode, EdgeFilter + + +class _MetaStructBase(type): + """This metaclass checks for the presence of _attrs and _is_template + attributes. For each attribute in _attrs, an + property is generated which gets and sets basic types, + and gets other types (struct and array). Used as metaclass of + StructBase.""" + + def __init__(cls, name, bases, dct): + super(_MetaStructBase, cls).__init__(name, bases, dct) + # does the type contain a Ref or a Ptr? + cls._has_links = getattr(cls, '_has_links', False) + # does the type contain a Ref? + cls._has_refs = getattr(cls, '_has_refs', False) + # does the type contain a string? + cls._has_strings = getattr(cls, '_has_strings', False) + + for attr in dct.get('_attrs', []): + # basestring is a forward compound type declaration + # and issubclass must take a type as first argument + # hence this hack + if not isinstance(attr.type_, str) and \ + issubclass(attr.type_, BasicBase) and attr.length is None: + # get and set basic attributes + setattr(cls, attr.name, property( + partial(StructBase.get_basic_attribute, name=attr.name), + partial(StructBase.set_basic_attribute, name=attr.name), + doc=attr.doc)) + elif not isinstance(attr.type_, str) and \ + issubclass(attr.type_, StructBase) and attr.length is None: + # get and set struct attributes + setattr(cls, attr.name, property( + partial(StructBase.get_attribute, name=attr.name), + partial(StructBase.set_attribute, name=attr.name), + doc=attr.doc)) + elif attr.type_ == type(None) and attr.length is None: + # get and set template attributes + setattr(cls, attr.name, property( + partial(StructBase.get_template_attribute, name=attr.name), + partial(StructBase.set_template_attribute, name=attr.name), + doc=attr.doc)) + else: + # other types of attributes: get only + setattr(cls, attr.name, property( + partial(StructBase.get_attribute, name=attr.name), + doc=attr.doc)) + + # check for links and refs and strings + if not cls._has_links: + if attr.type_ != type(None): # templates! + # attr.type_ basestring means forward declaration + # we cannot know if it has links, so assume yes + if (isinstance(attr.type_, str) + or attr.type_._has_links): + cls._has_links = True + # else: + # cls._has_links = True + # or false... we can't know at this point... might be necessary + # to uncomment this if template types contain refs + + if not cls._has_refs: + if attr.type_ != type(None): + # attr.type_ basestring means forward declaration + # we cannot know if it has refs, so assume yes + if (isinstance(attr.type_, str) + or attr.type_._has_refs): + cls._has_refs = True + # else: + # cls._has_refs = True # dito, see comment above + + if not cls._has_strings: + if attr.type_ != type(None): + # attr.type_ basestring means forward declaration + # we cannot know if it has strings, so assume yes + if (isinstance(attr.type_, str) + or attr.type_._has_strings): + cls._has_strings = True + else: + # enabled because there is a template key type that has + # strings + cls._has_strings = True + + # precalculate the attribute list + # profiling shows that this speeds up most of the StructBase methods + # that rely on parsing the attribute list + cls._attribute_list = cls._get_attribute_list() + + # precalculate the attribute name list + cls._names = cls._get_names() + + def __repr__(cls): + return "" % cls.__name__ + + +class StructBase(GlobalNode, metaclass=_MetaStructBase): + """Base class from which all file struct types are derived. + + The StructBase class implements the basic struct interface: + it will initialize all attributes using the class interface + using the _attrs class variable, represent them as strings, and so on. + The class variable _attrs must be declared every derived class + interface. + + Each item in the class _attrs list stores the information about + the attribute as stored for instance in the xml file, and the + __value_ instance variable stores the actual attribute + instance. + + Direct access to the attributes is implemented using a + property which invokes the get_attribute and set_attribute + functions, as demonstrated below. + + See the pyffi.XmlHandler class for a more advanced example. + + >>> from pyffi.object_models.basic import BasicBase + >>> from pyffi.object_models.expression import Expression + >>> from pyffi.object_models.xml import StructAttribute as Attr + >>> class SimpleFormat(object): + ... class UInt(BasicBase): + ... _is_template = False + ... def __init__(self, **kwargs): + ... BasicBase.__init__(self, **kwargs) + ... self.__value = 0 + ... def get_value(self): + ... return self.__value + ... def set_value(self, value): + ... self.__value = int(value) + ... @staticmethod + ... def name_attribute(name): + ... return name + >>> class X(StructBase): + ... _is_template = False + ... _attrs = [ + ... Attr(SimpleFormat, dict(name = 'a', type = 'UInt')), + ... Attr(SimpleFormat, dict(name = 'b', type = 'UInt'))] + >>> SimpleFormat.X = X + >>> class Y(X): + ... _is_template = False + ... _attrs = [ + ... Attr(SimpleFormat, dict(name = 'c', type = 'UInt')), + ... Attr(SimpleFormat, dict(name = 'd', type = 'X', cond = 'c == 3'))] + >>> SimpleFormat.Y = Y + >>> y = Y() + >>> y.a = 1 + >>> y.b = 2 + >>> y.c = 3 + >>> y.d.a = 4 + >>> y.d.b = 5 + >>> print(y) # doctest:+ELLIPSIS + instance at 0x... + * a : 1 + * b : 2 + * c : 3 + * d : + instance at 0x... + * a : 4 + * b : 5 + + >>> y.d = 1 + Traceback (most recent call last): + ... + TypeError: expected X but got int + >>> x = X() + >>> x.a = 8 + >>> x.b = 9 + >>> y.d = x + >>> print(y) # doctest:+ELLIPSIS + instance at 0x... + * a : 1 + * b : 2 + * c : 3 + * d : + instance at 0x... + * a : 8 + * b : 9 + + + Attributes: + _attrs (list[pyffi.object_models.niftoolsxml.StructAttribute]): TODO + """ + + _is_template = False + _attrs = [] + _games = {} + arg = None + logger = logging.getLogger("pyffi.nif.data.struct") + + # initialize all attributes + def __init__(self, template=None, argument: Optional[Expression] = None, parent=None): + """The constructor takes a tempate: any attribute whose type, + or template type, is type(None) - which corresponds to + TEMPLATE in the xml description - will be replaced by this + type. The argument is what the ARG xml tags will be replaced with. + + :param template: If the class takes a template type + argument, then this argument describes the template type. + :param argument: If the class takes a type argument, then + it is described here. + :param parent: The parent of this instance, that is, the instance this + array is an attribute of.""" + # used to track names of attributes that have already been added + # is faster than self.__dict__.has_key(...) + names = set() + # initialize argument + self.arg = argument + # save parent (note: disabled for performance) + # self._parent = weakref.ref(parent) if parent else None + # initialize item list + # this list is used for instance by qskope to display the structure + # in a tree view + self._items = [] + self._template = template + + # initialize attributes + for attr in self._attribute_list: # Attributes get replaced on read + # skip attributes with duplicate names + # (for this to work properly, duplicates must have the same + # type, template, argument, length, and width) + if attr.name in names: + continue + names.add(attr.name) + + # things that can only be determined at runtime (rt_xxx) + rt_type = attr.type_ if attr.type_ != type(None) \ + else template + rt_template = attr.template if attr.template != type(None) \ + else template + rt_arg = attr.arg.eval(self) if attr.arg is not None else None + + # instantiate the class, handling arrays at the same time + if attr.length is None: + attr_instance = rt_type( + template=rt_template, argument=rt_arg, + parent=self) + if attr.default is not None: + attr_instance.set_value(attr.default) + elif attr.width is None: + attr_instance = Array( + name=attr.name, + element_type=rt_type, + element_type_template=rt_template, + element_type_argument=rt_arg, + length=attr.length, + parent=self) + else: + attr_instance = Array( + name=attr.name, + element_type=rt_type, + element_type_template=rt_template, + element_type_argument=rt_arg, + length=attr.length, width=attr.width, + parent=self) + + # assign attribute value + setattr(self, "_%s_value_" % attr.name, attr_instance) + + # add instance to item list + self._items.append(attr_instance) + + def deepcopy(self, block): + """Copy attributes from a given block (one block class must be a + subclass of the other). Returns self.""" + # check class lineage + if isinstance(self, block.__class__): + attrlist = block._get_filtered_attribute_list() + elif isinstance(block, self.__class__): + attrlist = self._get_filtered_attribute_list() + else: + raise ValueError("deepcopy: classes %s and %s unrelated" + % (self.__class__.__name__, block.__class__.__name__)) + # copy the attributes + for attr in attrlist: + try: + attrvalue = getattr(self, attr.name) + if isinstance(attrvalue, (StructBase, BitStructBase)): + attrvalue.deepcopy(getattr(block, attr.name)) + elif isinstance(attrvalue, Array): + attrvalue.update_size() + attrvalue.deepcopy(getattr(block, attr.name)) + else: + setattr(self, attr.name, getattr(block, attr.name)) + except Exception: + self.logger.error("Failed to set attribute %s in block %s", attr.name, block) + raise + + return self + + # string of all attributes + def __str__(self): + text = '%s instance at 0x%08X\n' % (self.__class__, id(self)) + # used to track names of attributes that have already been added + # is faster than self.__dict__.has_key(...) + for attr in self._get_filtered_attribute_list(): + # append string + attr_str_lines = str( + getattr(self, "_%s_value_" % attr.name, None)).splitlines() + if len(attr_str_lines) > 1: + text += '* %s :\n' % attr.name + for attr_str in attr_str_lines: + text += ' %s\n' % attr_str + elif attr_str_lines: + text += '* %s : %s\n' % (attr.name, attr_str_lines[0]) + else: + # print(getattr(self, "_%s_value_" % attr.name)) + text += '* %s : \n' % attr.name + return text + + def __repr__(self): # TODO: Improve + attrs = [] + + for attr in self._get_filtered_attribute_list(): + attrs.append(f"{attr.name}: {repr(getattr(self, '_%s_value_' % attr.name, None))}") + + return f"<{self.__class__.__name__} {{{', '.join(attrs)}}}>" + + def _log_struct(self, stream, attr): + val = getattr(self, "_%s_value_" % attr.name) # debug + if not isinstance(val, BasicBase): # debug + # TODO: DEBUG + self.logger.warning(val.__class__.__name__ + ":" + attr.name) + self.logger.debug(val.__class__.__name__ + ":" + attr.name) + else: + try: + out = val.get_value() # debug + except Exception: + pass + else: + offset = stream.tell() + hex_ver = "0x%08X" % offset + # TODO: DEBUG + self.logger.warning( + "* {0}.{1} = {2} : type {3} at {4} offset {5} - ".format(self.__class__.__name__, attr.name, + str(out), attr.type_, hex_ver, + offset)) # debug + self.logger.debug( + "* {0}.{1} = {2} : type {3} at {4} offset {5} - ".format(self.__class__.__name__, attr.name, + str(out), attr.type_, hex_ver, + offset)) # debug + + def read(self, stream, data): + """Read structure from stream.""" + + self._items.clear() + for attr in self._get_filtered_attribute_list(data, report_duplicates=True): # We must update the attributes as we read + # things that can only be determined at runtime (rt_xxx) + rt_type = attr.type_ if attr.type_ != type(None) else self._template + rt_template = attr.template if attr.template != type(None) else self._template + rt_arg = attr.arg.eval(self) if attr.arg is not None else None + + # instantiate the class, handling arrays at the same time + if attr.length is None: + attr_instance = rt_type( + template=rt_template, argument=rt_arg, + parent=self) + if attr.default is not None: + attr_instance.set_value(attr.default) + elif attr.width is None: + attr_instance = Array( + name=attr.name, + element_type=rt_type, + element_type_template=rt_template, + element_type_argument=rt_arg, + length=attr.length, + parent=self) + else: + attr_instance = Array( + name=attr.name, + element_type=rt_type, + element_type_template=rt_template, + element_type_argument=rt_arg, + length=attr.length, width=attr.width, + parent=self) + + # skip abstract attributes + if attr.is_abstract: + continue + + # assign attribute value + setattr(self, "_%s_value_" % attr.name, attr_instance) + attr_instance.read(stream, data) + self._log_struct(stream, attr) # Log after read, that way we have data after read + + # add instance to item list + self._items.append(attr_instance) + + def write(self, stream, data): + """Write structure to stream.""" + # write all attributes + for attr in self._get_filtered_attribute_list(data): + # skip abstract attributes + if attr.is_abstract: + continue + # get attribute argument (can only be done at runtime) + rt_arg = attr.arg.eval(self) if attr.arg is not None else None + # write the attribute + attr_value = getattr(self, "_%s_value_" % attr.name) + attr_value.arg = rt_arg + getattr(self, "_%s_value_" % attr.name).write(stream, data) # TODO: Why the duplicate getattr? + self._log_struct(stream, attr) + + def fix_links(self, data): + """Fix links in the structure.""" + # parse arguments + # fix links in all attributes + for attr in self._get_filtered_attribute_list(data): + # check if there are any links at all, commonly this speeds things up considerably + if not attr.type_._has_links: + continue + self.logger.debug("fixlinks %s" % attr.name) + # fix the links in the attribute + getattr(self, "_%s_value_" % attr.name).fix_links(data) + + def get_links(self, data=None): + """Get list of all links in the structure.""" + # get all links + links = [] + for attr in self._get_filtered_attribute_list(data): + # check if there are any links at all, this speeds things up considerably + if not attr.type_._has_links: + continue + # extend list of links + links.extend( + getattr(self, "_" + attr.name + "_value_").get_links(data)) + # return the list of all links in all attributes + return links + + def get_strings(self, data): + """Get list of all strings in the structure.""" + # get all strings + strings = [] + for attr in self._get_filtered_attribute_list(data): + # check if there are any strings at all, this speeds things up considerably + if (not attr.type_ is type(None)) and (not attr.type_._has_strings): + continue + # extend list of strings + strings.extend( + getattr(self, "_%s_value_" % attr.name).get_strings(data)) + # return the list of all strings in all attributes + return strings + + def get_refs(self, data=None): + """Get list of all references in the structure. Refs are + links that point down the tree. For instance, if you need to parse + the whole tree starting from the root you would use get_refs and not + get_links, as get_links could result in infinite recursion.""" + # get all refs + refs = [] + for attr in self._get_filtered_attribute_list(data): + # check if there are any links at all + # (this speeds things up considerably) + if attr.type_ is not type(None) and not attr.type_._has_links: + continue + # extend list of refs + refs.extend( + getattr(self, "_%s_value_" % attr.name).get_refs(data)) + # return the list of all refs in all attributes + return refs + + def get_size(self, data=None): + """Calculate the structure size in bytes.""" + # calculate size + size = 0 + for attr in self._get_filtered_attribute_list(data): + # skip abstract attributes + if attr.is_abstract: + continue + size += getattr(self, "_%s_value_" % attr.name).get_size(data) + return size + + def get_hash(self, data=None): + """Calculate a hash for the structure, as a tuple.""" + # calculate hash + hsh = [] + for attr in self._get_filtered_attribute_list(data): + hsh.append( + getattr(self, "_%s_value_" % attr.name).get_hash(data)) + return tuple(hsh) + + def replace_global_node(self, oldbranch, newbranch, **kwargs): + for attr in self._get_filtered_attribute_list(): + # check if there are any links at all + # (this speeds things up considerably) + if not attr.type_._has_links: + continue + getattr(self, "_%s_value_" % attr.name).replace_global_node( + oldbranch, newbranch, **kwargs) + + @classmethod + def get_games(cls): + """Get games for which this block is supported.""" + return list(cls._games.keys()) + + @classmethod + def get_versions(cls, game): + """Get versions supported for C{game}.""" + return cls._games[game] + + @classmethod + def _get_attribute_list(cls): + """Calculate the list of all attributes of this structure.""" + # string of attributes of base classes of cls + attrs = [] + for base in cls.__bases__: + try: + attrs.extend(base._get_attribute_list()) + except AttributeError: # when base class is "object" + pass + attrs.extend(cls._attrs) + return attrs + + @classmethod + def _get_names(cls): + """Calculate the list of all attributes names in this structure. + Skips duplicate names.""" + # string of attributes of base classes of cls + names = [] + for base in cls.__bases__: + try: + names.extend(base._get_names()) + except AttributeError: # when base class is "object" + pass + for attr in cls._attrs: + if attr.name in names: + continue + else: + names.append(attr.name) + return names + + def _get_filtered_attribute_list(self, data=None, report_duplicates=False): + """Generator for listing all 'active' attributes, that is, + attributes whose condition evaluates ``True``, whose version + interval contains C{version}, and whose user version is + C{user_version}. ``None`` for C{version} or C{user_version} means + that these checks are ignored. Duplicate names are skipped as + well. + + Note: version and user_version arguments are deprecated, use + the data argument instead. + """ + if data is not None: + version = data.version + user_version = data.user_version + else: + version = None + user_version = None + + names = set() + for attr in self._attribute_list: + # print(attr.name, version, attr.since, attr.until) # debug + + # check version + if version is not None: + if attr.since is not None and version < attr.since: + continue + if attr.until is not None and version > attr.until: + continue + # print("version check passed") # debug + + # check user version + if attr.userver is not None and user_version is not None and user_version != attr.userver: + continue + # print("user version check passed") # debug + + # check conditions + if attr.cond is not None and not attr.cond.eval(self): + continue + # print("condition passed") # debug + + if version is not None and user_version is not None and attr.vercond is not None: + if not attr.vercond.eval(data): + continue + # print("version condition passed") # debug + + # skip duplicate names + if attr.name in names: + if report_duplicates: + logging.getLogger().warning("Duplicate attribute %s was found: %s", attr.name, attr) # TODO: More Verbose + continue + names.add(attr.name) + # print("duplicate check passed") # debug + + # passed all tests + # so yield the attribute + yield attr + + def get_attribute(self, name): + """Get a (non-basic) attribute.""" + return getattr(self, "_" + name + "_value_") + + # important note: to apply partial(set_attribute, name = 'xyz') the + # name argument must be last + def set_attribute(self, value, name): + """Set a (non-basic) attribute.""" + # check class + attr = getattr(self, "_" + name + "_value_") + if attr.__class__ is not value.__class__: + raise TypeError("expected %s but got %s" + % (attr.__class__.__name__, + value.__class__.__name__)) + # set it + setattr(self, "_" + name + "_value_", value) + + def get_basic_attribute(self, name): + """Get a basic attribute.""" + return getattr(self, "_" + name + "_value_").get_value() + + # important note: to apply partial(set_attribute, name = 'xyz') the + # name argument must be last + def set_basic_attribute(self, value, name): + """Set the value of a basic attribute.""" + getattr(self, "_" + name + "_value_").set_value(value) + + def get_template_attribute(self, name): + """Get a template attribute.""" + try: + return self.get_basic_attribute(name) + except AttributeError: + return self.get_attribute(name) + + # important note: to apply partial(set_attribute, name = 'xyz') the + # name argument must be last + def set_template_attribute(self, value, name): + """Set the value of a template attribute.""" + try: + self.set_basic_attribute(value, name) + except AttributeError: + self.set_attribute(value, name) + + def tree(self): + """A generator for parsing all blocks in the tree (starting from and + including C{self}). By default, there is no tree structure, so returns + self.""" + # return self + yield self + + def update_version(self, data): + """ + + :param data: + :return: + """ + self._update_attributes(data) + + @staticmethod + def _is_same_attr_type(attr_inst, attr): + if attr_inst is None: + return False + if isinstance(attr, Array) and isinstance(attr_inst, Array): + return type(attr._elementType) is type(attr_inst._elementType) + return type(attr) is type(attr_inst) + + def _update_attributes(self, data): # TODO: Only replace different attributes and convert in between + self._items.clear() + for attr in self._get_filtered_attribute_list(data, report_duplicates=True, skip_condition=True): # skip_condition is failing fallout 4 + logging.getLogger().warning("Updating ATTR %s (%s)", attr.name, attr.type_) + # things that can only be determined at runtime (rt_xxx) + rt_type = attr.type_ if attr.type_ != type(None) else self._template + rt_template = attr.template if attr.template != type(None) else self._template + rt_arg = attr.arg.eval(self) if attr.arg is not None else None + + # instantiate the class, handling arrays at the same time + if attr.length is None: + attr_instance = rt_type( + template=rt_template, argument=rt_arg, + parent=self) + if attr.default is not None: + attr_instance.set_value(attr.default) + elif attr.width is None: + attr_instance = Array( + name=attr.name, + element_type=rt_type, + element_type_template=rt_template, + element_type_argument=rt_arg, + length=attr.length, + parent=self) + else: + attr_instance = Array( + name=attr.name, + element_type=rt_type, + element_type_template=rt_template, + element_type_argument=rt_arg, + length=attr.length, width=attr.width, + parent=self) + + orig_attr_inst = getattr(self, "_%s_value_" % attr.name, None) + if orig_attr_inst is not None: + if self._is_same_attr_type(orig_attr_inst, attr_instance): + logging.getLogger().warning("IS SAME TYPE: %s ; %s - %s", attr.name, type(attr_instance), type(orig_attr_inst)) + continue + else: + pass # TODO: Look into converting previous values + + # assign attribute value + setattr(self, "_%s_value_" % attr.name, attr_instance) + logging.getLogger().warning("Set attr %s to %s", attr.name, attr.type_) + + # add instance to item list + self._items.append(attr_instance) + + # DetailNode + + def get_detail_child_nodes(self, edge_filter=EdgeFilter()): + """Yield children of this structure.""" + return (item for item in self._items) + + def get_detail_child_names(self, edge_filter=EdgeFilter()): + """Yield names of the children of this structure.""" + return (name for name in self._names) + + # GlobalNode + + def get_global_display(self): + """Construct a convenient name for the block itself.""" + return pyffi.object_models.common._as_str(self.name) if hasattr(self, "name") else "" + + def get_global_child_nodes(self, edge_filter=EdgeFilter()): + # TODO replace get_refs with a generator as well + for branch in self.get_refs(): + yield branch + + +from pyffi.object_models.niftoolsxml.array import Array +from pyffi.object_models.niftoolsxml import BitStructBase diff --git a/pyffi/object_models/simple_type.py b/pyffi/object_models/simple_type.py index 222ae188d..9e7004660 100644 --- a/pyffi/object_models/simple_type.py +++ b/pyffi/object_models/simple_type.py @@ -1,4 +1,24 @@ -"""Defines the base class for simple types.""" +""" +:mod:`pyffi.object_models.simple_type` --- Abstract classes for defining simple types +===================================================================================== + +Defines the base class for simple types. + +Implementation +-------------- + +.. autoclass:: _MetaSimpleType + :show-inheritance: + :members: + :undoc-members: + +.. autoclass:: SimpleType + :show-inheritance: + :members: + :undoc-members: + +.. todo:: Show examples for usage +""" # -------------------------------------------------------------------------- # ***** BEGIN LICENSE BLOCK ***** @@ -41,6 +61,7 @@ from pyffi.object_models.any_type import AnyType + class _MetaSimpleType(type): """This metaclass binds the get_value and set_value methods to the value property. We need a metaclass for this because properties are @@ -48,6 +69,7 @@ class _MetaSimpleType(type): http://stackoverflow.com/questions/237432/python-properties-and-inheritance http://requires-thinking.blogspot.com/2006/03/note-to-self-python-properties-are-non.html """ + def __init__(cls, name, bases, dct): # call base class constructor super(_MetaSimpleType, cls).__init__(name, bases, dct) @@ -55,6 +77,7 @@ def __init__(cls, name, bases, dct): cls.value = property(cls.get_value, cls.set_value, None, cls.value.__doc__) + class SimpleType(AnyType, metaclass=_MetaSimpleType): """Base class from which all simple types are derived. Simple types contain data which is not divided further into smaller pieces, diff --git a/pyffi/object_models/xml/__init__.py b/pyffi/object_models/xml/__init__.py index 69a2d6583..cda6ffc87 100644 --- a/pyffi/object_models/xml/__init__.py +++ b/pyffi/object_models/xml/__init__.py @@ -1,5 +1,57 @@ -"""Format classes and metaclasses for binary file formats described by an xml -file, and xml handler for converting the xml description into Python classes. +""" +:mod:`pyffi.object_models.xml` --- XML fileformat parser +======================================================== + +Format classes and metaclasses for binary file formats described by an niftoolsxml +file, and niftoolsxml handler for converting the niftoolsxml description into Python classes. + +Contents +-------- + +.. toctree:: + :maxdepth: 2 + :titlesonly: + + array + bit_struct + enum + expression + struct + +Implementation +-------------- + +.. autoclass:: FileFormat + :show-inheritance: + :members: + :undoc-members: + +.. autoclass:: StructAttribute + :show-inheritance: + :members: + :undoc-members: + +.. autoclass:: BitStructAttribute + :show-inheritance: + :members: + :undoc-members: + +.. autoclass:: XmlParser + :show-inheritance: + :members: + :undoc-members: + +.. autoclass:: XmlError + :show-inheritance: + :members: + :undoc-members: + +.. autoclass:: MetaFileFormat + :show-inheritance: + :members: + :undoc-members: + +.. todo:: Show examples for usage """ # ***** BEGIN LICENSE BLOCK ***** @@ -40,18 +92,15 @@ # ***** END LICENSE BLOCK ***** import logging -import time # for timing stuff -import types -import os.path -import sys +import time # for timing stuff import xml.etree.ElementTree as ET import pyffi.object_models -from pyffi.object_models.xml.struct_ import StructBase -from pyffi.object_models.xml.basic import BasicBase +from pyffi.object_models.basic import BasicBase +from pyffi.object_models.expression import Expression from pyffi.object_models.xml.bit_struct import BitStructBase -from pyffi.object_models.xml.enum import EnumBase -from pyffi.object_models.xml.expression import Expression +from pyffi.object_models.xml.enum import EnumBase +from pyffi.object_models.xml.struct_ import StructBase class MetaFileFormat(pyffi.object_models.MetaFileFormat): @@ -102,11 +151,12 @@ def __init__(cls, name, bases, dct): cls.logger.debug("Parsing finished in %.3f seconds." % (time.time() - start)) + class FileFormat(pyffi.object_models.FileFormat, metaclass=MetaFileFormat): """This class can be used as a base class for file formats described by an xml file.""" - xml_file_name = None #: Override. - xml_file_path = None #: Override. + xml_file_name = None #: Override. + xml_file_path = None #: Override. logger = logging.getLogger("pyffi.object_models.xml") # We also keep an ordered list of all classes that have been created. @@ -123,6 +173,7 @@ class FileFormat(pyffi.object_models.FileFormat, metaclass=MetaFileFormat): xml_bit_struct = [] xml_struct = [] + class StructAttribute(object): """Helper class to collect attribute data of struct add tags.""" @@ -210,6 +261,7 @@ def __init__(self, cls, attrs): if self.ver2: self.ver2 = cls.version_number(self.ver2) + class BitStructAttribute(object): """Helper class to collect attribute data of bitstruct bits tags.""" @@ -228,7 +280,7 @@ def __init__(self, cls, attrs): self.ver1 = attrs.get("ver1") self.ver2 = attrs.get("ver2") self.userver = attrs.get("userver") - self.doc = "" # handled in xml parser's characters function + self.doc = "" # handled in xml parser's characters function # post-processing if self.default: @@ -242,14 +294,17 @@ def __init__(self, cls, attrs): if self.ver2: self.ver2 = cls.version_number(self.ver2) + class XmlError(Exception): """The XML handler will throw this exception if something goes wrong while parsing.""" pass + class XmlParser: struct_types = ("compound", "niobject", "struct") bitstruct_types = ("bitfield", "bitflags", "bitstruct") + def __init__(self, cls): """Set up the xml parser.""" @@ -272,8 +327,8 @@ def __init__(self, cls): self.version_string = None # list of tuples ({tokens}, (target_attribs)) for each - self.tokens = [ ] - self.versions = [ ([], ("versions", "until", "since")), ] + self.tokens = [] + self.versions = [([], ("versions", "until", "since")), ] def load_xml(self, file): """Loads an XML (can be filepath or open file) and does all parsing""" @@ -281,7 +336,7 @@ def load_xml(self, file): root = tree.getroot() self.load_root(root) self.final_cleanup() - + def load_root(self, root): """Goes over all children of the root node and calls the appropriate function depending on type of the child""" for child in root: @@ -305,22 +360,22 @@ def load_root(self, root): # the following constructs do not create classes def read_token(self, token): """Reads an xml block and stores it in the tokens list""" - self.tokens.append( ([], token.attrib["attrs"].split(" ") ) ) + self.tokens.append(([], token.attrib["attrs"].split(" "))) for sub_token in token: - self.tokens[-1][0].append( (sub_token.attrib["token"], sub_token.attrib["string"]) ) - + self.tokens[-1][0].append((sub_token.attrib["token"], sub_token.attrib["string"])) + def read_version(self, version): """Reads an xml block and stores it in the versions list""" # todo [versions] this ignores the user vers! # versions must be in reverse order so don't append but insert at beginning if "id" in version.attrib: - self.versions[0][0].insert( 0, (version.attrib["id"], version.attrib["num"]) ) + self.versions[0][0].insert(0, (version.attrib["id"], version.attrib["num"])) # add to supported versions self.version_string = version.attrib["num"] self.cls.versions[self.version_string] = self.cls.version_number(self.version_string) self.update_gamesdict(self.cls.games, version.text) self.version_string = None - + def read_module(self, module): """Reads a xml block""" # no children, not interesting for now @@ -335,11 +390,11 @@ def read_basic(self, basic): # check the class variables is_template = self.is_generic(basic.attrib) if basic_class._is_template != is_template: - raise XmlError( 'class %s should have _is_template = %s' % (self.class_name, is_template)) + raise XmlError('class %s should have _is_template = %s' % (self.class_name, is_template)) # link class cls. to basic_class setattr(self.cls, self.class_name, basic_class) - + # the following constructs create classes def read_bitstruct(self, bitstruct): """Create a bitstruct class""" @@ -371,8 +426,8 @@ def read_bitstruct(self, bitstruct): if numextrabits < 0: raise XmlError("values of bitflags must be increasing") if numextrabits > 0: - reserved = dict(name="Reserved Bits %i"% len(self.class_dict["_attrs"]), numbits=numextrabits) - self.class_dict["_attrs"].append( BitStructAttribute( self.cls, reserved)) + reserved = dict(name="Reserved Bits %i" % len(self.class_dict["_attrs"]), numbits=numextrabits) + self.class_dict["_attrs"].append(BitStructAttribute(self.cls, reserved)) # add the actual attribute bit_attrs = dict(name=attrs["name"], numbits=1) # new nif xml @@ -380,8 +435,8 @@ def read_bitstruct(self, bitstruct): bit_attrs = dict(name=attrs["name"], numbits=attrs["width"]) else: raise XmlError("only bits tags allowed in struct type declaration") - - self.class_dict["_attrs"].append( BitStructAttribute(self.cls, bit_attrs) ) + + self.class_dict["_attrs"].append(BitStructAttribute(self.cls, bit_attrs)) self.update_doc(self.class_dict["_attrs"][-1].doc, member.text) self.create_class(bitstruct.tag) @@ -398,20 +453,20 @@ def read_struct(self, struct): try: self.base_class = getattr(self.cls, class_basename) except KeyError: - raise XmlError( "typo, or forward declaration of struct %s" % class_basename) + raise XmlError("typo, or forward declaration of struct %s" % class_basename) else: self.base_class = StructBase # set attributes (see class StructBase) # 'generic' attribute is optional- if not set, then the struct is not a template - self.class_dict["_is_template" ] = self.is_generic(attrs) - self.class_dict["_attrs" ] = [] - self.class_dict["_games" ] = {} + self.class_dict["_is_template"] = self.is_generic(attrs) + self.class_dict["_attrs"] = [] + self.class_dict["_games"] = {} for field in struct: attrs = self.replace_tokens(field.attrib) # the common case if field.tag in ("add", "field"): # add attribute to class dictionary - self.class_dict["_attrs"].append( StructAttribute(self.cls, attrs) ) + self.class_dict["_attrs"].append(StructAttribute(self.cls, attrs)) self.update_doc(self.class_dict["_attrs"][-1].doc, field.text) # not found in current nifxml elif field.tag == "version": @@ -472,7 +527,6 @@ def read_alias(self, alias): raise XmlError("typo, or forward declaration of type %s" % typename) self.create_class(alias.tag) - # the following are helper functions def is_generic(self, attr): # be backward compatible @@ -486,7 +540,7 @@ def update_gamesdict(self, gamesdict, ver_text): gamesdict[gamestr].append(self.cls.versions[self.version_string]) else: gamesdict[gamestr] = [self.cls.versions[self.version_string]] - + def update_class_dict(self, attrs, doc_text): """This initializes class_dict, sets the class name and doc text""" doc_text = doc_text.strip() if doc_text else "" @@ -496,7 +550,7 @@ def update_class_dict(self, attrs, doc_text): def update_doc(self, doc, doc_text): if doc_text: doc += doc_text.strip() - + def create_class(self, tag): """Creates a class for (tag name of the class that was just finished)""" # assign it to cls. if it has not been implemented internally @@ -510,8 +564,8 @@ def create_class(self, tag): return # it has been created in format's __init__.py # create and add to base class of customizer - gen_klass = type("_"+self.class_name, (self.base_class,), self.class_dict) - setattr(self.cls, "_"+self.class_name, gen_klass) + gen_klass = type("_" + self.class_name, (self.base_class,), self.class_dict) + setattr(self.cls, "_" + self.class_name, gen_klass) # recreate the class, to ensure that the metaclass is called!! # (otherwise, cls_klass does not have correct _attribute_list, etc.) cls_klass = type(cls_klass.__name__, (gen_klass,) + cls_klass.__bases__, dict(cls_klass.__dict__)) @@ -534,7 +588,7 @@ def create_class(self, tag): self.cls.xml_enum.append(gen_klass) elif tag == "alias": self.cls.xml_alias.append(gen_klass) - + def replace_tokens(self, attr_dict): """Update attr_dict with content of tokens+versions list.""" # replace versions after tokens because tokens include versions @@ -546,19 +600,20 @@ def replace_tokens(self, attr_dict): expr_str = expr_str.replace(op_token, op_str) attr_dict[target_attrib] = expr_str # additional tokens that are not specified by nif.xml - fixed_tokens = ( ("\\", "."), (">", ">"), ("<", "<"), ("&", "&"), ("#ARG#", "ARG"), ("#T#", "TEMPLATE") ) + fixed_tokens = ( + ("\\", "."), (">", ">"), ("<", "<"), ("&", "&"), ("#ARG#", "ARG"), ("#T#", "TEMPLATE")) for attrib, expr_str in attr_dict.items(): for op_token, op_str in fixed_tokens: expr_str = expr_str.replace(op_token, op_str) attr_dict[attrib] = expr_str # onlyT & excludeT act as aliases for deprecated cond - prefs = ( ("onlyT", ""), ("excludeT", "!") ) + prefs = (("onlyT", ""), ("excludeT", "!")) for t, pref in prefs: if t in attr_dict: - attr_dict["cond"] = pref+attr_dict[t] + attr_dict["cond"] = pref + attr_dict[t] break return attr_dict - + def final_cleanup(self): """Called when the xml is completely parsed. Searches and adds class customized functions. @@ -578,7 +633,7 @@ def final_cleanup(self): for attr in obj._attrs: templ = attr.template if isinstance(templ, str): - attr.template = getattr(self.cls, templ) if templ != "TEMPLATE" else type(None) + attr.template = getattr(self.cls, templ) if templ != "TEMPLATE" else type(None) attrtype = attr.type_ if isinstance(attrtype, str): attr.type_ = getattr(self.cls, attrtype) diff --git a/pyffi/object_models/xml/array.py b/pyffi/object_models/xml/array.py index 74a0cbb7d..4f8e609e0 100644 --- a/pyffi/object_models/xml/array.py +++ b/pyffi/object_models/xml/array.py @@ -1,4 +1,24 @@ -"""Implements class for arrays.""" +""" +:mod:`pyffi.object_models.xml.array` --- Array classes +====================================================== + +Implements class for arrays. + +Implementation +-------------- + +.. autoclass:: Array + :show-inheritance: + :members: + :undoc-members: + +.. autoclass:: _ListWrap + :show-inheritance: + :members: + :undoc-members: + +.. todo:: Show examples for usage +""" # -------------------------------------------------------------------------- # ***** BEGIN LICENSE BLOCK ***** @@ -43,6 +63,7 @@ import logging import weakref +from pyffi.object_models.basic import BasicBase from pyffi.utils.graph import DetailNode, EdgeFilter @@ -406,5 +427,4 @@ def _elementList(self, **kwargs): yield elem -from pyffi.object_models.xml.basic import BasicBase from pyffi.object_models.xml.struct_ import StructBase diff --git a/pyffi/object_models/xml/bit_struct.py b/pyffi/object_models/xml/bit_struct.py index 4720dbc41..00eeef2b5 100644 --- a/pyffi/object_models/xml/bit_struct.py +++ b/pyffi/object_models/xml/bit_struct.py @@ -1,4 +1,29 @@ -"""Implements base class for bitstruct types.""" +""" +:mod:`pyffi.object_models.xml.bit_struct` --- Bit Structured Types +================================================================== + +Implements base class for bitstruct types. + +Implementation +-------------- + +.. autoclass:: Bits + :show-inheritance: + :members: + :undoc-members: + +.. autoclass:: BitStructBase + :show-inheritance: + :members: + :undoc-members: + +.. autoclass:: _MetaBitStructBase + :show-inheritance: + :members: + :undoc-members: + +.. todo:: Show examples for usage +""" # -------------------------------------------------------------------------- # ***** BEGIN LICENSE BLOCK ***** @@ -41,9 +66,8 @@ # note: some imports are defined at the end to avoid problems with circularity -from functools import partial - import struct +from functools import partial from pyffi.object_models.editable import EditableSpinBox # for Bits from pyffi.utils.graph import DetailNode, EdgeFilter @@ -53,6 +77,7 @@ class _MetaBitStructBase(type): """This metaclass checks for the presence of a _attrs attribute. For each attribute in _attrs, an property is generated which gets and sets bit fields. Used as metaclass of BitStructBase.""" + def __init__(cls, name, bases, dct): super(_MetaBitStructBase, cls).__init__(name, bases, dct) # consistency checks @@ -95,13 +120,15 @@ def __init__(cls, name, bases, dct): cls._names = cls._get_names() def __repr__(cls): - return ""%(cls.__name__) + return "" % (cls.__name__) + class Bits(DetailNode, EditableSpinBox): """Basic implementation of a n-bit unsigned integer type (without read and write).""" - def __init__(self, numbits=1, default=0, parent = None): + + def __init__(self, numbits=1, default=0, parent=None): # parent disabled for performance - #self._parent = weakref.ref(parent) if parent else None + # self._parent = weakref.ref(parent) if parent else None self._value = default self._numbits = numbits @@ -160,7 +187,7 @@ class BitStructBase(DetailNode, metaclass=_MetaBitStructBase): See the pyffi.XmlHandler class for a more advanced example. - >>> from pyffi.object_models.xml.basic import BasicBase + >>> from pyffi.object_models.basic import BasicBase >>> from pyffi.object_models.xml import BitStructAttribute as Attr >>> class SimpleFormat(object): ... @staticmethod @@ -412,8 +439,8 @@ def _get_filtered_attribute_list(self, data=None): # print("version check passed") # debug # check user version - if not(attr.userver is None or user_version is None) \ - and user_version != attr.userver: + if not (attr.userver is None or user_version is None) \ + and user_version != attr.userver: continue # print("user version check passed") # debug diff --git a/pyffi/object_models/xml/enum.py b/pyffi/object_models/xml/enum.py index 0869f8c7f..27ce0fe53 100644 --- a/pyffi/object_models/xml/enum.py +++ b/pyffi/object_models/xml/enum.py @@ -1,4 +1,24 @@ -"""Abstract base class for implementing xml enum types.""" +""" +:mod:`pyffi.object_models.xml.enum` --- Enumerated Types +======================================================== + +Abstract base class for implementing niftoolsxml enum types. + +Implementation +-------------- + +.. autoclass:: EnumBase + :show-inheritance: + :members: + :undoc-members: + +.. autoclass:: _MetaEnumBase + :show-inheritance: + :members: + :undoc-members: + +.. todo:: Show examples for usage +""" # ***** BEGIN LICENSE BLOCK ***** # @@ -40,23 +60,25 @@ import logging import struct -from pyffi.object_models.xml.basic import BasicBase +from pyffi.object_models.basic import BasicBase from pyffi.object_models.editable import EditableComboBox + class _MetaEnumBase(type): """This metaclass checks for the presence of _enumkeys, _enumvalues, and _numbytes attributes. It also adds enum class attributes. Used as metaclass of EnumBase.""" + def __init__(cls, name, bases, dct): super(_MetaEnumBase, cls).__init__(name, bases, dct) # consistency checks if not '_enumkeys' in dct: - raise TypeError('%s: missing _enumkeys attribute'%cls) + raise TypeError('%s: missing _enumkeys attribute' % cls) if not '_enumvalues' in dct: - raise TypeError('%s: missing _enumvalues attribute'%cls) + raise TypeError('%s: missing _enumvalues attribute' % cls) if not '_numbytes' in dct: - raise TypeError('%s: missing _numbytes attribute'%cls) + raise TypeError('%s: missing _numbytes attribute' % cls) # check storage type if cls._numbytes == 1: @@ -92,7 +114,7 @@ def __iter__(cls): def __next__(cls): if cls.__i < len(cls._enumvalues): cls.__i += 1 - return (cls._enumkeys[cls.__i-1], cls._enumvalues[cls.__i-1]) + return (cls._enumkeys[cls.__i - 1], cls._enumvalues[cls.__i - 1]) else: return @@ -110,21 +132,22 @@ def __len__(cls): return len(cls._enumkeys) def __repr__(cls): - return ""%(cls.__name__) + return "" % (cls.__name__) def __str__(cls): returns = "{" for idx, key in enumerate(cls._enumkeys): - if not idx == 0 and idx <= len(cls._enumkeys) -1: + if not idx == 0 and idx <= len(cls._enumkeys) - 1: returns += ", " returns += "\"%s\": \"%s\"" % (key, cls._enumvalues[idx]) returns += "}" return returns + class EnumBase(BasicBase, EditableComboBox, metaclass=_MetaEnumBase): _enumkeys = [] _enumvalues = [] - _numbytes = 1 # default width of an enum + _numbytes = 1 # default width of an enum # # BasicBase methods @@ -144,13 +167,13 @@ def set_value(self, value): val = int(value) except ValueError: try: - val = int(value, 16) # for '0x...' strings + val = int(value, 16) # for '0x...' strings except ValueError: if value in self._enumkeys: val = getattr(self, value) else: raise ValueError( - "cannot convert value '%s' to integer"%value) + "cannot convert value '%s' to integer" % value) if not val in self._enumvalues: logger = logging.getLogger("pyffi.object_models.xml.enum") logger.error('invalid enum value (%i) for %s' diff --git a/pyffi/object_models/xml/struct_.py b/pyffi/object_models/xml/struct_.py index 300b1a868..c7d3b7592 100644 --- a/pyffi/object_models/xml/struct_.py +++ b/pyffi/object_models/xml/struct_.py @@ -1,4 +1,24 @@ -"""Implements base class for struct types.""" +""" +:mod:`pyffi.object_models.xml.struct_` --- Structured Types +=========================================================== + +Implements base class for struct types. + +Implementation +-------------- + +.. autoclass:: StructBase + :show-inheritance: + :members: + :undoc-members: + +.. autoclass:: _MetaStructBase + :show-inheritance: + :members: + :undoc-members: + +.. todo:: Show examples for usage +""" # -------------------------------------------------------------------------- # ***** BEGIN LICENSE BLOCK ***** @@ -43,9 +63,10 @@ import logging from functools import partial - -from pyffi.utils.graph import DetailNode, GlobalNode, EdgeFilter import pyffi.object_models.common +from pyffi.object_models.basic import BasicBase +from pyffi.utils.graph import GlobalNode, EdgeFilter + class _MetaStructBase(type): """This metaclass checks for the presence of _attrs and _is_template @@ -53,6 +74,7 @@ class _MetaStructBase(type): property is generated which gets and sets basic types, and gets other types (struct and array). Used as metaclass of StructBase.""" + def __init__(cls, name, bases, dct): super(_MetaStructBase, cls).__init__(name, bases, dct) # does the type contain a Ref or a Ptr? @@ -66,14 +88,14 @@ def __init__(cls, name, bases, dct): # and issubclass must take a type as first argument # hence this hack if not isinstance(attr.type_, str) and \ - issubclass(attr.type_, BasicBase) and attr.arr1 is None: + issubclass(attr.type_, BasicBase) and attr.arr1 is None: # get and set basic attributes setattr(cls, attr.name, property( partial(StructBase.get_basic_attribute, name=attr.name), partial(StructBase.set_basic_attribute, name=attr.name), doc=attr.doc)) elif not isinstance(attr.type_, str) and \ - issubclass(attr.type_, StructBase) and attr.arr1 is None: + issubclass(attr.type_, StructBase) and attr.arr1 is None: # get and set struct attributes setattr(cls, attr.name, property( partial(StructBase.get_attribute, name=attr.name), @@ -93,13 +115,13 @@ def __init__(cls, name, bases, dct): # check for links and refs and strings if not cls._has_links: - if attr.type_ != type(None): # templates! + if attr.type_ != type(None): # templates! # attr.type_ basestring means forward declaration # we cannot know if it has links, so assume yes if (isinstance(attr.type_, str) - or attr.type_._has_links): + or attr.type_._has_links): cls._has_links = True - #else: + # else: # cls._has_links = True # or false... we can't know at this point... might be necessary # to uncomment this if template types contain refs @@ -109,9 +131,9 @@ def __init__(cls, name, bases, dct): # attr.type_ basestring means forward declaration # we cannot know if it has refs, so assume yes if (isinstance(attr.type_, str) - or attr.type_._has_refs): + or attr.type_._has_refs): cls._has_refs = True - #else: + # else: # cls._has_refs = True # dito, see comment above if not cls._has_strings: @@ -119,7 +141,7 @@ def __init__(cls, name, bases, dct): # attr.type_ basestring means forward declaration # we cannot know if it has strings, so assume yes if (isinstance(attr.type_, str) - or attr.type_._has_strings): + or attr.type_._has_strings): cls._has_strings = True else: # enabled because there is a template key type that has @@ -135,7 +157,8 @@ def __init__(cls, name, bases, dct): cls._names = cls._get_names() def __repr__(cls): - return ""%(cls.__name__) + return "" % (cls.__name__) + class StructBase(GlobalNode, metaclass=_MetaStructBase): """Base class from which all file struct types are derived. @@ -157,7 +180,7 @@ class StructBase(GlobalNode, metaclass=_MetaStructBase): See the pyffi.XmlHandler class for a more advanced example. - >>> from pyffi.object_models.xml.basic import BasicBase + >>> from pyffi.object_models.basic import BasicBase >>> from pyffi.object_models.xml.expression import Expression >>> from pyffi.object_models.xml import StructAttribute as Attr >>> class SimpleFormat(object): @@ -228,7 +251,7 @@ class StructBase(GlobalNode, metaclass=_MetaStructBase): logger = logging.getLogger("pyffi.nif.data.struct") # initialize all attributes - def __init__(self, template = None, argument = None, parent = None): + def __init__(self, template=None, argument=None, parent=None): """The constructor takes a tempate: any attribute whose type, or template type, is type(None) - which corresponds to TEMPLATE in the xml description - will be replaced by this @@ -246,7 +269,7 @@ def __init__(self, template = None, argument = None, parent = None): # initialize argument self.arg = argument # save parent (note: disabled for performance) - #self._parent = weakref.ref(parent) if parent else None + # self._parent = weakref.ref(parent) if parent else None # initialize item list # this list is used for instance by qskope to display the structure # in a tree view @@ -262,33 +285,33 @@ def __init__(self, template = None, argument = None, parent = None): # things that can only be determined at runtime (rt_xxx) rt_type = attr.type_ if attr.type_ != type(None) \ - else template + else template rt_template = attr.template if attr.template != type(None) \ - else template + else template rt_arg = attr.arg if isinstance(attr.arg, (int, type(None))) \ - else getattr(self, attr.arg) + else getattr(self, attr.arg) # instantiate the class, handling arrays at the same time if attr.arr1 == None: attr_instance = rt_type( - template = rt_template, argument = rt_arg, - parent = self) + template=rt_template, argument=rt_arg, + parent=self) if attr.default != None: attr_instance.set_value(attr.default) elif attr.arr2 == None: attr_instance = Array( - element_type = rt_type, - element_type_template = rt_template, - element_type_argument = rt_arg, - count1 = attr.arr1, - parent = self) + element_type=rt_type, + element_type_template=rt_template, + element_type_argument=rt_arg, + count1=attr.arr1, + parent=self) else: attr_instance = Array( - element_type = rt_type, - element_type_template = rt_template, - element_type_argument = rt_arg, - count1 = attr.arr1, count2 = attr.arr2, - parent = self) + element_type=rt_type, + element_type_template=rt_template, + element_type_argument=rt_arg, + count1=attr.arr1, count2=attr.arr2, + parent=self) # assign attribute value setattr(self, "_%s_value_" % attr.name, attr_instance) @@ -336,7 +359,7 @@ def __str__(self): elif attr_str_lines: text += '* %s : %s\n' % (attr.name, attr_str_lines[0]) else: - #print(getattr(self, "_%s_value_" % attr.name)) + # print(getattr(self, "_%s_value_" % attr.name)) text += '* %s : \n' % attr.name return text @@ -352,7 +375,10 @@ def _log_struct(self, stream, attr): else: offset = stream.tell() hex_ver = "0x%08X" % offset - self.logger.debug("* {0}.{1} = {2} : type {3} at {4} offset {5} - ".format(self.__class__.__name__, attr.name, str(out), attr.type_, hex_ver, offset )) # debug + self.logger.debug( + "* {0}.{1} = {2} : type {3} at {4} offset {5} - ".format(self.__class__.__name__, attr.name, + str(out), attr.type_, hex_ver, + offset)) # debug def read(self, stream, data): """Read structure from stream.""" @@ -372,7 +398,6 @@ def read(self, stream, data): self._log_struct(stream, attr) attr_value.read(stream, data) - def write(self, stream, data): """Write structure to stream.""" # write all attributes @@ -382,7 +407,7 @@ def write(self, stream, data): continue # get attribute argument (can only be done at runtime) rt_arg = attr.arg if isinstance(attr.arg, (int, type(None))) \ - else getattr(self, attr.arg) + else getattr(self, attr.arg) # write the attribute attr_value = getattr(self, "_%s_value_" % attr.name) attr_value.arg = rt_arg @@ -494,7 +519,7 @@ def _get_attribute_list(cls): for base in cls.__bases__: try: attrs.extend(base._get_attribute_list()) - except AttributeError: # when base class is "object" + except AttributeError: # when base class is "object" pass attrs.extend(cls._attrs) return attrs @@ -508,7 +533,7 @@ def _get_names(cls): for base in cls.__bases__: try: names.extend(base._get_names()) - except AttributeError: # when base class is "object" + except AttributeError: # when base class is "object" pass for attr in cls._attrs: if attr.name in names: @@ -536,7 +561,7 @@ def _get_filtered_attribute_list(self, data=None): user_version = None names = set() for attr in self._attribute_list: - #print(attr.name, version, attr.ver1, attr.ver2) # debug + # print(attr.name, version, attr.ver1, attr.ver2) # debug # check version if version is not None: @@ -544,30 +569,30 @@ def _get_filtered_attribute_list(self, data=None): continue if attr.ver2 is not None and version > attr.ver2: continue - #print("version check passed") # debug + # print("version check passed") # debug # check user version if (attr.userver is not None and user_version is not None - and user_version != attr.userver): + and user_version != attr.userver): continue - #print("user version check passed") # debug + # print("user version check passed") # debug # check conditions if attr.cond is not None and not attr.cond.eval(self): continue if (version is not None and user_version is not None - and attr.vercond is not None): + and attr.vercond is not None): if not attr.vercond.eval(data): continue - #print("condition passed") # debug + # print("condition passed") # debug # skip dupiclate names if attr.name in names: continue - #print("duplicate check passed") # debug + # print("duplicate check passed") # debug names.add(attr.name) # passed all tests @@ -634,7 +659,6 @@ def get_detail_child_names(self, edge_filter=EdgeFilter()): """Yield names of the children of this structure.""" return (name for name in self._names) - # GlobalNode def get_global_display(self): @@ -647,5 +671,5 @@ def get_global_child_nodes(self, edge_filter=EdgeFilter()): for branch in self.get_refs(): yield branch -from pyffi.object_models.xml.basic import BasicBase + from pyffi.object_models.xml.array import Array diff --git a/pyffi/object_models/xsd/__init__.py b/pyffi/object_models/xsd/__init__.py index c9910d378..bad532aa8 100644 --- a/pyffi/object_models/xsd/__init__.py +++ b/pyffi/object_models/xsd/__init__.py @@ -1,6 +1,35 @@ -"""This module provides a base class and a metaclass for parsing an XSD +""" +:mod:`pyffi.object_models.xsd` --- XSD fileformat parser +============================================================= + +This module provides a base class and a metaclass for parsing an XSD schema and providing an interface for writing XML files that follow this schema. + +Implementation +-------------- + +.. autoclass:: Tree + :show-inheritance: + :members: + :undoc-members: + +.. autoclass:: MetaFileFormat + :show-inheritance: + :members: + :undoc-members: + +.. autoclass:: Type + :show-inheritance: + :members: + :undoc-members: + +.. autoclass:: FileFormat + :show-inheritance: + :members: + :undoc-members: + +.. todo:: Show examples for usage """ # ***** BEGIN LICENSE BLOCK ***** @@ -47,6 +76,7 @@ import pyffi.object_models + class Tree(object): """Converts an xsd element tree into a tree of nodes that contain all information and methods for creating classes. Each node has a @@ -183,18 +213,18 @@ def attribute_walker(self, fileformat): # now set attributes for this node if self.name: # could have self._type or not, but should not have a self.ref - assert(not self.ref) # debug + assert (not self.ref) # debug self.pyname = fileformat.name_attribute(self.name) node = self elif self.ref: # no name and no type should be defined - assert(not self.name) # debug - assert(not self.type_) # debug + assert (not self.name) # debug + assert (not self.type_) # debug self.pyname = fileformat.name_attribute(self.ref) # resolve reference for child in self.schema.children: if (isinstance(child, self.__class__) - and child.name == self.ref): + and child.name == self.ref): node = child break else: @@ -419,7 +449,7 @@ def __init__(self, element, parent): class Selector(Node): pass - + class Sequence(Node): pass @@ -489,6 +519,7 @@ def node_factory(cls, element, parent): setattr(cls, class_name, class_) return class_(element, parent) + class MetaFileFormat(pyffi.object_models.MetaFileFormat): """The MetaFileFormat metaclass transforms the XSD description of a xml format into a bunch of classes which can be directly used to @@ -525,7 +556,7 @@ def __init__(cls, name, bases, dct): # create nodes for every element in the XSD tree schema = Tree.node_factory( # XXX cElementTree python bug when running nosetests - #xml.etree.cElementTree.parse(xsdfile).getroot(), None) + # xml.etree.cElementTree.parse(xsdfile).getroot(), None) xml.etree.ElementTree.parse(xsdfile).getroot(), None) finally: xsdfile.close() @@ -537,6 +568,7 @@ def __init__(cls, name, bases, dct): cls.logger.debug("Parsing finished in %.3f seconds." % (time.time() - start)) + class Type(object): _node = None @@ -546,20 +578,21 @@ def __init__(self): # TODO initialize all attributes self._node.instantiate(self) + class FileFormat(pyffi.object_models.FileFormat, metaclass=MetaFileFormat): """This class can be used as a base class for file formats. It implements a number of useful functions such as walking over directory trees and a default attribute naming function. """ - xsdFileName = None #: Override. - xsdFilePath = None #: Override. + xsdFileName = None #: Override. + xsdFilePath = None #: Override. logger = logging.getLogger("pyffi.object_models.xsd") @classmethod def name_parts(cls, name): # introduces extra splits for some names - name = name.replace("NMTOKEN", "NM_TOKEN") # name token - name = name.replace("IDREF", "ID_REF") # identifier reference + name = name.replace("NMTOKEN", "NM_TOKEN") # name token + name = name.replace("IDREF", "ID_REF") # identifier reference # do the split return pyffi.object_models.FileFormat.name_parts(name) diff --git a/pyffi/qskope/__init__.py b/pyffi/qskope/__init__.py index e65090c3d..bbbe3df63 100644 --- a/pyffi/qskope/__init__.py +++ b/pyffi/qskope/__init__.py @@ -39,31 +39,30 @@ from PyQt4 import QtGui, QtCore -import pyffi.qskope.global_model -import pyffi.qskope.detail_model -import pyffi.qskope.detail_delegate - import pyffi -from pyffi.formats.nif import NifFormat +import pyffi.qskope.detail_delegate +import pyffi.qskope.detail_model +import pyffi.qskope.global_model +from pyffi.formats.bsa import BsaFormat from pyffi.formats.cgf import CgfFormat -from pyffi.formats.kfm import KfmFormat from pyffi.formats.dds import DdsFormat -from pyffi.formats.tga import TgaFormat from pyffi.formats.egm import EgmFormat from pyffi.formats.egt import EgtFormat from pyffi.formats.esp import EspFormat -from pyffi.formats.tri import TriFormat -from pyffi.formats.bsa import BsaFormat +from pyffi.formats.kfm import KfmFormat +from pyffi.formats.nif import NifFormat from pyffi.formats.psk import PskFormat from pyffi.formats.rockstar.dir_ import DirFormat +from pyffi.formats.tga import TgaFormat +from pyffi.formats.tri import TriFormat -from pyffi.object_models import FileFormat # implementation details: # http://doc.trolltech.com/4.3/qmainwindow.html#details class QSkope(QtGui.QMainWindow): """Main QSkope window.""" - def __init__(self, parent = None): + + def __init__(self, parent=None): """Initialize the main window.""" QtGui.QMainWindow.__init__(self, parent) @@ -170,16 +169,15 @@ def createMenus(self): def closeEvent(self, event): """Called when the application is closed. Saves the settings.""" - settings = self.getSettings(versioned = True) + settings = self.getSettings(versioned=True) settings.setValue("MainWindow/geometry", self.saveGeometry()) QtGui.QMainWindow.closeEvent(self, event) - # # various helper functions # - def openFile(self, filename = None): + def openFile(self, filename=None): """Open a file, and set up the view.""" # inform user about file being read self.statusBar().showMessage("Reading %s ..." % filename) @@ -195,7 +193,7 @@ def openFile(self, filename = None): self.data = Format.Data() try: self.data.read(stream) - except ValueError as err: #ValueError: + except ValueError as err: # ValueError: # failed, try next format print(str(err)) continue @@ -237,7 +235,7 @@ def openFile(self, filename = None): except UnboundLocalError: pass - def saveFile(self, filename = None): + def saveFile(self, filename=None): """Save changes to disk.""" # TODO support dds saving as well # TODO support tga saving as well @@ -265,7 +263,7 @@ def saveFile(self, filename = None): stream.close() @staticmethod - def getSettings(versioned = False): + def getSettings(versioned=False): """Return the QSkope settings.""" if not versioned: return QtCore.QSettings("PyFFI", "QSkope") @@ -296,7 +294,7 @@ def openAction(self): # (displays an extra file dialog) filename = QtGui.QFileDialog.getOpenFileName(self, "Open File") if filename: - self.openFile(filename = filename) + self.openFile(filename=filename) def saveAsAction(self): """Save a file.""" @@ -312,7 +310,7 @@ def saveAction(self): # wrapper around saveFile # (gets file name automatically from stored file name) if self.fileName: - self.saveFile(filename = self.fileName) + self.saveFile(filename=self.fileName) def aboutQSkopeAction(self): """Display an information window about QSkope.""" diff --git a/pyffi/qskope/detail_delegate.py b/pyffi/qskope/detail_delegate.py index 0b341101d..63850a14f 100644 --- a/pyffi/qskope/detail_delegate.py +++ b/pyffi/qskope/detail_delegate.py @@ -41,11 +41,12 @@ # each delegate type corresponds to a QtGui delegate type # (see _checkValidEditor for more details) -from pyffi.object_models.editable import EditableComboBox # -> QComboBox -from pyffi.object_models.editable import EditableFloatSpinBox # -> QDoubleSpinBox -from pyffi.object_models.editable import EditableSpinBox # -> QSpinBox -from pyffi.object_models.editable import EditableTextEdit # -> QTextEdit -from pyffi.object_models.editable import EditableLineEdit # -> QLineEdit +from pyffi.object_models.editable import EditableComboBox # -> QComboBox +from pyffi.object_models.editable import EditableFloatSpinBox # -> QDoubleSpinBox +from pyffi.object_models.editable import EditableLineEdit # -> QLineEdit +from pyffi.object_models.editable import EditableSpinBox # -> QSpinBox +from pyffi.object_models.editable import EditableTextEdit # -> QTextEdit + # implementation details: # http://doc.trolltech.com/4.3/model-view-delegate.html @@ -196,4 +197,3 @@ def setModelData(self, editor, model, index): # set the model data # EditRole ensures that setData uses set_editor_value to set the data model.setData(index, editorvalue, QtCore.Qt.EditRole) - diff --git a/pyffi/qskope/detail_model.py b/pyffi/qskope/detail_model.py index 552e080e5..88c2017f6 100644 --- a/pyffi/qskope/detail_model.py +++ b/pyffi/qskope/detail_model.py @@ -40,8 +40,9 @@ from PyQt4 import QtCore -from pyffi.utils.graph import EdgeFilter, GlobalNode from pyffi.qskope.detail_tree import DetailTreeItem, DetailTreeItemData +from pyffi.utils.graph import EdgeFilter, GlobalNode + # implementation references: # http://doc.trolltech.com/4.3/model-view-programming.html @@ -51,8 +52,8 @@ class DetailModel(QtCore.QAbstractItemModel): such as StructBase, Array, and BasicBase instances.""" # column definitions NUM_COLUMNS = 3 - COL_NAME = 0 - COL_TYPE = 1 + COL_NAME = 0 + COL_TYPE = 1 COL_VALUE = 2 # def __init__(self, parent = None, block = None, refnumber_dict = None): @@ -143,7 +144,7 @@ def data(self, index, role): def headerData(self, section, orientation, role): """Return header data.""" if (orientation == QtCore.Qt.Horizontal - and role == QtCore.Qt.DisplayRole): + and role == QtCore.Qt.DisplayRole): if section == self.COL_TYPE: return "Type" elif section == self.COL_NAME: @@ -225,7 +226,7 @@ def setData(self, index, value, role): node.set_editor_value(pyvalue) # tell everyone that the data has changed self.emit(QtCore.SIGNAL('dataChanged(QModelIndex, QModelIndex)'), - index, index) + index, index) return True # all other cases: failed return False diff --git a/pyffi/qskope/detail_tree.py b/pyffi/qskope/detail_tree.py index 767d59988..ade2a2a69 100644 --- a/pyffi/qskope/detail_tree.py +++ b/pyffi/qskope/detail_tree.py @@ -38,9 +38,9 @@ # -------------------------------------------------------------------------- - from pyffi.utils.graph import DetailNode, EdgeType, EdgeFilter + class DetailTreeItemData(object): """Stores all data used in the detail view. @@ -49,6 +49,7 @@ class DetailTreeItemData(object): :ivar name: The name of the node (this is usually not stored in the node). :type name: ``str`` """ + def __init__(self, node=None, name=None): if not isinstance(node, DetailNode): raise TypeError("node must be DetailNode instance") @@ -67,6 +68,7 @@ def display(self): def typename(self): return self.node.__class__.__name__ + class DetailTreeItem(object): """Stores all internal information to vizualize :class:`DetailNode`\ s in a tree view. @@ -82,6 +84,7 @@ class DetailTreeItem(object): :ivar edge_type: The type of edge from the parent of this node to itself. :type edge_type: :class:`EdgeType` """ + def __init__(self, data=None, parent=None, row=0, edge_type=EdgeType(), edge_filter=EdgeFilter()): """Initialize the node tree hierarchy from the given data.""" diff --git a/pyffi/qskope/global_model.py b/pyffi/qskope/global_model.py index 7ddfbf3dd..b467e1d1e 100644 --- a/pyffi/qskope/global_model.py +++ b/pyffi/qskope/global_model.py @@ -40,10 +40,11 @@ from collections import MutableMapping -from PyQt4 import QtGui, QtCore +from PyQt4 import QtCore -from pyffi.utils.graph import EdgeFilter from pyffi.qskope.global_tree import GlobalTreeItemData, GlobalTreeItem +from pyffi.utils.graph import EdgeFilter + # implementation references: # http://doc.trolltech.com/4.3/model-view-programming.html @@ -76,7 +77,7 @@ def __delitem__(self, key): # index becomes available self.free_indices.append(self.data[id(key)]) # remove it - del self.data[id(key)] + del self.data[id(key)] def clear(self): # all indices larger than the first element @@ -87,8 +88,10 @@ def clear(self): # override abstract functions which aren't used anyway def __len__(self): raise NotImplementedError() + def __setitem__(self): raise NotImplementedError() + def __iter__(self): raise NotImplementedError() @@ -107,7 +110,6 @@ def updateIndexDict(self, item): self.index_dict[item.data.node] for child_item in item.children: self.updateIndexDict(child_item) - def flags(self, index): """Return flags for the given index: all indices are enabled and @@ -147,7 +149,7 @@ def data(self, index, role): def headerData(self, section, orientation, role): """Return header data.""" if (orientation == QtCore.Qt.Horizontal - and role == QtCore.Qt.DisplayRole): + and role == QtCore.Qt.DisplayRole): if section == self.COL_TYPE: return "Type" elif section == self.COL_NAME: @@ -156,7 +158,7 @@ def headerData(self, section, orientation, role): return "#" return None - def rowCount(self, parent = QtCore.QModelIndex()): + def rowCount(self, parent=QtCore.QModelIndex()): """Calculate a row count for the given parent index.""" if not parent.isValid(): return 1 @@ -164,7 +166,7 @@ def rowCount(self, parent = QtCore.QModelIndex()): # get the parent child count = number of references return len(parent.internalPointer().children) - def columnCount(self, parent = QtCore.QModelIndex()): + def columnCount(self, parent=QtCore.QModelIndex()): """Return column count.""" # column count is constant everywhere return self.NUM_COLUMNS diff --git a/pyffi/qskope/global_tree.py b/pyffi/qskope/global_tree.py index 9a8507b41..3f6cd4ce0 100644 --- a/pyffi/qskope/global_tree.py +++ b/pyffi/qskope/global_tree.py @@ -38,15 +38,16 @@ # -------------------------------------------------------------------------- - from pyffi.utils.graph import GlobalNode, EdgeType, EdgeFilter + class GlobalTreeItemData(object): """Stores all data used in the detail view. :ivar node: The node of the item. :type node: :class:`DetailNode` """ + def __init__(self, node=None): if not isinstance(node, GlobalNode): raise TypeError("node must be GlobalNode instance") @@ -62,6 +63,7 @@ def display(self): def typename(self): return self.node.__class__.__name__ + class GlobalTreeItem(object): """Stores all internal information to vizualize L{GlobalNode}s in a tree view. @@ -79,6 +81,7 @@ class GlobalTreeItem(object): may form cycles (or not, this is format dependent). :type edge_type: ``int`` """ + def __init__(self, data=None, parent=None, row=0, edge_type=EdgeType(), edge_filter=EdgeFilter()): """Initialize the node tree hierarchy from the given data.""" diff --git a/pyffi/spells/__init__.py b/pyffi/spells/__init__.py index 6281a7e56..2cc69aebf 100644 --- a/pyffi/spells/__init__.py +++ b/pyffi/spells/__init__.py @@ -135,12 +135,9 @@ # -------------------------------------------------------------------------- -from configparser import ConfigParser -from copy import deepcopy +import concurrent.futures # ProcessPoolExecutor import gc - import logging # Logger -import concurrent.futures # ProcessPoolExecutor import multiprocessing # current_process, cpu_count import optparse import os # remove @@ -149,6 +146,8 @@ import shlex # shlex.split for parsing option lists in ini files import subprocess import tempfile +from configparser import ConfigParser +from copy import deepcopy import pyffi # for pyffi.__version__ import pyffi.object_models # pyffi.object_models.FileFormat @@ -458,6 +457,7 @@ def toastexit(cls, toaster): class SpellGroupSeriesBase(SpellGroupBase): """Base class for running spells in series.""" + def recurse(self, branch=None): """Recurse spells in series.""" for spell in self.spells: @@ -490,6 +490,7 @@ class SpellGroupParallelBase(SpellGroupBase): """Base class for running spells in parallel (that is, with only a single recursion in the tree). """ + def branchinspect(self, branch): """Inspect spells with :meth:`Spell.branchinspect` (not all checks are executed, only keeps going until a spell inspection returns ``True``). @@ -503,7 +504,7 @@ def branchentry(self, branch): def branchexit(self, branch): for spell in self.spells: - spell.branchexit(branch) + spell.branchexit(branch) def dataentry(self): """Look into every spell with :meth:`Spell.dataentry`.""" @@ -528,8 +529,8 @@ def SpellGroupSeries(*args): {"SPELLCLASSES": args, "SPELLNAME": " | ".join(spellclass.SPELLNAME for spellclass in args), - "READONLY": - all(spellclass.READONLY for spellclass in args)}) + "READONLY": + all(spellclass.READONLY for spellclass in args)}) def SpellGroupParallel(*args): @@ -539,8 +540,9 @@ def SpellGroupParallel(*args): {"SPELLCLASSES": args, "SPELLNAME": " & ".join(spellclass.SPELLNAME for spellclass in args), - "READONLY": - all(spellclass.READONLY for spellclass in args)}) + "READONLY": + all(spellclass.READONLY for spellclass in args)}) + class SpellApplyPatch(Spell): """A spell for applying a patch on files.""" @@ -611,6 +613,7 @@ def _toaster_job(args): class multiprocessing_fake_logger(fake_logger): """Simple logger which works well along with multiprocessing on all platforms.""" + @classmethod def _log(cls, level, level_str, msg): # do not actually log, just print @@ -635,6 +638,7 @@ def _log(cls, level, level_str, msg): # toast exit code toaster.spellclass.toastexit(toaster) + # CPU_COUNT is used for default number of jobs if multiprocessing: try: @@ -754,10 +758,10 @@ def _update_options(self): if self.options["createpatch"] and self.options["applypatch"]: raise ValueError( "options --diff and --patch are mutually exclusive") - if self.options["diffcmd"] and not(self.options["createpatch"]): + if self.options["diffcmd"] and not (self.options["createpatch"]): raise ValueError( "option --diff-cmd can only be used with --diff") - if self.options["patchcmd"] and not(self.options["applypatch"]): + if self.options["patchcmd"] and not (self.options["applypatch"]): raise ValueError( "option --patch-cmd can only be used with --patch") # multiprocessing available? @@ -832,7 +836,7 @@ def msgblockend(self, message=None): message, but if the message argument is ``None``, then no message is printed.""" self.indent -= 1 - if not(message is None): + if not (message is None): self.msg(message) def is_admissible_branch_class(self, branchtype): @@ -928,9 +932,9 @@ def cli(self): type="string", metavar="DESTDIR", help="write files to DESTDIR" - " instead of overwriting the original;" - " this is done by replacing SOURCEDIR by DESTDIR" - " in all source file paths") + " instead of overwriting the original;" + " this is done by replacing SOURCEDIR by DESTDIR" + " in all source file paths") parser.add_option( "--diff", dest="createpatch", action="store_true", @@ -942,7 +946,7 @@ def cli(self): type="string", metavar="CMD", help="use CMD as diff command; this command must accept precisely" - " 3 arguments: 'CMD oldfile newfile patchfile'.") + " 3 arguments: 'CMD oldfile newfile patchfile'.") parser.add_option( "--dry-run", dest="dryrun", action="store_true", @@ -1122,11 +1126,11 @@ def cli(self): return if not args: # no args: error if no top or no spells - if not(self.top and self.spellnames): + if not (self.top and self.spellnames): parser.error(errormessage_numargs) elif len(args) == 1: # single argument is top, error if no spells - if not(self.spellnames): + if not (self.spellnames): parser.error(errormessage_numargs) self.top = args[-1] else: @@ -1205,7 +1209,7 @@ def file_pools(chunksize): # is much more verbose by default pause = self.options.get("pause", False) - + # do not ask for confirmation (!= cli default) interactive = self.options.get("interactive", False) @@ -1238,7 +1242,8 @@ def file_pools(chunksize): if ((not self.spellclass.READONLY) and (not dryrun) and (not prefix) and (not createpatch) and interactive and (not suffix) and (not destdir)): - self.logger.warn("This script will modify your files, in particular if something goes wrong it may destroy them.") + self.logger.warn( + "This script will modify your files, in particular if something goes wrong it may destroy them.") self.logger.warn("Make a backup of your files before running this script.") if not input("Are you sure that you want to proceed? [n/y] ") in ("y", "Y"): self.logger.info("Script aborted by user.") @@ -1326,12 +1331,12 @@ def _toast(self, stream): # create spell instance spell = self.spellclass(toaster=self, data=data, stream=stream) - + # inspect the spell instance if spell._datainspect() and spell.datainspect(): # read the full file data.read(stream) - + # cast the spell on the data tree spell.recurse() @@ -1464,13 +1469,12 @@ def writepatch(self, stream, data): if not diffcmd: raise ValueError("must specify a diff command") - # create a temporary file that won't get deleted when closed self.options["suffix"] = ".tmp" newfile = self.spellclass.get_toast_stream(self, stream.name) try: data.write(newfile) - except: # not just Exception, also CTRL-C + except: # not just Exception, also CTRL-C self.msg("write failed!!!") raise # use external diff command @@ -1485,6 +1489,8 @@ def writepatch(self, stream, data): # delete temporary file os.remove(newfilename) + if __name__ == '__main__': import doctest + doctest.testmod(optionflags=doctest.ELLIPSIS) diff --git a/pyffi/spells/cgf/__init__.py b/pyffi/spells/cgf/__init__.py index c439917c7..2d2297fb4 100644 --- a/pyffi/spells/cgf/__init__.py +++ b/pyffi/spells/cgf/__init__.py @@ -47,6 +47,7 @@ import pyffi.spells from pyffi.formats.cgf import CgfFormat + class CgfSpell(pyffi.spells.Spell): """Base class for spells for cgf files.""" @@ -77,6 +78,6 @@ def inspectblocktype(self, block_type): """ return (block_type in self.data.chunk_table.get_chunk_types()) + class CgfToaster(pyffi.spells.Toaster): FILEFORMAT = CgfFormat - diff --git a/pyffi/spells/cgf/check.py b/pyffi/spells/cgf/check.py index 259724134..6bf1a4b9f 100644 --- a/pyffi/spells/cgf/check.py +++ b/pyffi/spells/cgf/check.py @@ -47,6 +47,7 @@ # XXX do something about this... from pyffi.utils.mathutils import * + class SpellReadWrite(CgfSpell): """Like the original read-write spell, but with additional file size check.""" @@ -81,13 +82,14 @@ def dataentry(self): # spell is finished: prevent recursing into the tree return False + class SpellCheckTangentSpace(CgfSpell): """This spell checks the tangent space calculation. Only useful for debugging. """ SPELLNAME = "check_tangentspace" - SENSITIVITY = 0.1 # admissible float error (relative to one) + SENSITIVITY = 0.1 # admissible float error (relative to one) def datainspect(self): return self.inspectblocktype(CgfFormat.MeshChunk) @@ -96,7 +98,7 @@ def branchinspect(self, branch): return isinstance(branch, (CgfFormat.MeshChunk, CgfFormat.NodeChunk)) def branchentry(self, branch): - if not isinstance(branch, CgfFormat.MeshChunk): + if not isinstance(branch, CgfFormat.MeshChunk): # keep recursing return True @@ -113,8 +115,8 @@ def branchentry(self, branch): self.toaster.msgblockbegin("validating and checking old with new") for norm, oldtangent, newtangent in zip(branch.normals_data.normals, - oldtangents, newtangents): - #self.toaster.msg("*** %s ***" % (norm,)) + oldtangents, newtangents): + # self.toaster.msg("*** %s ***" % (norm,)) # check old norm = (norm.x, norm.y, norm.z) tan = tuple(x / 32767.0 @@ -149,14 +151,14 @@ def branchentry(self, branch): self.toaster.logger.warn("%s %s" % (tan, bin)) self.toaster.logger.warn("(error is %f)" % abs(crossnorm - 1)) - cross = vecscalarMul(cross, 1.0/crossnorm) + cross = vecscalarMul(cross, 1.0 / crossnorm) if vecDistance(norm, cross) > self.SENSITIVITY: self.toaster.logger.warn( "norm not cross product of tangent and binormal") - #self.toaster.logger.warn("norm = %s" % (norm,)) - #self.toaster.logger.warn("tan = %s" % (tan,)) - #self.toaster.logger.warn("bin = %s" % (bin,)) - #self.toaster.logger.warn("tan bin cross prod = %s" % (cross,)) + # self.toaster.logger.warn("norm = %s" % (norm,)) + # self.toaster.logger.warn("tan = %s" % (tan,)) + # self.toaster.logger.warn("bin = %s" % (bin,)) + # self.toaster.logger.warn("tan bin cross prod = %s" % (cross,)) self.toaster.logger.warn( "(error is %f)" % vecDistance(norm, cross)) @@ -179,6 +181,7 @@ def branchentry(self, branch): self.toaster.msgblockend() + class SpellCheckHasVertexColors(CgfSpell): """This spell checks if a model has vertex colors. Only useful for debugging. diff --git a/pyffi/spells/cgf/dump.py b/pyffi/spells/cgf/dump.py index 06a9a2a8a..4494d6b25 100644 --- a/pyffi/spells/cgf/dump.py +++ b/pyffi/spells/cgf/dump.py @@ -39,15 +39,17 @@ # ***** END LICENSE BLOCK ***** # -------------------------------------------------------------------------- -from pyffi.formats.cgf import CgfFormat import pyffi.spells.cgf +from pyffi.formats.cgf import CgfFormat + def dumpBlock(chunk): # XXX figure out how to get the version - #print('chunk %3i (%s version 0x%04X)' + # print('chunk %3i (%s version 0x%04X)' # % (i, chunk.__class__.__name__, version)) return str(chunk) + class SpellDumpAll(pyffi.spells.cgf.CgfSpell): """Dump the whole file.""" diff --git a/pyffi/spells/check.py b/pyffi/spells/check.py index 138b2193a..f0b58e729 100644 --- a/pyffi/spells/check.py +++ b/pyffi/spells/check.py @@ -42,6 +42,7 @@ from pyffi.spells import Spell + class SpellNop(Spell): """A spell which really does nothing. For testing.""" @@ -51,6 +52,7 @@ class SpellNop(Spell): def datainspect(self): return False + class SpellRead(Spell): """A spell which does nothing, besides reading the file.""" @@ -61,6 +63,7 @@ def dataentry(self): # prevent recursing into the tree return False + class SpellReadWrite(SpellRead): """A spell which writes the data to a temporary file (essentially, it is like L{SpellRead} but it forces --dry-run and sets READONLY to @@ -69,7 +72,7 @@ class SpellReadWrite(SpellRead): SPELLNAME = "check_readwrite" READONLY = False - changed = True # we want it to write the file back + changed = True # we want it to write the file back @classmethod def toastentry(cls, toaster): @@ -77,4 +80,3 @@ def toastentry(cls, toaster): toaster.options["dryrun"] = True # the spell always acts return True - diff --git a/pyffi/spells/dds.py b/pyffi/spells/dds.py index c5dc48bd9..9fa51d06a 100644 --- a/pyffi/spells/dds.py +++ b/pyffi/spells/dds.py @@ -47,9 +47,11 @@ import pyffi.spells from pyffi.formats.dds import DdsFormat + class DdsSpell(pyffi.spells.Spell): """Base class for spells for dds files.""" pass + class DdsToaster(pyffi.spells.Toaster): FILEFORMAT = DdsFormat diff --git a/pyffi/spells/nif/__init__.py b/pyffi/spells/nif/__init__.py index 038739841..f97a98ec6 100644 --- a/pyffi/spells/nif/__init__.py +++ b/pyffi/spells/nif/__init__.py @@ -120,7 +120,7 @@ def dataentry(self): if isinstance(branch, NifFormat.NiGeometry): if branch.skin_instance: skelroot = branch.skin_instance.skeleton_root - if skelroot and not(id(skelroot) in self._skelroots): + if skelroot and not (id(skelroot) in self._skelroots): self._skelroots.add(id(skelroot)) # only apply spell if there are skeleton roots if self._skelroots: @@ -131,7 +131,7 @@ def dataentry(self): def branchinspect(self, branch): # only inspect the NiNode branch return isinstance(branch, NifFormat.NiNode) - + def branchentry(self, branch): if id(branch) in self._skelroots: self.skelrootentry(branch) diff --git a/pyffi/spells/nif/check.py b/pyffi/spells/nif/check.py index 61ebbdac8..e4dd165a2 100644 --- a/pyffi/spells/nif/check.py +++ b/pyffi/spells/nif/check.py @@ -40,13 +40,14 @@ # -------------------------------------------------------------------------- +import tempfile from contextlib import closing from itertools import repeat -import tempfile -from pyffi.formats.nif import NifFormat import pyffi.spells.nif -import pyffi.utils.tristrip # for check_tristrip +import pyffi.utils.tristrip # for check_tristrip +from pyffi.formats.nif import NifFormat + class SpellReadWrite(pyffi.spells.nif.NifSpell): """Like the original read-write spell, but with additional file size @@ -88,15 +89,17 @@ def dataentry(self): f_debug = open("debug.nif", "wb") f_debug.write(f_tmp.read(-1)) f_debug.close() - raise Exception('write check failed: file sizes differ (written file saved as debug.nif for inspection)') + raise Exception( + 'write check failed: file sizes differ (written file saved as debug.nif for inspection)') finally: f_tmp.close() - + self.toaster.msgblockend() # spell is finished: prevent recursing into the tree return False + class SpellNodeNamesByFlag(pyffi.spells.nif.NifSpell): """This spell goes over all NIF files, and at the end, it gives a summary of which node names where used with particular flags.""" @@ -130,6 +133,7 @@ def branchentry(self, branch): else: return False + class SpellCompareSkinData(pyffi.spells.nif.NifSpell): """This spell compares skinning data with a reference nif.""" @@ -139,15 +143,15 @@ class SpellCompareSkinData(pyffi.spells.nif.NifSpell): @staticmethod def are_vectors_equal(oldvec, newvec, tolerance=0.01): - return (max([abs(x-y) - for (x,y) in zip(oldvec.as_list(), newvec.as_list())]) + return (max([abs(x - y) + for (x, y) in zip(oldvec.as_list(), newvec.as_list())]) < tolerance) @staticmethod def are_matrices_equal(oldmat, newmat, tolerance=0.01): - return (max([max([abs(x-y) - for (x,y) in zip(oldrow, newrow)]) - for (oldrow, newrow) in zip(oldmat.as_list(), + return (max([max([abs(x - y) + for (x, y) in zip(oldrow, newrow)]) + for (oldrow, newrow) in zip(oldmat.as_list(), newmat.as_list())]) < tolerance) @@ -169,7 +173,7 @@ def toastentry(cls, toaster): toaster.refbonedata = [] for refgeom in toaster.refdata.get_global_iterator(): if (isinstance(refgeom, NifFormat.NiGeometry) - and refgeom.skin_instance and refgeom.skin_instance.data): + and refgeom.skin_instance and refgeom.skin_instance.data): toaster.refbonedata += list(zip( repeat(refgeom.skin_instance.skeleton_root), repeat(refgeom.skin_instance.data), @@ -187,14 +191,14 @@ def branchinspect(self, branch): def branchentry(self, branch): if (isinstance(branch, NifFormat.NiGeometry) - and branch.skin_instance and branch.skin_instance.data): + and branch.skin_instance and branch.skin_instance.data): for skelroot, skeldata, bonenode, bonedata in zip( - repeat(branch.skin_instance.skeleton_root), - repeat(branch.skin_instance.data), - branch.skin_instance.bones, - branch.skin_instance.data.bone_list): + repeat(branch.skin_instance.skeleton_root), + repeat(branch.skin_instance.data), + branch.skin_instance.bones, + branch.skin_instance.data.bone_list): for refskelroot, refskeldata, refbonenode, refbonedata \ - in self.toaster.refbonedata: + in self.toaster.refbonedata: if bonenode.name == refbonenode.name: self.toaster.msgblockbegin("checking bone %s" % bonenode.name) @@ -217,26 +221,26 @@ def branchentry(self, branch): # can we find skeleton root of data in reference # data? for refskelroot_branch \ - in self.toaster.refdata.get_global_iterator(): + in self.toaster.refdata.get_global_iterator(): if not isinstance(refskelroot_branch, NifFormat.NiAVObject): continue if skelroot.name == refskelroot_branch.name: # yes! found! - #self.toaster.msg( + # self.toaster.msg( # "found alternative in reference nif") branchtransform_extra = \ refskelroot_branch.get_transform(refskelroot).get_inverse() break else: for skelroot_ref \ - in self.data.get_global_iterator(): + in self.data.get_global_iterator(): if not isinstance(skelroot_ref, NifFormat.NiAVObject): continue if refskelroot.name == skelroot_ref.name: # yes! found! - #self.toaster.msg( + # self.toaster.msg( # "found alternative in nif") branchtransform_extra = \ skelroot_ref.get_transform(skelroot) @@ -252,21 +256,21 @@ def branchentry(self, branch): # to a vertex in the reference geometry in the position # of the reference bone reftransform = ( - refbonedata.get_transform() - * refbonenode.get_transform(refskelroot) - * refskeldata.get_transform()) + refbonedata.get_transform() + * refbonenode.get_transform(refskelroot) + * refskeldata.get_transform()) # calculate total transform matrix that would be applied # to a vertex in this branch in the position of the # reference bone branchtransform = ( - bonedata.get_transform() - * refbonenode.get_transform(refskelroot) # NOT a typo - * skeldata.get_transform() - * branchtransform_extra) # skelroot differences + bonedata.get_transform() + * refbonenode.get_transform(refskelroot) # NOT a typo + * skeldata.get_transform() + * branchtransform_extra) # skelroot differences # compare if not self.are_matrices_equal(reftransform, branchtransform): - #raise ValueError( + # raise ValueError( self.toaster.msg( "transform mismatch\n%s\n!=\n%s\n" % (reftransform, branchtransform)) @@ -278,6 +282,7 @@ def branchentry(self, branch): # keep iterating return True + class SpellCheckBhkBodyCenter(pyffi.spells.nif.NifSpell): """Recalculate the center of mass and inertia matrix, compare them to the originals, and report accordingly. @@ -299,15 +304,15 @@ def branchentry(self, branch): return True else: self.toaster.msg("getting rigid body mass, center, and inertia") - mass = branch.mass - center = branch.center.get_copy() - inertia = branch.inertia.get_copy() + mass = branch.rigid_body_info.mass + center = branch.rigid_body_info.center.get_copy() + inertia = branch.rigid_body_info.inertia_tensor.get_copy() # TODO: intertia has been renamed self.toaster.msg("recalculating...") - branch.update_mass_center_inertia(mass=branch.mass) + branch.update_mass_center_inertia(mass=branch.rigid_body_info.mass) - #self.toaster.msg("checking mass...") - #if mass != branch.mass: + # self.toaster.msg("checking mass...") + # if mass != branch.mass: # #raise ValueError("center does not match; original %s, calculated %s"%(center, branch.center)) # self.toaster.logger.warn("warning: mass does not match; original %s, calculated %s"%(mass, branch.mass)) # # adapt calculated inertia matrix with observed mass @@ -315,41 +320,42 @@ def branchentry(self, branch): # correction = mass / branch.mass # for i in range(12): # branch.inertia[i] *= correction - #else: + # else: # self.toaster.msg("perfect match!") self.toaster.msg("checking center...") report = {} - if center != branch.center: - #raise ValueError("center does not match; original %s, calculated %s"%(center, branch.center)) + if center != branch.rigid_body_info.center: + # raise ValueError("center does not match; original %s, calculated %s"%(center, branch.center)) self.toaster.logger.warn( "center does not match; original %s, calculated %s" - % (center, branch.center)) + % (center, branch.rigid_body_info.center)) report["center"] = { "orig": center.as_tuple(), - "calc": branch.center.as_tuple(), - } + "calc": branch.rigid_body_info.center.as_tuple(), + } self.toaster.msg("checking inertia...") - scale = max(max(abs(x) for x in row) for row in inertia.as_list() + branch.inertia.as_list()) + scale = max(max(abs(x) for x in row) for row in inertia.as_list() + branch.rigid_body_info.inertia_tensor.as_list()) if (max(max(abs(x - y) for x, y in zip(row1, row2)) - for row1, row2 in zip(inertia.as_list(), branch.inertia.as_list())) - > 0.1 * scale): - #raise ValueError("center does not match; original %s, calculated %s"%(center, branch.center)) + for row1, row2 in zip(inertia.as_list(), branch.rigid_body_info.inertia_tensor.as_list())) + > 0.1 * scale): + # raise ValueError("center does not match; original %s, calculated %s"%(center, branch.center)) self.toaster.logger.warn( "inertia does not match:\n\noriginal\n%s\n\ncalculated\n%s\n" - % (inertia, branch.inertia)) + % (inertia, branch.rigid_body_info.inertia)) report["inertia"] = { "orig": inertia.as_tuple(), - "calc": branch.inertia.as_tuple(), - } + "calc": branch.rigid_body_info.inertia_tensor.as_tuple(), + } if report: self.append_report(report) # stop recursing return False + class SpellCheckCenterRadius(pyffi.spells.nif.NifSpell): """Recalculate the center and radius, compare them to the originals, and report mismatches. @@ -380,10 +386,10 @@ def branchentry(self, branch): report = {} self.toaster.msg("getting bounding sphere") center = NifFormat.Vector3() - center.x = branch.center.x - center.y = branch.center.y - center.z = branch.center.z - radius = branch.radius + center.x = branch.bounding_sphere.center.x + center.y = branch.bounding_sphere.center.y + center.z = branch.bounding_sphere.center.z + radius = branch.bounding_sphere.radius self.toaster.msg("checking that all vertices are inside") maxr = 0.0 @@ -396,37 +402,38 @@ def branchentry(self, branch): maxr = maxr ** 0.5 if maxr > 1.01 * radius + 0.01: - #raise ValueError( + # raise ValueError( self.toaster.logger.warn( - "not all vertices inside bounding sphere (vertex %s, error %s)" - % (maxv, abs(maxr - radius))) + "not all vertices inside bounding sphere (vertex %s, error %s)" + % (maxv, abs(maxr - radius))) report["vertex_outside"] = maxv.as_tuple() self.toaster.msg("recalculating bounding sphere") branch.update_center_radius() self.toaster.msg("comparing old and new spheres") - if center != branch.center: - self.toaster.logger.warn( - "center does not match; original %s, calculated %s" - % (center, branch.center)) - report["center"] = { - "orig": center.as_tuple(), - "calc": branch.center.as_tuple(), - } - if abs(radius - branch.radius) > NifFormat.EPSILON: - self.toaster.logger.warn( - "radius does not match; original %s, calculated %s" - % (radius, branch.radius)) - report["radius"] = { - "orig": radius, - "calc": branch.radius, - } + if center != branch.bounding_sphere.center: + self.toaster.logger.warn( + "center does not match; original %s, calculated %s" + % (center, branch.bounding_sphere.center)) + report["center"] = { + "orig": center.as_tuple(), + "calc": branch.bounding_sphere.center.as_tuple(), + } + if abs(radius - branch.bounding_sphere.radius) > NifFormat.EPSILON: + self.toaster.logger.warn( + "radius does not match; original %s, calculated %s" + % (radius, branch.bounding_sphere.radius)) + report["radius"] = { + "orig": radius, + "calc": branch.bounding_sphere.radius, + } if report: self.append_report(report) # stop recursing return False + class SpellCheckSkinCenterRadius(pyffi.spells.nif.NifSpell): """Recalculate the skindata center and radius for each bone, compare them to the originals, and report mismatches. @@ -442,7 +449,7 @@ def branchinspect(self, branch): NifFormat.NiGeometry)) def branchentry(self, branch): - if not(isinstance(branch, NifFormat.NiGeometry) and branch.is_skin()): + if not (isinstance(branch, NifFormat.NiGeometry) and branch.is_skin()): # keep recursing return True else: @@ -464,7 +471,7 @@ def branchentry(self, branch): % (branch.skin_instance.bones[i].name, center[i], skindatablock.bounding_sphere_offset)) if abs(radius[i] - skindatablock.bounding_sphere_radius) \ - > NifFormat.EPSILON: + > NifFormat.EPSILON: self.toaster.logger.error( "%s radius does not match; original %s, calculated %s" % (branch.skin_instance.bones[i].name, @@ -472,6 +479,7 @@ def branchentry(self, branch): # stop recursing return False + class SpellCheckConvexVerticesShape(pyffi.spells.nif.NifSpell): """This test checks whether each vertex is the intersection of at least three planes. @@ -503,7 +511,7 @@ def branchentry(self, branch): n.x = n4.x n.y = n4.y n.z = n4.z - d = n4.w + d = n4.w if abs(v * n + d) < 0.01: num_intersect += 1 if num_intersect == 0: @@ -518,6 +526,7 @@ def branchentry(self, branch): # stop recursing return False + class SpellCheckMopp(pyffi.spells.nif.NifSpell): """Parse and dump mopp trees, and check their validity: @@ -599,18 +608,19 @@ def branchentry(self, branch): self.toaster.logger.debug([mopp[k] for k in range(i, min(branch.mopp_data_size, i + 10))]) error = True - #if error: + # if error: # raise ValueError("mopp parsing failed") # stop recursing return False + class SpellCheckTangentSpace(pyffi.spells.nif.NifSpell): """Check and recalculate the tangent space, compare them to the originals, and report accordingly. """ SPELLNAME = 'check_tangentspace' - PRECISION = 0.3 #: Difference between values worth warning about. + PRECISION = 0.3 #: Difference between values worth warning about. def datainspect(self): return self.inspectblocktype(NifFormat.NiTriBasedGeom) @@ -629,7 +639,7 @@ def branchentry(self, branch): # no tangent space present return False self.toaster.msg("checking tangent space") - oldspace = [] # we will store the old tangent space here + oldspace = [] # we will store the old tangent space here for i, (n, t, b) in enumerate(tangentspace): oldspace.append(n.as_list() + t.as_list() + b.as_list()) if abs(n * n - 1) > NifFormat.EPSILON: @@ -649,16 +659,16 @@ def branchentry(self, branch): self.toaster.logger.warn( 'non-ortogonal tangent space at vertex %i' % i) self.toaster.logger.warn( - 'n * t = %s * %s = %f'%(n, t, n * t)) + 'n * t = %s * %s = %f' % (n, t, n * t)) self.toaster.logger.warn( - 'n * b = %s * %s = %f'%(n, b, n * b)) + 'n * b = %s * %s = %f' % (n, b, n * b)) self.toaster.logger.warn( - 't * b = %s * %s = %f'%(t, b, t * b)) + 't * b = %s * %s = %f' % (t, b, t * b)) self.toaster.logger.warn( 'volume = %f' % volume) # recalculate the tangent space branch.update_tangent_space() - newspace = [] # we will store the old tangent space here + newspace = [] # we will store the old tangent space here for i, (n, t, b) in enumerate(branch.get_tangent_space()): newspace.append(n.as_list() + t.as_list() + b.as_list()) # check if old matches new @@ -676,9 +686,10 @@ def branchentry(self, branch): self.toaster.logger.warn('new: %s' % new[3:6]) self.toaster.logger.warn('new: %s' % new[6:9]) break - + # don't recurse further - return False + return False + class SpellCheckTriStrip(pyffi.spells.nif.NifSpell): """Run the stripifier on all triangles from NIF files. This spell is also @@ -793,6 +804,7 @@ def report_strip_statistics(triangles, strips): unstitchedstrips = pyffi.utils.tristrip.unstitch_strip(stitchedstrip) pyffi.utils.tristrip._check_strips(triangles, unstitchedstrips) + class SpellCheckVersion(pyffi.spells.nif.NifSpell): """Checks all versions used by the files (without reading the full files). """ @@ -800,9 +812,9 @@ class SpellCheckVersion(pyffi.spells.nif.NifSpell): @classmethod def toastentry(cls, toaster): - toaster.versions = {} # counts number of nifs with version - toaster.user_versions = {} # tracks used user version's per version - toaster.user_version_2s = {} # tracks used user version2's per version + toaster.versions = {} # counts number of nifs with version + toaster.user_versions = {} # tracks used user version's per version + toaster.bs_versions = {} # tracks used user version2's per version return True @classmethod @@ -811,30 +823,31 @@ def toastexit(cls, toaster): toaster.msgblockbegin("version 0x%08X" % version) toaster.msg("number of nifs: %s" % toaster.versions[version]) toaster.msg("user version: %s" % toaster.user_versions[version]) - toaster.msg("user version2: %s" % toaster.user_version_2s[version]) + toaster.msg("user version2: %s" % toaster.bs_versions[version]) toaster.msgblockend() def datainspect(self): # some shortcuts version = self.data.version user_version = self.data.user_version - user_version_2 = self.data.user_version_2 + bs_version = self.data.bs_version # report self.toaster.msg("version 0x%08X" % version) self.toaster.msg("user version %i" % user_version) - self.toaster.msg("user version %i" % user_version_2) + self.toaster.msg("user version %i" % bs_version) # update stats if version not in self.toaster.versions: self.toaster.versions[version] = 0 self.toaster.user_versions[version] = [] - self.toaster.user_version_2s[version] = [] + self.toaster.bs_versions[version] = [] self.toaster.versions[version] += 1 if user_version not in self.toaster.user_versions[version]: self.toaster.user_versions[version].append(user_version) - if user_version_2 not in self.toaster.user_version_2s[version]: - self.toaster.user_version_2s[version].append(user_version_2) + if bs_version not in self.toaster.bs_versions[version]: + self.toaster.bs_versions[version].append(bs_version) return False + class SpellCheckMaterialEmissiveValue(pyffi.spells.nif.NifSpell): """Check (and warn) about potentially bad material emissive values.""" @@ -855,7 +868,7 @@ def branchinspect(self, branch): # only inspect the NiAVObject branch, and material properties return isinstance(branch, (NifFormat.NiAVObject, NifFormat.NiMaterialProperty)) - + def branchentry(self, branch): if isinstance(branch, NifFormat.NiMaterialProperty): # check if any emissive values exceeds usual values @@ -876,6 +889,7 @@ def branchentry(self, branch): # keep recursing into children return True + class SpellCheckTriangles(pyffi.spells.nif.NifSpell): """Base class for spells which need to check all triangles.""" @@ -908,6 +922,7 @@ def branchentry(self, branch): def toastexit(cls, toaster): toaster.msg("found {0} geometries".format(len(toaster.geometries))) + try: import numpy import scipy.optimize @@ -915,15 +930,16 @@ def toastexit(cls, toaster): numpy = None scipy = None + class SpellCheckTrianglesATVR(SpellCheckTriangles): """Find optimal parameters for vertex cache algorithm by simulated annealing. """ SPELLNAME = "check_triangles_atvr" - INITIAL = [1.5, 0.75, 2.0, 0.5] - LOWER = [0.01, -10.0, 0.1, 0.01] - UPPER = [5.0, 1.0, 10.0, 5.0] + INITIAL = [1.5, 0.75, 2.0, 0.5] + LOWER = [0.01, -10.0, 0.1, 0.01] + UPPER = [5.0, 1.0, 10.0, 5.0] @classmethod def toastentry(cls, toaster): @@ -943,8 +959,8 @@ def get_atvr(cls, toaster, *args): # check bounds if any(value < lower or value > upper for (lower, value, upper) in zip( - cls.LOWER, args, cls.UPPER)): - return 1e30 # infinity + cls.LOWER, args, cls.UPPER)): + return 1e30 # infinity cache_decay_power, last_tri_score, valence_boost_scale, valence_boost_power = args vertex_score = pyffi.utils.vertex_cache.VertexScore() vertex_score.CACHE_DECAY_POWER = cache_decay_power @@ -953,8 +969,8 @@ def get_atvr(cls, toaster, *args): vertex_score.VALENCE_BOOST_POWER = valence_boost_power vertex_score.precalculate() print("{0:.3f} {1:.3f} {2:.3f} {3:.3f}".format( - cache_decay_power, last_tri_score, - valence_boost_scale, valence_boost_power)) + cache_decay_power, last_tri_score, + valence_boost_scale, valence_boost_power)) atvr = [] for triangles in toaster.geometries: mesh = pyffi.utils.vertex_cache.Mesh(triangles, vertex_score) @@ -974,10 +990,10 @@ def toastexit(cls, toaster): full_output=True, lower=numpy.array(cls.LOWER), upper=numpy.array(cls.UPPER), - #maxeval=10, - #maxaccept=10, - #maxiter=10, - #dwell=10, - #feps=0.1, - ) + # maxeval=10, + # maxaccept=10, + # maxiter=10, + # dwell=10, + # feps=0.1, + ) toaster.msg(str(result)) diff --git a/pyffi/spells/nif/dump.py b/pyffi/spells/nif/dump.py index 599a1a9f6..5c1a7b6f3 100644 --- a/pyffi/spells/nif/dump.py +++ b/pyffi/spells/nif/dump.py @@ -41,21 +41,22 @@ import codecs import http.server -import ntpath # explicit windows style path manipulations +import ntpath # explicit windows style path manipulations import os import tempfile -import types import webbrowser -from xml.sax.saxutils import escape # for htmlreport +from xml.sax.saxutils import escape # for htmlreport -from pyffi.formats.nif import NifFormat -from pyffi.spells.nif import NifSpell import pyffi.object_models.xml.array import pyffi.object_models.xml.struct_ +from pyffi.formats.nif import NifFormat +from pyffi.spells.nif import NifSpell + def tohex(value, nbytes=4): """Improved version of hex.""" - return ("0x%%0%dX" % (2*nbytes)) % (int(str(value)) & (2**(nbytes*8)-1)) + return ("0x%%0%dX" % (2 * nbytes)) % (int(str(value)) & (2 ** (nbytes * 8) - 1)) + def dumpArray(arr): """Format an array. @@ -65,7 +66,7 @@ def dumpArray(arr): :return: String describing the array. """ text = "" - if arr._count2 == None: + if arr._width == None: for i, element in enumerate(list.__iter__(arr)): if i > 16: text += "etc...\n" @@ -84,6 +85,7 @@ def dumpArray(arr): break return text if text else "None" + def dumpBlock(block): """Return formatted string for block without following references. @@ -105,6 +107,7 @@ def dumpBlock(block): text = '* %s : \n' % attr.name return text + def dumpAttr(attr): """Format an attribute. @@ -119,7 +122,7 @@ def dumpAttr(attr): return "<%s:%s:0x%08X>" % (ref.__class__.__name__, ref.name, id(attr)) else: - return "<%s:0x%08X>" % (ref.__class__.__name__,id(attr)) + return "<%s:0x%08X>" % (ref.__class__.__name__, id(attr)) else: return "" elif isinstance(attr, list): @@ -136,7 +139,8 @@ def dumpAttr(attr): return tohex(attr, 4) else: return str(attr) - + + class SpellDumpAll(NifSpell): """Dump the whole NIF file.""" @@ -148,6 +152,7 @@ def branchentry(self, branch): # continue recursion return True + class SpellDumpTex(NifSpell): """Dump the texture and material info of all geometries.""" @@ -194,20 +199,21 @@ def branchentry(self, branch): elif isinstance(branch, NifFormat.BSShaderTextureSet): textures = [path.decode() for path in branch.textures if path.decode() != ''] if len(textures) > 0: - for n, tex in enumerate (textures): + for n, tex in enumerate(textures): self.toaster.msg('%i: %s' % (n, tex)) - else: + else: self.toaster.msg('BSShaderTextureSet has no Textures') return False else: # keep looking for blocks of interest return True + class SpellHtmlReport(NifSpell): """Make a html report of selected blocks.""" SPELLNAME = "dump_htmlreport" - ENTITIES = { "\n": "
" } + ENTITIES = {"\n": "
"} @classmethod def toastentry(cls, toaster): @@ -220,7 +226,7 @@ def _branchinspect(self, branch): # enter every branch # (the base method is called in branch entry) return True - + def branchentry(self, branch): # check if this branch must be checked, if not, recurse further if not NifSpell._branchinspect(self, branch): @@ -230,15 +236,15 @@ def branchentry(self, branch): if not reports: # start a new report for this block type row = "" - row += "%s" % "file" - row += "%s" % "id" + row += "%s" % "file" + row += "%s" % "id" for attr in branch._get_filtered_attribute_list(data=self.data): row += ("%s" % escape(attr.displayname, self.ENTITIES)) row += "" reports = [row] self.toaster.reports_per_blocktype[blocktype] = reports - + row = "" row += "%s" % escape(self.stream.name) row += "%s" % escape("0x%08X" % id(branch), self.ENTITIES) @@ -256,10 +262,10 @@ def branchentry(self, branch): def toastexit(cls, toaster): if toaster.reports_per_blocktype: rows = [] - rows.append( "" ) - rows.append( "Report" ) - rows.append( "" ) - rows.append( "" ) + rows.append("") + rows.append("Report") + rows.append("") + rows.append("") for blocktype, reports in toaster.reports_per_blocktype.items(): rows.append("

%s

" % blocktype) @@ -280,16 +286,18 @@ def browser(cls, htmlstr): Instantiates a trivial http server and calls webbrowser.open with a URL to retrieve html from that server. - """ + """ + class RequestHandler(http.server.BaseHTTPRequestHandler): def do_GET(self): - bufferSize = 1024*1024 + bufferSize = 1024 * 1024 for i in range(0, len(htmlstr), bufferSize): - self.wfile.write(htmlstr[i:i+bufferSize]) + self.wfile.write(htmlstr[i:i + bufferSize]) server = http.server.HTTPServer(('127.0.0.1', 0), RequestHandler) webbrowser.open('http://127.0.0.1:%s' % server.server_port) - server.handle_request() + server.handle_request() + class SpellExportPixelData(NifSpell): """Export embedded images as DDS files. If the toaster's @@ -326,7 +334,7 @@ def branchinspect(self, branch): def branchentry(self, branch): if (isinstance(branch, NifFormat.NiSourceTexture) - and branch.pixel_data and branch.file_name): + and branch.pixel_data and branch.file_name): self.save_as_dds(branch.pixel_data, branch.file_name) return False elif isinstance(branch, NifFormat.ATextureRenderData): @@ -372,8 +380,8 @@ def get_pixeldata_head_root(texture_filename): # XXX following is disabled because not all textures in Bully # XXX actually have this form; use "-a textures" for this game # make relative path for Bully SE - #tmp1, tmp2, tmp3 = head.partition("\\bully\\temp\\export\\") - #if tmp2: + # tmp1, tmp2, tmp3 = head.partition("\\bully\\temp\\export\\") + # if tmp2: # head = tmp3 # for linux: convert backslash to forward slash head = head.replace("\\", "/") @@ -416,6 +424,7 @@ def save_as_dds(self, pixeldata, texture_filename): if stream: stream.close() + class SpellDumpPython(NifSpell): """Convert a nif into python code.""" @@ -445,18 +454,16 @@ def print_instance(self, name, _value, default=None): if _value._count2 is None: for i, elem in enumerate(list.__iter__(_value)): if self.print_instance( - "%s[%i]" % (name, i), elem): - + "%s[%i]" % (name, i), elem): result = True else: for i, elemlist in enumerate(list.__iter__(_value)): for j, elem in enumerate(list.__iter__(elemlist)): if self.print_instance( - "%s[%i][%i]" % (name, i, j), elem): - + "%s[%i][%i]" % (name, i, j), elem): result = True return result - elif isinstance(_value, pyffi.object_models.xml.basic.BasicBase): + elif isinstance(_value, pyffi.object_models.basic.BasicBase): value = _value.get_value() if default is None: default = type(_value)().get_value() @@ -505,8 +512,8 @@ def dataentry(self): self.print_("n_data.version = %s" % hex(self.data.version)) if self.data.user_version: self.print_("n_data.user_version = %s" % self.data.user_version) - if self.data.user_version_2: - self.print_("n_data.user_version_2 = %s" % self.data.user_version_2) + if self.data.bs_version: + self.print_("n_data.bs_version = %s" % self.data.bs_version) if self.data.modification: self.print_("n_data.modification = %s" % repr(self.data.modification)) self.print_("n_create_blocks(n_data)") diff --git a/pyffi/spells/nif/fix.py b/pyffi/spells/nif/fix.py index 667492578..1471eceaa 100644 --- a/pyffi/spells/nif/fix.py +++ b/pyffi/spells/nif/fix.py @@ -122,10 +122,10 @@ # ***** END LICENSE BLOCK ***** # -------------------------------------------------------------------------- +import pyffi.spells.nif.check # recycle checking spells for update spells from pyffi.formats.nif import NifFormat from pyffi.spells.nif import NifSpell -import pyffi.spells.nif -import pyffi.spells.nif.check # recycle checking spells for update spells + class SpellDelTangentSpace(NifSpell): """Delete tangentspace if it is present.""" @@ -146,7 +146,7 @@ def branchentry(self, branch): for extra in branch.get_extra_datas(): if isinstance(extra, NifFormat.NiBinaryExtraData): if (extra.name == - b'Tangent space (binormal & tangent vectors)'): + b'Tangent space (binormal & tangent vectors)'): self.toaster.msg("removing tangent space block") branch.remove_extra_data(extra) self.changed = True @@ -155,6 +155,7 @@ def branchentry(self, branch): # recurse further return True + class SpellAddTangentSpace(NifSpell): """Add tangentspace if none is present.""" @@ -174,7 +175,7 @@ def branchentry(self, branch): for extra in branch.get_extra_datas(): if isinstance(extra, NifFormat.NiBinaryExtraData): if (extra.name == - b'Tangent space (binormal & tangent vectors)'): + b'Tangent space (binormal & tangent vectors)'): # tangent space found, done! return False # no tangent space found @@ -187,6 +188,7 @@ def branchentry(self, branch): # recurse further return True + class SpellFFVT3RSkinPartition(NifSpell): """Create or update skin partition, with settings that work for Freedom Force vs. The 3rd Reich.""" @@ -217,6 +219,7 @@ def branchentry(self, branch): # recurse further return True + class SpellParseTexturePath(NifSpell): """Base class for spells which must parse all texture paths, with hook for texture path substitution. @@ -241,7 +244,6 @@ def datainspect(self): return True else: return False - def branchinspect(self, branch): # only inspect the NiAVObject branch, texturing properties and source @@ -251,19 +253,20 @@ def branchinspect(self, branch): NifFormat.NiSourceTexture, NifFormat.BSLightingShaderProperty, NifFormat.BSShaderTextureSet)) - + def branchentry(self, branch): if isinstance(branch, NifFormat.NiSourceTexture): branch.file_name = self.substitute(branch.file_name) return False elif isinstance(branch, NifFormat.BSShaderTextureSet): - for n, tex in enumerate (branch.textures): + for n, tex in enumerate(branch.textures): branch.textures[n] = self.substitute(tex) return False else: return True + class SpellFixTexturePath(SpellParseTexturePath): r"""Fix the texture path. Transforms 0x0a into \n and 0x0d into \r. This fixes a bug in nifs saved with older versions of @@ -275,12 +278,12 @@ class SpellFixTexturePath(SpellParseTexturePath): """ SPELLNAME = "fix_texturepath" - + def substitute(self, old_path): new_path = old_path new_path = new_path.replace(b'\n', b'\\n') new_path = new_path.replace(b'\r', b'\\r') - new_path = new_path.replace(b'/', b'\\') + new_path = new_path.replace(b'/', b'\\') # baphometal found some nifs that use double slashes # this causes textures not to show, so here we convert them # back to single slashes @@ -296,6 +299,7 @@ def substitute(self, old_path): self.changed = True return new_path + # the next spell solves issue #2065018, MiddleWolfRug01.NIF class SpellDetachHavokTriStripsData(NifSpell): """For NiTriStrips if their NiTriStripsData also occurs in a @@ -329,20 +333,21 @@ def branchinspect(self, branch): return isinstance(branch, (NifFormat.NiAVObject, NifFormat.bhkCollisionObject, NifFormat.bhkRefObject)) - + def branchentry(self, branch): if isinstance(branch, NifFormat.bhkNiTriStripsShape): for i, data in enumerate(branch.strips_data): if data in [otherbranch.data for otherbranch in self.nitristrips]: - # detach! - self.toaster.msg("detaching havok data") - branch.strips_data[i] = NifFormat.NiTriStripsData().deepcopy(data) - self.changed = True + # detach! + self.toaster.msg("detaching havok data") + branch.strips_data[i] = NifFormat.NiTriStripsData().deepcopy(data) + self.changed = True return False else: return True + class SpellClampMaterialAlpha(NifSpell): """Clamp corrupted material alpha values.""" @@ -357,7 +362,7 @@ def branchinspect(self, branch): # only inspect the NiAVObject branch, and material properties return isinstance(branch, (NifFormat.NiAVObject, NifFormat.NiMaterialProperty)) - + def branchentry(self, branch): if isinstance(branch, NifFormat.NiMaterialProperty): # check if alpha exceeds usual values @@ -379,6 +384,7 @@ def branchentry(self, branch): # keep recursing into children return True + class SpellSendGeometriesToBindPosition(pyffi.spells.nif.SpellVisitSkeletonRoots): """Transform skinned geometries so similar bones have the same bone data, and hence, the same bind position, over all geometries. @@ -391,6 +397,7 @@ def skelrootentry(self, branch): branch.send_geometries_to_bind_position() self.changed = True + class SpellSendDetachedGeometriesToNodePosition(pyffi.spells.nif.SpellVisitSkeletonRoots): """Transform geometries so each set of geometries that shares bones is aligned with the transform of the root bone of that set. @@ -403,6 +410,7 @@ def skelrootentry(self, branch): branch.send_detached_geometries_to_node_position() self.changed = True + class SpellSendBonesToBindPosition(pyffi.spells.nif.SpellVisitSkeletonRoots): """Transform bones so bone data agrees with bone transforms, and hence, all bones are in bind position. @@ -415,6 +423,7 @@ def skelrootentry(self, branch): branch.send_bones_to_bind_position() self.changed = True + class SpellMergeSkeletonRoots(NifSpell): """Merges skeleton roots in the NIF file so that no skeleton root has another skeleton root as child. Warns if merge is impossible (this happens @@ -459,7 +468,7 @@ def dataentry(self): def branchinspect(self, branch): # only inspect the NiNode branch return isinstance(branch, NifFormat.NiNode) - + def branchentry(self, branch): if branch in self.skelrootlist: result, failed = branch.merge_skeleton_roots() @@ -473,11 +482,13 @@ def branchentry(self, branch): else: return False + class SpellApplySkinDeformation(NifSpell): """Apply skin deformation to nif.""" # TODO pass + class SpellScale(NifSpell): """Scale a model.""" @@ -512,16 +523,19 @@ def branchentry(self, branch): # continue recursion return True + class SpellFixCenterRadius(pyffi.spells.nif.check.SpellCheckCenterRadius): """Recalculate geometry centers and radii.""" SPELLNAME = "fix_centerradius" READONLY = False + class SpellFixSkinCenterRadius(pyffi.spells.nif.check.SpellCheckSkinCenterRadius): """Recalculate skin centers and radii.""" SPELLNAME = "fix_skincenterradius" READONLY = False + class SpellFixMopp(pyffi.spells.nif.check.SpellCheckMopp): """Recalculate mopp data from collision geometry.""" SPELLNAME = "fix_mopp" @@ -539,6 +553,7 @@ def branchentry(self, branch): branch.update_mopp() self.changed = True + class SpellCleanStringPalette(NifSpell): """Remove unused strings from string palette.""" @@ -570,7 +585,7 @@ def branchentry(self, branch): >>> seq.string_palette = NifFormat.NiStringPalette() >>> block = seq.add_controlled_block() >>> block.string_palette = seq.string_palette - >>> block.set_variable_1("there") + >>> block.set_controller_id("there") >>> block.set_node_name("hello") >>> block.string_palette.palette.add_string("test") 12 @@ -581,7 +596,7 @@ def branchentry(self, branch): False >>> seq.string_palette.palette.get_all_strings() [b'hello', b'there'] - >>> block.get_variable_1() + >>> block.get_controller_id() b'there' >>> block.get_node_name() b'hello' @@ -618,8 +633,8 @@ def branchentry(self, branch): block.node_name = self.substitute(block.get_node_name()) block.property_type = self.substitute(block.get_property_type()) block.controller_type = self.substitute(block.get_controller_type()) - block.variable_1 = self.substitute(block.get_variable_1()) - block.variable_2 = self.substitute(block.get_variable_2()) + block.controller_id = self.substitute(block.get_controller_id()) + block.interpolator_id = self.substitute(block.get_interpolator_id()) # ensure single string palette for all controlled blocks block.string_palette = string_palette # ensure single string palette for all controller sequences @@ -632,8 +647,8 @@ def branchentry(self, branch): block.set_node_name(block.node_name) block.set_property_type(block.property_type) block.set_controller_type(block.controller_type) - block.set_variable_1(block.variable_1) - block.set_variable_2(block.variable_2) + block.set_controller_id(block.controller_id) + block.set_interpolator_id(block.interpolator_id) self.changed = True # do not recurse further return False @@ -641,6 +656,7 @@ def branchentry(self, branch): # keep looking for managers or sequences return True + class SpellFixFallout3StringOffsets(NifSpell): """Fix Oblivion style kf files to work with Fallout 3, by replacing empty string offsets to point to a null byte. @@ -652,10 +668,10 @@ class SpellFixFallout3StringOffsets(NifSpell): def datainspect(self): # only run the spell if it looks like an Oblivion kf return ( - self.data.version == 0x14000005 - and self.inspectblocktype(NifFormat.NiStringPalette) - and self.inspectblocktype(NifFormat.NiControllerSequence) - ) + self.data.version == 0x14000005 + and self.inspectblocktype(NifFormat.NiStringPalette) + and self.inspectblocktype(NifFormat.NiControllerSequence) + ) def branchinspect(self, branch): # only inspect branches where NiControllerSequence can occur @@ -671,7 +687,7 @@ def branchentry(self, branch): >>> seq.string_palette = NifFormat.NiStringPalette() >>> block = seq.add_controlled_block() >>> block.string_palette = seq.string_palette - >>> block.set_variable_1("there") + >>> block.set_controller_id("there") >>> block.set_node_name("hello") >>> block.string_palette.palette.add_string("test") 12 @@ -681,9 +697,9 @@ def branchentry(self, branch): -1 >>> block.controller_type_offset -1 - >>> block.variable_1_offset + >>> block.controller_id_offset 0 - >>> block.variable_2_offset + >>> block.interpolator_id_offset -1 >>> block.get_node_name() b'hello' @@ -691,9 +707,9 @@ def branchentry(self, branch): b'' >>> block.get_controller_type() b'' - >>> block.get_variable_1() + >>> block.get_controller_id() b'there' - >>> block.get_variable_2() + >>> block.get_interpolator_id() b'' >>> SpellFixFallout3StringOffsets().branchentry(seq) pyffi.toaster:INFO:updating empty links @@ -707,9 +723,9 @@ def branchentry(self, branch): 16 >>> block.controller_type_offset 16 - >>> block.variable_1_offset + >>> block.controller_id_offset 0 - >>> block.variable_2_offset + >>> block.interpolator_id_offset 16 >>> block.get_node_name() b'hello' @@ -719,13 +735,13 @@ def branchentry(self, branch): >>> block.get_controller_type() pyffi.nif.stringpalette:WARNING:StringPalette: no string starts at offset 16 (string is b'', preceeding character is b't') b'' - >>> block.get_variable_1() + >>> block.get_controller_id() b'there' - >>> block.get_variable_2() + >>> block.get_interpolator_id() pyffi.nif.stringpalette:WARNING:StringPalette: no string starts at offset 16 (string is b'', preceeding character is b't') b'' """ - if isinstance(branch,NifFormat.NiControllerSequence): + if isinstance(branch, NifFormat.NiControllerSequence): self.toaster.msg("updating empty links") # use the first string palette as reference string_palette = branch.string_palette @@ -740,8 +756,8 @@ def branchentry(self, branch): return False for block in branch.controlled_blocks: for attr in ( - "node_name", "property_type", "controller_type", - "variable_1", "variable_2"): + "node_name", "property_type", "controller_type", + "controller_id", "interpolator_id"): attr_offset = attr + "_offset" offset = getattr(block, attr_offset) if offset == -1: @@ -754,6 +770,7 @@ def branchentry(self, branch): else: return True + class SpellDelUnusedRoots(pyffi.spells.nif.NifSpell): """Remove root branches that shouldn't be root branches and are unused in the file such as NiProperty branches that are not @@ -798,6 +815,7 @@ def dataentry(self): self.changed = True return False + class SpellFixBhkSubShapes(NifSpell): """Fix bad subshape vertex counts in bhkPackedNiTriStripsShape blocks.""" @@ -833,9 +851,9 @@ def branchentry(self, branch): # calculate new number of vertices # if everything were to be fixed with this shape sub_shape_num_vertices = ( - sub_shape.num_vertices - + branch.data.num_vertices - - num_verts_in_sub_shapes) + sub_shape.num_vertices + + branch.data.num_vertices + - num_verts_in_sub_shapes) if sub_shape_num_vertices > 0: # we can do everything in the last shape # so do it @@ -852,6 +870,7 @@ def branchentry(self, branch): # recurse further return True + class SpellFixEmptySkeletonRoots(NifSpell): """Fix empty skeleton roots in an as sane as possible way.""" diff --git a/pyffi/spells/nif/modify.py b/pyffi/spells/nif/modify.py index afbed78c2..de08ace86 100644 --- a/pyffi/spells/nif/modify.py +++ b/pyffi/spells/nif/modify.py @@ -124,17 +124,15 @@ # ***** END LICENSE BLOCK ***** # -------------------------------------------------------------------------- +import codecs +import os +import re + +import pyffi.spells.nif.fix from pyffi.formats.nif import NifFormat from pyffi.object_models.common import _as_bytes from pyffi.spells.nif import NifSpell -import pyffi.spells.nif -import pyffi.spells.nif.check # recycle checking spells for update spells -import pyffi.spells.nif.fix - -import codecs -import os -import re class SpellTexturePath( pyffi.spells.nif.fix.SpellParseTexturePath): @@ -164,12 +162,13 @@ def substitute(self, old_path): new_path = os.path.join( self.toaster.texture_path, os.path.basename(old_path.replace("\\", os.sep)) - ).replace(os.sep, "\\") + ).replace(os.sep, "\\") if new_path != old_path: self.changed = True self.toaster.msg("%s -> %s" % (old_path, new_path)) return new_path + class SpellSubstituteTexturePath( pyffi.spells.nif.fix.SpellFixTexturePath): """Runs a regex replacement on texture paths.""" @@ -188,7 +187,7 @@ def toastentry(cls, toaster): dummy, toaster.regex, toaster.sub = arg.split(arg[0]) toaster.sub = _as_bytes(toaster.sub) toaster.regex = re.compile(_as_bytes(toaster.regex)) - return True + return True def substitute(self, old_path): """Returns modified texture path, and reports if path was modified. @@ -202,6 +201,7 @@ def substitute(self, old_path): self.toaster.msg("%s -> %s" % (old_path, new_path)) return new_path + class SpellLowResTexturePath(SpellSubstituteTexturePath): """Changes the texture path by replacing 'textures\\*' with 'textures\\lowres\\*' - used mainly for making _far.nifs @@ -221,6 +221,7 @@ def substitute(self, old_path): else: return old_path + class SpellCollisionType(NifSpell): """Sets the object collision to be a different type""" @@ -257,7 +258,7 @@ class CollisionTypeClutter(CollisionTypeAnimStatic): class CollisionTypeWeapon(CollisionTypeClutter): layer = 5 mass = 25 - + class CollisionTypeNonCollidable(CollisionTypeStatic): layer = 15 motion_system = 7 @@ -269,7 +270,7 @@ class CollisionTypeNonCollidable(CollisionTypeStatic): "weapon": CollisionTypeWeapon, "terrain": CollisionTypeTerrain, "non_collidable": CollisionTypeNonCollidable - } + } @classmethod def toastentry(cls, toaster): @@ -325,12 +326,13 @@ def branchentry(self, branch): # recurse further return True + class SpellScaleAnimationTime(NifSpell): """Scales the animation time.""" SPELLNAME = "modify_scaleanimationtime" READONLY = False - + @classmethod def toastentry(cls, toaster): if not toaster.options["arg"]: @@ -402,6 +404,7 @@ def scale_key_times(keys): # recurse further return True + class SpellReverseAnimation(NifSpell): """Reverses the animation by reversing datas in relation to the time.""" @@ -462,6 +465,7 @@ def reverse_keys(keys): # recurse further return True + class SpellCollisionMaterial(NifSpell): """Sets the object's collision material to be a different type""" @@ -481,7 +485,7 @@ class CollisionMaterialMetal: "stone": CollisionMaterialStone, "cloth": CollisionMaterialCloth, "metal": CollisionMaterialMetal - } + } @classmethod def toastentry(cls, toaster): @@ -526,6 +530,7 @@ def branchentry(self, branch): # recurse further return True + class SpellDelBranches(NifSpell): """Delete blocks that match the exclude list.""" @@ -563,6 +568,7 @@ def branchentry(self, branch): # this one was not excluded, keep recursing return True + class _SpellDelBranchClasses(SpellDelBranches): """Delete blocks that match a given list. Only useful as base class for other spells. @@ -579,6 +585,7 @@ def datainspect(self): def is_branch_to_be_deleted(self, branch): return isinstance(branch, self.BRANCH_CLASSES_TO_BE_DELETED) + class SpellDelVertexColor(SpellDelBranches): """Delete vertex color properties and vertex color data.""" @@ -610,6 +617,7 @@ def branchentry(self, branch): # recurse further return True + # identical to niftoaster.py modify_delbranches -x NiVertexColorProperty # delete? class SpellDelVertexColorProperty(_SpellDelBranchClasses): @@ -618,6 +626,7 @@ class SpellDelVertexColorProperty(_SpellDelBranchClasses): SPELLNAME = "modify_delvertexcolorprop" BRANCH_CLASSES_TO_BE_DELETED = (NifFormat.NiVertexColorProperty,) + # identical to niftoaster.py modify_delbranches -x NiAlphaProperty # delete? class SpellDelAlphaProperty(_SpellDelBranchClasses): @@ -626,6 +635,7 @@ class SpellDelAlphaProperty(_SpellDelBranchClasses): SPELLNAME = "modify_delalphaprop" BRANCH_CLASSES_TO_BE_DELETED = (NifFormat.NiAlphaProperty,) + # identical to niftoaster.py modify_delbranches -x NiSpecularProperty # delete? class SpellDelSpecularProperty(_SpellDelBranchClasses): @@ -634,6 +644,7 @@ class SpellDelSpecularProperty(_SpellDelBranchClasses): SPELLNAME = "modify_delspecularprop" BRANCH_CLASSES_TO_BE_DELETED = (NifFormat.NiSpecularProperty,) + # identical to niftoaster.py modify_delbranches -x BSXFlags # delete? class SpellDelBSXFlags(_SpellDelBranchClasses): @@ -641,7 +652,8 @@ class SpellDelBSXFlags(_SpellDelBranchClasses): SPELLNAME = "modify_delbsxflags" BRANCH_CLASSES_TO_BE_DELETED = (NifFormat.BSXFlags,) - + + # identical to niftoaster.py modify_delbranches -x NiStringExtraData # delete? class SpellDelStringExtraDatas(_SpellDelBranchClasses): @@ -650,6 +662,7 @@ class SpellDelStringExtraDatas(_SpellDelBranchClasses): SPELLNAME = "modify_delstringextradatas" BRANCH_CLASSES_TO_BE_DELETED = (NifFormat.NiStringExtraData,) + class SpellDelSkinShapes(SpellDelBranches): """Delete any geometries with a material name of 'skin'""" @@ -669,6 +682,7 @@ def branchinspect(self, branch): # only inspect the NiAVObject branch return isinstance(branch, NifFormat.NiAVObject) + # identical to niftoaster.py modify_delbranches -x NiCollisionObject # delete? class SpellDelCollisionData(_SpellDelBranchClasses): @@ -677,6 +691,7 @@ class SpellDelCollisionData(_SpellDelBranchClasses): SPELLNAME = "modify_delcollision" BRANCH_CLASSES_TO_BE_DELETED = (NifFormat.NiCollisionObject,) + # identical to niftoaster.py modify_delbranches -x NiTimeController # delete? class SpellDelAnimation(_SpellDelBranchClasses): @@ -685,6 +700,7 @@ class SpellDelAnimation(_SpellDelBranchClasses): SPELLNAME = "modify_delanimation" BRANCH_CLASSES_TO_BE_DELETED = (NifFormat.NiTimeController,) + class SpellDisableParallax(NifSpell): """Disable parallax shader (for Oblivion, but may work on other nifs too). """ @@ -715,6 +731,7 @@ def branchentry(self, branch): # keep recursing return True + class SpellAddStencilProperty(NifSpell): """Adds a NiStencilProperty to each geometry if it is not present.""" @@ -743,6 +760,7 @@ def branchentry(self, branch): # recurse further return True + # note: this should go into the optimize module # but we have to put it here to avoid circular dependencies class SpellCleanFarNif( @@ -766,6 +784,7 @@ class SpellCleanFarNif( def datainspect(self): return self.stream.name.endswith('_far.nif') + # TODO: implement via modify_delbranches? # this is like SpellCleanFarNif but with changing the texture path # and optimizing the geometry @@ -781,23 +800,25 @@ class SpellMakeFarNif( SpellDelAnimation, SpellDisableParallax, SpellLowResTexturePath)): - #TODO: implement vert decreaser. + # TODO: implement vert decreaser. """Spell to make _far type nifs (for even more optimizations, combine this with the optimize spell). """ SPELLNAME = "modify_makefarnif" + class SpellMakeSkinlessNif( pyffi.spells.SpellGroupSeries( pyffi.spells.SpellGroupParallel( SpellDelSkinShapes, SpellAddStencilProperty) - )): + )): """Spell to make fleshless CMR (Custom Model Races) clothing/armour type nifs. """ SPELLNAME = "modify_makeskinlessnif" + class SpellSubstituteStringPalette( pyffi.spells.nif.fix.SpellCleanStringPalette): """Substitute strings in a string palette.""" @@ -816,7 +837,7 @@ def toastentry(cls, toaster): dummy, toaster.regex, toaster.sub = arg.split(arg[0]) toaster.sub = _as_bytes(toaster.sub) toaster.regex = re.compile(_as_bytes(toaster.regex)) - return True + return True def substitute(self, old_string): """Returns modified string, and reports if string was modified. @@ -830,6 +851,7 @@ def substitute(self, old_string): self.toaster.msg("%s -> %s" % (old_string, new_string)) return new_string + class SpellChangeBonePriorities(NifSpell): """Changes controlled block priorities based on controlled block name.""" @@ -855,7 +877,7 @@ def toastentry(cls, toaster): def datainspect(self): # returns only if nif/kf contains NiSequence return self.inspectblocktype(NifFormat.NiSequence) - + def branchinspect(self, branch): # inspect the NiAVObject and NiSequence branches return isinstance(branch, (NifFormat.NiAVObject, @@ -877,6 +899,7 @@ def branchentry(self, branch): controlled_block.priority)) return True + class SpellChangeAllBonePriorities(SpellChangeBonePriorities): """Changes all controlled block priorities to supplied argument.""" @@ -907,6 +930,7 @@ def branchentry(self, branch): controlled_block.priority)) return True + # should go in dump, but is the counterpart of modify_setbonepriorities # therefore maintained here class SpellGetBonePriorities(NifSpell): @@ -938,7 +962,7 @@ def branchentry(self, branch): priority = controlled_block.priority if name not in bonepriorities: bonepriorities[name] = priority - #self.toaster.msg("noted %r priority %i" % (name, priority)) + # self.toaster.msg("noted %r priority %i" % (name, priority)) elif bonepriorities[name] != priority: self.toaster.logger.warn( "multiple priorities for %r" % name) @@ -972,6 +996,7 @@ def dataexit(self): print("%s=%i" % (name, priority), file=stream, end="\r\n") self.bonepriorities = {} + class SpellSetBonePriorities(NifSpell): """For each file.nif, restore bone priorites from file_bonepriorities.txt. @@ -990,9 +1015,9 @@ def dataentry(self): if os.path.exists(filename): self.toaster.msg("reading %s" % filename) with codecs.open(filename, "rb", encoding="ascii") as stream: - self.bonepriorities = {} # priorities for all sequences - sequence = "" # current sequence - bonepriorities = {} # priorities for current sequence + self.bonepriorities = {} # priorities for all sequences + sequence = "" # current sequence + bonepriorities = {} # priorities for current sequence for line in stream: line = line.rstrip('\r\n') m = re.match("\\[(.*)\\]$", line) @@ -1045,6 +1070,7 @@ def branchentry(self, branch): "%r in NIF file but not in priority file" % name) return True + class SpellSetInterpolatorTransRotScale(NifSpell): """Changes specified bone(s) translations/rotations in their NiTransformInterpolator. @@ -1072,7 +1098,7 @@ def _float(x): return None else: return float(x) - + toaster.interp_transforms = dict( (name.lower(), ([_float(x) for x in trans.split(",")], [_float(x) for x in rot.split(",")], @@ -1080,15 +1106,15 @@ def _float(x): for (name, (trans, rot, scale)) in ( (name, transrotscale.split(";")) for (name, transrotscale) in ( - name_transrotscale.split(":") - for name_transrotscale - in toaster.options["arg"].split("|")))) + name_transrotscale.split(":") + for name_transrotscale + in toaster.options["arg"].split("|")))) return True def datainspect(self): # returns only if nif/kf contains NiSequence return self.inspectblocktype(NifFormat.NiSequence) - + def branchinspect(self, branch): # inspect the NiAVObject and NiSequence branches return isinstance(branch, (NifFormat.NiAVObject, @@ -1098,7 +1124,8 @@ def branchentry(self, branch): if isinstance(branch, NifFormat.NiSequence): for controlled_block in branch.controlled_blocks: try: - (transx, transy, transz), (quatx, quaty, quatz, quatw), scale = self.toaster.interp_transforms[controlled_block.get_node_name().lower()] + (transx, transy, transz), (quatx, quaty, quatz, quatw), scale = self.toaster.interp_transforms[ + controlled_block.get_node_name().lower()] except KeyError: # node name not in change list continue @@ -1125,6 +1152,7 @@ def branchentry(self, branch): % (controlled_block.get_node_name())) return True + class SpellDelInterpolatorTransformData(NifSpell): """Deletes the specified bone(s) NiTransformData(s).""" @@ -1146,7 +1174,7 @@ def toastentry(cls, toaster): def datainspect(self): # returns only if nif/kf contains NiSequence return self.inspectblocktype(NifFormat.NiSequence) - + def branchinspect(self, branch): # inspect the NiAVObject and NiSequence branches return isinstance(branch, (NifFormat.NiAVObject, @@ -1157,10 +1185,12 @@ def branchentry(self, branch): for controlled_block in branch.controlled_blocks: if controlled_block.get_node_name().lower() in self.toaster.change_blocks: self.data.replace_global_node(controlled_block.interpolator.data, None) - self.toaster.msg("NiTransformData removed from interpolator for %s" % (controlled_block.get_node_name())) + self.toaster.msg( + "NiTransformData removed from interpolator for %s" % (controlled_block.get_node_name())) self.changed = True return True + class SpellCollisionToMopp(NifSpell): """Transforms non-mopp triangle collisions to the more efficient mopps.""" @@ -1204,6 +1234,7 @@ def branchentry(self, branch): # recurse further return True + class SpellMirrorAnimation(NifSpell): """Mirrors the animation by switching bones and mirroring their x values. Only useable on creature/character animations (well any animations @@ -1217,7 +1248,7 @@ def datainspect(self): # returns more than needed but easiest way to ensure it catches all # types of animations return True - + def dataentry(self): # make list of used bones self.old_bone_data = {} @@ -1226,7 +1257,9 @@ def dataentry(self): for block in branch.controlled_blocks: name = block.get_node_name().lower() if ' r ' in name or ' l ' in name: - self.old_bone_data[name] = [block.interpolator, block.controller, block.priority, block.string_palette, block.node_name_offset, block.controller_type_offset] + self.old_bone_data[name] = [block.interpolator, block.controller, block.priority, + block.string_palette, block.node_name_offset, + block.controller_type_offset] if self.old_bone_data: return True @@ -1241,15 +1274,18 @@ def branchinspect(self, branch): def branchentry(self, branch): old_bone_data = self.old_bone_data - + if isinstance(branch, NifFormat.NiControllerSequence): for block in branch.controlled_blocks: node_name = block.get_node_name().lower() - if ' l ' in node_name: node_name = node_name.replace(' l ', ' r ') - elif ' r ' in node_name: node_name = node_name.replace(' r ', ' l ') + if ' l ' in node_name: + node_name = node_name.replace(' l ', ' r ') + elif ' r ' in node_name: + node_name = node_name.replace(' r ', ' l ') if node_name in old_bone_data: self.changed = True - block.interpolator, block.controller, block.priority, block.string_palette, block.node_name_offset, block.controller_type_offset = old_bone_data[node_name] + block.interpolator, block.controller, block.priority, block.string_palette, block.node_name_offset, block.controller_type_offset = \ + old_bone_data[node_name] # and then reverse x movements (since otherwise the movement of f.e. an arm towards the center of the body will be still in the same direction but away from the body if not block.interpolator: continue ip = block.interpolator diff --git a/pyffi/spells/nif/optimize.py b/pyffi/spells/nif/optimize.py index c44b8455f..7b27633b2 100644 --- a/pyffi/spells/nif/optimize.py +++ b/pyffi/spells/nif/optimize.py @@ -62,21 +62,21 @@ # -------------------------------------------------------------------------- -import os.path # exists +import os.path # exists -from pyffi.formats.nif import NifFormat -from pyffi.utils import unique_map -import pyffi.utils.tristrip -import pyffi.utils.vertex_cache import pyffi.spells import pyffi.spells.nif import pyffi.spells.nif.fix import pyffi.spells.nif.modify +import pyffi.utils.tristrip +import pyffi.utils.vertex_cache +from pyffi.formats.nif import NifFormat +from pyffi.utils import unique_map # localization -#import gettext -#_ = gettext.translation('pyffi').ugettext -_ = lambda msg: msg # stub, for now +# import gettext +# _ = gettext.translation('pyffi').ugettext +_ = lambda msg: msg # stub, for now # set flag to overwrite files __readonly__ = False @@ -91,6 +91,7 @@ python niftoaster.py optimize --exclude=NiMaterialProperty /path/to/copy/of/my/nifs """ + class SpellCleanRefLists(pyffi.spells.nif.NifSpell): """Remove empty and duplicate entries in reference lists.""" @@ -156,6 +157,7 @@ def branchentry(self, branch): # always recurse further return True + class SpellMergeDuplicates(pyffi.spells.nif.NifSpell): """Remove duplicate branches.""" @@ -188,12 +190,12 @@ def branchinspect(self, branch): def branchentry(self, branch): for otherbranch in self.branches: if (branch is not otherbranch and - branch.is_interchangeable(otherbranch)): + branch.is_interchangeable(otherbranch)): # skip properties that have controllers (the # controller data cannot always be reliably checked, # see also issue #2106668) if (isinstance(branch, NifFormat.NiProperty) - and branch.controller): + and branch.controller): continue # skip BSShaderProperty blocks (see niftools issue #3009832) if isinstance(branch, NifFormat.BSShaderProperty): @@ -210,6 +212,7 @@ def branchentry(self, branch): # continue recursion return True + class SpellOptimizeGeometry(pyffi.spells.nif.NifSpell): """Optimize all geometries: - remove duplicate vertices @@ -237,7 +240,7 @@ def datainspect(self): # do not optimize if an egm or tri file is detected filename = self.stream.name if (os.path.exists(filename[:-3] + "egm") - or os.path.exists(filename[:-3] + "tri")): + or os.path.exists(filename[:-3] + "tri")): return False # so far, only reference lists in NiObjectNET blocks, NiAVObject # blocks, and NiNode blocks are checked @@ -253,7 +256,7 @@ def optimize_vertices(self, data): return unique_map( vhash for i, vhash in enumerate(data.get_vertex_hash_generator( - vertexprecision=self.VERTEXPRECISION, + vertex_precision=self.VERTEXPRECISION, normalprecision=self.NORMALPRECISION, uvprecision=self.UVPRECISION, vcolprecision=self.VCOLPRECISION))) @@ -287,7 +290,7 @@ def branchentry(self, branch): "mesh has additional geometry data" " which is not well understood: not optimizing") return False - + # we found a geometry to optimize # we're going to change the data @@ -306,7 +309,7 @@ def branchentry(self, branch): data = branch.data v_map, v_map_inverse = self.optimize_vertices(data) - + self.toaster.msg("(num vertices was %i and is now %i)" % (len(v_map), len(v_map_inverse))) @@ -328,13 +331,13 @@ def branchentry(self, branch): "(ATVR reduced from %.3f to %.3f)" % (old_atvr, new_atvr)) else: self.toaster.msg( - "(ATVR stable at %.3f)" % old_atvr) - # optimize triangles to have sequentially ordered indices + "(ATVR stable at %.3f)" % old_atvr) + # optimize triangles to have sequentially ordered indices self.toaster.msg("optimizing vertex ordering") v_map_opt = pyffi.utils.vertex_cache.get_cache_optimized_vertex_map( triangles) triangles = [(v_map_opt[v0], v_map_opt[v1], v_map_opt[v2]) - for v0, v1, v2 in triangles] + for v0, v1, v2 in triangles] # update vertex map and its inverse for i in range(data.num_vertices): try: @@ -371,9 +374,9 @@ def branchentry(self, branch): # copy old data oldverts = [(v.x, v.y, v.z) for v in data.vertices] oldnorms = [(n.x, n.y, n.z) for n in data.normals] - olduvs = [[(uv.u, uv.v) for uv in uvset] for uvset in data.uv_sets] + olduvs = [[(uv.u, uv.v) for uv in uvset] for uvset in data.uv_sets] oldvcols = [(c.r, c.g, c.b, c.a) for c in data.vertex_colors] - if branch.skin_instance: # for later + if branch.skin_instance: # for later oldweights = branch.get_vertex_weights() # set new data data.num_vertices = new_numvertices @@ -487,7 +490,7 @@ def branchentry(self, branch): .format(morphdata.num_vertices, len(v_map))) morphdata.num_vertices = len(v_map) for morph in morphdata.morphs: - morph.arg = morphdata.num_vertices # manual argument passing + morph.arg = morphdata.num_vertices # manual argument passing morph.vectors.update_size() # now remap morph vertices for morph in morphdata.morphs: @@ -502,19 +505,20 @@ def branchentry(self, branch): # resize matrices morphdata.num_vertices = new_numvertices for morph in morphdata.morphs: - morph.arg = morphdata.num_vertices # manual argument passing + morph.arg = morphdata.num_vertices # manual argument passing morph.vectors.update_size() # recalculate tangent space (only if the branch already exists) if (branch.find(block_name=b'Tangent space (binormal & tangent vectors)', block_type=NifFormat.NiBinaryExtraData) - or (data.num_uv_sets & 61440) or (data.extra_vectors_flags & 16)): + or data.data_flags.num_uv_sets or data.bs_data_flags.has_tangents): # TODO: was these renames correct? self.toaster.msg("recalculating tangent space") branch.update_tangent_space() # stop recursion return False + # XXX todo class SpellSplitGeometry(pyffi.spells.nif.NifSpell): """Optimize geometry by splitting large models into pieces. @@ -522,7 +526,7 @@ class SpellSplitGeometry(pyffi.spells.nif.NifSpell): """ SPELLNAME = "opt_split" READONLY = False - THRESHOLD_RADIUS = 100 #: Threshold where to split geometry. + THRESHOLD_RADIUS = 100 #: Threshold where to split geometry. # XXX todo @staticmethod @@ -578,9 +582,11 @@ def addTriangle(sourcetriangle, v_map, sourcedata, destdata): @staticmethod def get_size(vertices, triangle): """Calculate size of geometry data + given triangle.""" + def helper(oper, coord): return oper((getattr(vert, coord) for vert in triangle), oper(getattr(vert, coord) for vert in vertices)) + minx = helper(min, "x") miny = helper(min, "y") minz = helper(min, "z") @@ -591,7 +597,7 @@ def helper(oper, coord): # XXX todo: merge into branchentry spell @staticmethod - def split(geom, threshold_radius = THRESHOLD_RADIUS): + def split(geom, threshold_radius=THRESHOLD_RADIUS): """Takes a NiGeometry block and splits the geometries. Returns a NiNode which contains the splitted geometry. Note that everything is triangulated in the process.""" @@ -622,11 +628,11 @@ def split(geom, threshold_radius = THRESHOLD_RADIUS): addTriangle(triangle, v_map, geom.data, geomsplit.data) # find face that is close to current geometry for triangle in triangles: - if get_size(geomsplit.data, + if get_size(geomsplit.data, tuple(geom.data.vertices[index] for index in triangle)) < threshold_radius: - addTriangle(triangle, v_map, geom.data, geomsplit.data) - break + addTriangle(triangle, v_map, geom.data, geomsplit.data) + break else: # if exceeded, start new geometry # first finish some things in geomsplit data @@ -657,7 +663,7 @@ def branchentry(self, branch): if branch in self.optimized: # already optimized return False - + # we found a geometry to optimize # XXX todo # get geometry data @@ -670,7 +676,7 @@ def branchentry(self, branch): optimized_geometries.append(block) return False # radius is over the threshold, so re-organize the geometry - newblock = split(block, threshold_radius = THRESHOLD_RADIUS) + newblock = split(block, threshold_radius=THRESHOLD_RADIUS) # replace block with newblock everywhere data.replace_global_node(block, newblock) @@ -679,6 +685,7 @@ def branchentry(self, branch): # stop recursing return False + class SpellDelUnusedBones(pyffi.spells.nif.NifSpell): """Remove nodes that are not used for anything.""" @@ -701,12 +708,12 @@ def dataentry(self): def branchinspect(self, branch): # only inspect the NiNode branch return isinstance(branch, NifFormat.NiNode) - + def branchentry(self, branch): if isinstance(branch, NifFormat.NiNode): if ((not branch.children) - and (not branch.collision_object) - and (branch not in self._used_bones)): + and (not branch.collision_object) + and (branch not in self._used_bones)): self.toaster.msg("removing unreferenced bone") self.data.replace_global_node(branch, None) self.changed = True @@ -714,6 +721,7 @@ def branchentry(self, branch): return False return True + class SpellDelZeroScale(pyffi.spells.nif.NifSpell): """Remove nodes with zero scale.""" @@ -727,7 +735,7 @@ def datainspect(self): def branchinspect(self, branch): # only inspect the NiAVObject branch return isinstance(branch, NifFormat.NiAVObject) - + def branchentry(self, branch): if isinstance(branch, NifFormat.NiAVObject): if branch.scale == 0: @@ -738,12 +746,13 @@ def branchentry(self, branch): return False return True + class SpellReduceGeometry(SpellOptimizeGeometry): """Reduce vertices of all geometries.""" SPELLNAME = "opt_reducegeometry" READONLY = False - + @classmethod def toastentry(cls, toaster): if not toaster.options["arg"]: @@ -762,6 +771,7 @@ def toastentry(cls, toaster): cls.VCOLPRECISION = max(precision, 0) return True + class SpellOptimizeCollisionBox(pyffi.spells.nif.NifSpell): """Optimize collision geometries by converting shapes to primitive boxes where appropriate. @@ -775,12 +785,12 @@ def __init__(self, *args, **kwargs): # list of all optimized geometries so far # (to avoid optimizing the same geometry twice) self.optimized = [] - + def datainspect(self): # only run the spell if there are collisions return ( - self.inspectblocktype(NifFormat.bhkPackedNiTriStripsShape) - or self.inspectblocktype(NifFormat.bhkNiTriStripsShape)) + self.inspectblocktype(NifFormat.bhkPackedNiTriStripsShape) + or self.inspectblocktype(NifFormat.bhkNiTriStripsShape)) def branchinspect(self, branch): # only inspect the collision branches @@ -788,7 +798,7 @@ def branchinspect(self, branch): NifFormat.bhkCollisionObject, NifFormat.bhkRigidBody, NifFormat.bhkMoppBvTreeShape)) - + def get_box_shape(self, shape): """Check if the given shape is has a box shape. If so, return an equivalent (bhkConvexTransformShape +) bhkBoxShape. @@ -857,14 +867,14 @@ def get_box_shape(self, shape): # material has a bad value, this sometimes happens pass boxshape.radius = 0.1 - boxshape.unknown_8_bytes[0] = 0x6b - boxshape.unknown_8_bytes[1] = 0xee - boxshape.unknown_8_bytes[2] = 0x43 - boxshape.unknown_8_bytes[3] = 0x40 - boxshape.unknown_8_bytes[4] = 0x3a - boxshape.unknown_8_bytes[5] = 0xef - boxshape.unknown_8_bytes[6] = 0x8e - boxshape.unknown_8_bytes[7] = 0x3e + boxshape.unused_01[0] = 0x6b + boxshape.unused_01[1] = 0xee + boxshape.unused_01[2] = 0x43 + boxshape.unused_01[3] = 0x40 + boxshape.unused_01[4] = 0x3a + boxshape.unused_01[5] = 0xef + boxshape.unused_01[6] = 0x8e + boxshape.unused_01[7] = 0x3e # check translation mid = [min_[i] + 0.5 * size[i] for i in range(3)] if sum(abs(mid[i]) for i in range(3)) < 1e-6: @@ -879,7 +889,7 @@ def get_box_shape(self, shape): tfshape.transform.m_24 = mid[1] / factor tfshape.transform.m_34 = mid[2] / factor return tfshape - + def branchentry(self, branch): """Optimize a vertex based collision block: - remove duplicate vertices @@ -889,11 +899,11 @@ def branchentry(self, branch): if branch in self.optimized: # already optimized return False - + if (isinstance(branch, NifFormat.bhkMoppBvTreeShape) - and isinstance(branch.shape, NifFormat.bhkPackedNiTriStripsShape) - and isinstance(branch.shape.data, - NifFormat.hkPackedNiTriStripsData)): + and isinstance(branch.shape, NifFormat.bhkPackedNiTriStripsShape) + and isinstance(branch.shape.data, + NifFormat.hkPackedNiTriStripsData)): # packed collision with mopp box_shape = self.get_box_shape(branch.shape) if box_shape: @@ -902,7 +912,7 @@ def branchentry(self, branch): self.toaster.msg(_("optimized box collision")) self.changed = True self.optimized.append(branch) - return False # don't recurse farther + return False # don't recurse farther elif (isinstance(branch, NifFormat.bhkRigidBody) and isinstance(branch.shape, NifFormat.bhkNiTriStripsShape)): # unpacked collision @@ -927,9 +937,10 @@ def branchentry(self, branch): self.changed = True self.optimized.append(branch) return False - #keep recursing + # keep recursing return True + class SpellOptimizeCollisionGeometry(pyffi.spells.nif.NifSpell): """Optimize collision geometries by removing duplicate vertices.""" @@ -946,8 +957,8 @@ def __init__(self, *args, **kwargs): def datainspect(self): # only run the spell if there are collisions return ( - self.inspectblocktype(NifFormat.bhkPackedNiTriStripsShape) - or self.inspectblocktype(NifFormat.bhkNiTriStripsShape)) + self.inspectblocktype(NifFormat.bhkPackedNiTriStripsShape) + or self.inspectblocktype(NifFormat.bhkNiTriStripsShape)) def branchinspect(self, branch): # only inspect the collision branches @@ -955,7 +966,7 @@ def branchinspect(self, branch): NifFormat.bhkCollisionObject, NifFormat.bhkRigidBody, NifFormat.bhkMoppBvTreeShape)) - + def optimize_mopp(self, mopp): """Optimize a bhkMoppBvTreeShape.""" shape = mopp.shape @@ -973,7 +984,7 @@ def optimize_mopp(self, mopp): % subshape_index) v_map, v_map_inverse = unique_map( shape.get_vertex_hash_generator( - vertexprecision=self.VERTEXPRECISION, + vertex_precision=self.VERTEXPRECISION, subshape_index=subshape_index)) self.toaster.msg( _("(num vertices in collision shape was %i and is now %i)") @@ -996,8 +1007,8 @@ def optimize_mopp(self, mopp): shape.sub_shapes[subshape_index].num_vertices = subshape_count if shape.data.sub_shapes: # fallout 3 subshapes - shape.data.sub_shapes[subshape_index].num_vertices = subshape_count - # set new data + shape.data.sub_shapes[subshape_index].num_vertices = subshape_count + # set new data data.num_vertices = len(full_v_map_inverse) data.vertices.update_size() for old_i, v in zip(full_v_map_inverse, data.vertices): @@ -1044,7 +1055,7 @@ def optimize_mopp(self, mopp): del oldtris # update mopp data and welding info mopp.update_mopp_welding() - + def branchentry(self, branch): """Optimize a vertex based collision block: - remove duplicate vertices @@ -1059,9 +1070,9 @@ def branchentry(self, branch): return False if (isinstance(branch, NifFormat.bhkMoppBvTreeShape) - and isinstance(branch.shape, NifFormat.bhkPackedNiTriStripsShape) - and isinstance(branch.shape.data, - NifFormat.hkPackedNiTriStripsData)): + and isinstance(branch.shape, NifFormat.bhkPackedNiTriStripsShape) + and isinstance(branch.shape.data, + NifFormat.hkPackedNiTriStripsData)): # packed collision with mopp self.toaster.msg(_("optimizing mopp")) self.optimize_mopp(branch) @@ -1075,7 +1086,7 @@ def branchentry(self, branch): return False elif (isinstance(branch, NifFormat.bhkRigidBody) and isinstance(branch.shape, NifFormat.bhkNiTriStripsShape)): - if branch.havok_col_filter.layer == NifFormat.OblivionLayer.CLUTTER: + if branch.havok_filter.layer == NifFormat.OblivionLayer.OL_CLUTTER: # packed collisions do not work for clutter # so skip it # see issue #3194017 reported by Gratis_monsta @@ -1095,24 +1106,24 @@ def branchentry(self, branch): NifFormat.bhkPackedNiTriStripsShape)): # packed collision without mopp # add a mopp to it if it is static - if any(sub_shape.havok_col_filter.layer != 1 + if any(sub_shape.havok_filter.layer != 1 for sub_shape in branch.shape.get_sub_shapes()): # no mopps for non-static objects return False self.toaster.msg(_("adding mopp")) mopp = NifFormat.bhkMoppBvTreeShape() - shape = branch.shape # store reference before replacing + shape = branch.shape # store reference before replacing self.data.replace_global_node(branch.shape, mopp) mopp.shape = shape mopp.material = shape.get_sub_shapes()[0].material - mopp.unknown_8_bytes[0] = 160 - mopp.unknown_8_bytes[1] = 13 - mopp.unknown_8_bytes[2] = 75 - mopp.unknown_8_bytes[3] = 1 - mopp.unknown_8_bytes[4] = 192 - mopp.unknown_8_bytes[5] = 207 - mopp.unknown_8_bytes[6] = 144 - mopp.unknown_8_bytes[7] = 11 + mopp.unused_01[0] = 160 + mopp.unused_01[1] = 13 + mopp.unused_01[2] = 75 + mopp.unused_01[3] = 1 + mopp.unused_01[4] = 192 + mopp.unused_01[5] = 207 + mopp.unused_01[6] = 144 + mopp.unused_01[7] = 11 mopp.unknown_float = 1.0 mopp.update_mopp_welding() # call branchentry again in order to optimize the mopp @@ -1122,13 +1133,14 @@ def branchentry(self, branch): return False # keep recursing return True - + + class SpellOptimizeAnimation(pyffi.spells.nif.NifSpell): """Optimizes animations by removing duplicate keys""" SPELLNAME = "opt_optimizeanimation" READONLY = False - + @classmethod def toastentry(cls, toaster): if not toaster.options["arg"]: @@ -1137,7 +1149,6 @@ def toastentry(cls, toaster): cls.significance_check = float(toaster.options["arg"]) return True - def datainspect(self): # returns more than needed but easiest way to ensure it catches all # types of animations @@ -1155,46 +1166,52 @@ def branchinspect(self, branch): NifFormat.NiTextKeyExtraData, NifFormat.NiFloatData)) - def optimize_keys(self,keys): + def optimize_keys(self, keys): """Helper function to optimize the keys.""" new_keys = [] - #compare keys + # compare keys ## types: 0 = float/int values ## 1 = Vector4, Quaternions, QuaternionsWXYZ ## 2 = word values (ie NiTextKeyExtraData) ## 3 = Vector3 values (ie translations) - if len(keys) < 3: return keys # no optimization possible? - precision = 10**self.significance_check - if isinstance(keys[0].value,(float,int)): + if len(keys) < 3: return keys # no optimization possible? + precision = 10 ** self.significance_check + if isinstance(keys[0].value, (float, int)): for i, key in enumerate(keys): - if i == 0: # since we don't want to delete the first key even if it is the same as the last key. + if i == 0: # since we don't want to delete the first key even if it is the same as the last key. new_keys.append(key) - continue + continue try: - if int(precision*keys[i-1].value) != int(precision*key.value): + if int(precision * keys[i - 1].value) != int(precision * key.value): new_keys.append(key) continue - if int(precision*keys[i+1].value) != int(precision*key.value): + if int(precision * keys[i + 1].value) != int(precision * key.value): new_keys.append(key) except IndexError: new_keys.append(key) return new_keys - elif isinstance(keys[0].value,(str)): + elif isinstance(keys[0].value, (str)): for i, key in enumerate(keys): - if i == 0: # since we don't want to delete the first key even if it is the same as the last key. + if i == 0: # since we don't want to delete the first key even if it is the same as the last key. new_keys.append(key) - continue + continue try: - if keys[i-1].value != key.value: + if keys[i - 1].value != key.value: new_keys.append(key) continue - if keys[i+1].value != key.value: + if keys[i + 1].value != key.value: new_keys.append(key) except IndexError: new_keys.append(key) return new_keys - elif isinstance(keys[0].value,(NifFormat.Vector4,NifFormat.Quaternion,NifFormat.QuaternionXYZW)): - tempkey = [[int(keys[0].value.w*precision),int(keys[0].value.x*precision),int(keys[0].value.y*precision),int(keys[0].value.z*precision)],[int(keys[1].value.w*precision),int(keys[1].value.x*precision),int(keys[1].value.y*precision),int(keys[1].value.z*precision)],[int(keys[2].value.w*precision),int(keys[2].value.x*precision),int(keys[2].value.y*precision),int(keys[2].value.z*precision)]] + elif isinstance(keys[0].value, (NifFormat.Vector4, NifFormat.Quaternion, NifFormat.QuaternionXYZW)): + tempkey = [ + [int(keys[0].value.w * precision), int(keys[0].value.x * precision), int(keys[0].value.y * precision), + int(keys[0].value.z * precision)], + [int(keys[1].value.w * precision), int(keys[1].value.x * precision), int(keys[1].value.y * precision), + int(keys[1].value.z * precision)], + [int(keys[2].value.w * precision), int(keys[2].value.x * precision), int(keys[2].value.y * precision), + int(keys[2].value.z * precision)]] for i, key in enumerate(keys): if i == 0: new_keys.append(key) @@ -1203,10 +1220,10 @@ def optimize_keys(self,keys): tempkey[1] = tempkey[2] tempkey[2] = [] try: - tempkey[2].append(int(keys[i+1].value.w*precision)) - tempkey[2].append(int(keys[i+1].value.x*precision)) - tempkey[2].append(int(keys[i+1].value.y*precision)) - tempkey[2].append(int(keys[i+1].value.z*precision)) + tempkey[2].append(int(keys[i + 1].value.w * precision)) + tempkey[2].append(int(keys[i + 1].value.x * precision)) + tempkey[2].append(int(keys[i + 1].value.y * precision)) + tempkey[2].append(int(keys[i + 1].value.z * precision)) except IndexError: new_keys.append(key) continue @@ -1216,8 +1233,11 @@ def optimize_keys(self,keys): if tempkey[1] != tempkey[2]: new_keys.append(key) return new_keys - elif isinstance(keys[0].value,(NifFormat.Vector3)): - tempkey = [[int(keys[0].value.x*precision),int(keys[0].value.y*precision),int(keys[0].value.z*precision)],[int(keys[1].value.x*precision),int(keys[1].value.y*precision),int(keys[1].value.z*precision)],[int(keys[2].value.x*precision),int(keys[2].value.y*precision),int(keys[2].value.z*precision)]] + elif isinstance(keys[0].value, (NifFormat.Vector3)): + tempkey = [ + [int(keys[0].value.x * precision), int(keys[0].value.y * precision), int(keys[0].value.z * precision)], + [int(keys[1].value.x * precision), int(keys[1].value.y * precision), int(keys[1].value.z * precision)], + [int(keys[2].value.x * precision), int(keys[2].value.y * precision), int(keys[2].value.z * precision)]] for i, key in enumerate(keys): if i == 0: new_keys.append(key) @@ -1226,9 +1246,9 @@ def optimize_keys(self,keys): tempkey[1] = tempkey[2] tempkey[2] = [] try: - tempkey[2].append(int(keys[i+1].value.x*precision)) - tempkey[2].append(int(keys[i+1].value.y*precision)) - tempkey[2].append(int(keys[i+1].value.z*precision)) + tempkey[2].append(int(keys[i + 1].value.x * precision)) + tempkey[2].append(int(keys[i + 1].value.y * precision)) + tempkey[2].append(int(keys[i + 1].value.z * precision)) except IndexError: new_keys.append(key) continue @@ -1238,29 +1258,29 @@ def optimize_keys(self,keys): if tempkey[1] != tempkey[2]: new_keys.append(key) return new_keys - else: #something unhandled -- but what? - + else: # something unhandled -- but what? + return keys - - def update_animation(self,old_keygroup,new_keys): - self.toaster.msg(_("Num keys was %i and is now %i") % (len(old_keygroup.keys),len(new_keys))) + + def update_animation(self, old_keygroup, new_keys): + self.toaster.msg(_("Num keys was %i and is now %i") % (len(old_keygroup.keys), len(new_keys))) old_keygroup.num_keys = len(new_keys) old_keygroup.keys.update_size() - for old_key, new_key in zip(old_keygroup.keys,new_keys): + for old_key, new_key in zip(old_keygroup.keys, new_keys): old_key.time = new_key.time old_key.value = new_key.value self.changed = True - - def update_animation_quaternion(self,old_keygroup,new_keys): - self.toaster.msg(_("Num keys was %i and is now %i") % (len(old_keygroup),len(new_keys))) + + def update_animation_quaternion(self, old_keygroup, new_keys): + self.toaster.msg(_("Num keys was %i and is now %i") % (len(old_keygroup), len(new_keys))) old_keygroup.update_size() - for old_key, new_key in zip(old_keygroup,new_keys): + for old_key, new_key in zip(old_keygroup, new_keys): old_key.time = new_key.time old_key.value = new_key.value self.changed = True def branchentry(self, branch): - + if isinstance(branch, NifFormat.NiKeyframeData): # (this also covers NiTransformData) if branch.num_rotation_keys != 0: @@ -1268,20 +1288,20 @@ def branchentry(self, branch): for rotation in branch.xyz_rotations: new_keys = self.optimize_keys(rotation.keys) if len(new_keys) != rotation.num_keys: - self.update_animation(rotation,new_keys) + self.update_animation(rotation, new_keys) else: new_keys = self.optimize_keys(branch.quaternion_keys) if len(new_keys) != branch.num_rotation_keys: branch.num_rotation_keys = len(new_keys) - self.update_animation_quaternion(branch.quaternion_keys,new_keys) + self.update_animation_quaternion(branch.quaternion_keys, new_keys) if branch.translations.num_keys != 0: new_keys = self.optimize_keys(branch.translations.keys) if len(new_keys) != branch.translations.num_keys: - self.update_animation(branch.translations,new_keys) + self.update_animation(branch.translations, new_keys) if branch.scales.num_keys != 0: new_keys = self.optimize_keys(branch.scales.keys) if len(new_keys) != branch.scales.num_keys: - self.update_animation(branch.scales,new_keys) + self.update_animation(branch.scales, new_keys) # no children of NiKeyframeData so no need to recurse further return False elif isinstance(branch, NifFormat.NiTextKeyExtraData): @@ -1289,13 +1309,14 @@ def branchentry(self, branch): # no children of NiTextKeyExtraData so no need to recurse further return False elif isinstance(branch, NifFormat.NiFloatData): - #self.optimize_keys(branch.data.keys) + # self.optimize_keys(branch.data.keys) # no children of NiFloatData so no need to recurse further return False else: # recurse further return True - + + class SpellOptimize( pyffi.spells.SpellGroupSeries( pyffi.spells.nif.modify.SpellCleanFarNif, @@ -1311,6 +1332,6 @@ class SpellOptimize( SpellOptimizeCollisionBox, SpellOptimizeCollisionGeometry, SpellMergeDuplicates, - )): + )): """Global fixer and optimizer spell.""" SPELLNAME = "optimize" diff --git a/pyffi/spells/tga.py b/pyffi/spells/tga.py index b20b5066d..f5dea92f7 100644 --- a/pyffi/spells/tga.py +++ b/pyffi/spells/tga.py @@ -47,9 +47,11 @@ import pyffi.spells from pyffi.formats.tga import TgaFormat + class TgaSpell(pyffi.spells.Spell): """Base class for spells for tga files.""" pass + class TgaToaster(pyffi.spells.Toaster): FILEFORMAT = TgaFormat diff --git a/pyffi/utils/__init__.py b/pyffi/utils/__init__.py index 16d4090ce..12c2c396a 100644 --- a/pyffi/utils/__init__.py +++ b/pyffi/utils/__init__.py @@ -42,24 +42,27 @@ # ***** END LICENSE BLOCK ***** import os +import re from distutils.cmd import Command +sci_not = re.compile("([-+]?\d+\.?\d*)[Ee]([-+]?\d+)?") -class BuildDoc(Command): # pragma: no cover + +class BuildDoc(Command): # pragma: no cover """ Distutils command to stop setup.py from throwing errors if sphinx is not installed """ - + description = 'Sphinx is not installed' user_options = [] - + def initialize_options(self): self.source_dir = self.build_dir = None self.project = '' self.version = '' self.release = '' - + def finalize_options(self): return @@ -103,7 +106,8 @@ def walk(top, topdown=True, onerror=None, re_filename=None): # for c in [chr(i) for i in range(32,128)]: # table += c # table += "."*128 -chartable = '................................ !"#$%&\'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~.................................................................................................................................'.encode("ascii") +chartable = '................................ !"#$%&\'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~.................................................................................................................................'.encode( + "ascii") def hex_dump(f, num_lines=8): @@ -112,8 +116,8 @@ def hex_dump(f, num_lines=8): dumpstr = "" pos = f.tell() - if pos > num_lines*8: - f.seek((pos-num_lines*8) & 0xfffffff0) + if pos > num_lines * 8: + f.seek((pos - num_lines * 8) & 0xfffffff0) else: f.seek(0) dumppos = f.tell() @@ -169,6 +173,25 @@ def unique_map(hash_generator): return hash_map, hash_map_inverse +def get_single(val): + if isinstance(val, str): + return val + try: + iter(val) + return val[0] + except TypeError: + return val + + +def parse_scientific_notation(string: str): + match = sci_not.match(string) + if match: + val = float(match[1]) + exp = pow(10, int(match[2])) + return val * exp + + if __name__ == '__main__': import doctest + doctest.testmod() diff --git a/pyffi/utils/graph.py b/pyffi/utils/graph.py index e20088551..f93c11779 100644 --- a/pyffi/utils/graph.py +++ b/pyffi/utils/graph.py @@ -29,7 +29,6 @@ The base classes are roughly based on the TreeItem example in the Qt docs: http://doc.trolltech.com/4.4/itemviews-simpletreemodel.html """ - # -------------------------------------------------------------------------- # ***** BEGIN LICENSE BLOCK ***** # @@ -72,6 +71,7 @@ from itertools import repeat from operator import itemgetter + class EdgeType(tuple): """Represents all possible edge types. By default, there are four types: any edge can be part of the acyclic graph or not, and can @@ -86,24 +86,27 @@ def __new__(cls, active=True, acyclic=True): active = property(itemgetter(0)) acyclic = property(itemgetter(1)) + class EdgeFilter(tuple): """A simple filter for edges. The default filter only checks the edge's active and acyclic attributes, and accepts them if both are ``True``. """ + def __new__(cls, active_filter=True, acyclic_filter=True): return tuple.__new__(cls, (active_filter, acyclic_filter)) - + active_filter = property(itemgetter(0)) acyclic_filter = property(itemgetter(1)) def accept(self, edge_type): - if not(self.active_filter is None): + if not (self.active_filter is None): if edge_type.active != self.active_filter: return False - if not(self.acyclic_filter is None): + if not (self.acyclic_filter is None): if edge_type.acyclic != self.acyclic_filter: return False + class DetailNode(object): """A node of the detail tree which can have children. @@ -168,6 +171,7 @@ def replace_global_node(self, oldnode, newnode, edge_filter=EdgeFilter()): """Replace a particular branch in the graph.""" raise NotImplementedError + class GlobalNode(DetailNode): """A node of the global graph.""" @@ -179,7 +183,7 @@ def get_global_display(self): """ return "" # possible implementation: - #return self.name if hasattr(self, "name") else "" + # return self.name if hasattr(self, "name") else "" def get_global_child_nodes(self, edge_filter=EdgeFilter()): """Generator which yields all children of this item in the diff --git a/pyffi/utils/inertia.py b/pyffi/utils/inertia.py index a4a14e8f0..973fd2e0c 100644 --- a/pyffi/utils/inertia.py +++ b/pyffi/utils/inertia.py @@ -39,8 +39,11 @@ # ***** END LICENSE BLOCK ***** import math +import operator + from pyffi.utils.mathutils import * + # see http://en.wikipedia.org/wiki/List_of_moment_of_inertia_tensors @@ -74,7 +77,7 @@ def getMassInertiaBox(size, density=1, solid=True): 24.0 >>> inertia ((26.0, 0, 0), (0, 20.0, 0), (0, 0, 10.0))""" - assert(len(size) == 3) # debug + assert (len(size) == 3) # debug if solid: mass = density * size[0] * size[1] * size[2] tmp = tuple(mass * (length ** 2) / 12.0 for length in size) @@ -136,7 +139,7 @@ def get_mass_center_inertia_polyhedron(vertices, triangles, density=1, solid=Tru covariance_canonical = ((2, 1, 1), (1, 2, 1), (1, 1, 2)) - covariance_correction = 1.0/120 + covariance_correction = 1.0 / 120 covariances = [] masses = [] @@ -190,7 +193,7 @@ def get_mass_center_inertia_polyhedron(vertices, triangles, density=1, solid=Tru # (this is approximate only as it replaces triangle with point mass # todo: find better way) covariances.append( - tuple(tuple(masses[-1]*x*y for x in centers[-1]) + tuple(tuple(masses[-1] * x * y for x in centers[-1]) for y in centers[-1]) ) @@ -239,6 +242,8 @@ def get_mass_center_inertia_polyhedron(vertices, triangles, density=1, solid=Tru return total_mass, total_center, total_inertia + if __name__ == "__main__": import doctest + doctest.testmod() diff --git a/pyffi/utils/mathutils.py b/pyffi/utils/mathutils.py index d4a934e07..d27d90785 100644 --- a/pyffi/utils/mathutils.py +++ b/pyffi/utils/mathutils.py @@ -39,7 +39,7 @@ import logging -import operator + def float_to_int(value): """Convert float to integer, rounding and handling nan and inf @@ -79,6 +79,7 @@ def float_to_int(value): "float_to_int converted -inf to -2147483648.") return -2147483648 + def getBoundingBox(veclist): """Calculate bounding box (pair of vectors with minimum and maximum coordinates). @@ -87,7 +88,7 @@ def getBoundingBox(veclist): ((0, 0, 0), (1, 1, 2))""" if not veclist: # assume 3 dimensions if veclist is empty - return (0,0,0), (0,0,0) + return (0, 0, 0), (0, 0, 0) # find bounding box dim = len(veclist[0]) @@ -95,6 +96,7 @@ def getBoundingBox(veclist): tuple((min(vec[i] for vec in veclist) for i in range(dim))), tuple((max(vec[i] for vec in veclist) for i in range(dim)))) + def getCenterRadius(veclist): """Calculate center and radius of given list of vectors. @@ -103,7 +105,7 @@ def getCenterRadius(veclist): """ if not veclist: # assume 3 dimensions if veclist is empty - return (0,0,0), 0 + return (0, 0, 0), 0 # get bounding box vecmin, vecmax = getBoundingBox(veclist) @@ -121,16 +123,20 @@ def getCenterRadius(veclist): return center, radius + def vecSub(vec1, vec2): """Vector substraction.""" return tuple(x - y for x, y in zip(vec1, vec2)) + def vecAdd(vec1, vec2): return tuple(x + y for x, y in zip(vec1, vec2)) + def vecscalarMul(vec, scalar): return tuple(x * scalar for x in vec) + def vecDotProduct(vec1, vec2): """The vector dot product (any dimension). @@ -138,6 +144,7 @@ def vecDotProduct(vec1, vec2): 12""" return sum(x1 * x2 for x1, x2 in zip(vec1, vec2)) + def vecDistance(vec1, vec2): """Return distance between two vectors (any dimension). @@ -146,10 +153,12 @@ def vecDistance(vec1, vec2): """ return vecNorm(vecSub(vec1, vec2)) + def vecNormal(vec1, vec2, vec3): """Returns a vector that is orthogonal on C{triangle}.""" return vecCrossProduct(vecSub(vec2, vec1), vecSub(vec3, vec1)) + def vecDistanceAxis(axis, vec): """Return distance between the axis spanned by axis[0] and axis[1] and the vector v, in 3 dimensions. Raises ZeroDivisionError if the axis points @@ -162,6 +171,7 @@ def vecDistanceAxis(axis, vec): """ return vecNorm(vecNormal(axis[0], axis[1], vec)) / vecDistance(*axis) + def vecDistanceTriangle(triangle, vert): """Return (signed) distance between the plane spanned by triangle[0], triangle[1], and triange[2], and the vector v, in 3 dimensions. @@ -173,7 +183,8 @@ def vecDistanceTriangle(triangle, vert): """ normal = vecNormal(*triangle) return vecDotProduct(normal, vecSub(vert, triangle[0])) \ - / vecNorm(normal) + / vecNorm(normal) + def vecNorm(vec): """Norm of a vector (any dimension). @@ -183,6 +194,7 @@ def vecNorm(vec): """ return vecDotProduct(vec, vec) ** 0.5 + def vecNormalized(vec): """Normalized version of a vector (any dimension). @@ -191,6 +203,7 @@ def vecNormalized(vec): """ return vecscalarMul(vec, 1.0 / vecNorm(vec)) + def vecCrossProduct(vec1, vec2): """The vector cross product (in 3d). @@ -203,58 +216,66 @@ def vecCrossProduct(vec1, vec2): vec1[2] * vec2[0] - vec1[0] * vec2[2], vec1[0] * vec2[1] - vec1[1] * vec2[0]) + def matTransposed(mat): """Return the transposed of a nxn matrix. >>> matTransposed(((1, 2), (3, 4))) ((1, 3), (2, 4))""" dim = len(mat) - return tuple( tuple( mat[i][j] - for i in range(dim) ) - for j in range(dim) ) + return tuple(tuple(mat[i][j] + for i in range(dim)) + for j in range(dim)) + def matscalarMul(mat, scalar): """Return matrix * scalar.""" dim = len(mat) - return tuple( tuple( mat[i][j] * scalar - for j in range(dim) ) - for i in range(dim) ) + return tuple(tuple(mat[i][j] * scalar + for j in range(dim)) + for i in range(dim)) + def matvecMul(mat, vec): """Return matrix * vector.""" dim = len(mat) - return tuple( sum( mat[i][j] * vec[j] for j in range(dim) ) - for i in range(dim) ) + return tuple(sum(mat[i][j] * vec[j] for j in range(dim)) + for i in range(dim)) + def matMul(mat1, mat2): """Return matrix * matrix.""" dim = len(mat1) - return tuple( tuple( sum( mat1[i][k] * mat2[k][j] - for k in range(dim) ) - for j in range(dim) ) - for i in range(dim) ) + return tuple(tuple(sum(mat1[i][k] * mat2[k][j] + for k in range(dim)) + for j in range(dim)) + for i in range(dim)) + def matAdd(mat1, mat2): """Return matrix + matrix.""" dim = len(mat1) - return tuple( tuple( mat1[i][j] + mat2[i][j] - for j in range(dim) ) - for i in range(dim) ) + return tuple(tuple(mat1[i][j] + mat2[i][j] + for j in range(dim)) + for i in range(dim)) + def matSub(mat1, mat2): """Return matrix - matrix.""" dim = len(mat1) - return tuple( tuple( mat1[i][j] - mat2[i][j] - for j in range(dim) ) - for i in range(dim) ) + return tuple(tuple(mat1[i][j] - mat2[i][j] + for j in range(dim)) + for i in range(dim)) + def matCofactor(mat, i, j): dim = len(mat) - return matDeterminant(tuple( tuple( mat[ii][jj] - for jj in range(dim) - if jj != j ) - for ii in range(dim) - if ii != i )) + return matDeterminant(tuple(tuple(mat[ii][jj] + for jj in range(dim) + if jj != j) + for ii in range(dim) + if ii != i)) + def matDeterminant(mat): """Calculate determinant. @@ -265,13 +286,18 @@ def matDeterminant(mat): 36 """ dim = len(mat) - if dim == 0: return 0 - elif dim == 1: return mat[0][0] - elif dim == 2: return mat[0][0] * mat[1][1] - mat[1][0] * mat[0][1] + if dim == 0: + return 0 + elif dim == 1: + return mat[0][0] + elif dim == 2: + return mat[0][0] * mat[1][1] - mat[1][0] * mat[0][1] else: - return sum( (-1 if i&1 else 1) * mat[i][0] * matCofactor(mat, i, 0) - for i in range(dim) ) + return sum((-1 if i & 1 else 1) * mat[i][0] * matCofactor(mat, i, 0) + for i in range(dim)) + if __name__ == "__main__": import doctest + doctest.testmod() diff --git a/pyffi/utils/mopp.py b/pyffi/utils/mopp.py index 82c8e6134..ae2b99450 100644 --- a/pyffi/utils/mopp.py +++ b/pyffi/utils/mopp.py @@ -38,9 +38,10 @@ # ***** END LICENSE BLOCK ***** import os.path -import tempfile import subprocess import sys +import tempfile + def _skip_terminal_chars(stream): """Skip initial terminal characters (happens when mopper runs via wine).""" @@ -50,6 +51,7 @@ def _skip_terminal_chars(stream): else: stream.seek(0) + def getMopperPath(): """Get path to the mopper. @@ -66,6 +68,7 @@ def getMopperPath(): raise OSError("mopper.exe not found at %s" % mopper) return mopper + def getMopperCredits(): """Get info about mopper, and credit havok. @@ -87,7 +90,7 @@ def getMopperCredits(): :rtype: ``str`` """ mopper = getMopperPath() - outfile = tempfile.TemporaryFile("w+") # not binary + outfile = tempfile.TemporaryFile("w+") # not binary try: # get license info, credit havok (raises OSError on failure) if sys.platform == "win32": @@ -101,6 +104,7 @@ def getMopperCredits(): outfile.close() return creditstr + def getMopperOriginScaleCodeWelding(vertices, triangles, material_indices=None): """Generate mopp code and welding info for given geometry. Raises RuntimeError if something goes wrong (e.g. if mopp generator fails, or if @@ -153,8 +157,8 @@ def getMopperOriginScaleCodeWelding(vertices, triangles, material_indices=None): material_indices = [] mopper = getMopperPath() - infile = tempfile.TemporaryFile("w+") # not binary - outfile = tempfile.TemporaryFile("w+") # not binary + infile = tempfile.TemporaryFile("w+") # not binary + outfile = tempfile.TemporaryFile("w+") # not binary try: # set up input infile.write("%i\n" % len(vertices)) @@ -193,7 +197,8 @@ def getMopperOriginScaleCodeWelding(vertices, triangles, material_indices=None): outfile.close() return origin, scale, moppcode, welding_info + if __name__ == "__main__": import doctest - doctest.testmod() + doctest.testmod() diff --git a/pyffi/utils/quickhull.py b/pyffi/utils/quickhull.py index d778d9180..33a32dede 100644 --- a/pyffi/utils/quickhull.py +++ b/pyffi/utils/quickhull.py @@ -145,13 +145,14 @@ # # ***** END LICENSE BLOCK ***** +import operator + from pyffi.utils.mathutils import * -import operator # adapted from # http://en.literateprograms.org/Quickhull_(Python,_arrays) -def qdome2d(vertices, base, normal, precision = 0.0001): +def qdome2d(vertices, base, normal, precision=0.0001): """Build a convex dome from C{vertices} on top of the two C{base} vertices, in the plane with normal C{normal}. This is a helper function for L{qhull2d}, and should usually not be called directly. @@ -165,24 +166,25 @@ def qdome2d(vertices, base, normal, precision = 0.0001): :return: A list of vertices that make up a fan of the dome.""" vert0, vert1 = base - outer = [ (dist, vert) - for dist, vert - in zip( ( vecDotProduct(vecCrossProduct(normal, + outer = [(dist, vert) + for dist, vert + in zip((vecDotProduct(vecCrossProduct(normal, vecSub(vert1, vert0)), vecSub(vert, vert0)) - for vert in vertices ), - vertices ) - if dist > precision ] + for vert in vertices), + vertices) + if dist > precision] if outer: pivot = max(outer)[1] outer_verts = list(map(operator.itemgetter(1), outer)) return qdome2d(outer_verts, [vert0, pivot], normal, precision) \ - + qdome2d(outer_verts, [pivot, vert1], normal, precision)[1:] + + qdome2d(outer_verts, [pivot, vert1], normal, precision)[1:] else: return base -def qhull2d(vertices, normal, precision = 0.0001): + +def qhull2d(vertices, normal, precision=0.0001): """Simple implementation of the 2d quickhull algorithm in 3 dimensions for vertices viewed from the direction of C{normal}. Returns a fan of vertices that make up the surface. Called by @@ -223,11 +225,12 @@ def qhull2d(vertices, normal, precision = 0.0001): if len(base) >= 2: vert0, vert1 = base[:2] return qdome2d(vertices, [vert0, vert1], normal, precision) \ - + qdome2d(vertices, [vert1, vert0], normal, precision)[1:-1] + + qdome2d(vertices, [vert1, vert0], normal, precision)[1:-1] else: return base -def basesimplex3d(vertices, precision = 0.0001): + +def basesimplex3d(vertices, precision=0.0001): """Find four extreme points, to be used as a starting base for the quick hull algorithm L{qhull3d}. @@ -271,14 +274,14 @@ def basesimplex3d(vertices, precision = 0.0001): vert1 = max(vertices, key=operator.itemgetter(*extents)) # check if all vertices coincide if vecDistance(vert0, vert1) < precision: - return [ vert0 ] + return [vert0] # as a third extreme point select that one which maximizes the distance # from the vert0 - vert1 axis vert2 = max(vertices, key=lambda vert: vecDistanceAxis((vert0, vert1), vert)) - #check if all vertices are colinear + # check if all vertices are colinear if vecDistanceAxis((vert0, vert1), vert2) < precision: - return [ vert0, vert1 ] + return [vert0, vert1] # as a fourth extreme point select one which maximizes the distance from # the v0, v1, v2 triangle vert3 = max(vertices, @@ -287,14 +290,15 @@ def basesimplex3d(vertices, precision = 0.0001): # ensure positive orientation and check if all vertices are coplanar orientation = vecDistanceTriangle((vert0, vert1, vert2), vert3) if orientation > precision: - return [ vert0, vert1, vert2, vert3 ] + return [vert0, vert1, vert2, vert3] elif orientation < -precision: - return [ vert1, vert0, vert2, vert3 ] + return [vert1, vert0, vert2, vert3] else: # coplanar - return [ vert0, vert1, vert2 ] + return [vert0, vert1, vert2] + -def qhull3d(vertices, precision = 0.0001, verbose = False): +def qhull3d(vertices, precision=0.0001, verbose=False): """Return the triangles making up the convex hull of C{vertices}. Considers distances less than C{precision} to be zero (useful to simplify the hull of a complex mesh, at the expense of exactness of the hull). @@ -317,16 +321,16 @@ def qhull3d(vertices, precision = 0.0001, verbose = False): if len(hull_vertices) == 3: # coplanar hull_vertices = qhull2d(vertices, vecNormal(*hull_vertices), precision) - return hull_vertices, [ (0, i+1, i+2) - for i in range(len(hull_vertices) - 2) ] + return hull_vertices, [(0, i + 1, i + 2) + for i in range(len(hull_vertices) - 2)] elif len(hull_vertices) <= 2: # colinear or singular # no triangles for these cases return hull_vertices, [] # construct list of triangles of this simplex - hull_triangles = set([ operator.itemgetter(i,j,k)(hull_vertices) - for i, j, k in ((1,0,2), (0,1,3), (0,3,2), (3,1,2)) ]) + hull_triangles = set([operator.itemgetter(i, j, k)(hull_vertices) + for i, j, k in ((1, 0, 2), (0, 1, 3), (0, 3, 2), (3, 1, 2))]) if verbose: print("starting set", hull_vertices) @@ -335,12 +339,12 @@ def qhull3d(vertices, precision = 0.0001, verbose = False): outer_vertices = {} for triangle in hull_triangles: outer = \ - [ (dist, vert) - for dist, vert - in zip( ( vecDistanceTriangle(triangle, vert) - for vert in vertices ), - vertices ) - if dist > precision ] + [(dist, vert) + for dist, vert + in zip((vecDistanceTriangle(triangle, vert) + for vert in vertices), + vertices) + if dist > precision] if outer: outer_vertices[triangle] = outer @@ -357,23 +361,23 @@ def qhull3d(vertices, precision = 0.0001, verbose = False): hull_vertices.append(pivot) # and update the list of triangles: # 1. calculate visibility of triangles to pivot point - visibility = [ vecDistanceTriangle(othertriangle, pivot) > precision - for othertriangle in outer_vertices.keys() ] + visibility = [vecDistanceTriangle(othertriangle, pivot) > precision + for othertriangle in outer_vertices.keys()] # 2. get list of visible triangles - visible_triangles = [ othertriangle - for othertriangle, visible - in zip(iter(outer_vertices.keys()), visibility) - if visible ] + visible_triangles = [othertriangle + for othertriangle, visible + in zip(iter(outer_vertices.keys()), visibility) + if visible] # 3. find all edges of visible triangles visible_edges = [] for visible_triangle in visible_triangles: - visible_edges += [operator.itemgetter(i,j)(visible_triangle) - for i, j in ((0,1),(1,2),(2,0))] + visible_edges += [operator.itemgetter(i, j)(visible_triangle) + for i, j in ((0, 1), (1, 2), (2, 0))] if verbose: print("visible edges", visible_edges) # 4. construct horizon: edges that are not shared with another triangle - horizon_edges = [ edge for edge in visible_edges - if not tuple(reversed(edge)) in visible_edges ] + horizon_edges = [edge for edge in visible_edges + if not tuple(reversed(edge)) in visible_edges] # 5. remove visible triangles from list # this puts a hole inside the triangle list visible_outer = set() @@ -387,14 +391,14 @@ def qhull3d(vertices, precision = 0.0001, verbose = False): # 6. close triangle list by adding cone from horizon to pivot # also update the outer triangle list as we go for edge in horizon_edges: - newtriangle = edge + ( pivot, ) + newtriangle = edge + (pivot,) newouter = \ - [ (dist, vert) - for dist, vert in zip( ( vecDistanceTriangle(newtriangle, - vert) - for vert in visible_outer ), - visible_outer ) - if dist > precision ] + [(dist, vert) + for dist, vert in zip((vecDistanceTriangle(newtriangle, + vert) + for vert in visible_outer), + visible_outer) + if dist > precision] hull_triangles.add(newtriangle) if newouter: outer_vertices[newtriangle] = newouter @@ -404,10 +408,12 @@ def qhull3d(vertices, precision = 0.0001, verbose = False): # no triangle has outer vertices anymore # so the convex hull is complete! # remap the triangles to indices that point into hull_vertices - return hull_vertices, [ tuple(hull_vertices.index(vert) - for vert in triangle) - for triangle in hull_triangles ] + return hull_vertices, [tuple(hull_vertices.index(vert) + for vert in triangle) + for triangle in hull_triangles] + if __name__ == "__main__": import doctest + doctest.testmod() diff --git a/pyffi/utils/tangentspace.py b/pyffi/utils/tangentspace.py index dea4ea35b..6f54a7aa0 100644 --- a/pyffi/utils/tangentspace.py +++ b/pyffi/utils/tangentspace.py @@ -39,9 +39,10 @@ from pyffi.utils.mathutils import * -def getTangentSpace(vertices = None, normals = None, uvs = None, - triangles = None, orientation = False, - orthogonal = True): + +def getTangentSpace(vertices=None, normals=None, uvs=None, + triangles=None, orientation=False, + orthogonal=True): """Calculate tangent space data. >>> vertices = [(0,0,0), (0,1,0), (1,0,0)] @@ -68,8 +69,8 @@ def getTangentSpace(vertices = None, normals = None, uvs = None, raise ValueError( "lists of vertices, normals, and uvs must have the same length") - bin = [(0,0,0) for i in range(len(vertices)) ] - tan = [(0,0,0) for i in range(len(vertices)) ] + bin = [(0, 0, 0) for i in range(len(vertices))] + tan = [(0, 0, 0) for i in range(len(vertices))] orientations = [0 for i in range(len(vertices))] # calculate tangents and binormals from vertex and texture coordinates @@ -103,10 +104,10 @@ def getTangentSpace(vertices = None, normals = None, uvs = None, r_sign * (w3w1[1] * v2v1[2] - w2w1[1] * v3v1[2])) try: sdir = vecNormalized(sdir) - except ZeroDivisionError: # catches zero vector - continue # skip triangle - except ValueError: # catches invalid data - continue # skip triangle + except ZeroDivisionError: # catches zero vector + continue # skip triangle + except ValueError: # catches invalid data + continue # skip triangle tdir = ( r_sign * (w2w1[0] * v3v1[0] - w3w1[0] * v2v1[0]), @@ -114,10 +115,10 @@ def getTangentSpace(vertices = None, normals = None, uvs = None, r_sign * (w2w1[0] * v3v1[2] - w3w1[0] * v2v1[2])) try: tdir = vecNormalized(tdir) - except ZeroDivisionError: # catches zero vector - continue # skip triangle - except ValueError: # catches invalid data - continue # skip triangle + except ZeroDivisionError: # catches zero vector + continue # skip triangle + except ValueError: # catches invalid data + continue # skip triangle # vector combination algorithm could possibly be improved for i in (t1, t2, t3): @@ -129,7 +130,7 @@ def getTangentSpace(vertices = None, normals = None, uvs = None, xvec = (1, 0, 0) yvec = (0, 1, 0) for i, norm in enumerate(normals): - if abs(1-vecNorm(norm)) > 0.01: + if abs(1 - vecNorm(norm)) > 0.01: raise ValueError( "tangentspace: unnormalized normal in list of normals (%s, norm is %f)" % (norm, vecNorm(norm))) try: @@ -165,6 +166,8 @@ def getTangentSpace(vertices = None, normals = None, uvs = None, else: return tan, bin + if __name__ == "__main__": import doctest + doctest.testmod() diff --git a/pyffi/utils/trianglemesh.py b/pyffi/utils/trianglemesh.py index b24cac6df..42e01e6a6 100644 --- a/pyffi/utils/trianglemesh.py +++ b/pyffi/utils/trianglemesh.py @@ -65,7 +65,7 @@ # ~ Imports # ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ -import operator # itemgetter +import operator # itemgetter from weakref import WeakSet @@ -79,7 +79,7 @@ def __init__(self, ev0, ev1): >>> edge.verts (6, 9) """ - + if ev0 == ev1: raise ValueError("Degenerate edge.") @@ -97,6 +97,7 @@ def __repr__(self): """ return "Edge(%s, %s)" % self.verts + class Face: """An oriented face which keeps track its adjacent faces.""" @@ -166,6 +167,7 @@ class Mesh: :ivar faces: List of faces of the mesh. :type faces: ``list`` of :class:`Face`""" + def __init__(self, faces=None, lock=True): """Initialize a mesh, and optionally assign its faces and lock. @@ -307,7 +309,7 @@ def lock(self): # store faces and set their index self.faces = [] for i, (verts, face) in enumerate(sorted(iter(self._faces.items()), - key=operator.itemgetter(0))): + key=operator.itemgetter(0))): face.index = i self.faces.append(face) # remove helper structures @@ -336,9 +338,11 @@ def discard_face(self, face): for adj_adj_faces in adj_face.adjacent_faces: adj_adj_faces.discard(face) # faster (but breaks py3k!!): - #if id(face) in adj_adj_faces.data: + # if id(face) in adj_adj_faces.data: # del adj_adj_faces.data[id(face)] + if __name__ == '__main__': import doctest + doctest.testmod() diff --git a/pyffi/utils/trianglestripifier.py b/pyffi/utils/trianglestripifier.py index cd5dc828d..de3426d37 100644 --- a/pyffi/utils/trianglestripifier.py +++ b/pyffi/utils/trianglestripifier.py @@ -47,10 +47,8 @@ # # ***** END LICENSE BLOCK ***** -import itertools -import random # choice +from pyffi.utils.trianglemesh import Mesh -from pyffi.utils.trianglemesh import Face, Mesh class TriangleStrip(object): """A heavily specialized oriented strip of faces. @@ -304,6 +302,7 @@ def get_strip(self): strip = list(self.vertices) return strip + class Experiment(object): """A stripification experiment, essentially consisting of a set of adjacent strips. @@ -357,7 +356,7 @@ def build(self): # build adjacent strips num_faces = len(strip.faces) if num_faces >= 4: - face_index = num_faces >> 1 # quick / 2 + face_index = num_faces >> 1 # quick / 2 self.build_adjacent(strip, face_index) self.build_adjacent(strip, face_index + 1) elif num_faces == 3: @@ -389,13 +388,14 @@ def build_adjacent(self, strip, face_index): other_vertex = strip.vertices[face_index + 2] face_index = other_strip.build(other_vertex, other_face) self.strips.append(other_strip) - if face_index > (len(other_strip.faces) >> 1): # quick / 2 + if face_index > (len(other_strip.faces) >> 1): # quick / 2 self.build_adjacent(other_strip, face_index - 1) elif face_index < len(other_strip.faces) - 1: self.build_adjacent(other_strip, face_index + 1) return True return False + class ExperimentSelector(object): def __init__(self): @@ -419,6 +419,7 @@ def clear(self): self.best_score = -1.0 self.best_experiment = None + class TriangleStripifier(object): """Implementation of a triangle stripifier. @@ -543,6 +544,8 @@ def find_all_strips(self): for strip in selector.best_experiment.strips)) selector.clear() -if __name__=='__main__': + +if __name__ == '__main__': import doctest + doctest.testmod() diff --git a/pyffi/utils/tristrip.py b/pyffi/utils/tristrip.py index a8c07ceda..55942ec6d 100644 --- a/pyffi/utils/tristrip.py +++ b/pyffi/utils/tristrip.py @@ -46,6 +46,7 @@ from pyffi.utils.trianglestripifier import TriangleStripifier from pyffi.utils.trianglemesh import Mesh + def triangulate(strips): """A generator for iterating over the faces in a set of strips. Degenerate triangles in strips are discarded. @@ -57,14 +58,14 @@ def triangulate(strips): triangles = [] for strip in strips: - if len(strip) < 3: continue # skip empty strips + if len(strip) < 3: continue # skip empty strips # make list copy incase input data does not like slice notation strip_list = list(strip) # flips the order of verts in every other tri flip = False - for i in range(0, len(strip_list)-2): + for i in range(0, len(strip_list) - 2): flip = not flip - t0, t1, t2 = strip_list[i:i+3] + t0, t1, t2 = strip_list[i:i + 3] # skip degenerate tri if t0 == t1 or t1 == t2 or t2 == t0: continue # append tri in correct order @@ -72,10 +73,12 @@ def triangulate(strips): return triangles + def _generate_faces_from_triangles(triangles): """Creates faces (tris) from a flat list of non-overlapping triangle indices""" for i in range(0, len(triangles), 3): - yield triangles[i], triangles[i+1], triangles[i+2] + yield triangles[i], triangles[i + 1], triangles[i + 2] + def _sort_triangle_indices(triangles): """Sorts indices of each triangle so lowest index always comes first. @@ -102,6 +105,7 @@ def _sort_triangle_indices(triangles): raise RuntimeError( "Unexpected error while sorting triangle indices.") + def _check_strips(triangles, strips): """Checks that triangles and strips describe the same geometry. @@ -139,7 +143,8 @@ def _check_strips(triangles, strips): triangles - strips_triangles, strips_triangles - triangles)) -def stripify(triangles, stitchstrips = False): + +def stripify(triangles, stitchstrips=False): """Converts triangles into a list of strips. If stitchstrips is True, then everything is wrapped in a single strip using @@ -200,6 +205,7 @@ def stripify(triangles, stitchstrips = False): else: return strips + class OrientedStrip: """An oriented strip, with stitching support.""" @@ -383,17 +389,18 @@ def __add__(self, other): # append stitches if num_stitches >= 1: - result.vertices.append(self.vertices[-1]) # first stitch + result.vertices.append(self.vertices[-1]) # first stitch if num_stitches >= 2: - result.vertices.append(other.vertices[0]) # second stitch + result.vertices.append(other.vertices[0]) # second stitch if num_stitches >= 3: - result.vertices.append(other.vertices[0]) # third stitch + result.vertices.append(other.vertices[0]) # third stitch # append other vertices result.vertices.extend(other.vertices) return result + def stitch_strips(strips): """Stitch strips keeping stitch size minimal. @@ -433,6 +440,7 @@ def stitch_strips(strips): class ExperimentSelector: """Helper class to select best experiment.""" + def __init__(self): self.best_ostrip1 = None self.best_ostrip2 = None @@ -442,7 +450,7 @@ def __init__(self): def update(self, ostrip_index, ostrip1, ostrip2): num_stitches = ostrip1.get_num_stitches(ostrip2) if ((self.best_num_stitches is None) - or (num_stitches < self.best_num_stitches)): + or (num_stitches < self.best_num_stitches)): self.best_ostrip1 = ostrip1 self.best_ostrip2 = ostrip2 self.best_ostrip_index = ostrip_index @@ -484,6 +492,7 @@ def update(self, ostrip_index, ostrip1, ostrip2): # return resulting strip return strip + def unstitch_strip(strip): """Revert stitched strip back to a set of strips without stitches. @@ -520,17 +529,17 @@ def unstitch_strip(strip): strips = [] currentstrip = [] i = 0 - while i < len(strip)-1: + while i < len(strip) - 1: winding = i & 1 currentstrip.append(strip[i]) - if strip[i] == strip[i+1]: + if strip[i] == strip[i + 1]: # stitch detected, add current strip to list of strips strips.append(currentstrip) # and start a new one, taking into account winding if winding == 1: currentstrip = [] else: - currentstrip = [strip[i+1]] + currentstrip = [strip[i + 1]] i += 1 # add last part currentstrip.extend(strip[i:]) @@ -542,6 +551,8 @@ def unstitch_strip(strip): strip.pop(0) return [strip for strip in strips if len(strip) > 3 or (len(strip) == 3 and strip[0] != strip[1])] -if __name__=='__main__': + +if __name__ == '__main__': import doctest + doctest.testmod() diff --git a/pyffi/utils/vertex_cache.py b/pyffi/utils/vertex_cache.py index 81b6f4e12..8b9523feb 100644 --- a/pyffi/utils/vertex_cache.py +++ b/pyffi/utils/vertex_cache.py @@ -48,10 +48,11 @@ from pyffi.utils.tristrip import OrientedStrip + class VertexScore: """Vertex score calculation.""" # constants used for scoring algorithm - CACHE_SIZE = 32 # higher values yield virtually no improvement + CACHE_SIZE = 32 # higher values yield virtually no improvement """The size of the modeled cache.""" CACHE_DECAY_POWER = 1.5 @@ -164,6 +165,7 @@ def update_score(self, vertex_info): min(len(vertex_info.triangle_indices), self.MAX_TRIANGLES_PER_VERTEX)] + class VertexInfo: """Stores information about a vertex.""" @@ -175,18 +177,20 @@ def __init__(self, cache_position=-1, score=-1, self.triangle_indices = ([] if triangle_indices is None else triangle_indices) + class TriangleInfo: def __init__(self, score=0, vertex_indices=None): self.score = score self.vertex_indices = ([] if vertex_indices is None else vertex_indices) + class Mesh: """Simple mesh implementation which keeps track of which triangles are used by which vertex, and vertex cache positions. """ - _DEBUG = False # to enable debugging of the algorithm + _DEBUG = False # to enable debugging of the algorithm def __init__(self, triangles, vertex_score=None): """Initialize mesh from given set of triangles. @@ -280,10 +284,10 @@ def get_cache_optimized_triangles(self): self.triangle_infos[triangle_index].score) best_triangle_info = self.triangle_infos[best_triangle_index] if (self._DEBUG and - globally_optimal_score - best_triangle_info.score > 0.01): - print(globally_optimal_score, - globally_optimal_score - best_triangle_info.score, - len(updated_triangles)) + globally_optimal_score - best_triangle_info.score > 0.01): + print(globally_optimal_score, + globally_optimal_score - best_triangle_info.score, + len(updated_triangles)) # mark as added self.triangle_infos[best_triangle_index] = None # append to ordered list of triangles @@ -333,6 +337,7 @@ def get_cache_optimized_triangles(self): # return result return triangles + def get_cache_optimized_triangles(triangles): """Calculate cache optimized triangles, and return the result as a reordered set of triangles or strip of stitched triangles. @@ -343,6 +348,7 @@ def get_cache_optimized_triangles(triangles): mesh = Mesh(triangles) return mesh.get_cache_optimized_triangles() + def get_unique_triangles(triangles): """Yield unique triangles. @@ -366,6 +372,7 @@ def get_unique_triangles(triangles): yield verts _added_triangles.add(verts) + def stable_stripify(triangles, stitchstrips=False): """Stitch all triangles together into a strip without changing the triangle ordering (for example because their ordering is already @@ -401,7 +408,7 @@ def stable_stripify(triangles, stitchstrips=False): added = False for v0, v1, v2 in indices: for ov0, ov1, ov2 in indices: - if strip[v1] == tri[ov1] and strip[v2] == tri[ov0]: + if strip[v1] == tri[ov1] and strip[v2] == tri[ov0]: strip = [strip[v0], strip[v1], strip[v2], tri[ov2]] added = True break @@ -419,12 +426,12 @@ def stable_stripify(triangles, stitchstrips=False): added = False for ov0, ov1, ov2 in indices: if len(strip) & 1: - if strip[-2] == tri[ov1] and strip[-1] == tri[ov0]: + if strip[-2] == tri[ov1] and strip[-1] == tri[ov0]: strip.append(tri[ov2]) added = True break else: - if strip[-2] == tri[ov0] and strip[-1] == tri[ov1]: + if strip[-2] == tri[ov0] and strip[-1] == tri[ov1]: strip.append(tri[ov2]) added = True break @@ -443,12 +450,14 @@ def stable_stripify(triangles, stitchstrips=False): (OrientedStrip(strip) for strip in strips)) return [list(result)] + def stripify(triangles, stitchstrips=False): """Stripify triangles, optimizing for the vertex cache.""" return stable_stripify( get_cache_optimized_triangles(triangles), stitchstrips=stitchstrips) + def get_cache_optimized_vertex_map(strips): """Map vertices so triangles/strips have consequetive indices. @@ -475,6 +484,7 @@ def get_cache_optimized_vertex_map(strips): new_vertex += 1 return vertex_map + def average_transform_to_vertex_ratio(strips, cache_size=16): """Calculate number of transforms per vertex for a given cache size and triangles/strips. See @@ -501,6 +511,8 @@ def average_transform_to_vertex_ratio(strips, cache_size=16): # no vertices... return 1 -if __name__=='__main__': + +if __name__ == '__main__': import doctest + doctest.testmod() diff --git a/requirements/requirements-dev.txt b/requirements/requirements-dev.txt index 54e4ad14e..2fd40951a 100644 --- a/requirements/requirements-dev.txt +++ b/requirements/requirements-dev.txt @@ -1,8 +1,8 @@ check-manifest coveralls nose -pdbpp +# pdbpp pyflakes -pytest==3.6 +pytest==7.2.1 pytest-cov wheel \ No newline at end of file diff --git a/requirements/requirements-docs.txt b/requirements/requirements-docs.txt index 8d25e30c2..4d724ca8d 100644 --- a/requirements/requirements-docs.txt +++ b/requirements/requirements-docs.txt @@ -1,2 +1,2 @@ -sphinx>=2.0.0 +sphinx>=6.0.0 niftools_sphinx_theme>=0.3.3 \ No newline at end of file diff --git a/requirements/requirements.txt b/requirements/requirements.txt index e69de29bb..86c871ed7 100644 --- a/requirements/requirements.txt +++ b/requirements/requirements.txt @@ -0,0 +1 @@ +lxml \ No newline at end of file diff --git a/scripts/cgf/cgftoaster.py b/scripts/cgf/cgftoaster.py index b68ab67b3..da3c0a9bf 100644 --- a/scripts/cgf/cgftoaster.py +++ b/scripts/cgf/cgftoaster.py @@ -49,9 +49,9 @@ import pyffi.spells.cgf import pyffi.spells.cgf.check import pyffi.spells.cgf.dump -import pyffi.formats.cgf import pyffi.spells.check + class CgfToaster(pyffi.spells.cgf.CgfToaster): """Class for toasting cgf files, using any of the available spells.""" SPELLS = [ @@ -76,6 +76,7 @@ class CgfToaster(pyffi.spells.cgf.CgfToaster): python -m cProfile -s cumulative cgftoaster.py -j 1 dump""" + # if script is called... if __name__ == "__main__": # set up logger diff --git a/scripts/nif/nifmakehsl.py b/scripts/nif/nifmakehsl.py index e8b11b3ea..ff6cf4761 100644 --- a/scripts/nif/nifmakehsl.py +++ b/scripts/nif/nifmakehsl.py @@ -61,12 +61,10 @@ # ***** END LICENSE BLOCK ***** # -------------------------------------------------------------------------- -import sys -from types import * from string import maketrans from pyffi.formats.nif import NifFormat -from pyffi.object_models.xml.basic import BasicBase + def find_templates(): # find all types that are used as a template (excluding the ones @@ -78,29 +76,33 @@ def find_templates(): templates.add(attr.template) return templates + transtable = maketrans('?', '_') + + def sanitize_attrname(s): return s.translate(transtable) + def write_hsl(f, ver, templates): # map basic NifFormat types to HWS types and enum byte size hsl_types = { - NifFormat.int : ('long', 4), - NifFormat.uint : ('ulong', 4), - NifFormat.short : ('short', 2), - NifFormat.ushort : ('ushort', 2), - NifFormat.Flags : ('ushort', None), - NifFormat.byte : ('ubyte ', 1), - NifFormat.char : ('char', None), - NifFormat.float : ('float', None), - NifFormat.Ref : ('long', None), - NifFormat.Ptr : ('long', None), - NifFormat.FileVersion : ('ulong', None), + NifFormat.int: ('long', 4), + NifFormat.uint: ('ulong', 4), + NifFormat.short: ('short', 2), + NifFormat.ushort: ('ushort', 2), + NifFormat.Flags: ('ushort', None), + NifFormat.byte: ('ubyte ', 1), + NifFormat.char: ('char', None), + NifFormat.float: ('float', None), + NifFormat.Ref: ('long', None), + NifFormat.Ptr: ('long', None), + NifFormat.FileVersion: ('ulong', None), # some stuff we cannot do in hex workshop - NifFormat.HeaderString : ('char', None), - NifFormat.LineString : ('char', None) } - # hack for string (TODO fix this in NifFormat) - #NifFormat.string : ('struct string', None) } + NifFormat.HeaderString: ('char', None), + NifFormat.LineString: ('char', None)} + # hack for string (TODO fix this in NifFormat) + # NifFormat.string : ('struct string', None) } if ver <= 0x04000002: hsl_types[NifFormat.bool] = ('ulong', 4) @@ -113,7 +115,7 @@ def write_hsl(f, ver, templates): #pragma byteorder(little_endian) #pragma maxarray(65535) -"""%ver) +""" % ver) # write each enum class for cls in NifFormat.xml_enum: @@ -121,7 +123,7 @@ def write_hsl(f, ver, templates): # write each struct class for cls in NifFormat.xml_struct: - if cls.__name__[:3] == 'ns ': continue # cheat: skip ns types + if cls.__name__[:3] == 'ns ': continue # cheat: skip ns types if not cls._is_template: # write regular class write_struct(cls, ver, hsl_types, f, None) @@ -130,14 +132,15 @@ def write_hsl(f, ver, templates): for template in templates: write_struct(cls, ver, hsl_types, f, template) + def write_enum(cls, ver, hsl_types, f): # set enum size f.write('#pragma enumsize(%s)\n' % cls._numbytes) f.write('typedef enum tag' + cls.__name__ + ' {\n') ## list of all non-private attributes gives enum constants - #enum_items = [x for x in cls.__dict__.items() if x[0][:2] != '__'] + # enum_items = [x for x in cls.__dict__.items() if x[0][:2] != '__'] ## sort them by value - #enum_items = sorted(enum_items, key=lambda x: x[1]) + # enum_items = sorted(enum_items, key=lambda x: x[1]) # and write out all name, value pairs enum_items = list(zip(cls._enumkeys, cls._enumvalues)) for const_name, const_value in enum_items[:-1]: @@ -146,28 +149,29 @@ def write_enum(cls, ver, hsl_types, f): f.write(' ' + const_name + ' = ' + str(const_value) + '\n') f.write('} ' + cls.__name__ + ';\n\n') + def write_struct(cls, ver, hsl_types, f, template): # open the structure if not template: f.write('struct ' + cls.__name__ + ' {\n') else: f.write('struct ' + cls.__name__ + '_' + template.__name__ + ' {\n') - #for attrname, typ, default, tmpl, arg, arr1, arr2, cond, ver1, ver2, userver, doc in cls._attribute_list: + # for attrname, typ, default, tmpl, arg, length, width, cond, since, until, userver, doc in cls._attribute_list: for attr in cls._attribute_list: # check version if not (ver is None): - if (not (attr.ver1 is None)) and ver < attr.ver1: + if (not (attr.since is None)) and ver < attr.since: continue - if (not (attr.ver2 is None)) and ver > attr.ver2: + if (not (attr.until is None)) and ver > attr.until: continue s = ' ' # things that can only be determined at runtime (rt_xxx) rt_type = attr.type_ if attr.type_ != type(None) \ - else template + else template rt_template = attr.template if attr.template != type(None) \ - else template + else template # get the attribute type name try: @@ -175,12 +179,12 @@ def write_struct(cls, ver, hsl_types, f, template): except KeyError: if rt_type in NifFormat.xml_enum: s += rt_type.__name__ - else: # it's in NifFormat.xml_struct + else: # it's in NifFormat.xml_struct s += 'struct ' + rt_type.__name__ # get the attribute template type name if (not rt_template is None) and (not issubclass(rt_type, NifFormat.Ref)): s += '_' - s += rt_template.__name__ # note: basic types are named by their xml name in the template + s += rt_template.__name__ # note: basic types are named by their niftoolsxml name in the template # attribute name s = s.ljust(20) + ' ' + sanitize_attrname(attr.name) # array and conditional arguments @@ -188,22 +192,22 @@ def write_struct(cls, ver, hsl_types, f, template): comments = '' if not attr.cond is None: # catch argument passing and double arrays - if (str(attr.cond).find('arg') == -1) and (attr.arr2 is None): + if (str(attr.cond).find('arg') == -1) and (attr.width is None): if attr.cond._op is None or (attr.cond._op == '!=' and attr.cond._right == 0): arr_str += sanitize_attrname(str(attr.cond._left)) else: comments += ' (' + sanitize_attrname(str(attr.cond)) + ')' else: comments += ' (' + sanitize_attrname(str(attr.cond)) + ')' - if attr.arr1 is None: + if attr._length is None: pass - elif attr.arr2 is None: - if str(attr.arr1).find('arg') == -1: # catch argument passing + elif attr.width is None: + if str(attr._length).find('arg') == -1: # catch argument passing if arr_str: arr_str += ' * ' - arr_str += sanitize_attrname(str(attr.arr1._left)) - if attr.arr1._op: - comments += ' [' + sanitize_attrname(str(attr.arr1)) + ']' + arr_str += sanitize_attrname(str(attr._length._left)) + if attr._length._op: + comments += ' [' + sanitize_attrname(str(attr._length)) + ']' else: if arr_str: arr_str += ' * ' @@ -213,20 +217,21 @@ def write_struct(cls, ver, hsl_types, f, template): # TODO catch args here too (so far not used anywhere in nif.xml) if arr_str: arr_str += ' * ' - arr_str += sanitize_attrname(str(attr.arr1._left)) + ' * ' + sanitize_attrname(str(attr.arr2._left)) - if attr.arr1._op or attr.arr2._op: - comments += ' [' + sanitize_attrname(str(attr.arr1)) + ' * ' + sanitize_attrname(str(attr.arr2)) + ']' + arr_str += sanitize_attrname(str(attr._length._left)) + ' * ' + sanitize_attrname(str(attr.width._left)) + if attr._length._op or attr.width._op: + comments += ' [' + sanitize_attrname(str(attr._length)) + ' * ' + sanitize_attrname(str(attr.width)) + ']' arr_str = '[' + arr_str + ']' if arr_str else '' comments = ' //' + comments if comments else '' f.write(s + arr_str + ';' + comments + '\n') # close the structure f.write('};\n\n') + if __name__ == '__main__': # list all types used as a template templates = find_templates() # write out hex structure library for each nif version - for ver_str, ver in list(NifFormat.versions.items()): + for ver_str, ver in list(NifFormat.versions_num.items()): f = open('nif_' + ver_str.replace('.', '_') + '.hsl', 'w') try: write_hsl(f, ver, templates) diff --git a/scripts/nif/niftoaster.py b/scripts/nif/niftoaster.py index 644f29a35..f639d3f56 100644 --- a/scripts/nif/niftoaster.py +++ b/scripts/nif/niftoaster.py @@ -50,8 +50,8 @@ import pyffi.spells.nif.check import pyffi.spells.nif.dump import pyffi.spells.nif.fix -import pyffi.spells.nif.optimize import pyffi.spells.nif.modify +import pyffi.spells.nif.optimize class NifToaster(pyffi.spells.nif.NifToaster): @@ -100,7 +100,7 @@ class NifToaster(pyffi.spells.nif.NifToaster): pyffi.spells.nif.optimize.SpellCleanRefLists, pyffi.spells.nif.optimize.SpellMergeDuplicates, pyffi.spells.nif.optimize.SpellOptimizeGeometry, - #pyffi.spells.nif.optimize.SpellOptimizeSplit, + # pyffi.spells.nif.optimize.SpellOptimizeSplit, pyffi.spells.nif.optimize.SpellOptimize, pyffi.spells.nif.optimize.SpellDelUnusedBones, pyffi.spells.nif.optimize.SpellDelZeroScale, @@ -138,7 +138,7 @@ class NifToaster(pyffi.spells.nif.NifToaster): pyffi.spells.nif.optimize.SpellOptimizeAnimation, pyffi.spells.nif.check.SpellCheckMaterialEmissiveValue, pyffi.spells.nif.modify.SpellMirrorAnimation - ] + ] ALIASDICT = { "texdump": "dump_tex", "read": "check_read", @@ -148,7 +148,7 @@ class NifToaster(pyffi.spells.nif.NifToaster): "exportpixeldata": "dump_pixeldata", "scale": "fix_scale", "opt_cleanfarnif": "modify_cleanfarnif", - } + } EXAMPLES = """* check if PyFFI can read all files in current directory (python version of nifskope's xml checker): @@ -186,6 +186,7 @@ class NifToaster(pyffi.spells.nif.NifToaster): python niftoaster.py -a 100 fix_scale "c:\\zoo2" """ + # if script is called... if __name__ == "__main__": # set up logger @@ -198,4 +199,3 @@ class NifToaster(pyffi.spells.nif.NifToaster): logger.addHandler(loghandler) # call toaster NifToaster().cli() - diff --git a/scripts/patch_recursive_apply.py b/scripts/patch_recursive_apply.py index 9c0415ad0..91a6c8372 100755 --- a/scripts/patch_recursive_apply.py +++ b/scripts/patch_recursive_apply.py @@ -40,7 +40,6 @@ # ***** END LICENSE BLOCK ***** import argparse -import shutil import os import os.path import subprocess @@ -54,7 +53,7 @@ parser.add_argument( 'patch_cmd', metavar="CMD", type=str, help="use CMD to apply a patch between files; this command must " - "accept at least 3 arguments: 'CMD oldfile newfile patchfile ...'") + "accept at least 3 arguments: 'CMD oldfile newfile patchfile ...'") parser.add_argument( 'in_folder', type=str, help="folder containing original files") @@ -66,6 +65,7 @@ help="folder containing patch files") args, unknown_args = parser.parse_known_args() + # actual script def patch_cmd(in_file, out_file, patch_file): @@ -82,6 +82,7 @@ def patch_cmd(in_file, out_file, patch_file): print("applying %s" % patch_file) subprocess.call(command) + for dirpath, dirnames, filenames in os.walk(args.in_folder): for filename in filenames: in_file = os.path.join(dirpath, filename) diff --git a/scripts/patch_recursive_make.py b/scripts/patch_recursive_make.py index dc6c1e423..e3012db30 100755 --- a/scripts/patch_recursive_make.py +++ b/scripts/patch_recursive_make.py @@ -40,7 +40,6 @@ # ***** END LICENSE BLOCK ***** import argparse -import shutil import os import os.path import subprocess @@ -54,7 +53,7 @@ parser.add_argument( 'patch_cmd', metavar="CMD", type=str, help="use CMD to make a patch between files; this command must " - "accept at least 3 arguments: 'CMD oldfile newfile patchfile ...'") + "accept at least 3 arguments: 'CMD oldfile newfile patchfile ...'") parser.add_argument( 'in_folder', type=str, help="folder containing original files") @@ -66,6 +65,7 @@ help="folder where patch files will be stored (should be empty)") args, unknown_args = parser.parse_known_args() + # actual script def patch_cmd(in_file, out_file, patch_file): @@ -85,6 +85,7 @@ def patch_cmd(in_file, out_file, patch_file): print("skipped %s (no out file)" % in_file) return + for dirpath, dirnames, filenames in os.walk(args.in_folder): for filename in filenames: in_file = os.path.join(dirpath, filename) diff --git a/scripts/qskope.py b/scripts/qskope.py index e5ff81265..98ea8d186 100755 --- a/scripts/qskope.py +++ b/scripts/qskope.py @@ -57,6 +57,7 @@ import sys from optparse import OptionParser + # main script function def main(): """The main script function. Does argument parsing, file type checking, @@ -66,8 +67,8 @@ def main(): description = """Parse and display the file .""" parser = OptionParser(usage, - version = "%prog $Rev$", - description = description) + version="%prog $Rev$", + description=description) (options, args) = parser.parse_args() if len(args) > 1: @@ -77,10 +78,11 @@ def main(): app = QtGui.QApplication(sys.argv) mainwindow = QSkope() if len(args) >= 1: - mainwindow.openFile(filename = args[0]) + mainwindow.openFile(filename=args[0]) mainwindow.show() sys.exit(app.exec_()) + if __name__ == "__main__": # set up logger logger = logging.getLogger("pyffi") diff --git a/scripts/rockstar_pack_dir_img.py b/scripts/rockstar_pack_dir_img.py index 117c3b687..a6616e052 100755 --- a/scripts/rockstar_pack_dir_img.py +++ b/scripts/rockstar_pack_dir_img.py @@ -72,13 +72,14 @@ usage= "Usage: %prog source_folder destination_folder\n\n" + __doc__ - ) +) (options, args) = parser.parse_args() if len(args) != 2: parser.print_help() exit() unpack_folder, out_folder = args + # actual script def pack(arcroot): @@ -90,6 +91,7 @@ def pack(arcroot): with open(os.path.join(out_folder, arcroot) + '.img', 'wb') as imgfile: dirdata.pack(imgfile, folder) + for arcname in os.listdir(unpack_folder): if os.path.isdir(os.path.join(unpack_folder, arcname)): pack(arcname) diff --git a/scripts/rockstar_unpack_dir_img.py b/scripts/rockstar_unpack_dir_img.py index 29bef47c0..fe2f874ce 100755 --- a/scripts/rockstar_unpack_dir_img.py +++ b/scripts/rockstar_unpack_dir_img.py @@ -72,13 +72,14 @@ usage= "Usage: %prog source_folder destination_folder\n\n" + __doc__ - ) +) (options, args) = parser.parse_args() if len(args) != 2: parser.print_help() exit() in_folder, unpack_folder = args + # actual script def unpack(arcroot): @@ -91,7 +92,8 @@ def unpack(arcroot): with open(os.path.join(in_folder, arcroot) + '.img', 'rb') as imgfile: dirdata.unpack(imgfile, folder) + for arcname in os.listdir(in_folder): if (arcname.endswith('.dir') - and os.path.isfile(os.path.join(in_folder, arcname))): + and os.path.isfile(os.path.join(in_folder, arcname))): unpack(arcname[:-4]) diff --git a/setup.py b/setup.py index f17b446c6..f9a665fdc 100644 --- a/setup.py +++ b/setup.py @@ -12,6 +12,8 @@ PACKAGES = [ 'pyffi', 'pyffi.object_models', + 'pyffi.object_models.mex', + 'pyffi.object_models.niftoolsxml', 'pyffi.object_models.xml', 'pyffi.object_models.xsd', 'pyffi.utils', diff --git a/tests/formats/nif/files/CREDITS.txt b/tests/formats/nif/files/CREDITS.txt new file mode 100644 index 000000000..59163802e --- /dev/null +++ b/tests/formats/nif/files/CREDITS.txt @@ -0,0 +1,21 @@ +fallout3_switch: Animated Light Switch by odin_ml +- https://www.nexusmods.com/fallout3/mods/12677 + +fallout4_refigerator: Full Clean Kitchen Set (Modder's Resource) by ScottyX2 +- https://www.nexusmods.com/fallout4/mods/44751 + +falloutnv_goldbar: Gold Bars Modders Resource V2 by lordinquisitor +- https://www.nexusmods.com/newvegas/mods/40766 + +morrowind_mossyrock: Mossy Rocks by Calenfair +- https://www.nexusmods.com/morrowind/mods/46244 + +oblivion_chair: Ancient Elven Furniture for modders by css0101 +- https://www.nexusmods.com/oblivion/mods/27797 + +skyrim_cookiechip: Sweets and Such - Modders Resource LE by CatPancake +- https://www.nexusmods.com/skyrim/mods/114141 + + +skyrimse_cookiechip: Sweets and Such - Modders Resource SE by CatPancake +- https://www.nexusmods.com/skyrimspecialedition/mods/80782 diff --git a/tests/formats/nif/files/fallout3_switch.nif b/tests/formats/nif/files/fallout3_switch.nif new file mode 100644 index 000000000..e7b36272c Binary files /dev/null and b/tests/formats/nif/files/fallout3_switch.nif differ diff --git a/tests/formats/nif/files/fallout4_refrigerator.nif b/tests/formats/nif/files/fallout4_refrigerator.nif new file mode 100644 index 000000000..780c3847a Binary files /dev/null and b/tests/formats/nif/files/fallout4_refrigerator.nif differ diff --git a/tests/formats/nif/files/falloutnv_goldbar.nif b/tests/formats/nif/files/falloutnv_goldbar.nif new file mode 100644 index 000000000..d5add8a9e Binary files /dev/null and b/tests/formats/nif/files/falloutnv_goldbar.nif differ diff --git a/tests/formats/nif/files/materials/Setdressing/PlayerHouse+/PlayerHouse_KitchenRefrigeratorWhite01.BGSM b/tests/formats/nif/files/materials/Setdressing/PlayerHouse+/PlayerHouse_KitchenRefrigeratorWhite01.BGSM new file mode 100644 index 000000000..c93add68b Binary files /dev/null and b/tests/formats/nif/files/materials/Setdressing/PlayerHouse+/PlayerHouse_KitchenRefrigeratorWhite01.BGSM differ diff --git a/tests/formats/nif/files/morrowind_mossyrock.nif b/tests/formats/nif/files/morrowind_mossyrock.nif new file mode 100644 index 000000000..83655f0ba Binary files /dev/null and b/tests/formats/nif/files/morrowind_mossyrock.nif differ diff --git a/tests/formats/nif/files/oblivion_chair.nif b/tests/formats/nif/files/oblivion_chair.nif new file mode 100644 index 000000000..51599f3a5 Binary files /dev/null and b/tests/formats/nif/files/oblivion_chair.nif differ diff --git a/tests/formats/nif/files/skyrim_cookiechip.nif b/tests/formats/nif/files/skyrim_cookiechip.nif new file mode 100644 index 000000000..c34e519e4 Binary files /dev/null and b/tests/formats/nif/files/skyrim_cookiechip.nif differ diff --git a/tests/formats/nif/files/skyrimse_cookiechip.nif b/tests/formats/nif/files/skyrimse_cookiechip.nif new file mode 100644 index 000000000..f8fa1b9d8 Binary files /dev/null and b/tests/formats/nif/files/skyrimse_cookiechip.nif differ diff --git a/tests/formats/nif/files/textures/ClutterNew/Furniture/Bronze1.dds b/tests/formats/nif/files/textures/ClutterNew/Furniture/Bronze1.dds new file mode 100644 index 000000000..aadf3875c Binary files /dev/null and b/tests/formats/nif/files/textures/ClutterNew/Furniture/Bronze1.dds differ diff --git a/tests/formats/nif/files/textures/ClutterNew/Furniture/Bronze1Leaf.dds b/tests/formats/nif/files/textures/ClutterNew/Furniture/Bronze1Leaf.dds new file mode 100644 index 000000000..a564710e4 Binary files /dev/null and b/tests/formats/nif/files/textures/ClutterNew/Furniture/Bronze1Leaf.dds differ diff --git a/tests/formats/nif/files/textures/ClutterNew/Furniture/Bronze1Leaf_n.dds b/tests/formats/nif/files/textures/ClutterNew/Furniture/Bronze1Leaf_n.dds new file mode 100644 index 000000000..461164ed3 Binary files /dev/null and b/tests/formats/nif/files/textures/ClutterNew/Furniture/Bronze1Leaf_n.dds differ diff --git a/tests/formats/nif/files/textures/ClutterNew/Furniture/Bronze1_n.dds b/tests/formats/nif/files/textures/ClutterNew/Furniture/Bronze1_n.dds new file mode 100644 index 000000000..b6c7874b7 Binary files /dev/null and b/tests/formats/nif/files/textures/ClutterNew/Furniture/Bronze1_n.dds differ diff --git a/tests/formats/nif/files/textures/ClutterNew/Furniture/Bronze2.dds b/tests/formats/nif/files/textures/ClutterNew/Furniture/Bronze2.dds new file mode 100644 index 000000000..3fbfe3124 Binary files /dev/null and b/tests/formats/nif/files/textures/ClutterNew/Furniture/Bronze2.dds differ diff --git a/tests/formats/nif/files/textures/ClutterNew/Furniture/Bronze2_n.dds b/tests/formats/nif/files/textures/ClutterNew/Furniture/Bronze2_n.dds new file mode 100644 index 000000000..2761c4110 Binary files /dev/null and b/tests/formats/nif/files/textures/ClutterNew/Furniture/Bronze2_n.dds differ diff --git a/tests/formats/nif/files/textures/ClutterNew/Furniture/Fabric05_1.dds b/tests/formats/nif/files/textures/ClutterNew/Furniture/Fabric05_1.dds new file mode 100644 index 000000000..8f0bbc63f Binary files /dev/null and b/tests/formats/nif/files/textures/ClutterNew/Furniture/Fabric05_1.dds differ diff --git a/tests/formats/nif/files/textures/ClutterNew/Furniture/Fabric05_n.dds b/tests/formats/nif/files/textures/ClutterNew/Furniture/Fabric05_n.dds new file mode 100644 index 000000000..f2b3b1d0f Binary files /dev/null and b/tests/formats/nif/files/textures/ClutterNew/Furniture/Fabric05_n.dds differ diff --git a/tests/formats/nif/files/textures/ClutterNew/Furniture/Wood16.dds b/tests/formats/nif/files/textures/ClutterNew/Furniture/Wood16.dds new file mode 100644 index 000000000..1ea93e115 Binary files /dev/null and b/tests/formats/nif/files/textures/ClutterNew/Furniture/Wood16.dds differ diff --git a/tests/formats/nif/files/textures/ClutterNew/Furniture/Wood16_n.dds b/tests/formats/nif/files/textures/ClutterNew/Furniture/Wood16_n.dds new file mode 100644 index 000000000..076e373a7 Binary files /dev/null and b/tests/formats/nif/files/textures/ClutterNew/Furniture/Wood16_n.dds differ diff --git a/tests/formats/nif/files/textures/Setdressing/PlayerHouse+/PlayerHouse_KitchenRefrigeratorWhite01_d.dds b/tests/formats/nif/files/textures/Setdressing/PlayerHouse+/PlayerHouse_KitchenRefrigeratorWhite01_d.dds new file mode 100644 index 000000000..a26211bcf Binary files /dev/null and b/tests/formats/nif/files/textures/Setdressing/PlayerHouse+/PlayerHouse_KitchenRefrigeratorWhite01_d.dds differ diff --git a/tests/formats/nif/files/textures/clutter/junk/GoldBarGD.dds b/tests/formats/nif/files/textures/clutter/junk/GoldBarGD.dds new file mode 100644 index 000000000..e15155c0f Binary files /dev/null and b/tests/formats/nif/files/textures/clutter/junk/GoldBarGD.dds differ diff --git a/tests/formats/nif/files/textures/clutter/junk/GoldBarGD_N.dds b/tests/formats/nif/files/textures/clutter/junk/GoldBarGD_N.dds new file mode 100644 index 000000000..6ad827760 Binary files /dev/null and b/tests/formats/nif/files/textures/clutter/junk/GoldBarGD_N.dds differ diff --git a/tests/formats/nif/files/textures/clutter/junk/GoldBarGD_m.dds b/tests/formats/nif/files/textures/clutter/junk/GoldBarGD_m.dds new file mode 100644 index 000000000..f5a1ef093 Binary files /dev/null and b/tests/formats/nif/files/textures/clutter/junk/GoldBarGD_m.dds differ diff --git a/tests/formats/nif/files/textures/kv/sweets/roll_d.dds b/tests/formats/nif/files/textures/kv/sweets/roll_d.dds new file mode 100644 index 000000000..fe4540ccb Binary files /dev/null and b/tests/formats/nif/files/textures/kv/sweets/roll_d.dds differ diff --git a/tests/formats/nif/files/textures/kv/sweets/roll_n.dds b/tests/formats/nif/files/textures/kv/sweets/roll_n.dds new file mode 100644 index 000000000..93a149a42 Binary files /dev/null and b/tests/formats/nif/files/textures/kv/sweets/roll_n.dds differ diff --git a/tests/formats/nif/files/textures/odinml/lightswitch.dds b/tests/formats/nif/files/textures/odinml/lightswitch.dds new file mode 100644 index 000000000..168ece087 Binary files /dev/null and b/tests/formats/nif/files/textures/odinml/lightswitch.dds differ diff --git a/tests/formats/nif/files/textures/odinml/lightswitch_n.dds b/tests/formats/nif/files/textures/odinml/lightswitch_n.dds new file mode 100644 index 000000000..b415419ad Binary files /dev/null and b/tests/formats/nif/files/textures/odinml/lightswitch_n.dds differ diff --git a/tests/formats/nif/files/textures/tx_merp_mirkwood_stone01.dds b/tests/formats/nif/files/textures/tx_merp_mirkwood_stone01.dds new file mode 100644 index 000000000..897e30f32 Binary files /dev/null and b/tests/formats/nif/files/textures/tx_merp_mirkwood_stone01.dds differ diff --git a/tests/formats/nif/test_bhkpackednitristrips.py b/tests/formats/nif/test_bhkpackednitristrips.py index 90a71f577..238fa2f27 100644 --- a/tests/formats/nif/test_bhkpackednitristrips.py +++ b/tests/formats/nif/test_bhkpackednitristrips.py @@ -1,12 +1,11 @@ from pyffi.formats.nif import NifFormat -import nose -def test_bhkPackedNiTriStripsShape(): +def test_bhkPackedNiTriStripsShape(): # Adding Shapes shape = NifFormat.bhkPackedNiTriStripsShape() - nose.tools.assert_equal(shape.num_sub_shapes, 0) - nose.tools.assert_true(shape.data is None) + assert shape.num_sub_shapes == 0 + assert shape.data is None triangles1 = [(0, 1, 2)] normals1 = [(1, 0, 0)] vertices1 = [(0, 0, 0), (0, 0, 1), (0, 1, 0)] @@ -15,22 +14,21 @@ def test_bhkPackedNiTriStripsShape(): vertices2 = [(0, 0, 0), (0, 0, 1), (0, 1, 0), (0, 0, 0)] shape.add_shape(triangles=triangles1, normals=normals1, vertices=vertices1, layer=1, material=2) shape.add_shape(triangles=triangles2, normals=normals2, vertices=vertices2, layer=3, material=4) - nose.tools.assert_equal(shape.num_sub_shapes, 2) - nose.tools.assert_equal(shape.sub_shapes[0].layer, 1) - nose.tools.assert_equal(shape.sub_shapes[0].num_vertices, 3) - nose.tools.assert_equal(shape.sub_shapes[0].material.material, 2) - nose.tools.assert_equal(shape.sub_shapes[1].layer, 3) - nose.tools.assert_equal(shape.sub_shapes[1].num_vertices, 4) - nose.tools.assert_equal(shape.sub_shapes[1].material.material, 4) + assert shape.num_sub_shapes == 2 + assert shape.sub_shapes[0].layer == 1 + assert shape.sub_shapes[0].num_vertices == 3 + assert shape.sub_shapes[0].material.material == 2 + assert shape.sub_shapes[1].layer == 3 + assert shape.sub_shapes[1].num_vertices == 4 + assert shape.sub_shapes[1].material.material == 4 # for fallout 3 the subshape info is stored in the shape data - nose.tools.assert_equal(shape.data.num_sub_shapes, 2) - nose.tools.assert_equal(shape.data.sub_shapes[0].layer, 1) - nose.tools.assert_equal(shape.data.sub_shapes[0].num_vertices, 3) - nose.tools.assert_equal(shape.data.sub_shapes[0].material.material, 2) - nose.tools.assert_equal(shape.data.sub_shapes[1].layer, 3) - nose.tools.assert_equal(shape.data.sub_shapes[1].num_vertices, 4) - nose.tools.assert_equal(shape.data.sub_shapes[1].material.material, 4) - nose.tools.assert_equal(shape.data.num_triangles, 3) - nose.tools.assert_equal(shape.data.num_vertices, 7) - + assert shape.data.num_sub_shapes == 2 + assert shape.data.sub_shapes[0].layer == 1 + assert shape.data.sub_shapes[0].num_vertices == 3 + assert shape.data.sub_shapes[0].material.material == 2 + assert shape.data.sub_shapes[1].layer == 3 + assert shape.data.sub_shapes[1].num_vertices == 4 + assert shape.data.sub_shapes[1].material.material == 4 + assert shape.data.num_triangles == 3 + assert shape.data.num_vertices == 7 diff --git a/tests/formats/nif/test_games.py b/tests/formats/nif/test_games.py new file mode 100644 index 000000000..48621f3f8 --- /dev/null +++ b/tests/formats/nif/test_games.py @@ -0,0 +1,442 @@ +import pytest +from os import path +from os.path import dirname + +from pyffi.formats.nif import NifFormat + +dir_path = __file__ +for i in range(1): # recurse up to root repo dir + dir_path = dirname(dir_path) +test_root = dir_path + + +def read_file(name: str): + data = NifFormat.Data() + with open(path.join(test_root, "files", name), 'rb') as stream: + data.read(stream) + return data + + +def test_import_fallout3(): + data = read_file("fallout3_switch.nif") + assert data.version == NifFormat.version_number("20.2.0.7") + assert data.user_version == 11 + assert data.bs_version == 34 + assert data.header.num_blocks == 46 + assert data.header.num_block_types == 24 + assert data.header.num_strings == 18 + + # 0: BSFadeNode + block = data.blocks[0] + assert isinstance(block, NifFormat.BSFadeNode) + assert block.name == b"GenElecSwitch01" + assert block.num_extra_data_list == 2 + assert int(block.flags) == 0x0008000e + assert block.translation == NifFormat.Vector3.zero() + assert block.rotation == NifFormat.Matrix33.identity() + assert block.scale == 1.0 + assert block.num_properties == 0 + assert block.num_children == 1 + assert block.num_effects == 0 + + # 1: BSXFlags + block = data.blocks[1] + assert isinstance(block, NifFormat.BSXFlags) + assert block.name == b"BSX" + assert block.integer_data == 11 + + # 2: NiStringExtraData + block = data.blocks[2] + assert isinstance(block, NifFormat.NiStringExtraData) + assert block.name == b"UPB" + assert block.string_data == b"KFAccumRoot = \r\nUnyielding = 0\r\n" + + # 18: NiNode + block = data.blocks[18] + assert isinstance(block, NifFormat.NiNode) + + # 25: NiTriStrips + block = data.blocks[25] + assert isinstance(block, NifFormat.NiTriStrips) + assert block.name == b"GenElecSwitch01:0" + assert block.num_properties == 2 + + # 29: NiTriStripsData + block = data.blocks[29] + assert isinstance(block, NifFormat.NiTriStripsData) + assert block.num_vertices == 480 + assert block.num_triangles == 288 + assert block.num_strips == 122 + + # 30: NiTriStrips + block = data.blocks[30] + assert isinstance(block, NifFormat.NiTriStrips) + assert block.name == b"GenElecSwitch01:3" + assert block.num_properties == 3 + + # 34: NiTriStripsData + block = data.blocks[34] + assert isinstance(block, NifFormat.NiTriStripsData) + assert block.num_vertices == 12 + assert block.num_triangles == 12 + assert block.num_strips == 2 + + +def test_import_fallout4(): + data = read_file("fallout4_refrigerator.nif") + assert data.version == NifFormat.version_number("20.2.0.7") + assert data.user_version == 12 + assert data.bs_version == 130 + assert data.header.num_blocks == 37 + assert data.header.num_block_types == 18 + assert data.header.num_strings == 19 + + # 0: NiNode + block = data.blocks[0] + assert isinstance(block, NifFormat.NiNode) + assert block.name == b"PlayerHouse_KitchenRefrigerator01" + assert block.num_extra_data_list == 1 + assert block.flags == 14 + assert block.translation == NifFormat.Vector3.zero() + assert block.rotation == NifFormat.Matrix33.identity() + assert block.scale == 1.0 + assert block.num_children == 2 + + # 1: BSXFlags + block = data.blocks[1] + assert isinstance(block, NifFormat.BSXFlags) + assert block.name == b"BSX" + + # 19: NiNode + block = data.blocks[19] + assert isinstance(block, NifFormat.NiNode) + assert block.name == b"PlayerHouse_KitchenRefrigerator01_Mesh" + assert block.num_extra_data_list == 0 + assert block.flags == 14 + assert block.translation == NifFormat.Vector3.zero() + assert block.rotation == NifFormat.Matrix33.identity() + assert block.scale == 1.0 + assert block.num_children == 3 + + # 22: BSTriShape + block = data.blocks[22] + assert isinstance(block, NifFormat.BSTriShape) + assert block.name == b"PlayerHouse_KitchenRefrigerator01_Mesh:31" + assert block.num_extra_data_list == 0 + assert block.flags == 14 + assert block.translation == NifFormat.Vector3.zero() + assert block.rotation == NifFormat.Matrix33.identity() + assert block.scale == 1.0 + assert block.num_triangles == 146 + assert block.num_vertices == 202 + assert block.data_size == 7340 + + # 27: BSTriShape + block = data.blocks[27] + assert isinstance(block, NifFormat.BSTriShape) + assert block.name == b"PlayerHouse_KitchenRefrigerator01_Mesh:34" + assert block.num_extra_data_list == 0 + assert block.flags == 14 + assert block.translation == NifFormat.Vector3.zero() + assert block.rotation == NifFormat.Matrix33.identity() + assert block.scale == 1.0 + assert block.num_triangles == 1771 + assert block.num_vertices == 1434 + assert block.data_size == 56514 + + # 32: BSTriShape + block = data.blocks[32] + assert isinstance(block, NifFormat.BSTriShape) + assert block.name == b"PlayerHouse_KitchenRefrigerator01_Mesh:113" + assert block.num_extra_data_list == 0 + assert block.flags == 14 + assert block.translation == NifFormat.Vector3.zero() + assert block.rotation == NifFormat.Matrix33.identity() + assert block.scale == 1.0 + assert block.num_triangles == 2 + assert block.num_vertices == 4 + assert block.data_size == 156 + + +def test_import_falloutnv(): + data = read_file("falloutnv_goldbar.nif") + assert data.version == NifFormat.version_number("20.2.0.7") + assert data.user_version == 11 + assert data.bs_version == 34 + assert data.header.num_blocks == 13 + assert data.header.num_block_types == 13 + assert data.header.num_strings == 4 + + # 0: BSFadeNode + block = data.blocks[0] + assert isinstance(block, NifFormat.BSFadeNode) + assert block.name == b"Scene Root" + assert block.num_extra_data_list == 1 + assert int(block.flags) == 524302 + assert block.translation == NifFormat.Vector3.zero() + assert block.rotation == NifFormat.Matrix33.identity() + assert block.scale == 1.0 + assert block.num_properties == 0 + assert block.num_children == 1 + assert block.num_effects == 0 + + # 1: BSXFlags + block = data.blocks[1] + assert isinstance(block, NifFormat.BSXFlags) + assert block.name == b"BSX" + assert block.integer_data == 3 + + # 6: NiTriStrips + block = data.blocks[6] + assert isinstance(block, NifFormat.NiTriStrips) + assert block.name == b"GoldBarGD" + assert block.num_extra_data_list == 0 + assert block.flags == 524302 + assert block.translation == NifFormat.Vector3.zero() + # assert block.rotation == 0 to bothered to actually + assert block.num_properties == 4 + + # 12: NiTriStripsData + block = data.blocks[12] + assert isinstance(block, NifFormat.NiTriStripsData) + assert block.group_id == 0 + assert block.num_vertices == 120 + assert block.keep_flags == 0 + assert block.compress_flags == 0 + assert block.has_vertices + assert block.has_normals + assert block.bounding_sphere.center == NifFormat.Vector3.create(0, 0, 1.279867) + assert block.bounding_sphere.radius == pytest.approx(9.251637) + assert not block.has_vertex_colors + assert block.num_triangles == 249 + assert block.num_strips == 1 + assert block.has_points + + +def test_import_morrowind(): + data = read_file("morrowind_mossyrock.nif") + assert data.version == NifFormat.version_number("4.0.0.2") + assert data.header.num_blocks == 11 + + # 0: NiNode + block = data.blocks[0] + assert isinstance(block, NifFormat.NiNode) + assert block.name == b"chasmwallwg_mw" + assert block.flags == 12 + assert block.translation == NifFormat.Vector3.zero() + assert block.rotation == NifFormat.Matrix33.identity() + assert block.scale == 1.0 + assert block.velocity == NifFormat.Vector3.zero() + assert block.num_properties == 0 + assert not block.has_bounding_volume + assert block.num_children == 2 + assert block.num_effects == 0 + + # 1: NiTriShape + block = data.blocks[1] + assert isinstance(block, NifFormat.NiTriShape) + assert block.name == b"Tri Cube 0" + assert block.flags == 4 + assert block.translation == NifFormat.Vector3.zero() + assert block.rotation == NifFormat.Matrix33.identity() + assert block.scale == 1.0 + assert block.velocity == NifFormat.Vector3.zero() + assert block.num_properties == 3 + assert not block.has_bounding_volume + + # 6: NiTriShapeData + block = data.blocks[6] + assert isinstance(block, NifFormat.NiTriShapeData) + assert block.num_vertices == 755 + assert block.has_vertices + assert block.has_normals + assert block.bounding_sphere.center == NifFormat.Vector3.create(3.295708, 10.778694, 531.150818) + assert block.bounding_sphere.radius == pytest.approx(1011.122681) + assert block.has_vertex_colors + assert block.data_flags.num_uv_sets == 1 + assert block.has_uv + assert block.num_triangles == 1340 + assert block.num_triangle_points == 4020 + assert block.num_match_groups == 0 + + # 7: NiTriShape + block = data.blocks[7] + assert isinstance(block, NifFormat.NiTriShape) + assert block.name == b"Tri Cube 1" + assert block.flags == 4 + assert block.translation == NifFormat.Vector3.zero() + assert block.rotation == NifFormat.Matrix33.identity() + assert block.scale == 1.0 + assert block.velocity == NifFormat.Vector3.zero() + assert block.num_properties == 3 + assert not block.has_bounding_volume + + # 10: NiTriShapeData + block = data.blocks[10] + assert isinstance(block, NifFormat.NiTriShapeData) + assert block.num_vertices == 365 + assert block.has_vertices + assert block.has_normals + assert block.bounding_sphere.center == NifFormat.Vector3.create(-9.280243, -7.708931, 1167.847168) + assert block.bounding_sphere.radius == pytest.approx(934.304688) + assert block.has_vertex_colors + assert block.data_flags.num_uv_sets == 1 + assert block.has_uv + assert block.num_triangles == 678 + assert block.num_triangle_points == 2034 + assert block.num_match_groups == 0 + + +def test_import_oblivion(): + data = read_file("oblivion_chair.nif") + assert data.version == NifFormat.version_number("20.0.0.5") + assert data.user_version == 11 + assert data.bs_version == 11 + assert data.header.num_blocks == 39 + assert data.header.num_block_types == 14 + + # 0: NiNode + block = data.blocks[0] + assert isinstance(block, NifFormat.NiNode) + assert block.name == b"Scene Root" + assert block.num_extra_data_list == 2 + assert block.flags == 8 + assert block.translation == NifFormat.Vector3.zero() + assert block.rotation == NifFormat.Matrix33.identity() + assert block.scale == 1.0 + assert block.num_properties == 0 + assert block.num_children == 6 + assert block.num_effects == 0 + + # 1: BSXFlags + block = data.blocks[1] + assert isinstance(block, NifFormat.BSXFlags) + assert block.name == b"BSX" + assert block.integer_data == 2 + + # 9: NiTriShape + block = data.blocks[9] + assert isinstance(block, NifFormat.NiTriShape) + assert block.name == b"Chair:1" + assert block.num_extra_data_list == 1 + assert block.flags == 0 + assert block.translation == NifFormat.Vector3.zero() + assert block.rotation == NifFormat.Matrix33.identity() + assert block.scale == 1.0 + assert block.num_properties == 2 + + # 14: NiTriShapeData + block = data.blocks[14] + assert isinstance(block, NifFormat.NiTriShapeData) + assert block.group_id == 0 + assert block.num_vertices == 2203 + assert block.keep_flags == 0 + assert block.compress_flags == 0 + assert block.has_vertices + assert block.has_normals + assert block.bounding_sphere.center == NifFormat.Vector3.create(0.000055, 4.207962, 59.622810) + assert block.bounding_sphere.radius == pytest.approx(65.464142) + assert not block.has_vertex_colors + assert block.num_triangles == 3778 + assert block.num_triangle_points == 11334 + assert block.has_triangles + assert block.num_match_groups == 0 + + +def test_import_skyrim(): + data = read_file("skyrim_cookiechip.nif") + assert data.version == NifFormat.version_number("20.2.0.7") + assert data.user_version == 12 + assert data.bs_version == 83 + assert data.header.num_blocks == 12 + assert data.header.num_block_types == 12 + assert data.header.num_strings == 4 + + # 0: BSFadeNode + block = data.blocks[0] + assert isinstance(block, NifFormat.BSFadeNode) + assert block.name == b"Scene Root" + assert block.num_extra_data_list == 2 + assert block.flags == 524302 + assert block.translation == NifFormat.Vector3.zero() + assert block.rotation == NifFormat.Matrix33.identity() + assert block.scale == 1.0 + assert block.num_children == 1 + assert block.num_effects == 0 + + # 1: BSXFlags + block = data.blocks[1] + assert isinstance(block, NifFormat.BSXFlags) + assert block.name == b"BSX" + assert block.integer_data == 194 + + # 7: NiTriShape + block = data.blocks[7] + assert isinstance(block, NifFormat.NiTriShape) + assert block.name == b"cookie005" + assert block.num_extra_data_list == 0 + assert block.flags == 524302 + assert block.translation == NifFormat.Vector3.create(0.043507, 19.610661, 0.627650) + assert block.rotation == NifFormat.Matrix33.identity() + assert block.scale == 1.0 + + # 10: NiTriShapeData + block = data.blocks[10] + assert isinstance(block, NifFormat.NiTriShapeData) + assert block.group_id == 0 + assert block.num_vertices == 344 + assert block.keep_flags == 0 + assert block.compress_flags == 0 + assert block.has_vertices + assert block.material_crc == 0 + assert block.has_normals + assert block.bounding_sphere.center == NifFormat.Vector3.create(0.001349, -19.619707, 0.754842) + assert block.bounding_sphere.radius == pytest.approx(3.932583) + assert not block.has_vertex_colors + assert block.num_triangles == 576 + assert block.num_triangle_points == 1728 + assert block.has_triangles + assert block.num_match_groups == 0 + + +def test_import_skyrimse(): + data = read_file("skyrimse_cookiechip.nif") + assert data.version == NifFormat.version_number("20.2.0.7") + assert data.user_version == 12 + assert data.bs_version == 100 + assert data.header.num_blocks == 11 + assert data.header.num_block_types == 11 + assert data.header.num_strings == 4 + + # 0: BSFadeNode + block = data.blocks[0] + assert isinstance(block, NifFormat.BSFadeNode) + assert block.name == b"Scene Root" + assert block.num_extra_data_list == 2 + assert block.flags == 524302 + assert block.translation == NifFormat.Vector3.zero() + assert block.rotation == NifFormat.Matrix33.identity() + assert block.scale == 1.0 + assert block.num_children == 1 + assert block.num_effects == 0 + + # 1: BSXFlags + block = data.blocks[1] + assert isinstance(block, NifFormat.BSXFlags) + assert block.name == b"BSX" + assert block.integer_data == 194 + + # 7: BSTriShape + block = data.blocks[7] + assert isinstance(block, NifFormat.BSTriShape) + assert block.name == b"cookie005" + assert block.num_extra_data_list == 0 + assert block.flags == 524302 + assert block.translation == NifFormat.Vector3.create(0.043507, 19.610661, 0.627650) + assert block.rotation == NifFormat.Matrix33.identity() + assert block.scale == 1.0 + assert block.num_triangles == 576 + assert block.num_vertices == 344 + assert block.data_size == 13088 + assert block.particle_data_size == 0 + diff --git a/tests/formats/nif/test_matrix.py b/tests/formats/nif/test_matrix.py index b4f796b0a..a94eae37f 100644 --- a/tests/formats/nif/test_matrix.py +++ b/tests/formats/nif/test_matrix.py @@ -1,5 +1,6 @@ -from pyffi.formats.nif import NifFormat from nose.tools import assert_equals, assert_true, assert_false, assert_almost_equals + +from pyffi.formats.nif import NifFormat from tests.utils import assert_tuple_values @@ -66,7 +67,7 @@ def test_det_inverse_matrices(self): s, r = mat.get_inverse().get_scale_rotation() assert_almost_equals(s, 3.11526432) - assert_true(abs(0.321 - 1/s) < NifFormat.EPSILON) + assert_true(abs(0.321 - 1 / s) < NifFormat.EPSILON) rotation = ((-0.43430806610505857, -0.45177006876291087, 0.7792821186127868), (0.8930951359360114, -0.10331401572519507, 0.43784406664326525), diff --git a/tests/formats/nif/test_skinpartition.py b/tests/formats/nif/test_skinpartition.py index ce4da6c58..ddd2a1711 100644 --- a/tests/formats/nif/test_skinpartition.py +++ b/tests/formats/nif/test_skinpartition.py @@ -1,6 +1,7 @@ -from pyffi.formats.nif import NifFormat from nose.tools import assert_equals +from pyffi.formats.nif import NifFormat + class TestSkinPartition: """Regression tests for NifFormat.SkinPartition""" @@ -40,7 +41,6 @@ def test_skinpartition_get_triangles(self): expected_mapped_tris = [(2, 4, 6), (4, 5, 6), (6, 5, 3), (0, 1, 7)] assert_equals(list(part.get_mapped_triangles()), expected_mapped_tris) - def test_skinpartition_update_triangles(self): """Test NifFormat.SkinPartition updating triangle""" part = NifFormat.SkinPartition() diff --git a/tests/object_model/__init__.py b/tests/object_model/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/tests/object_model/niftoolsxml/test_niftools_bit_struct.py b/tests/object_model/niftoolsxml/test_niftools_bit_struct.py new file mode 100644 index 000000000..8fbf750cc --- /dev/null +++ b/tests/object_model/niftoolsxml/test_niftools_bit_struct.py @@ -0,0 +1,49 @@ +import unittest + +from nose.tools import assert_equals, assert_true + +from pyffi.object_models.niftoolsxml import BitStructAttribute as Attr +from pyffi.object_models.niftoolsxml.bit_struct import BitStructBase + + +class SimpleFormat(object): + @staticmethod + def name_attribute(name): + return name + + +class Flags(BitStructBase): + _numbytes = 1 + _attrs = [Attr.create(SimpleFormat, dict(name='a', numbits='3')), + Attr.create(SimpleFormat, dict(name='b', numbits='1'))] + + +SimpleFormat.Flags = Flags + + +class TestBitStruct(unittest.TestCase): + + def setUp(self): + self.y = Flags() + + def test_value_population(self): + self.y.populate_attribute_values(9, None) # b1001 + assert_equals(self.y.a, 1) + assert_equals(self.y.b, 1) + + def test_attributes(self): + self.y.populate_attribute_values(13, None) + assert_true(len(self.y._names), 2) + assert_true(self.y._names, ('a', 'b')) + assert_true(self.y._a_value_, 5) + assert_true(self.y._b_value_, 5) + + def test_get_value(self): + self.y.a = 5 + self.y.b = 1 + assert_equals(self.y.get_attributes_values(None), 13) + + def test_int_cast(self): + self.y.populate_attribute_values(13, None) + assert_true(len(self.y._items), 2) + assert_equals(int(self.y), 13) diff --git a/tests/object_model/test_arraytype.py b/tests/object_model/test_arraytype.py index 92259adba..b71378599 100644 --- a/tests/object_model/test_arraytype.py +++ b/tests/object_model/test_arraytype.py @@ -1,11 +1,11 @@ -from pyffi.object_models.array_type import ValidatedList +import nose +from nose.tools import assert_equals + from pyffi.object_models.array_type import UniformArray +from pyffi.object_models.array_type import ValidatedList from pyffi.object_models.simple_type import SimpleType from tests.utils import assert_tuple_values -import nose -from nose.tools import assert_equals - class IntList(ValidatedList): """Mock class to test validation""" @@ -18,6 +18,7 @@ def validate(self, item): class MyInt(SimpleType): """Mock class with a simple value""" + def __init__(self, value=0): self._value = value @@ -79,12 +80,14 @@ class TestAnyArray: @nose.tools.raises(TypeError) def test_invalid_anytype_constructor(self): """Test elements must be of AnyType""" + class InvalidListOfInts(UniformArray): """Mock class to uniform and override values""" ItemType = int def test_subtype_constructor(self): """Test subtyping setting correct ItemType with base AnyType""" + class SubInt(SimpleType): """Mock class to uniform and override values""" diff --git a/tests/object_model/test_expression.py b/tests/object_model/test_expression.py new file mode 100644 index 000000000..92af64833 --- /dev/null +++ b/tests/object_model/test_expression.py @@ -0,0 +1,158 @@ +import unittest +import pytest + +from pyffi.object_models.expression import Expression +from nose.tools import assert_almost_equal + + +class Z(object): + a = 1 + b = 2 + c = 3 + + +class A(object): + x = False + y = True + z = Z() + + +class B(object): + + def __int__(self): + return 7 + + def __mul__(self, other): + return self.__int__() * int(other) + + +class TestExpression(unittest.TestCase): + def setUp(self): + self.a = A() + + def test_data_source_evaluation(self): + e = Expression('x || y') + assert e.eval(self.a) == 1 + + assert Expression('99 & 15').eval(self.a) == 3 + assert bool(Expression('(99&15)&&y').eval(self.a)) + + def test_name_filter(self): + self.a.hello_world = False + + def name_filter(s): + return 'hello_' + s.lower() + + assert not bool(Expression('(99 &15) &&WoRlD', name_filter=name_filter).eval(self.a)) + + def test_attribute_error(self): + with pytest.raises(AttributeError): + Expression('c && d').eval(self.a) + + def test_expression_operators(self): + assert bool(Expression('1 == 1').eval()) + assert bool(Expression('(1 == 1)').eval()) + assert not bool(Expression('1 != 1').eval()) + assert not bool(Expression('!(1 == 1)').eval()) + assert not bool(Expression('!((1 <= 2) && (2 <= 3))').eval()) + assert bool(Expression('(1 <= 2) && (2 <= 3) && (3 <= 4)').eval()) + + def test_bitwise_operators(self): + assert Expression('0xFF000000 >> 18').eval() == 16320 + assert Expression('0x000000FF << 4').eval() == 4080 + + def test_implicit_cast(self): + self.a.x = B() + assert Expression('x * 10').eval(self.a) == 70 + + def test_nested_attributes(self): + assert bool(Expression("z.a == 1").eval(self.a)) + assert bool(Expression("z.b == 2").eval(self.a)) + assert bool(Expression("z.c == 3").eval(self.a)) + + def test_scientific_notation(self): + assert Expression("3.402823466e+9").eval() == 3402823466 + assert Expression("3.402823466e+9 + 360").eval() == 3402823826 + assert_almost_equal(Expression("3.402823466e+9 / 12").eval(), 283568622.1666667) + + +class TestPartition: + def test_partition_empty(self): + assert Expression._partition('') == ('', '', '') + + def test_partition_left(self): + assert Expression._partition('abcdefg') == ('abcdefg', '', '') + + def test_partition_left_trim(self): + assert Expression._partition(' abcdefg ') == ('abcdefg', '', '') + + def test_partition_logical_or(self): + assert Expression._partition('abc || efg') == ('abc', '||', 'efg') + + def test_partition_equivilance(self): + assert Expression._partition('(1 == 1)') == ('1 == 1', '', '') + + def test_multi_brances(self): + assert Expression._partition('( 1 != 1 ) || ((!abc) == 1)') == ('1 != 1', '||', '(!abc) == 1') + + def test_partition_no_spaces(self): + assert Expression._partition('abc||efg') == ('abc', '||', 'efg') + + def test_partition_bit_ops(self): + assert Expression._partition(' (a | b) & c ') == ('a | b', '&', 'c') + + def test_partition_right_uninary_op(self): + assert Expression._partition('!(1 <= 2)') == ('', '!', '(1 <= 2)') + + def test_partition_not_eq(self): + assert Expression._partition('(a | b)!=(b&c)') == ('a | b', '!=', 'b&c') + + def test_partition_scrambled(self): + assert Expression._partition('(a== b) &&(( b!=c)||d )') == ('a== b', '&&', '( b!=c)||d') + + def test_partition_scientific_notation(self): + assert Expression._partition('3.402823466e+9') == ('3.402823466e+9', '', '') + assert Expression._partition('3.402823466e+9 + 360') == ('3.402823466e+9', '+', '360') + assert Expression._partition('(3.402823466e+9 * 2) + (3.402823466e+9 * 2)') == ('3.402823466e+9 * 2', '+', '3.402823466e+9 * 2') + + +class TestParse: + def test_parse_empty(self): + assert Expression._parse("") is None + + def test_parse_brackets(self): + assert isinstance(Expression._parse("(12 - 12)"), Expression) + + def test_parse_scientific_notation(self): + assert Expression._parse("3.402823466e+9") == 3402823466 + + def test_parse_operators(self): + assert isinstance(Expression._parse("12 - 12"), Expression) + + def test_parse_int(self): + assert Expression._parse("0xFF00") == 0xFF00 + assert Expression._parse("1000") == 1000 + + def test_parse_version(self): + assert Expression._parse("20.2.0.7") == 335675399 + + def test_parse_arg_token(self): + assert Expression._parse("#ARG#") == "arg" + + def test_parse_attributes(self): + assert Expression._parse("a.b.c") == "a.b.c" + + +class TestBraces: + def test_no_brace(self): + assert Expression._scan_brackets('abcde') == (-1, -1) + + def test_single_set_of_braces(self): + assert Expression._scan_brackets('()') == (0, 1) + + def test_nested_braces(self): + assert Expression._scan_brackets('(abc(def))g') == (0, 9) + + expr_string = ' (abc(dd efy 442))xxg' + start_pos, end_pos = Expression._scan_brackets(expr_string) + assert expr_string[start_pos + 1:end_pos] == "abc(dd efy 442)" diff --git a/tests/object_model/test_simpletype.py b/tests/object_model/test_simpletype.py index 193eb742d..f0f698a87 100644 --- a/tests/object_model/test_simpletype.py +++ b/tests/object_model/test_simpletype.py @@ -1,4 +1,3 @@ -import nose from nose.tools import assert_equals, assert_is_none, assert_false, assert_true from pyffi.object_models.simple_type import SimpleType diff --git a/tests/object_model/xml/test_bit_struct.py b/tests/object_model/xml/test_bit_struct.py index f900ca0ef..30d6c602c 100644 --- a/tests/object_model/xml/test_bit_struct.py +++ b/tests/object_model/xml/test_bit_struct.py @@ -1,9 +1,9 @@ import unittest -from nose.tools import assert_equals, assert_false, assert_true, raises +from nose.tools import assert_equals, assert_true -from pyffi.object_models.xml.bit_struct import BitStructBase from pyffi.object_models.xml import BitStructAttribute as Attr +from pyffi.object_models.xml.bit_struct import BitStructBase class SimpleFormat(object): @@ -17,6 +17,7 @@ class Flags(BitStructBase): _attrs = [Attr(SimpleFormat, dict(name='a', numbits='3')), Attr(SimpleFormat, dict(name='b', numbits='1'))] + SimpleFormat.Flags = Flags diff --git a/tests/object_model/xml/test_expression.py b/tests/object_model/xml/test_expression.py deleted file mode 100644 index ac9c0bb5c..000000000 --- a/tests/object_model/xml/test_expression.py +++ /dev/null @@ -1,106 +0,0 @@ -import unittest - -from pyffi.object_models.xml.expression import Expression -from nose.tools import assert_equals, assert_false, assert_true, raises - - -class A(object): - x = False - y = True - - -class B(object): - - def __int__(self): - return 7 - - def __mul__(self, other): - return self.__int__() * int(other) - - -class TestExpression(unittest.TestCase): - - def setUp(self): - self.a = A() - - def test_data_source_evaluation(self): - e = Expression('x || y') - assert_equals(e.eval(self.a), 1) - - assert_equals(Expression('99 & 15').eval(self.a), 3) - assert_true(bool(Expression('(99&15)&&y').eval(self.a))) - - def test_name_filter(self): - self.a.hello_world = False - - def nameFilter(s): - return 'hello_' + s.lower() - - assert_false(bool(Expression('(99 &15) &&WoRlD', name_filter = nameFilter).eval(self.a))) - - @raises(AttributeError) - def test_attribute_error(self): - Expression('c && d').eval(self.a) - - def test_expression_operators(self): - assert_true(bool(Expression('1 == 1').eval())) - assert_true(bool(Expression('(1 == 1)').eval())) - assert_false(bool(Expression('1 != 1').eval())) - assert_false(bool(Expression('!(1 == 1)').eval())) - assert_false(bool(Expression('!((1 <= 2) && (2 <= 3))').eval())) - assert_true(bool(Expression('(1 <= 2) && (2 <= 3) && (3 <= 4)').eval())) - - def test_implicit_cast(self): - self.a.x = B() - assert_equals(Expression('x * 10').eval(self.a), 70) - -class TestPartition: - - def test_partition_empty(self): - assert_equals(Expression._partition(''), ('', '', '')) - - def test_partition_left(self): - assert_equals(Expression._partition('abcdefg'), ('abcdefg', '', '')) - - def test_partition_left_trim(self): - assert_equals(Expression._partition(' abcdefg '), ('abcdefg', '', '')) - - def test_partition_logical_or(self): - assert_equals(Expression._partition('abc || efg'), ('abc', '||', 'efg')) - - def test_partition_equivilance(self): - assert_equals(Expression._partition('(1 == 1)'), ('1 == 1', '', '')) - - def test_multi_brances(self): - assert_equals(Expression._partition('( 1 != 1 ) || ((!abc) == 1)'), ('1 != 1', '||', '(!abc) == 1')) - - def test_partition_no_spaces(self): - assert_equals(Expression._partition('abc||efg'), ('abc', '||', 'efg')) - - def test_partition_bit_ops(self): - assert_equals(Expression._partition(' (a | b) & c '), ('a | b', '&', 'c')) - - def test_partition_right_uninary_op(self): - assert_equals(Expression._partition('!(1 <= 2)'), ('', '!', '(1 <= 2)')) - - def test_partition_not_eq(self): - assert_equals(Expression._partition('(a | b)!=(b&c)'), ('a | b', '!=', 'b&c')) - - def test_partition_left_trim(self): - assert_equals(Expression._partition('(a== b) &&(( b!=c)||d )'), ('a== b', '&&', '( b!=c)||d')) - - -class TestBraces: - - def test_no_brace(self): - assert_equals(Expression._scan_brackets('abcde'), (-1, -1)) - - def test_single_set_of_braces(self): - assert_equals(Expression._scan_brackets('()'), (0, 1)) - - def test_nested_braces(self): - assert_equals(Expression._scan_brackets('(abc(def))g'), (0, 9)) - - s = ' (abc(dd efy 442))xxg' - start_pos, end_pos = Expression._scan_brackets(s) - assert_equals(s[start_pos + 1:end_pos], "abc(dd efy 442)") \ No newline at end of file diff --git a/tests/perf/fraps_minmaxavg.py b/tests/perf/fraps_minmaxavg.py index a1c6c3eeb..0eded5369 100644 --- a/tests/perf/fraps_minmaxavg.py +++ b/tests/perf/fraps_minmaxavg.py @@ -61,6 +61,7 @@ name = name.strip() total[root][name].append(float(num)) + def summary(outfile): for root in sorted(total): if not total[root]["Frames"]: @@ -76,6 +77,7 @@ def summary(outfile): 1.96 * sd(vec) / (len(vec) ** 0.5)), file=outfile) print(file=outfile) + summary(sys.stdout) with open(os.path.join(folder, "summary.txt"), "w") as outfile: summary(outfile) diff --git a/tests/perf/objgraph.py b/tests/perf/objgraph.py index 7e20ce305..7e6a32e1c 100644 --- a/tests/perf/objgraph.py +++ b/tests/perf/objgraph.py @@ -54,13 +54,12 @@ __version__ = "1.1dev" __date__ = "2008-09-05" - import gc import inspect -import types -import weakref import operator import os +import types +import weakref def count(typename): @@ -186,7 +185,7 @@ def find_backref_chain(obj, predicate, max_depth=20, extra_ignore=()): depth[id(source)] = tdepth + 1 parent[id(source)] = target queue.append(source) - return None # not found + return None # not found def show_backrefs(objs, max_depth=3, extra_ignore=(), filter=None, too_many=10, @@ -258,6 +257,7 @@ def show_refs(objs, max_depth=3, extra_ignore=(), filter=None, too_many=10, filter=filter, too_many=too_many, highlight=highlight, edge_func=gc.get_referents, swap_source_target=True) + # # Internal helpers # @@ -379,9 +379,9 @@ def gradient(start_color, end_color, depth, max_depth): h1, s1, v1 = start_color h2, s2, v2 = end_color f = float(depth) / max_depth - h = h1 * (1-f) + h2 * f - s = s1 * (1-f) + s2 * f - v = v1 * (1-f) + v2 * f + h = h1 * (1 - f) + h2 * f + s = s1 * (1 - f) + s2 * f + v = v1 * (1 - f) + v2 * f return h, s, v @@ -396,4 +396,3 @@ def edge_label(source, target): else: return ' [label="%s"]' % quote(safe_repr(k)) return '' - diff --git a/tests/perf/summary.py b/tests/perf/summary.py index e7901a185..6258f494e 100644 --- a/tests/perf/summary.py +++ b/tests/perf/summary.py @@ -37,8 +37,6 @@ # # ***** END LICENSE BLOCK ***** -import os -import sys def mean(vec): """Sample mean. @@ -49,6 +47,7 @@ def mean(vec): return float(sum(vec)) / len(vec) + def sd(vec): """Sample standard deviation. @@ -58,6 +57,7 @@ def sd(vec): m = mean(vec) return (float(sum((v - m) ** 2 for v in vec)) / (len(vec) - 1)) ** 0.5 + def median(vec): """ >>> median([1, 2, 3, 4, 5]) @@ -74,6 +74,7 @@ def median(vec): else: return 0.5 * (vec[mid] + vec[mid + 1]) + def mad(vec): """Median absolute deviation. @@ -83,6 +84,7 @@ def mad(vec): m = median(vec) return median(abs(x - m) for x in vec) + def iqr(vec): """Interquartile range. @@ -101,6 +103,7 @@ def iqr(vec): right = vec[mid + 1:] return median(right) - median(left) + def confint(vec, robust=False): """Confidence interval for the population mean at 5% significance.""" if not robust: @@ -111,6 +114,8 @@ def confint(vec, robust=False): bound = 1.96 * 1.349 * iqr(vec) / (len(vec) ** 0.5) return center - bound, center + bound + if __name__ == "__main__": import doctest + doctest.testmod() diff --git a/tests/perf/txt_minmaxavg.py b/tests/perf/txt_minmaxavg.py index e0c7132d9..03f88755a 100644 --- a/tests/perf/txt_minmaxavg.py +++ b/tests/perf/txt_minmaxavg.py @@ -50,11 +50,11 @@ parser.add_argument( '--robust', dest='robust', default=False, action='store_true', help='use median and iqr instead of mean and standard deviation', - ) +) parser.add_argument( 'folder', type=str, action='store', help='the folder to process files from', - ) +) args = parser.parse_args() @@ -71,9 +71,11 @@ row = row.strip() total[name].append(float(row)) + def summary(outfile): for name, vec in sorted(total.items()): low, up = confint(vec, robust=args.robust) print("{0:10}: [{1:10.4f}, {2:10.4f}]".format(name, low, up), file=outfile) + summary(sys.stdout) diff --git a/tests/scripts/cgf/__init__.py b/tests/scripts/cgf/__init__.py index ac635dbad..56c80bf1c 100644 --- a/tests/scripts/cgf/__init__.py +++ b/tests/scripts/cgf/__init__.py @@ -1,9 +1,10 @@ import imp import os.path import sys +from os.path import dirname + from tests import test_logger -from os.path import dirname dir_path = __file__ for i in range(4): # recurse up to root repo dir dir_path = dirname(dir_path) diff --git a/tests/scripts/cgf/test_cgftoaster.py b/tests/scripts/cgf/test_cgftoaster.py index 2e991eb93..b0752950a 100644 --- a/tests/scripts/cgf/test_cgftoaster.py +++ b/tests/scripts/cgf/test_cgftoaster.py @@ -1,5 +1,6 @@ """Tests for the cgftoaster script""" from nose.tools import raises + from tests.scripts.cgf import call_cgftoaster cfg_dir = "tests/spells/cgf/files/" diff --git a/tests/scripts/kfm/__init__.py b/tests/scripts/kfm/__init__.py index a9938471e..63f38e9c0 100644 --- a/tests/scripts/kfm/__init__.py +++ b/tests/scripts/kfm/__init__.py @@ -1,10 +1,10 @@ import imp import os.path import sys +from os.path import dirname from tests import test_logger -from os.path import dirname dir_path = __file__ for i in range(4): # recurse up to root repo dir dir_path = dirname(dir_path) diff --git a/tests/scripts/nif/__init__.py b/tests/scripts/nif/__init__.py index 981d7f53c..1c1023990 100644 --- a/tests/scripts/nif/__init__.py +++ b/tests/scripts/nif/__init__.py @@ -1,10 +1,10 @@ import imp import os.path import sys +from os.path import dirname from tests import test_logger -from os.path import dirname dir_path = __file__ for i in range(4): # recurse up to root repo dir dir_path = dirname(dir_path) diff --git a/tests/scripts/nif/test_niftoaster.py b/tests/scripts/nif/test_niftoaster.py index 1596b9671..13a55b43b 100644 --- a/tests/scripts/nif/test_niftoaster.py +++ b/tests/scripts/nif/test_niftoaster.py @@ -2,7 +2,9 @@ import os import os.path + from nose.tools import assert_equal, assert_almost_equal, raises + from tests.scripts.nif import call_niftoaster nif_dir = "tests/spells/nif/files/" @@ -49,47 +51,47 @@ def test_check_skip_only(): toaster = call_niftoaster( *("--raise --skip texture --skip skin --only fix_t --only center check_nop {0}".format(nif_dir).split())) assert_equal(sorted(toaster.files_done), [ - nif_dir + 'test_centerradius.nif', - nif_dir + 'test_fix_tangentspace.nif']) + nif_dir + 'test_centerradius.nif', + nif_dir + 'test_fix_tangentspace.nif']) assert_equal(sorted(toaster.files_skipped), [ - nif_dir + 'invalid.nif', - nif_dir + 'nds.nif', - nif_dir + 'neosteam.nif', - nif_dir + 'test.nif', - nif_dir + 'test_check_tangentspace1.nif', - nif_dir + 'test_check_tangentspace2.nif', - nif_dir + 'test_check_tangentspace3.nif', - nif_dir + 'test_check_tangentspace4.nif', - nif_dir + 'test_convexverticesshape.nif', - nif_dir + 'test_dump_tex.nif', - nif_dir + 'test_fix_clampmaterialalpha.nif', - nif_dir + 'test_fix_cleanstringpalette.nif', - nif_dir + 'test_fix_detachhavoktristripsdata.nif', - nif_dir + 'test_fix_disableparallax.nif', - nif_dir + 'test_fix_ffvt3rskinpartition.nif', - nif_dir + 'test_fix_mergeskeletonroots.nif', - nif_dir + 'test_fix_texturepath.nif', - nif_dir + 'test_grid_128x128.nif', - nif_dir + 'test_grid_64x64.nif', - nif_dir + 'test_mopp.nif', - nif_dir + 'test_opt_collision_complex_mopp.nif', - nif_dir + 'test_opt_collision_mopp.nif', - nif_dir + 'test_opt_collision_packed.nif', - nif_dir + 'test_opt_collision_to_boxshape.nif', - nif_dir + 'test_opt_collision_to_boxshape_notabox.nif', - nif_dir + 'test_opt_collision_unpacked.nif', - nif_dir + 'test_opt_delunusedbones.nif', - nif_dir + 'test_opt_dupgeomdata.nif', - nif_dir + 'test_opt_dupverts.nif', - nif_dir + 'test_opt_emptyproperties.nif', - nif_dir + 'test_opt_grid_layout.nif', - nif_dir + 'test_opt_mergeduplicates.nif', - nif_dir + 'test_opt_vertex_cache.nif', - nif_dir + 'test_opt_zeroscale.nif', - nif_dir + 'test_skincenterradius.nif', - nif_dir + 'test_vertexcolor.nif', - ]) + nif_dir + 'invalid.nif', + nif_dir + 'nds.nif', + nif_dir + 'neosteam.nif', + nif_dir + 'test.nif', + nif_dir + 'test_check_tangentspace1.nif', + nif_dir + 'test_check_tangentspace2.nif', + nif_dir + 'test_check_tangentspace3.nif', + nif_dir + 'test_check_tangentspace4.nif', + nif_dir + 'test_convexverticesshape.nif', + nif_dir + 'test_dump_tex.nif', + nif_dir + 'test_fix_clampmaterialalpha.nif', + nif_dir + 'test_fix_cleanstringpalette.nif', + nif_dir + 'test_fix_detachhavoktristripsdata.nif', + nif_dir + 'test_fix_disableparallax.nif', + nif_dir + 'test_fix_ffvt3rskinpartition.nif', + nif_dir + 'test_fix_mergeskeletonroots.nif', + nif_dir + 'test_fix_texturepath.nif', + nif_dir + 'test_grid_128x128.nif', + nif_dir + 'test_grid_64x64.nif', + nif_dir + 'test_mopp.nif', + nif_dir + 'test_opt_collision_complex_mopp.nif', + nif_dir + 'test_opt_collision_mopp.nif', + nif_dir + 'test_opt_collision_packed.nif', + nif_dir + 'test_opt_collision_to_boxshape.nif', + nif_dir + 'test_opt_collision_to_boxshape_notabox.nif', + nif_dir + 'test_opt_collision_unpacked.nif', + nif_dir + 'test_opt_delunusedbones.nif', + nif_dir + 'test_opt_dupgeomdata.nif', + nif_dir + 'test_opt_dupverts.nif', + nif_dir + 'test_opt_emptyproperties.nif', + nif_dir + 'test_opt_grid_layout.nif', + nif_dir + 'test_opt_mergeduplicates.nif', + nif_dir + 'test_opt_vertex_cache.nif', + nif_dir + 'test_opt_zeroscale.nif', + nif_dir + 'test_skincenterradius.nif', + nif_dir + 'test_vertexcolor.nif', + ]) assert_equal(toaster.files_failed, set([])) @@ -101,7 +103,7 @@ def test_prefix_suffix(): os.remove(nif_dir + "pre_test_suf.nif") -#TODO Move to spell test +# TODO Move to spell test def test_check_bhkbodycenter(): """Test body centre spell""" testfile = nif_dir + "test_fix_detachhavoktristripsdata.nif" @@ -129,6 +131,7 @@ def test_check_centerradius(): assert_almost_equal(orig_radius, 10.0) assert_almost_equal(calc_radius, 17.32050890) + """ The check_skincenterradius spell -------------------------------- diff --git a/tests/spells/cgf/check/test_tangentspace.py b/tests/spells/cgf/check/test_tangentspace.py index 251d7eba5..6d356d975 100644 --- a/tests/spells/cgf/check/test_tangentspace.py +++ b/tests/spells/cgf/check/test_tangentspace.py @@ -14,7 +14,7 @@ def setUp(self): def test_non_interactive_check_tangentspace(self): """Check_tangentspace spell""" - call_cgftoaster("--raise", "check_tangentspace", "--noninteractive", "--verbose=1", self.dest_file) + call_cgftoaster("--raise", "check_tangentspace", "--noninteractive", "--verbose=1", self.dest_file) """ pyffi.toaster:INFO:=== tests/formats/cgf/monkey.cgf === pyffi.toaster:INFO: --- check_tangentspace --- @@ -26,4 +26,4 @@ def test_non_interactive_check_tangentspace(self): ... pyffi.toaster:INFO: ~~~ NodeChunk [CryExportNode_monkey-CGF-monkey-DoExport-MergeNodes] ~~~ pyffi.toaster:INFO:Finished. - """ \ No newline at end of file + """ diff --git a/tests/spells/cgf/check/test_vertex_colors.py b/tests/spells/cgf/check/test_vertex_colors.py index 15413fbd7..a83ac6d5f 100644 --- a/tests/spells/cgf/check/test_vertex_colors.py +++ b/tests/spells/cgf/check/test_vertex_colors.py @@ -44,4 +44,4 @@ def test_non_interactive_check_vcols(self): pyffi.toaster:INFO: ~~~ MeshChunk [] ~~~ pyffi.toaster:INFO: has vertex colors! pyffi.toaster:INFO:Finished. - """ \ No newline at end of file + """ diff --git a/tests/spells/cgf/dump/test_dump_data.py b/tests/spells/cgf/dump/test_dump_data.py index 167e168f3..92a5d30af 100644 --- a/tests/spells/cgf/dump/test_dump_data.py +++ b/tests/spells/cgf/dump/test_dump_data.py @@ -1,6 +1,6 @@ """Tests for the dump spell for cgf""" -from tests.utils import BaseCgfFileTestCase from tests.scripts.cgf import call_cgftoaster +from tests.utils import BaseCgfFileTestCase class TestDumpDataCgf(BaseCgfFileTestCase): diff --git a/tests/spells/kf/__init__.py b/tests/spells/kf/__init__.py index d82041e73..9c6acc6e2 100644 --- a/tests/spells/kf/__init__.py +++ b/tests/spells/kf/__init__.py @@ -1 +1 @@ -"""Module to test KF spells""" \ No newline at end of file +"""Module to test KF spells""" diff --git a/tests/spells/kf/test_getsetbonepriorities.py b/tests/spells/kf/test_getsetbonepriorities.py index 87bd9e657..509d9c1bd 100644 --- a/tests/spells/kf/test_getsetbonepriorities.py +++ b/tests/spells/kf/test_getsetbonepriorities.py @@ -1,26 +1,25 @@ """Tests for the get/setbonepriorities spells.""" import codecs -import os.path - -import tempfile import os +import os.path import shutil +import tempfile +from os.path import dirname import nose.tools from pyffi.formats.nif import NifFormat from tests.scripts.nif import call_niftoaster -from os.path import dirname dir_path = __file__ for i in range(1): # recurse up to root repo dir dir_path = dirname(dir_path) test_root = dir_path input_files = os.path.join(test_root, 'spells', 'kf').replace("\\", "/") -class TestGetSetBonePrioritiesOblivion: +class TestGetSetBonePrioritiesOblivion: out = None file_name = "test_controllersequence.kf" txt_name = "test_controllersequence_bonepriorities.txt" @@ -31,7 +30,6 @@ def setup(self): self.kffile2 = os.path.join(test_root, "_" + self.file_name) self.txtfile = os.path.join(test_root, self.txt_name) - def teardown(self): shutil.rmtree(self.out) @@ -41,7 +39,7 @@ def check_priorities(filename, priorities): data = NifFormat.Data() with open(filename, "rb") as stream: data.read(stream) - nose.tools.assert_equal(len(data.roots), 1) + nose.tools.assert_equal(len(data.roots), 1) seq = data.roots[0] nose.tools.assert_is_instance(seq, NifFormat.NiControllerSequence) nose.tools.assert_list_equal( @@ -54,12 +52,12 @@ def test_check_get_set_bonepriorities(self): nose.tools.assert_true(os.path.exists(self.txtfile)) with codecs.open(self.txtfile, "rb", encoding="ascii") as stream: contents = stream.read() - nose.tools.assert_equal(contents,'[TestAction]\r\nBip01=27\r\nBip01 Pelvis=27\r\nBip01 Spine=75\r\n') + nose.tools.assert_equal(contents, '[TestAction]\r\nBip01=27\r\nBip01 Pelvis=27\r\nBip01 Spine=75\r\n') with codecs.open(self.txtfile, "wb", encoding="ascii") as stream: stream.write("[TestAction]\n") stream.write("Bip01=33\n") stream.write("Bip01 Pelvis=29\n") - stream.write("Bip01 Spine=42\n") # .replace('\r\n', '\n')) # replace probably not needed; just in case + stream.write("Bip01 Spine=42\n") # .replace('\r\n', '\n')) # replace probably not needed; just in case toaster = call_niftoaster("--raise", "modify_setbonepriorities", "--prefix=_", self.kffile) nose.tools.assert_equal(list(toaster.files_done), [self.kffile]) self.check_priorities(self.kffile2, [33, 29, 42]) diff --git a/tests/spells/nif/__init__.py b/tests/spells/nif/__init__.py index 8da660db7..b1267c92c 100644 --- a/tests/spells/nif/__init__.py +++ b/tests/spells/nif/__init__.py @@ -1,9 +1,9 @@ import imp +import logging import os.path import sys -import logging - from os.path import dirname + dir_path = __file__ for i in range(4): # recurse up to root repo dir dir_path = dirname(dir_path) diff --git a/tests/spells/nif/dump/test_texture_properties.py b/tests/spells/nif/dump/test_texture_properties.py index 22f77d980..876dc5884 100644 --- a/tests/spells/nif/dump/test_texture_properties.py +++ b/tests/spells/nif/dump/test_texture_properties.py @@ -36,6 +36,7 @@ def test_non_interactive_dump_texture_properties(self): pyffi.toaster:INFO:Finished. """ + class TestDumpTextureDataNif(BaseNifFileTestCase): """Invoke the dump_tex spell check through nif toaster""" diff --git a/tests/spells/nif/fix/test_clampmaterialalpha.py b/tests/spells/nif/fix/test_clampmaterialalpha.py index c21d97148..03b2d80b9 100644 --- a/tests/spells/nif/fix/test_clampmaterialalpha.py +++ b/tests/spells/nif/fix/test_clampmaterialalpha.py @@ -1,10 +1,9 @@ """Tests for the fix_texturepath spell""" -from tests.scripts.nif import call_niftoaster -from tests.utils import BaseNifFileTestCase +from nose.tools import assert_true, assert_equals from pyffi.spells.nif.fix import SpellClampMaterialAlpha - -from nose.tools import assert_true, assert_equals +from tests.scripts.nif import call_niftoaster +from tests.utils import BaseNifFileTestCase class TestFixTexturePathToasterNif(BaseNifFileTestCase): @@ -29,8 +28,8 @@ def test_explicit_fix_texture_path(self): assert_equals(self.data.roots[0].children[0].children[1].properties[0].alpha, 0.0) def test_non_interactive_fix_clamp_material_alpha(self): - - call_niftoaster("--raise", "fix_clampmaterialalpha", "--dry-run", "--noninteractive", "--verbose=1", self.dest_file) + call_niftoaster("--raise", "fix_clampmaterialalpha", "--dry-run", "--noninteractive", "--verbose=1", + self.dest_file) """ pyffi.toaster:INFO:=== tests/spells/nif/files/test_fix_clampmaterialalpha.nif === diff --git a/tests/spells/nif/fix/test_cleanstringpalette.py b/tests/spells/nif/fix/test_cleanstringpalette.py index 6ef08c4b0..eaa7cd276 100644 --- a/tests/spells/nif/fix/test_cleanstringpalette.py +++ b/tests/spells/nif/fix/test_cleanstringpalette.py @@ -1,16 +1,14 @@ """Tests for the fix_cleanstringpalette spell""" -from tests.scripts.nif import call_niftoaster -from tests.utils import BaseNifFileTestCase +from nose.tools import assert_equals from pyffi.spells.nif.fix import SpellCleanStringPalette - -from nose.tools import assert_equals +from tests.scripts.nif import call_niftoaster +from tests.utils import BaseNifFileTestCase class TestFixTexturePathToasterNif(BaseNifFileTestCase): """Invoke the fix_texturepath spell check through nif toaster""" - def setUp(self): super(TestFixTexturePathToasterNif, self).setUp() self.src_name = "test_fix_cleanstringpalette.nif" @@ -33,4 +31,5 @@ def test_explicit_fix_string_palette(self): assert_equals(strings, expected) def test_non_interactive_fix_string_palette(self): - call_niftoaster("--raise", "fix_cleanstringpalette", "--dry-run", "--noninteractive", "--verbose=1", self.dest_file) + call_niftoaster("--raise", "fix_cleanstringpalette", "--dry-run", "--noninteractive", "--verbose=1", + self.dest_file) diff --git a/tests/spells/nif/fix/test_detachhavoktristripsdata.py b/tests/spells/nif/fix/test_detachhavoktristripsdata.py index 6ab4961c1..47c70ca47 100644 --- a/tests/spells/nif/fix/test_detachhavoktristripsdata.py +++ b/tests/spells/nif/fix/test_detachhavoktristripsdata.py @@ -1,10 +1,9 @@ """Tests for the fix_detachhavoktristripsdata spell""" -from tests.scripts.nif import call_niftoaster -from tests.utils import BaseNifFileTestCase +from nose.tools import assert_true, assert_false from pyffi.spells.nif.fix import SpellDetachHavokTriStripsData - -from nose.tools import assert_equals, assert_true, assert_false +from tests.scripts.nif import call_niftoaster +from tests.utils import BaseNifFileTestCase class TestDetachHavokTriStripsDataNif(BaseNifFileTestCase): diff --git a/tests/spells/nif/fix/test_substitutestringpalette.py b/tests/spells/nif/fix/test_substitutestringpalette.py index fc736f917..1d5260a2b 100644 --- a/tests/spells/nif/fix/test_substitutestringpalette.py +++ b/tests/spells/nif/fix/test_substitutestringpalette.py @@ -1,9 +1,9 @@ """Tests for the modify_substitutestringpalette spell""" +from nose.tools import assert_true + from tests.scripts.nif import call_niftoaster from tests.utils import BaseNifFileTestCase -from nose.tools import assert_true - class TestModifySubstitutePaletteNif(BaseNifFileTestCase): """Invoke the modify_substitutestringpalette spell check through nif toaster""" @@ -21,7 +21,8 @@ def test_non_interactive_modify_string_palette_values(self): assert_true(strings, expected) # substitute - call_niftoaster("--raise", "modify_substitutestringpalette", "-a", "/Test/Woops", "--noninteractive", "--verbose=1", self.dest_file) + call_niftoaster("--raise", "modify_substitutestringpalette", "-a", "/Test/Woops", "--noninteractive", + "--verbose=1", self.dest_file) """ pyffi.toaster:INFO:=== tests/spells/nif/files...test_fix_cleanstringpalette.nif === diff --git a/tests/spells/nif/fix/test_tangentspace.py b/tests/spells/nif/fix/test_tangentspace.py index 8dab8cae6..88f773037 100644 --- a/tests/spells/nif/fix/test_tangentspace.py +++ b/tests/spells/nif/fix/test_tangentspace.py @@ -1,9 +1,8 @@ """Regression test for tangent space algorithm""" +from pyffi.spells.nif.fix import SpellAddTangentSpace from tests.scripts.nif import call_niftoaster from tests.utils import BaseNifFileTestCase, assert_tuple_values -from pyffi.spells.nif.fix import SpellAddTangentSpace - class TestFixTangentSpaceNif(BaseNifFileTestCase): """Invoke the fix_texturepath spell check through nif toaster""" @@ -1518,8 +1517,7 @@ def setUp(self): super(TestFixDeltaTangentSpaceNif, self).readNifData() def test_non_interactive_fix_addtangentspace(self): - - call_niftoaster("--raise", "fix_deltangentspace", "fix_addtangentspace", "--dry-run", "--noninteractive", + call_niftoaster("--raise", "fix_deltangentspace", "fix_addtangentspace", "--dry-run", "--noninteractive", "--verbose=1", self.dest_file) """ pyffi.toaster:INFO:=== tests/spells/nif/files/test_fix_tangentspace.nif === @@ -1533,9 +1531,8 @@ def test_non_interactive_fix_addtangentspace(self): """ def test_non_interactive_fix_addtangentspace_series(self): - call_niftoaster("--raise", "fix_deltangentspace", "fix_addtangentspace", "--series", - "--dry-run", "--noninteractive", "--verbose=1", self.dest_file) + "--dry-run", "--noninteractive", "--verbose=1", self.dest_file) """ pyffi.toaster:INFO:=== tests/spells/nif/files/test_fix_tangentspace.nif === @@ -1549,4 +1546,4 @@ def test_non_interactive_fix_addtangentspace_series(self): pyffi.toaster:INFO: adding tangent space pyffi.toaster:INFO: writing to temporary file pyffi.toaster:INFO:Finished. - """ \ No newline at end of file + """ diff --git a/tests/spells/nif/fix/test_texturepath.py b/tests/spells/nif/fix/test_texturepath.py index 242c0b9ef..f8c2f13c7 100644 --- a/tests/spells/nif/fix/test_texturepath.py +++ b/tests/spells/nif/fix/test_texturepath.py @@ -1,6 +1,7 @@ """Tests for the fix_texturepath spell""" from tests.scripts.nif import call_niftoaster -from tests.utils import BaseNifFileTestCase, assert_tuple_values +from tests.utils import BaseNifFileTestCase + class TestFixTangentSpaceNif(BaseNifFileTestCase): """Invoke the fix_texturepath spell check through nif toaster""" @@ -34,4 +35,3 @@ def test_non_interactive_fix_texture_path(self): pyffi.toaster:INFO: writing to temporary file pyffi.toaster:INFO:Finished. """ - diff --git a/tests/spells/nif/modify/test_delbranches.py b/tests/spells/nif/modify/test_delbranches.py index a009cbc14..b10226a9e 100644 --- a/tests/spells/nif/modify/test_delbranches.py +++ b/tests/spells/nif/modify/test_delbranches.py @@ -1,9 +1,9 @@ """Tests for the modify_delbranches spell and its friends""" +from nose.tools import assert_equals + from tests.scripts.nif import call_niftoaster from tests.utils import BaseNifFileTestCase -from nose.tools import assert_equals - class TestModifyDelBranchesNif(BaseNifFileTestCase): """Invoke the modify_delbranches spell check through nif toaster""" @@ -179,4 +179,4 @@ def test_non_interactive_modify_delalphaprop(self): 'NiTexturingProperty', 'NiSourceTexture', 'NiMaterialProperty', 'NiWireframeProperty', 'NiDitherProperty', 'NiTriStripsData'] - assert_equals(blocks, branches) \ No newline at end of file + assert_equals(blocks, branches) diff --git a/tests/spells/nif/modify/test_delvertexcolor.py b/tests/spells/nif/modify/test_delvertexcolor.py index 15b17215f..09ad9a8a9 100644 --- a/tests/spells/nif/modify/test_delvertexcolor.py +++ b/tests/spells/nif/modify/test_delvertexcolor.py @@ -1,9 +1,9 @@ """Tests for the modify_delvertexcolor spell""" +from nose.tools import assert_equals, assert_false, assert_true + from tests.scripts.nif import call_niftoaster from tests.utils import BaseNifFileTestCase -from nose.tools import assert_equals, assert_false, assert_true - class TestModifyDelBranchesNif(BaseNifFileTestCase): """Invoke the modify_delvertexcolor spell check through nif toaster""" @@ -22,7 +22,6 @@ def test_non_interactive_modify_delbranches(self): assert_equals(blocks, expected) assert_true(self.data.roots[0].children[0].data.has_vertex_colors) - # delete vertex color call_niftoaster("--raise", "modify_delvertexcolor", "--noninteractive", "--verbose=1", self.dest_file) """ @@ -42,6 +41,7 @@ def test_non_interactive_modify_delbranches(self): # check that file has no vertex color blocks = [block.__class__.__name__ for block in self.data.blocks] - expected = ['NiNode', 'NiTriStrips', 'NiStencilProperty', 'NiSpecularProperty', 'NiMaterialProperty', 'NiTriStripsData'] + expected = ['NiNode', 'NiTriStrips', 'NiStencilProperty', 'NiSpecularProperty', 'NiMaterialProperty', + 'NiTriStripsData'] assert_equals(blocks, expected) assert_false(self.data.roots[0].children[0].data.has_vertex_colors) diff --git a/tests/spells/nif/optimize/test_collision.py b/tests/spells/nif/optimize/test_collision.py index e67e5f45d..0cbceaaf1 100644 --- a/tests/spells/nif/optimize/test_collision.py +++ b/tests/spells/nif/optimize/test_collision.py @@ -1,9 +1,9 @@ -from tests.utils import BaseNifFileTestCase import nose + import pyffi -from pyffi.spells import Toaster from pyffi.formats.nif import NifFormat - +from pyffi.spells import Toaster +from tests.utils import BaseNifFileTestCase from tests.utils import assert_tuple_values @@ -165,7 +165,8 @@ def test_box_from_packed_collision_optimisation(self): """Test that a collision mesh which is not a box, but whose vertices form a box, is not converted to a box.""" # check initial data - nose.tools.assert_equals(self.data.roots[0].collision_object.body.shape.__class__.__name__, 'bhkMoppBvTreeShape') + nose.tools.assert_equals(self.data.roots[0].collision_object.body.shape.__class__.__name__, + 'bhkMoppBvTreeShape') # run the box spell spell = pyffi.spells.nif.optimize.SpellOptimizeCollisionBox(data=self.data) @@ -179,7 +180,8 @@ def test_box_from_packed_collision_optimisation(self): """ # check that we still have a mopp collision, and not a box collision - nose.tools.assert_equals(self.data.roots[0].collision_object.body.shape.__class__.__name__, 'bhkMoppBvTreeShape') + nose.tools.assert_equals(self.data.roots[0].collision_object.body.shape.__class__.__name__, + 'bhkMoppBvTreeShape') class TestMoppCollisionOptimisationNif(BaseNifFileTestCase): @@ -190,7 +192,6 @@ def setUp(self): super(TestMoppCollisionOptimisationNif, self).readNifData() def test_optimise_collision_complex_mopp(self): - # check initial data shape = self.shape nose.tools.assert_equals(shape.sub_shapes[0].num_vertices, 53) diff --git a/tests/spells/nif/optimize/test_delunusedbones.py b/tests/spells/nif/optimize/test_delunusedbones.py index d3aeb5cee..6b83439fd 100644 --- a/tests/spells/nif/optimize/test_delunusedbones.py +++ b/tests/spells/nif/optimize/test_delunusedbones.py @@ -1,6 +1,7 @@ -from tests.utils import BaseNifFileTestCase from nose.tools import assert_equals, assert_is + from pyffi.spells.nif.optimize import SpellDelUnusedBones +from tests.utils import BaseNifFileTestCase class TestDeleteUnusedBonesOptimisationNif(BaseNifFileTestCase): diff --git a/tests/spells/nif/optimize/test_delzeroscale.py b/tests/spells/nif/optimize/test_delzeroscale.py index 31cd9fb20..ef3b9d62a 100644 --- a/tests/spells/nif/optimize/test_delzeroscale.py +++ b/tests/spells/nif/optimize/test_delzeroscale.py @@ -1,9 +1,9 @@ -from tests.utils import BaseNifFileTestCase from nose.tools import assert_true + +from pyffi.spells.nif.optimize import SpellDelZeroScale from tests import test_logger -import pyffi from tests.utils import BaseNifFileTestCase -from pyffi.spells.nif.optimize import SpellDelZeroScale + class TestDelZeroScaleOptimisationNif(BaseNifFileTestCase): """Test for the delete zero scale spell""" diff --git a/tests/spells/nif/optimize/test_mergeduplicates.py b/tests/spells/nif/optimize/test_mergeduplicates.py index f74a7ebcf..3099d9112 100644 --- a/tests/spells/nif/optimize/test_mergeduplicates.py +++ b/tests/spells/nif/optimize/test_mergeduplicates.py @@ -1,10 +1,9 @@ -from tests.scripts.nif import call_niftoaster -from tests.utils import BaseNifFileTestCase +from nose.tools import assert_true, assert_false import pyffi from pyffi.spells import Toaster - -from nose.tools import assert_true, assert_false +from tests.scripts.nif import call_niftoaster +from tests.utils import BaseNifFileTestCase class TestMergeDuplicatesOptimisationNif(BaseNifFileTestCase): @@ -16,8 +15,9 @@ def setUp(self): super(TestMergeDuplicatesOptimisationNif, self).copyFile() def test_non_interactive_opt_merge_duplicates(self): - call_niftoaster("--raise", "opt_mergeduplicates", "--dry-run", "--noninteractive", "--verbose=1", self.dest_file) - + call_niftoaster("--raise", "opt_mergeduplicates", "--dry-run", "--noninteractive", "--verbose=1", + self.dest_file) + class TestMergeDuplicatesGeomOptimisationNif(BaseNifFileTestCase): # I didn't need setUp and tearDown here.. @@ -80,4 +80,4 @@ def test_non_interactive_opt_merge_duplicates(self): spell = pyffi.spells.nif.optimize.SpellMergeDuplicates(data=self.data) spell.recurse() - assert_false(has_duplicates(self.data.roots[0])) \ No newline at end of file + assert_false(has_duplicates(self.data.roots[0])) diff --git a/tests/spells/nif/optimize/test_niftoaster.py b/tests/spells/nif/optimize/test_niftoaster.py index 6b4dfea8a..d6d534749 100644 --- a/tests/spells/nif/optimize/test_niftoaster.py +++ b/tests/spells/nif/optimize/test_niftoaster.py @@ -1,8 +1,7 @@ +from pyffi.spells import Toaster +from tests import test_logger from tests.scripts.nif import call_niftoaster from tests.utils import BaseNifFileTestCase -from tests import test_logger - -from pyffi.spells import Toaster class TestToasterOptimisationNif(BaseNifFileTestCase): @@ -39,6 +38,7 @@ def test_simulate_user_optimisation(self): Toaster.toast.__globals__['input'] = input_func call_niftoaster("optimize", "--raise", "--verbose=1", self.dest_file) + inputs = ["yes it is", "n", "y"] # list of inputs of this test diff --git a/tests/spells/nif/optimize/test_vertex_cache.py b/tests/spells/nif/optimize/test_vertex_cache.py index 10a1682e6..2ff389e0c 100644 --- a/tests/spells/nif/optimize/test_vertex_cache.py +++ b/tests/spells/nif/optimize/test_vertex_cache.py @@ -1,7 +1,7 @@ -from tests.utils import BaseNifFileTestCase from nose.tools import assert_equals import pyffi +from tests.utils import BaseNifFileTestCase class TestVertexCacheOptimisationNif(BaseNifFileTestCase): diff --git a/tests/spells/test_toaster.py b/tests/spells/test_toaster.py index 228267aef..34039041a 100644 --- a/tests/spells/test_toaster.py +++ b/tests/spells/test_toaster.py @@ -1,7 +1,7 @@ """Tests for pyffi.""" -import tempfile import os import shutil +import tempfile from nose.tools import assert_true, assert_false @@ -34,7 +34,6 @@ def test_toaster_exclude(self): assert_false(toaster.is_admissible_branch_class(NifFormat.NiLODNode)) assert_false(toaster.is_admissible_branch_class(NifFormat.NiMaterialProperty)) - def test_toaster_include(self): """Test include only NiProperty and NiNode inherited types""" toaster = MyToaster(options={"include": ["NiProperty", "NiNode"]}) @@ -44,7 +43,6 @@ def test_toaster_include(self): assert_true(toaster.is_admissible_branch_class(NifFormat.NiLODNode)) # NiNode subclass! assert_true(toaster.is_admissible_branch_class(NifFormat.NiMaterialProperty)) # NiProperties are! - def test_toaster_include_and_exclude(self): """Test include NiProperty and NiNode, exclude NiMaterialProp and NiLODNode""" toaster = MyToaster(options={"include": ["NiProperty", "NiNode"], @@ -160,7 +158,3 @@ class TestDelToaster(pyffi.spells.nif.NifToaster): for name, value in sorted(toaster.options.items()): fake_logger.info("%s: %s" % (name, value)) - - - - diff --git a/tests/test_doctests.py b/tests/test_doctests.py index 42661a7a7..85601f237 100644 --- a/tests/test_doctests.py +++ b/tests/test_doctests.py @@ -1,51 +1,52 @@ -import os.path -from os.path import dirname import doctest import logging import sys import unittest import pyffi -import pyffi.object_models.common +import pyffi.formats.bsa +import pyffi.formats.cgf +import pyffi.formats.dds +import pyffi.formats.egm +import pyffi.formats.egt +import pyffi.formats.esp +import pyffi.formats.kfm +import pyffi.formats.nif +import pyffi.formats.psk +import pyffi.formats.rockstar.dir_ +import pyffi.formats.tga +import pyffi.formats.tri import pyffi.object_models -import pyffi.object_models.xml -import pyffi.object_models.mex import pyffi.object_models.any_type -import pyffi.object_models.simple_type import pyffi.object_models.array_type +import pyffi.object_models.basic import pyffi.object_models.binary_type -import pyffi.object_models.xml.basic +import pyffi.object_models.common +import pyffi.object_models.expression +import pyffi.object_models.mex +import pyffi.object_models.simple_type +import pyffi.object_models.xml import pyffi.object_models.xml.bit_struct import pyffi.object_models.xml.enum -import pyffi.object_models.xml.expression import pyffi.object_models.xml.struct_ -import pyffi.utils -import pyffi.utils.tristrip -import pyffi.utils.vertex_cache -import pyffi.utils.mathutils -import pyffi.utils.quickhull -import pyffi.utils.inertia -import pyffi.utils.tangentspace -import pyffi.utils.mopp -import pyffi.formats.nif -import pyffi.formats.cgf -import pyffi.formats.kfm -import pyffi.formats.dds -import pyffi.formats.tga -import pyffi.formats.egm -import pyffi.formats.esp -import pyffi.formats.tri -import pyffi.formats.bsa -import pyffi.formats.egt -import pyffi.formats.psk -import pyffi.formats.rockstar.dir_ +import pyffi.object_models.niftoolsxml +import pyffi.object_models.niftoolsxml.bit_struct +import pyffi.object_models.niftoolsxml.enum +import pyffi.object_models.niftoolsxml.struct_ import pyffi.spells import pyffi.spells.nif -import pyffi.spells.nif.fix -import pyffi.spells.nif.modify import pyffi.spells.nif.check import pyffi.spells.nif.dump - +import pyffi.spells.nif.fix +import pyffi.spells.nif.modify +import pyffi.utils +import pyffi.utils.inertia +import pyffi.utils.mathutils +import pyffi.utils.mopp +import pyffi.utils.quickhull +import pyffi.utils.tangentspace +import pyffi.utils.tristrip +import pyffi.utils.vertex_cache # these two do not yet work on py3k from tests import test_logger @@ -91,12 +92,12 @@ def create_suite(): file_paths = { - # Contain outstanding issues - # 'spells/egm/optimize.txt', - # 'formats/kfm/kfmtoaster.txt', #Not Implemented - # various regression tests (outside documentation) - # 'docs-sphinx/intro.rst', #outside of test dir... - } + # Contain outstanding issues + # 'spells/egm/optimize.txt', + # 'formats/kfm/kfmtoaster.txt', #Not Implemented + # various regression tests (outside documentation) + # 'docs-sphinx/intro.rst', #outside of test dir... + } suite.addTest(doctest.DocFileSuite(*file_paths)) diff --git a/tests/utils/__init__.py b/tests/utils/__init__.py index 502b58f03..0030e2db4 100644 --- a/tests/utils/__init__.py +++ b/tests/utils/__init__.py @@ -1,13 +1,14 @@ """Tests for utility classes""" -import nose -import nose.tools -import tempfile import os import shutil +import tempfile import unittest from os.path import dirname +import nose +import nose.tools + from pyffi.formats.cgf import CgfFormat from pyffi.formats.nif import NifFormat @@ -17,6 +18,7 @@ def assert_tuple_values(a, b): for elem, j in zip(a, b): nose.tools.assert_almost_equal(elem, j, places=3) + dir_path = __file__ for i in range(2): # recurse up to root repo dir dir_path = dirname(dir_path) diff --git a/tests/utils/test_trianglemesh.py b/tests/utils/test_trianglemesh.py index c1d9babcf..ad321521d 100644 --- a/tests/utils/test_trianglemesh.py +++ b/tests/utils/test_trianglemesh.py @@ -1,6 +1,7 @@ """Tests for pyffi.utils.trianglemesh module.""" import nose.tools + from pyffi.utils.trianglemesh import Face, Mesh, Edge @@ -131,8 +132,8 @@ def test_sorted_faced_locked_mesh(self): self.m.add_face(5, 6, 2) self.m.lock() - #Should be sorted - nose.tools.assert_equals(self.m.faces , [Face(0, 1, 2), Face(1, 2, 3), Face(2, 5, 6)]) + # Should be sorted + nose.tools.assert_equals(self.m.faces, [Face(0, 1, 2), Face(1, 2, 3), Face(2, 5, 6)]) nose.tools.assert_equals(self.m.faces[0].index, 0) nose.tools.assert_equals(self.m.faces[1].index, 1) nose.tools.assert_equals(self.m.faces[2].index, 2) @@ -156,7 +157,6 @@ def test_faces_when_locked(self): self.m.add_face(1, 2, 3) def test_discard_face(self): - f0 = self.m.add_face(0, 1, 2) f1 = self.m.add_face(1, 3, 2) self.m.add_face(2, 3, 4) @@ -164,4 +164,4 @@ def test_discard_face(self): self.m.lock() nose.tools.assert_equals(list(f0.get_adjacent_faces(0)), [Face(1, 3, 2)]) self.m.discard_face(f1) - nose.tools.assert_equals(list(f0.get_adjacent_faces(0)), []) \ No newline at end of file + nose.tools.assert_equals(list(f0.get_adjacent_faces(0)), []) diff --git a/tests/utils/test_utils.py b/tests/utils/test_utils.py index 965964807..1209881bf 100644 --- a/tests/utils/test_utils.py +++ b/tests/utils/test_utils.py @@ -1,8 +1,9 @@ """Tests for pyffi.utils module.""" -from pyffi.utils import unique_map, hex_dump import nose.tools +from pyffi.utils import unique_map, hex_dump + def test_hex_dump(): """Test output of hex_dump function""" diff --git a/tests/utils/test_withref.py b/tests/utils/test_withref.py index 3fac61b2d..5b34d9e8c 100644 --- a/tests/utils/test_withref.py +++ b/tests/utils/test_withref.py @@ -1,6 +1,7 @@ """Tests for pyffi.utils.withref module.""" import nose.tools + from pyffi.utils.withref import ref diff --git a/todo/NifVis/__init__.py b/todo/NifVis/__init__.py index feecce71a..e4cf7806c 100644 --- a/todo/NifVis/__init__.py +++ b/todo/NifVis/__init__.py @@ -1,2 +1 @@ import lizers - diff --git a/todo/NifVis/lizers/NiBinaryVoxelData.py b/todo/NifVis/lizers/NiBinaryVoxelData.py index 8d7b071ec..bc3cee564 100644 --- a/todo/NifVis/lizers/NiBinaryVoxelData.py +++ b/todo/NifVis/lizers/NiBinaryVoxelData.py @@ -2,9 +2,10 @@ radius = 0.1 -def Radius( block ): + +def Radius(block): if not block: return - if not isinstance( block, NifFormat.NiBinaryVoxelData ): return + if not isinstance(block, NifFormat.NiBinaryVoxelData): return global radius @@ -16,16 +17,15 @@ def Radius( block ): return radius -def Draw( block ): +def Draw(block): if not block: return - if not isinstance( block, NifFormat.NiBinaryVoxelData ): return + if not isinstance(block, NifFormat.NiBinaryVoxelData): return global radius GLNoLighting() - SetPointSize( 4 ) - SetColor( 1, 1, 1 ) - - DrawVertices( block.unknownVectors ) + SetPointSize(4) + SetColor(1, 1, 1) + DrawVertices(block.unknownVectors) diff --git a/todo/NifVis/lizers/NiTriBasedGeom.py b/todo/NifVis/lizers/NiTriBasedGeom.py index afbbaada4..b95739d92 100644 --- a/todo/NifVis/lizers/NiTriBasedGeom.py +++ b/todo/NifVis/lizers/NiTriBasedGeom.py @@ -1,18 +1,21 @@ -from pyffi.formats.nif import NifFormat from OpenGL.GL import * -def Radius( block ): +from pyffi.formats.nif import NifFormat + + +def Radius(block): if not block: return - if not isinstance( block, NifFormat.NiTriBasedGeom ): return 0 + if not isinstance(block, NifFormat.NiTriBasedGeom): return 0 mesh = block.data if not mesh: return 0 return mesh.radius -def Draw( block ): + +def Draw(block): if not block: return - if not isinstance( block, NifFormat.NiTriBasedGeom ): return + if not isinstance(block, NifFormat.NiTriBasedGeom): return mesh = block.data if not mesh: return @@ -21,14 +24,14 @@ def Draw( block ): norms = mesh.normals tris = mesh.getTriangles() - glColor3f( 1, 1, 1 ) + glColor3f(1, 1, 1) - glBegin( GL_TRIANGLES ) + glBegin(GL_TRIANGLES) for v1, v2, v3 in tris: - glNormal3f( norms[v1].x, norms[v1].y, norms[v1].z ) - glVertex3f( verts[v1].x, verts[v1].y, verts[v1].z ) - glNormal3f( norms[v2].x, norms[v2].y, norms[v2].z ) - glVertex3f( verts[v2].x, verts[v2].y, verts[v2].z ) - glNormal3f( norms[v3].x, norms[v3].y, norms[v3].z ) - glVertex3f( verts[v3].x, verts[v3].y, verts[v3].z ) + glNormal3f(norms[v1].x, norms[v1].y, norms[v1].z) + glVertex3f(verts[v1].x, verts[v1].y, verts[v1].z) + glNormal3f(norms[v2].x, norms[v2].y, norms[v2].z) + glVertex3f(verts[v2].x, verts[v2].y, verts[v2].z) + glNormal3f(norms[v3].x, norms[v3].y, norms[v3].z) + glVertex3f(verts[v3].x, verts[v3].y, verts[v3].z) glEnd() diff --git a/todo/NifVis/lizers/NiTriShape.py b/todo/NifVis/lizers/NiTriShape.py index e52fbc6cd..2c5b6ac38 100644 --- a/todo/NifVis/lizers/NiTriShape.py +++ b/todo/NifVis/lizers/NiTriShape.py @@ -1,7 +1,9 @@ import NiTriBasedGeom -def Radius( block ): - return NiTriBasedGeom.Radius( block ) -def Draw( block ): - NiTriBasedGeom.Draw( block ) +def Radius(block): + return NiTriBasedGeom.Radius(block) + + +def Draw(block): + NiTriBasedGeom.Draw(block) diff --git a/todo/NifVis/lizers/NiTriStrips.py b/todo/NifVis/lizers/NiTriStrips.py index 95cc33e92..4388d6b40 100644 --- a/todo/NifVis/lizers/NiTriStrips.py +++ b/todo/NifVis/lizers/NiTriStrips.py @@ -1,8 +1,9 @@ from NifVis.lizers import NiTriBasedGeom -def Radius( block ): - return NiTriBasedGeom.Radius( block ) -def Draw( block ): - NiTriBasedGeom.Draw( block ) +def Radius(block): + return NiTriBasedGeom.Radius(block) + +def Draw(block): + NiTriBasedGeom.Draw(block) diff --git a/todo/NifVis/lizers/__init__.py b/todo/NifVis/lizers/__init__.py index 2bc64940a..aeac85b88 100644 --- a/todo/NifVis/lizers/__init__.py +++ b/todo/NifVis/lizers/__init__.py @@ -1,7 +1,6 @@ -import bhkMoppBvTreeShape_alt -import bhkMoppBvTreeShape import NiBinaryVoxelData import NiTriBasedGeom import NiTriShape import NiTriStrips - +import bhkMoppBvTreeShape +import bhkMoppBvTreeShape_alt diff --git a/todo/NifVis/lizers/bhkMoppBvTreeShape.py b/todo/NifVis/lizers/bhkMoppBvTreeShape.py index 2d7ff0e32..7cf199f8b 100644 --- a/todo/NifVis/lizers/bhkMoppBvTreeShape.py +++ b/todo/NifVis/lizers/bhkMoppBvTreeShape.py @@ -5,20 +5,21 @@ Tree = [] Colors = [ - ( 1, 0, 0 ), - ( 0, 1, 0 ), - ( 0, 0, 1 ), - ( 1, 1, 0 ), - ( 0, 1, 1 ), - ( 1, 0, 1 ), - ( 1, 0.75, 0 ), - ( 0, 1, 0.75 ), - ( 0.75, 0, 1 ) + (1, 0, 0), + (0, 1, 0), + (0, 0, 1), + (1, 1, 0), + (0, 1, 1), + (1, 0, 1), + (1, 0.75, 0), + (0, 1, 0.75), + (0.75, 0, 1) ] -def Radius( mopp ): + +def Radius(mopp): if not mopp: return - if not isinstance( mopp, NifFormat.bhkMoppBvTreeShape ): return + if not isinstance(mopp, NifFormat.bhkMoppBvTreeShape): return global Scale, Corner, Tree @@ -26,26 +27,25 @@ def Radius( mopp ): Corner = mopp.origin - ChunkCode( mopp.moppData, 0, len(mopp.moppData), Tree ) + ChunkCode(mopp.moppData, 0, len(mopp.moppData), Tree) - print "Scale: %.3f - Corner: (%.2f, %.2f, %.2f )" % ( Scale, Corner.x, Corner.y, Corner.z ) + print + "Scale: %.3f - Corner: (%.2f, %.2f, %.2f )" % (Scale, Corner.x, Corner.y, Corner.z) - return ( 0.5 * Scale * 255 ) + return (0.5 * Scale * 255) -def Draw( mopp ): +def Draw(mopp): if not mopp: return - if not isinstance( mopp, NifFormat.bhkMoppBvTreeShape ): return + if not isinstance(mopp, NifFormat.bhkMoppBvTreeShape): return global Tree, Colors - GLNoLighting() + SetLineWidth(1) - SetLineWidth( 1 ) - - SetColor( 1, 1, 1, 0.2 ) + SetColor(1, 1, 1, 0.2) Shape = mopp.shape.data tris = Shape.triangles @@ -53,14 +53,13 @@ def Draw( mopp ): colnum = 0 for t in tris: - SetColorA( Colors[colnum%len(Colors)] ) - SetNormal( t.normal ) - DrawTriangleW( t.triangle, verts ) + SetColorA(Colors[colnum % len(Colors)]) + SetNormal(t.normal) + DrawTriangleW(t.triangle, verts) colnum += 1 - - SetPointSize( 4 ) - SetLineWidth( 3 ) + SetPointSize(4) + SetLineWidth(3) xstep = 8 ystep = 8 @@ -72,48 +71,48 @@ def Draw( mopp ): while y < 256: z = 0 while z < 256: - DrawCode( ( x, y, z ), Tree, [ 0, 0, 0 ], [ 0, 0, 0 ] ) + DrawCode((x, y, z), Tree, [0, 0, 0], [0, 0, 0]) z += zstep y += ystep x += xstep -def DrawCode( pos, tree, off, dim ): +def DrawCode(pos, tree, off, dim): global Scale, Corner for chunk in tree: code = chunk[0] - if code in range( 0x10, 0x20 ): + if code in range(0x10, 0x20): if code == 0x10: - a = (1.0 * (pos[0]+1) / (chunk[1]+1)) - b = (1.0 * (chunk[2]+1) / (pos[0]+1)) + a = (1.0 * (pos[0] + 1) / (chunk[1] + 1)) + b = (1.0 * (chunk[2] + 1) / (pos[0] + 1)) if a < b: - DrawCode( pos, chunk[3], off, dim ) + DrawCode(pos, chunk[3], off, dim) return elif code == 0x11: - a = (1.0 * (pos[1]+1) / (chunk[1]+1)) - b = (1.0 * (chunk[2]+1) / (pos[1]+1)) + a = (1.0 * (pos[1] + 1) / (chunk[1] + 1)) + b = (1.0 * (chunk[2] + 1) / (pos[1] + 1)) if a < b: - DrawCode( pos, chunk[3], off, dim ) + DrawCode(pos, chunk[3], off, dim) return elif code == 0x17: - a = (1.0 * (dim[0]-pos[0]+1) / (pos[1]+1)) - b = (1.0 * (chunk[2]+1) / (chunk[1]+1)) + a = (1.0 * (dim[0] - pos[0] + 1) / (pos[1] + 1)) + b = (1.0 * (chunk[2] + 1) / (chunk[1] + 1)) if a > b: - DrawCode( pos, chunk[3], off, dim ) + DrawCode(pos, chunk[3], off, dim) return elif code == 0x18: - a = (1.0 * (chunk[2]+1) / (chunk[1]+1)) - b = (1.0 * (pos[0]+1) / (pos[1]+1)) + a = (1.0 * (chunk[2] + 1) / (chunk[1] + 1)) + b = (1.0 * (pos[0] + 1) / (pos[1] + 1)) if a > b: - DrawCode( pos, chunk[3], off, dim ) + DrawCode(pos, chunk[3], off, dim) return - elif code in range( 0x20, 0x30 ): + elif code in range(0x20, 0x30): if code == 0x26: if pos[0] < chunk[1] or pos[0] > chunk[2]: return @@ -129,41 +128,41 @@ def DrawCode( pos, tree, off, dim ): return dim[2] = chunk[2] - elif code in range( 0x30, 0x50 ): + elif code in range(0x30, 0x50): v = NifFormat.Vector3() - v.x = Corner.x + Scale * ( off[0] + pos[0] ) - v.y = Corner.y + Scale * ( off[1] + pos[1] ) - v.z = Corner.z + Scale * ( off[2] + pos[2] ) + v.x = Corner.x + Scale * (off[0] + pos[0]) + v.y = Corner.y + Scale * (off[1] + pos[1]) + v.z = Corner.z + Scale * (off[2] + pos[2]) - SetColorA( Colors[(code-0x30)%len(Colors)] ) + SetColorA(Colors[(code - 0x30) % len(Colors)]) - DrawVertex( v ) + DrawVertex(v) return -def ChunkCode( bytes, index, length, tree ): +def ChunkCode(bytes, index, length, tree): stop = index + length while index < stop: code = bytes[index] chunk = () jump = 1 - if code in range( 0x10, 0x20 ): - subsize = bytes[index+3] - subtree = ChunkCode( bytes, index+4, subsize, [] ) - chunk = ( code, bytes[index+1], bytes[index+2], subtree ) + if code in range(0x10, 0x20): + subsize = bytes[index + 3] + subtree = ChunkCode(bytes, index + 4, subsize, []) + chunk = (code, bytes[index + 1], bytes[index + 2], subtree) jump = 4 + subsize - elif code in range( 0x20, 0x30 ): - if code in [ 0x26, 0x27, 0x28 ]: - chunk = ( code, bytes[index+1], bytes[index+2] ) + elif code in range(0x20, 0x30): + if code in [0x26, 0x27, 0x28]: + chunk = (code, bytes[index + 1], bytes[index + 2]) jump = 3 - elif code in range( 0x30, 0x4f ): - chunk = ( bytes[index], 0 ) + elif code in range(0x30, 0x4f): + chunk = (bytes[index], 0) jump = 1 - tree.append( chunk ) + tree.append(chunk) index += jump return tree diff --git a/todo/NifVis/lizers/bhkMoppBvTreeShape_alt.py b/todo/NifVis/lizers/bhkMoppBvTreeShape_alt.py index 5ba0c11c0..4afcc2152 100644 --- a/todo/NifVis/lizers/bhkMoppBvTreeShape_alt.py +++ b/todo/NifVis/lizers/bhkMoppBvTreeShape_alt.py @@ -1,26 +1,27 @@ -from NifVis.ua import * - import random +from NifVis.ua import * + Scale = 0.1 Corner = NifFormat.Vector3() Tree = [] Colors = [ - ( 1, 0, 0 ), - ( 0, 1, 0 ), - ( 0, 0, 1 ), - ( 1, 1, 0 ), - ( 0, 1, 1 ), - ( 1, 0, 1 ), - ( 1, 0.75, 0 ), - ( 0, 1, 0.75 ), - ( 0.75, 0, 1 ) + (1, 0, 0), + (0, 1, 0), + (0, 0, 1), + (1, 1, 0), + (0, 1, 1), + (1, 0, 1), + (1, 0.75, 0), + (0, 1, 0.75), + (0.75, 0, 1) ] -def Radius( mopp ): + +def Radius(mopp): if not mopp: return - if not isinstance( mopp, NifFormat.bhkMoppBvTreeShape ): return + if not isinstance(mopp, NifFormat.bhkMoppBvTreeShape): return global Scale, Corner, Tree @@ -28,26 +29,25 @@ def Radius( mopp ): Corner = mopp.objectCorner - ChunkCode( mopp.moppData, 0, len(mopp.moppData), Tree, verbose = True ) + ChunkCode(mopp.moppData, 0, len(mopp.moppData), Tree, verbose=True) - print "Scale: %.3f - Corner: (%.2f, %.2f, %.2f )" % ( Scale, Corner.x, Corner.y, Corner.z ) + print + "Scale: %.3f - Corner: (%.2f, %.2f, %.2f )" % (Scale, Corner.x, Corner.y, Corner.z) - return ( 0.5 * Scale * 255 ) + return (0.5 * Scale * 255) -def Draw( mopp ): +def Draw(mopp): if not mopp: return - if not isinstance( mopp, NifFormat.bhkMoppBvTreeShape ): return + if not isinstance(mopp, NifFormat.bhkMoppBvTreeShape): return global Tree, Colors - GLNoLighting() + SetLineWidth(1) - SetLineWidth( 1 ) - - SetColor( 1, 1, 1, 0.2 ) + SetColor(1, 1, 1, 0.2) Shape = mopp.shape.data tris = Shape.triangles @@ -55,14 +55,13 @@ def Draw( mopp ): colnum = 0 for t in tris: - SetColorA( Colors[colnum%len(Colors)] ) - SetNormal( t.normal ) - DrawTriangleW( t.triangle, verts ) + SetColorA(Colors[colnum % len(Colors)]) + SetNormal(t.normal) + DrawTriangleW(t.triangle, verts) colnum += 1 - - SetPointSize( 4 ) - SetLineWidth( 3 ) + SetPointSize(4) + SetLineWidth(3) xstep = 5 ystep = 5 @@ -74,57 +73,57 @@ def Draw( mopp ): while y < 256: z = 0 while z < 256: - DrawCode( ( x, y, z ), Tree, [ 0, 0, 0 ], [ 0, 0, 0 ] ) + DrawCode((x, y, z), Tree, [0, 0, 0], [0, 0, 0]) z += zstep y += ystep x += xstep -def DrawCode( pos, tree, off, dim ): +def DrawCode(pos, tree, off, dim): global Scale, Corner for chunk in tree: code = chunk[0] - if code in range( 0x10, 0x20 ): + if code in range(0x10, 0x20): if code == 0x10: if pos[0] <= chunk[1]: - DrawCode( pos, chunk[3], off, dim ) + DrawCode(pos, chunk[3], off, dim) if pos[0] <= chunk[2]: return elif code == 0x11: if pos[1] <= chunk[1]: - DrawCode( pos, chunk[3], off, dim ) + DrawCode(pos, chunk[3], off, dim) if pos[1] <= chunk[2]: return elif code == 0x12: if pos[2] <= chunk[1]: - DrawCode( pos, chunk[3], off, dim ) + DrawCode(pos, chunk[3], off, dim) if pos[2] <= chunk[2]: return else: # cannot resolve further - DrawCode( pos, chunk[3], off, dim ) - -## elif code == 0x16: -## if pos[1]*chunk[1] <= pos[2]*chunk[2]: -## DrawCode( pos, chunk[3], off, dim ) -## return -## -## elif code == 0x17: -## if pos[2]*chunk[1] <= pos[0]*chunk[2]: -## DrawCode( pos, chunk[3], off, dim ) -## return -## -## elif code == 0x18: -## if pos[0]*chunk[2] <= pos[1]*chunk[1]: -## DrawCode( pos, chunk[3], off, dim ) -## return - - elif code in range( 0x20, 0x30 ): + DrawCode(pos, chunk[3], off, dim) + + ## elif code == 0x16: + ## if pos[1]*chunk[1] <= pos[2]*chunk[2]: + ## DrawCode( pos, chunk[3], off, dim ) + ## return + ## + ## elif code == 0x17: + ## if pos[2]*chunk[1] <= pos[0]*chunk[2]: + ## DrawCode( pos, chunk[3], off, dim ) + ## return + ## + ## elif code == 0x18: + ## if pos[0]*chunk[2] <= pos[1]*chunk[1]: + ## DrawCode( pos, chunk[3], off, dim ) + ## return + + elif code in range(0x20, 0x30): if code == 0x26: if pos[0] < chunk[1] or pos[0] > chunk[2]: return @@ -140,57 +139,64 @@ def DrawCode( pos, tree, off, dim ): return dim[2] = chunk[2] - elif code in range( 0x30, 0x50 ): + elif code in range(0x30, 0x50): v = NifFormat.Vector3() - v.x = Corner.x + Scale * ( off[0] + pos[0] ) + random.uniform(0,0.1) - v.y = Corner.y + Scale * ( off[1] + pos[1] ) + random.uniform(0,0.1) - v.z = Corner.z + Scale * ( off[2] + pos[2] ) + random.uniform(0,0.1) + v.x = Corner.x + Scale * (off[0] + pos[0]) + random.uniform(0, 0.1) + v.y = Corner.y + Scale * (off[1] + pos[1]) + random.uniform(0, 0.1) + v.z = Corner.z + Scale * (off[2] + pos[2]) + random.uniform(0, 0.1) - SetColorA( Colors[(code-0x30)%len(Colors)] ) + SetColorA(Colors[(code - 0x30) % len(Colors)]) - DrawVertex( v ) + DrawVertex(v) return else: - print "WARNING: unknown code 0x%X"%code + print + "WARNING: unknown code 0x%X" % code -def ChunkCode( bytes, index, length, tree, depth = 0, verbose = False ): +def ChunkCode(bytes, index, length, tree, depth=0, verbose=False): stop = index + length while index < stop: code = bytes[index] chunk = () jump = 1 - if code in range( 0x10, 0x20 ): + if code in range(0x10, 0x20): if verbose: - print " "*depth, hex(code), bytes[index+1], bytes[index+2] - subsize = bytes[index+3] - subtree = ChunkCode( bytes, index+4, subsize, [], depth+1, verbose ) - chunk = ( code, bytes[index+1], bytes[index+2], subtree ) + print + " " * depth, hex(code), bytes[index + 1], bytes[index + 2] + subsize = bytes[index + 3] + subtree = ChunkCode(bytes, index + 4, subsize, [], depth + 1, verbose) + chunk = (code, bytes[index + 1], bytes[index + 2], subtree) jump = 4 + subsize - elif code in range( 0x20, 0x30 ): - if code in [ 0x26, 0x27, 0x28 ]: + elif code in range(0x20, 0x30): + if code in [0x26, 0x27, 0x28]: if verbose: - print " "*depth, hex(code), bytes[index+1], bytes[index+2], "[%s-axis check]"%("XYZ"[code-0x26]) - chunk = ( code, bytes[index+1], bytes[index+2] ) + print + " " * depth, hex(code), bytes[index + 1], bytes[index + 2], "[%s-axis check]" % ( + "XYZ"[code - 0x26]) + chunk = (code, bytes[index + 1], bytes[index + 2]) jump = 3 else: if verbose: - print " "*depth, hex(code), "(unknown)" + print + " " * depth, hex(code), "(unknown)" - elif code in range( 0x30, 0x4f ): + elif code in range(0x30, 0x4f): if verbose: - print " "*depth, hex(code), "[triangle %i check]"%(code-0x30) - chunk = ( bytes[index], 0 ) + print + " " * depth, hex(code), "[triangle %i check]" % (code - 0x30) + chunk = (bytes[index], 0) jump = 1 else: if verbose: - print " "*depth, hex(code), "(unknown)" + print + " " * depth, hex(code), "(unknown)" - tree.append( chunk ) + tree.append(chunk) index += jump return tree diff --git a/todo/NifVis/ua.py b/todo/NifVis/ua.py index f3666d35d..651294b11 100644 --- a/todo/NifVis/ua.py +++ b/todo/NifVis/ua.py @@ -1,87 +1,100 @@ -from pyffi.formats.nif import NifFormat from OpenGL.GL import * +from pyffi.formats.nif import NifFormat + def GLNoLighting(): - glDisable( GL_LIGHTING ) + glDisable(GL_LIGHTING) + def GLLighting(): - glEnable( GL_LIGHTING ) + glEnable(GL_LIGHTING) + + +def SetPointSize(size): + glPointSize(size) + + +def SetLineWidth(width): + glLineWidth(width) -def SetPointSize( size ): - glPointSize( size ) +def SetColor(r, g, b, a=1.0): + glColor4f(r, g, b, a) -def SetLineWidth( width ): - glLineWidth( width ) -def SetColor( r, g, b, a = 1.0 ): - glColor4f( r, g, b, a ) +def SetColorA(color): + if not isinstance(color, tuple): return + if len(color) == 3: + SetColor(color[0], color[1], color[2]) + elif len(color) == 4: + SetColor(color[0], color[1], color[2], color[3]) -def SetColorA( color ): - if not isinstance( color, tuple ): return - if len( color ) == 3: - SetColor( color[0], color[1], color[2] ) - elif len( color ) == 4: - SetColor( color[0], color[1], color[2], color[3] ) +def BeginDrawing(mode): + glBegin(mode) -def BeginDrawing( mode ): - glBegin( mode ) def EndDrawing(): glEnd() -def SetNormal( n ): - glNormal3f( n.x, n.y, n.z ) +def SetNormal(n): + glNormal3f(n.x, n.y, n.z) -def DrawVertex( v, mode = True ): - if mode: glBegin( GL_POINTS ) - glVertex3f( v.x, v.y, v.z ) +def DrawVertex(v, mode=True): + if mode: glBegin(GL_POINTS) + glVertex3f(v.x, v.y, v.z) if mode: glEnd() -def DrawVertices( verts, mode = True ): - if mode: glBegin( GL_POINTS ) + +def DrawVertices(verts, mode=True): + if mode: glBegin(GL_POINTS) for v in verts: - DrawVertex( v, False ) + DrawVertex(v, False) if mode: glEnd() -def DrawTriangle( t, verts, mode = True ): - if mode: glBegin( GL_TRIANGLES ) - DrawVertex( verts[t.v1], False ) - DrawVertex( verts[t.v2], False ) - DrawVertex( verts[t.v3], False ) + +def DrawTriangle(t, verts, mode=True): + if mode: glBegin(GL_TRIANGLES) + DrawVertex(verts[t.v1], False) + DrawVertex(verts[t.v2], False) + DrawVertex(verts[t.v3], False) if mode: glEnd() -def DrawTriangleW( t, verts, mode = True ): - if mode: glBegin( GL_LINE_STRIP ) - DrawVertex( verts[t.v1], False ) - DrawVertex( verts[t.v2], False ) - DrawVertex( verts[t.v3], False ) - DrawVertex( verts[t.v1], False ) + +def DrawTriangleW(t, verts, mode=True): + if mode: glBegin(GL_LINE_STRIP) + DrawVertex(verts[t.v1], False) + DrawVertex(verts[t.v2], False) + DrawVertex(verts[t.v3], False) + DrawVertex(verts[t.v1], False) if mode: glEnd() -def DrawTriangles( tris, verts, mode = True ): - if mode: glBegin( GL_TRIANGLES ) + +def DrawTriangles(tris, verts, mode=True): + if mode: glBegin(GL_TRIANGLES) for t in tris: - DrawTriangle( t, verts, False ) + DrawTriangle(t, verts, False) if mode: glEnd() -def DrawTrianglesW( tris, verts, mode = True ): + +def DrawTrianglesW(tris, verts, mode=True): for t in tris: - DrawTriangleW( t, verts, True ) + DrawTriangleW(t, verts, True) + -def DrawLine( v, w, mode = True ): - if mode: glBegin( GL_LINES ) - DrawVertex( v, False ) - DrawVertex( w, False ) +def DrawLine(v, w, mode=True): + if mode: glBegin(GL_LINES) + DrawVertex(v, False) + DrawVertex(w, False) if mode: glEnd() -def DrawOffset( v, off ): + +def DrawOffset(v, off): w = NifFormat.Vector3() w.x = v.x + off.x w.y = v.y + off.y w.z = v.z + off.z - DrawLine( v, w ) + DrawLine(v, w) diff --git a/todo/NifVis/vis_cfg.py b/todo/NifVis/vis_cfg.py index 5a14d9847..29f4f3a31 100644 --- a/todo/NifVis/vis_cfg.py +++ b/todo/NifVis/vis_cfg.py @@ -1,4 +1,4 @@ -_WINDOW_WIDTH = 640 -_WINDOW_HEIGHT = 480 +_WINDOW_WIDTH = 640 +_WINDOW_HEIGHT = 480 -_FONT_SIZE = 24 \ No newline at end of file +_FONT_SIZE = 24 diff --git a/todo/NifVis/vis_gl.py b/todo/NifVis/vis_gl.py index 32ecc5da8..afdfec5a2 100644 --- a/todo/NifVis/vis_gl.py +++ b/todo/NifVis/vis_gl.py @@ -1,12 +1,9 @@ +import pygame.display from OpenGL.GL import * from OpenGL.GLU import * -import pygame.display - import vis_cfg - - Aspect = 4 / 3 Height = 480 @@ -17,76 +14,75 @@ zRot = 0 - -def Initialize( radius ): +def Initialize(radius): global Radius, Aspect, Height Radius = radius Width = vis_cfg._WINDOW_WIDTH - Height = min( vis_cfg._WINDOW_WIDTH, vis_cfg._WINDOW_HEIGHT ) + Height = min(vis_cfg._WINDOW_WIDTH, vis_cfg._WINDOW_HEIGHT) Aspect = 1.0 * Width / Height - print "OpenGL Setup: Radius %.2f, Resolution %dx%d, Aspect %.2f" % ( Radius, Width, Height, Aspect ) + print + "OpenGL Setup: Radius %.2f, Resolution %dx%d, Aspect %.2f" % (Radius, Width, Height, Aspect) def InitFrame(): global Radius, Aspect, Height # Viewport - glViewport( 0, ( vis_cfg._WINDOW_HEIGHT - Height ) / 2, vis_cfg._WINDOW_WIDTH, Height ) + glViewport(0, (vis_cfg._WINDOW_HEIGHT - Height) / 2, vis_cfg._WINDOW_WIDTH, Height) # Initialize - glClearColor( 0.5, 0.5, 0.5, 1 ) + glClearColor(0.5, 0.5, 0.5, 1) - glShadeModel( GL_SMOOTH ) - glEnable( GL_MULTISAMPLE_ARB ) + glShadeModel(GL_SMOOTH) + glEnable(GL_MULTISAMPLE_ARB) - glClearDepth( 1 ) - glEnable( GL_DEPTH_TEST ) - glDepthFunc( GL_LEQUAL ) + glClearDepth(1) + glEnable(GL_DEPTH_TEST) + glDepthFunc(GL_LEQUAL) - glClear( GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT ); + glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT); - glEnable( GL_ALPHA_TEST ) - glEnable( GL_BLEND ) - glBlendFunc( GL_SRC_ALPHA, GL_ONE_MINUS_SRC_ALPHA ) + glEnable(GL_ALPHA_TEST) + glEnable(GL_BLEND) + glBlendFunc(GL_SRC_ALPHA, GL_ONE_MINUS_SRC_ALPHA) # Light source - glLightfv( GL_LIGHT0,GL_AMBIENT,[ .5, .5, .5, 1. ] ) - glLightfv( GL_LIGHT0,GL_DIFFUSE,[ .8, .8, .8, 1. ] ) - glLightfv( GL_LIGHT0,GL_SPECULAR,[ 1., 1., 1., 1. ] ) - glEnable( GL_LIGHT0 ) - glEnable( GL_LIGHTING ) + glLightfv(GL_LIGHT0, GL_AMBIENT, [.5, .5, .5, 1.]) + glLightfv(GL_LIGHT0, GL_DIFFUSE, [.8, .8, .8, 1.]) + glLightfv(GL_LIGHT0, GL_SPECULAR, [1., 1., 1., 1.]) + glEnable(GL_LIGHT0) + glEnable(GL_LIGHTING) # Projection - glMatrixMode( GL_PROJECTION ) + glMatrixMode(GL_PROJECTION) glLoadIdentity() - gluPerspective ( 45, Aspect, 0.1, -Radius * 2 ) + gluPerspective(45, Aspect, 0.1, -Radius * 2) # Initialize ModelView matrix - glMatrixMode( GL_MODELVIEW ) + glMatrixMode(GL_MODELVIEW) glLoadIdentity() # View translation - glTranslatef( 0, 0, -Radius * 4 ) + glTranslatef(0, 0, -Radius * 4) # Set up light always from front glPushMatrix() glLoadIdentity() - glLightfv( GL_LIGHT0,GL_POSITION,[ 0, 0, 0, 1 ] ) + glLightfv(GL_LIGHT0, GL_POSITION, [0, 0, 0, 1]) glPopMatrix() # View rotation - glRotatef( xRot, 1, 0, 0 ) - glRotatef( yRot, 0, 1, 0 ) - glRotatef( zRot, 0, 0, 1 ) - - glRotatef( -90, 1, 0, 0 ) + glRotatef(xRot, 1, 0, 0) + glRotatef(yRot, 0, 1, 0) + glRotatef(zRot, 0, 0, 1) + glRotatef(-90, 1, 0, 0) def FinalizeFrame(): @@ -94,7 +90,7 @@ def FinalizeFrame(): def InitDraw(): - glPushAttrib( GL_COLOR_BUFFER_BIT | GL_ENABLE_BIT | GL_POINT_BIT | GL_LINE_BIT ) + glPushAttrib(GL_COLOR_BUFFER_BIT | GL_ENABLE_BIT | GL_POINT_BIT | GL_LINE_BIT) glPushMatrix() @@ -103,87 +99,84 @@ def FinalizeDraw(): glPopAttrib() -def RotateViewBy( xAngle, yAngle, zAngle ): +def RotateViewBy(xAngle, yAngle, zAngle): global xRot, yRot, zRot xRot += xAngle yRot += yAngle zRot += zAngle - NormalizeAngle( xAngle ) - NormalizeAngle( yAngle ) - NormalizeAngle( zAngle ) - + NormalizeAngle(xAngle) + NormalizeAngle(yAngle) + NormalizeAngle(zAngle) -def RotateView( xAngle, yAngle, zAngle ): +def RotateView(xAngle, yAngle, zAngle): global xRot, yRot, zRot xRot = xAngle yRot = yAngle zRot = zAngle - NormalizeAngle( xAngle ) - NormalizeAngle( yAngle ) - NormalizeAngle( zAngle ) + NormalizeAngle(xAngle) + NormalizeAngle(yAngle) + NormalizeAngle(zAngle) - -def NormalizeAngle( angle ): +def NormalizeAngle(angle): while angle < 0: angle += 360 while angle >= 360: angle -= 360 - def DrawAxes(): global Radius - glDisable( GL_LIGHTING ) + glDisable(GL_LIGHTING) glPushMatrix() axis = Radius * 1.2 arrow = Radius / 36.0 - glBegin( GL_LINES ) - - glColor3f( 1.0, 0.0, 0.0 ) - glVertex3f( - axis, 0, 0 ) - glVertex3f( + axis, 0, 0 ) - glVertex3f( + axis, 0, 0 ) - glVertex3f( + axis - 3 * arrow, + arrow, + arrow ) - glVertex3f( + axis, 0, 0 ) - glVertex3f( + axis - 3 * arrow, - arrow, + arrow ) - glVertex3f( + axis, 0, 0 ) - glVertex3f( + axis - 3 * arrow, + arrow, - arrow ) - glVertex3f( + axis, 0, 0 ) - glVertex3f( + axis - 3 * arrow, - arrow, - arrow ) - glColor3f( 0.0, 1.0, 0.0 ) - glVertex3f( 0, - axis, 0 ) - glVertex3f( 0, + axis, 0 ) - glVertex3f( 0, + axis, 0 ) - glVertex3f( + arrow, + axis - 3 * arrow, + arrow ) - glVertex3f( 0, + axis, 0 ) - glVertex3f( - arrow, + axis - 3 * arrow, + arrow ) - glVertex3f( 0, + axis, 0 ) - glVertex3f( + arrow, + axis - 3 * arrow, - arrow ) - glVertex3f( 0, + axis, 0 ) - glVertex3f( - arrow, + axis - 3 * arrow, - arrow ) - glColor3f( 0.0, 0.0, 1.0 ) - glVertex3f( 0, 0, - axis ) - glVertex3f( 0, 0, + axis ) - glVertex3f( 0, 0, + axis ) - glVertex3f( + arrow, + arrow, + axis - 3 * arrow ) - glVertex3f( 0, 0, + axis ) - glVertex3f( - arrow, + arrow, + axis - 3 * arrow ) - glVertex3f( 0, 0, + axis ) - glVertex3f( + arrow, - arrow, + axis - 3 * arrow ) - glVertex3f( 0, 0, + axis ) - glVertex3f( - arrow, - arrow, + axis - 3 * arrow ) + glBegin(GL_LINES) + + glColor3f(1.0, 0.0, 0.0) + glVertex3f(- axis, 0, 0) + glVertex3f(+ axis, 0, 0) + glVertex3f(+ axis, 0, 0) + glVertex3f(+ axis - 3 * arrow, + arrow, + arrow) + glVertex3f(+ axis, 0, 0) + glVertex3f(+ axis - 3 * arrow, - arrow, + arrow) + glVertex3f(+ axis, 0, 0) + glVertex3f(+ axis - 3 * arrow, + arrow, - arrow) + glVertex3f(+ axis, 0, 0) + glVertex3f(+ axis - 3 * arrow, - arrow, - arrow) + glColor3f(0.0, 1.0, 0.0) + glVertex3f(0, - axis, 0) + glVertex3f(0, + axis, 0) + glVertex3f(0, + axis, 0) + glVertex3f(+ arrow, + axis - 3 * arrow, + arrow) + glVertex3f(0, + axis, 0) + glVertex3f(- arrow, + axis - 3 * arrow, + arrow) + glVertex3f(0, + axis, 0) + glVertex3f(+ arrow, + axis - 3 * arrow, - arrow) + glVertex3f(0, + axis, 0) + glVertex3f(- arrow, + axis - 3 * arrow, - arrow) + glColor3f(0.0, 0.0, 1.0) + glVertex3f(0, 0, - axis) + glVertex3f(0, 0, + axis) + glVertex3f(0, 0, + axis) + glVertex3f(+ arrow, + arrow, + axis - 3 * arrow) + glVertex3f(0, 0, + axis) + glVertex3f(- arrow, + arrow, + axis - 3 * arrow) + glVertex3f(0, 0, + axis) + glVertex3f(+ arrow, - arrow, + axis - 3 * arrow) + glVertex3f(0, 0, + axis) + glVertex3f(- arrow, - arrow, + axis - 3 * arrow) glEnd() glPopMatrix() - glEnable( GL_LIGHTING ) + glEnable(GL_LIGHTING) diff --git a/todo/NifVis/vis_nif.py b/todo/NifVis/vis_nif.py index 5d8d6c79f..c69382f30 100644 --- a/todo/NifVis/vis_nif.py +++ b/todo/NifVis/vis_nif.py @@ -5,48 +5,52 @@ Version = 0 UserVersion = 0 + # # A simple custom exception class. # class NIFImportError(Exception): def __init__(self, value): self.value = value + def __str__(self): return repr(self.value) -def LoadNif( filename ): +def LoadNif(filename): global Version, UserVersion try: - print "Reading Nif: %s" % filename - f = open( filename, "rb" ) - Version, UserVersion = NifFormat.getVersion( f ) + print + "Reading Nif: %s" % filename + f = open(filename, "rb") + Version, UserVersion = NifFormat.getVersion(f) if Version >= 0: - print "( Version 0x%08X )" % Version - root_blocks = NifFormat.read(f, version = Version, user_version = UserVersion, verbose = 0) - for block in root_blocks: - AddBlock( block ) + print + "( Version 0x%08X )" % Version + root_blocks = NifFormat.read(f, version=Version, user_version=UserVersion, verbose=0) + for block in root_blocks: + AddBlock(block) elif Version == -1: - raise NIFImportError( "Unsupported NIF version." ) + raise NIFImportError("Unsupported NIF version.") else: - raise NIFImportError( "Not a NIF file." ) + raise NIFImportError("Not a NIF file.") - except NIFImportError, e: # in that case, we raise a menu instead of an exception - print 'NIFImportError: ' + e.value + except NIFImportError, e: # in that case, we raise a menu instead of an exception + print + 'NIFImportError: ' + e.value return - -def AddBlock( block ): +def AddBlock(block): global Version, UserVersion if not block: return - if not TypeRegistry.has_key( type( block ).__name__ ): - TypeRegistry[type( block ).__name__] = [] + if not TypeRegistry.has_key(type(block).__name__): + TypeRegistry[type(block).__name__] = [] - TypeRegistry[type( block ).__name__].append( block ) - BlockRegistry.append( block ) + TypeRegistry[type(block).__name__].append(block) + BlockRegistry.append(block) for child in block.get_refs(): - if not child in BlockRegistry: AddBlock( child ) + if not child in BlockRegistry: AddBlock(child) diff --git a/todo/NifVis/vis_run.py b/todo/NifVis/vis_run.py index 36c109274..e52248a4e 100644 --- a/todo/NifVis/vis_run.py +++ b/todo/NifVis/vis_run.py @@ -4,12 +4,9 @@ import vis_cfg import vis_gl - - IsRunning = False - def EventHandler(): global IsRunning @@ -20,13 +17,11 @@ def EventHandler(): elif event.type == MOUSEBUTTONDOWN: if event.button == 2: - vis_gl.RotateView( 0, 0, 0 ) + vis_gl.RotateView(0, 0, 0) elif event.type == MOUSEMOTION: if event.buttons[0]: - vis_gl.RotateViewBy( event.rel[1], event.rel[0], 0 ) - - + vis_gl.RotateViewBy(event.rel[1], event.rel[0], 0) def Initialize(): @@ -34,8 +29,8 @@ def Initialize(): pygame.display.init() - window = pygame.display.set_mode( ( vis_cfg._WINDOW_WIDTH, vis_cfg._WINDOW_HEIGHT ), OPENGL | DOUBLEBUF ) - pygame.display.set_caption( 'Nif Visualizer' ) + window = pygame.display.set_mode((vis_cfg._WINDOW_WIDTH, vis_cfg._WINDOW_HEIGHT), OPENGL | DOUBLEBUF) + pygame.display.set_caption('Nif Visualizer') screen = pygame.display.get_surface() diff --git a/todo/ez_setup.py b/todo/ez_setup.py index 38c09c624..d4b98d55f 100644 --- a/todo/ez_setup.py +++ b/todo/ez_setup.py @@ -14,8 +14,9 @@ This file can also be run as a script to install or upgrade setuptools. """ import sys + DEFAULT_VERSION = "0.6c5" -DEFAULT_URL = "http://cheeseshop.python.org/packages/%s/s/setuptools/" % sys.version[:3] +DEFAULT_URL = "http://cheeseshop.python.org/packages/%s/s/setuptools/" % sys.version[:3] md5_data = { 'setuptools-0.6b1-py2.3.egg': '8822caf901250d848b996b7f25c6e6ca', @@ -43,22 +44,23 @@ import sys, os + def _validate_md5(egg_name, data): if egg_name in md5_data: from md5 import md5 digest = md5(data).hexdigest() if digest != md5_data[egg_name]: - print >>sys.stderr, ( - "md5 validation of %s failed! (Possible download problem?)" - % egg_name + print >> sys.stderr, ( + "md5 validation of %s failed! (Possible download problem?)" + % egg_name ) sys.exit(2) return data def use_setuptools( - version=DEFAULT_VERSION, download_base=DEFAULT_URL, to_dir=os.curdir, - download_delay=15 + version=DEFAULT_VERSION, download_base=DEFAULT_URL, to_dir=os.curdir, + download_delay=15 ): """Automatically find/download setuptools and make it available on sys.path @@ -74,32 +76,34 @@ def use_setuptools( try: import setuptools if setuptools.__version__ == '0.0.1': - print >>sys.stderr, ( - "You have an obsolete version of setuptools installed. Please\n" - "remove it from your system entirely before rerunning this script." + print >> sys.stderr, ( + "You have an obsolete version of setuptools installed. Please\n" + "remove it from your system entirely before rerunning this script." ) sys.exit(2) except ImportError: egg = download_setuptools(version, download_base, to_dir, download_delay) sys.path.insert(0, egg) - import setuptools; setuptools.bootstrap_install_from = egg + import setuptools; + setuptools.bootstrap_install_from = egg import pkg_resources try: - pkg_resources.require("setuptools>="+version) + pkg_resources.require("setuptools>=" + version) except pkg_resources.VersionConflict, e: # XXX could we install in a subprocess here? - print >>sys.stderr, ( + print >> sys.stderr, ( "The required version of setuptools (>=%s) is not available, and\n" "can't be installed while this script is running. Please install\n" " a more recent version first.\n\n(Currently using %r)" ) % (version, e.args[0]) sys.exit(2) + def download_setuptools( - version=DEFAULT_VERSION, download_base=DEFAULT_URL, to_dir=os.curdir, - delay = 15 + version=DEFAULT_VERSION, download_base=DEFAULT_URL, to_dir=os.curdir, + delay=15 ): """Download setuptools from a specified location and return its filename @@ -108,8 +112,8 @@ def download_setuptools( with a '/'). `to_dir` is the directory where the egg will be downloaded. `delay` is the number of seconds to pause before an actual download attempt. """ - import urllib2, shutil - egg_name = "setuptools-%s-py%s.egg" % (version,sys.version[:3]) + import urllib2 + egg_name = "setuptools-%s-py%s.egg" % (version, sys.version[:3]) url = download_base + egg_name saveto = os.path.join(to_dir, egg_name) src = dst = None @@ -131,19 +135,23 @@ def download_setuptools( and place it in this directory before rerunning this script.) ---------------------------------------------------------------------------""", - version, download_base, delay, url - ); from time import sleep; sleep(delay) + version, download_base, delay, url + ); + from time import sleep; + sleep(delay) log.warn("Downloading %s", url) src = urllib2.urlopen(url) # Read/write all in one block, so we don't create a corrupt file # if the download is interrupted. data = _validate_md5(egg_name, src.read()) - dst = open(saveto,"wb"); dst.write(data) + dst = open(saveto, "wb"); + dst.write(data) finally: if src: src.close() if dst: dst.close() return os.path.realpath(saveto) + def main(argv, version=DEFAULT_VERSION): """Install or upgrade setuptools and EasyInstall""" @@ -153,9 +161,9 @@ def main(argv, version=DEFAULT_VERSION): egg = None try: egg = download_setuptools(version, delay=0) - sys.path.insert(0,egg) + sys.path.insert(0, egg) from setuptools.command.easy_install import main - return main(list(argv)+[egg]) # we're done here + return main(list(argv) + [egg]) # we're done here finally: if egg and os.path.exists(egg): os.unlink(egg) @@ -164,7 +172,7 @@ def main(argv, version=DEFAULT_VERSION): # tell the user to uninstall obsolete version use_setuptools(version) - req = "setuptools>="+version + req = "setuptools>=" + version import pkg_resources try: pkg_resources.require(req) @@ -173,16 +181,17 @@ def main(argv, version=DEFAULT_VERSION): from setuptools.command.easy_install import main except ImportError: from easy_install import main - main(list(argv)+[download_setuptools(delay=0)]) - sys.exit(0) # try to force an exit + main(list(argv) + [download_setuptools(delay=0)]) + sys.exit(0) # try to force an exit else: if argv: from setuptools.command.easy_install import main main(argv) else: - print "Setuptools version",version,"or greater has been installed." - print '(Run "ez_setup.py -U setuptools" to reinstall or upgrade.)' - + print + "Setuptools version", version, "or greater has been installed." + print + '(Run "ez_setup.py -U setuptools" to reinstall or upgrade.)' def update_md5(filenames): @@ -193,7 +202,7 @@ def update_md5(filenames): for name in filenames: base = os.path.basename(name) - f = open(name,'rb') + f = open(name, 'rb') md5_data[base] = md5(f.read()).hexdigest() f.close() @@ -203,26 +212,23 @@ def update_md5(filenames): import inspect srcfile = inspect.getsourcefile(sys.modules[__name__]) - f = open(srcfile, 'rb'); src = f.read(); f.close() + f = open(srcfile, 'rb'); + src = f.read(); + f.close() match = re.search("\nmd5_data = {\n([^}]+)}", src) if not match: - print >>sys.stderr, "Internal error!" + print >> sys.stderr, "Internal error!" sys.exit(2) src = src[:match.start(1)] + repl + src[match.end(1):] - f = open(srcfile,'w') + f = open(srcfile, 'w') f.write(src) f.close() -if __name__=='__main__': - if len(sys.argv)>2 and sys.argv[1]=='--md5update': +if __name__ == '__main__': + if len(sys.argv) > 2 and sys.argv[1] == '--md5update': update_md5(sys.argv[2:]) else: main(sys.argv[1:]) - - - - - diff --git a/todo/nifvisualizer.py b/todo/nifvisualizer.py index 0cf2684f0..a12e4f6cc 100644 --- a/todo/nifvisualizer.py +++ b/todo/nifvisualizer.py @@ -39,85 +39,76 @@ # ***** END LICENSE BLOCK ***** # -------------------------------------------------------------------------- -import sys, os.path +import os.path +import sys Args = sys.argv -if len( Args ) < 3: - print """ - nifvisualizer: Visualizes arbitrary block types by scriptable visualizers. - --- - Syntax: python nifvisualizer.py [alt] - --- - Usage: Specify the Nif file with the argument. - The visualizer will look for a file called ".py" in the - "lizers" folder. If [alt] was specified, the visualizer - "_[alt].py" will be used. +if len(Args) < 3: + print """ - sys.exit( 1 ) + nifvisualizer: Visualizes arbitrary block types by scriptable visualizers. + --- + Syntax: python nifvisualizer.py [alt] + --- + Usage: Specify the Nif file with the argument. + The visualizer will look for a file called ".py" in the + "lizers" folder. If [alt] was specified, the visualizer + "_[alt].py" will be used. + """ + sys.exit(1) FileName = Args[1] BlockName = Args[2] - - -sys.path.append( os.path.abspath( '../pymodules' ) ) - - +sys.path.append(os.path.abspath('../pymodules')) from NifVis import vis_nif -vis_nif.LoadNif( FileName ) - - +vis_nif.LoadNif(FileName) DrawBlocks = None -if vis_nif.TypeRegistry.has_key( BlockName ): +if vis_nif.TypeRegistry.has_key(BlockName): DrawBlocks = vis_nif.TypeRegistry[BlockName] if not DrawBlocks: - print "Blocktype '%s' was not found in file!" % BlockName - sys.exit( 1 ) - - + print + "Blocktype '%s' was not found in file!" % BlockName + sys.exit(1) from NifVis import lizers LizerName = BlockName -if len( Args ) > 3: +if len(Args) > 3: LizerName += '_' + Args[3] Lizer = getattr(lizers, LizerName) if not Lizer: - print "Blocktype '%s' has no visualizer!" % BlockName - sys.exit( 1 ) - - + print + "Blocktype '%s' has no visualizer!" % BlockName + sys.exit(1) from NifVis import vis_gl Radius = 1 for b in DrawBlocks: - r = Lizer.Radius( b ) + r = Lizer.Radius(b) if r > Radius: Radius = r -vis_gl.Initialize( Radius ) +vis_gl.Initialize(Radius) -#try: +# try: # glBindTexture( Lizer.Texture() ) -#except: +# except: # pass - from NifVis import vis_run vis_run.Initialize() - - while vis_run.IsRunning: vis_gl.InitFrame() @@ -125,7 +116,7 @@ for b in DrawBlocks: vis_gl.InitDraw() - Lizer.Draw( b ) + Lizer.Draw(b) vis_gl.FinalizeDraw() vis_run.EventHandler()