--- a/usr/src/Makefile.master Wed Oct 20 10:31:42 2010 -0600
+++ b/usr/src/Makefile.master Wed Oct 20 18:48:33 2010 +0100
@@ -100,7 +100,8 @@
ROOTPYTHONVENDORINSTALLDC= $(ROOTPYTHONVENDORINSTALL)/distro_const
ROOTPYTHONVENDORINSTALLTI= $(ROOTPYTHONVENDORINSTALL)/text_install
ROOTPYTHONVENDORINSTALLPROF= $(ROOTPYTHONVENDORINSTALL)/profile
-ROOTPYTHONVENDORSOLINSTALL= $(ROOTPYTHONVENDOR)/solaris_install
+ROOTPYTHONVENDORSOLINSTALL= $(ROOTPYTHONVENDOR)/solaris_install
+ROOTPYTHONVENDORSOLINSTALLDATACACHE= $(ROOTPYTHONVENDORSOLINSTALL)/data_object
ROOTAUTOINST= $(ROOT)/usr/share/auto_install
ROOTAUTOINSTSCPROFILES= $(ROOTAUTOINST)/sc_profiles
ROOTSBIN= $(ROOT)/sbin
--- a/usr/src/Targetdirs Wed Oct 20 10:31:42 2010 -0600
+++ b/usr/src/Targetdirs Wed Oct 20 18:48:33 2010 +0100
@@ -67,7 +67,8 @@
/usr/lib/python2.6/vendor-packages/osol_install/distro_const \
/usr/lib/python2.6/vendor-packages/osol_install/profile \
/usr/lib/python2.6/vendor-packages/osol_install/text_install \
- /usr/lib/python2.6/vendor-packages/solaris_install \
+ /usr/lib/python2.6/vendor-packages/solaris_install \
+ /usr/lib/python2.6/vendor-packages/solaris_install/data_object \
/usr/sbin \
/usr/share/auto_install \
/usr/share/auto_install/sc_profiles \
--- a/usr/src/lib/Makefile Wed Oct 20 10:31:42 2010 -0600
+++ b/usr/src/lib/Makefile Wed Oct 20 18:48:33 2010 +0100
@@ -25,7 +25,8 @@
include $(SRC)/Makefile.master
-SUBDIRS= install_utils \
+SUBDIRS= install_common \
+ install_utils \
libaiscf \
libaiscf_pymod \
libict \
@@ -44,8 +45,9 @@
HDRSUBDIRS= libadmldb libadmutil
-COMSUBDIRS= liberrsvc \
- liberrsvc_pymod \
+COMSUBDIRS= liberrsvc_pymod \
+ liberrsvc \
+ install_doc \
install_logging \
install_logging_pymod
--- a/usr/src/lib/Makefile.lib Wed Oct 20 10:31:42 2010 -0600
+++ b/usr/src/lib/Makefile.lib Wed Oct 20 18:48:33 2010 +0100
@@ -69,7 +69,7 @@
ROOTPYTHONVENDORINSTALLCMODS= $(PYCMODS:%=$(ROOTPYTHONVENDORINSTALL)/%)
ROOTPYTHONVENDORSOLINSTALLMODS= $(PYMODS:%=$(ROOTPYTHONVENDORSOLINSTALL)/%)
-ROOTPYTHONVENDORSOLINSTALLCMODS=$(PYCMODS:%=$(ROOTPYTHONVENDORSOLINSTALL)/%)
+ROOTPYTHONVENDORSOLINSTALLCMODS= $(PYCMODS:%=$(ROOTPYTHONVENDORSOLINSTALL)/%)
ROOTPYTHONVENDORSOLINSTALLLIBS= $(CPYTHONLIBS:%=$(ROOTPYTHONVENDORSOLINSTALL)/%)
DYNLIBLINK = $(DYNLIB:%$(VERS)=%)
--- a/usr/src/lib/Makefile.targ Wed Oct 20 10:31:42 2010 -0600
+++ b/usr/src/lib/Makefile.targ Wed Oct 20 18:48:33 2010 +0100
@@ -112,6 +112,9 @@
$(ROOTPYTHONVENDORSOLINSTALL)/%: $$(PNAME)/$(ARCH)/%
$(INS.file)
+$(ROOTPYTHONVENDORSOLINSTALLDATACACHE):
+ $(INS.dir)
+
#
# Python .py and .pyc files need to be installed with the original
# timestamp of the file preserved. Otherwise, .pyc files will
@@ -125,6 +128,9 @@
$(ROOTPYTHONVENDORSOLINSTALL)/%: %
$(CP_P.file)
+$(ROOTPYTHONVENDORSOLINSTALLDATACACHE)/%: %
+ $(CP_P.file)
+
$(ROOTRNGSCHEMA)/%: %
$(INS.file)
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/usr/src/lib/install_common/Makefile Wed Oct 20 18:48:33 2010 +0100
@@ -0,0 +1,53 @@
+#
+# CDDL HEADER START
+#
+# The contents of this file are subject to the terms of the
+# Common Development and Distribution License (the "License").
+# You may not use this file except in compliance with the License.
+#
+# You can obtain a copy of the license at usr/src/OPENSOLARIS.LICENSE
+# or http://www.opensolaris.org/os/licensing.
+# See the License for the specific language governing permissions
+# and limitations under the License.
+#
+# When distributing Covered Code, include this CDDL HEADER in each
+# file and include the License file at usr/src/OPENSOLARIS.LICENSE.
+# If applicable, add the following below this CDDL HEADER, with the
+# fields enclosed by brackets "[]" replaced with your own identifying
+# information: Portions Copyright [yyyy] [name of copyright owner]
+#
+# CDDL HEADER END
+#
+
+#
+# Copyright (c) 2010, Oracle and/or its affiliates. All rights reserved.
+#
+
+include ../Makefile.lib
+
+all:= TARGET= all
+clean:= TARGET= clean
+clobber:= TARGET= clobber
+install:= TARGET= install
+
+PYMODS = __init__.py
+
+PYCMODS= $(PYMODS:%.py=%.pyc)
+
+CLOBBERFILES = $(PYCMODS)
+CLEANFILES = $(CLOBBERFILES)
+
+all: python
+
+install: all .WAIT \
+ $(ROOTPYTHONVENDOR) \
+ $(ROOTPYTHONVENDORSOLINSTALL) \
+ $(ROOTPYTHONVENDORSOLINSTALLMODS) \
+ $(ROOTPYTHONVENDORSOLINSTALLCMODS)
+
+install_h:
+
+python:
+ $(PYTHON) -m compileall -l $(@D)
+
+include ../Makefile.targ
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/usr/src/lib/install_common/__init__.py Wed Oct 20 18:48:33 2010 +0100
@@ -0,0 +1,27 @@
+# CDDL HEADER START
+#
+# The contents of this file are subject to the terms of the
+# Common Development and Distribution License (the "License").
+# You may not use this file except in compliance with the License.
+#
+# You can obtain a copy of the license at usr/src/OPENSOLARIS.LICENSE
+# or http://www.opensolaris.org/os/licensing.
+# See the License for the specific language governing permissions
+# and limitations under the License.
+#
+# When distributing Covered Code, include this CDDL HEADER in each
+# file and include the License file at usr/src/OPENSOLARIS.LICENSE.
+# If applicable, add the following below this CDDL HEADER, with the
+# fields enclosed by brackets "[]" replaced with your own identifying
+# information: Portions Copyright [yyyy] [name of copyright owner]
+#
+# CDDL HEADER END
+#
+
+#
+# Copyright (c) 2010, Oracle and/or its affiliates. All rights reserved.
+#
+
+""" Module body for solaris_install package
+"""
+
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/usr/src/lib/install_doc/Makefile Wed Oct 20 18:48:33 2010 +0100
@@ -0,0 +1,55 @@
+#
+##
+# CDDL HEADER START
+#
+# The contents of this file are subject to the terms of the
+# Common Development and Distribution License (the "License").
+# You may not use this file except in compliance with the License.
+#
+# You can obtain a copy of the license at usr/src/OPENSOLARIS.LICENSE
+# or http://www.opensolaris.org/os/licensing.
+# See the License for the specific language governing permissions
+# and limitations under the License.
+#
+# When distributing Covered Code, include this CDDL HEADER in each
+# file and include the License file at usr/src/OPENSOLARIS.LICENSE.
+# If applicable, add the following below this CDDL HEADER, with the
+# fields enclosed by brackets "[]" replaced with your own identifying
+# information: Portions Copyright [yyyy] [name of copyright owner]
+#
+# CDDL HEADER END
+#
+
+#
+# Copyright (c) 2010, Oracle and/or its affiliates. All rights reserved.
+#
+
+
+include $(SRC)/Makefile.master
+
+SUBDIRS= data_object
+
+.PARALLEL: $(SUBDIRS)
+
+all:= TARGET= all
+check:= TARGET= check
+clean:= TARGET= clean
+clobber:= TARGET= clobber
+install:= TARGET= install
+install_h:= TARGET= install_h
+lint:= TARGET= lint
+
+.KEEP_STATE:
+
+all check clean clobber lint: $(SUBDIRS)
+
+install: $(SUBDIRS)
+
+headers:
+
+install_h: $(SUBDIRS)
+
+$(SUBDIRS): FRC
+ cd $@; pwd; $(MAKE) $(TARGET)
+
+FRC:
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/usr/src/lib/install_doc/data_object/Makefile Wed Oct 20 18:48:33 2010 +0100
@@ -0,0 +1,72 @@
+#
+##
+# CDDL HEADER START
+#
+# The contents of this file are subject to the terms of the
+# Common Development and Distribution License (the "License").
+# You may not use this file except in compliance with the License.
+#
+# You can obtain a copy of the license at usr/src/OPENSOLARIS.LICENSE
+# or http://www.opensolaris.org/os/licensing.
+# See the License for the specific language governing permissions
+# and limitations under the License.
+#
+# When distributing Covered Code, include this CDDL HEADER in each
+# file and include the License file at usr/src/OPENSOLARIS.LICENSE.
+# If applicable, add the following below this CDDL HEADER, with the
+# fields enclosed by brackets "[]" replaced with your own identifying
+# information: Portions Copyright [yyyy] [name of copyright owner]
+#
+# CDDL HEADER END
+#
+
+#
+# Copyright (c) 2010, Oracle and/or its affiliates. All rights reserved.
+#
+
+
+PYMODS = __init__.py \
+ cache.py \
+ data_dict.py
+
+PYCMODS = $(PYMODS:%.py=%.pyc)
+
+ROOTPYMODS= $(PYMODS:%=$(ROOTPYTHONVENDORSOLINSTALLDATACACHE)/%)
+
+ROOTPYCMODS= $(PYCMODS:%=$(ROOTPYTHONVENDORSOLINSTALLDATACACHE)/%)
+
+
+CLOBBERFILES = $(PYCMODS)
+CLEANFILES = $(CLOBBERFILES)
+
+include ../../Makefile.lib
+
+PRIVHDRS =
+EXPHDRS =
+HDRS = $(EXPHDRS) $(PRIVHDRS)
+
+static:
+
+dynamic:
+
+python:
+ $(PYTHON) -m compileall -l $(@D)
+
+all: $(HDRS) python $(SUBDIRS)
+
+install_h:
+
+install: all .WAIT \
+ $(ROOTPYTHONVENDOR) \
+ $(ROOTPYTHONVENDORSOLINSTALL) \
+ $(ROOTPYTHONVENDORSOLINSTALLDATACACHE) \
+ $(ROOTPYMODS) $(ROOTPYCMODS)
+
+lint: lint_SRCS
+
+$(SUBDIRS): FRC
+ cd $@; pwd; $(MAKE) $(TARGET)
+
+FRC:
+
+include ../../Makefile.targ
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/usr/src/lib/install_doc/data_object/__init__.py Wed Oct 20 18:48:33 2010 +0100
@@ -0,0 +1,1020 @@
+#
+# CDDL HEADER START
+#
+# The contents of this file are subject to the terms of the
+# Common Development and Distribution License (the "License").
+# You may not use this file except in compliance with the License.
+#
+# You can obtain a copy of the license at usr/src/OPENSOLARIS.LICENSE
+# or http://www.opensolaris.org/os/licensing.
+# See the License for the specific language governing permissions
+# and limitations under the License.
+#
+# When distributing Covered Code, include this CDDL HEADER in each
+# file and include the License file at usr/src/OPENSOLARIS.LICENSE.
+# If applicable, add the following below this CDDL HEADER, with the
+# fields enclosed by brackets "[]" replaced with your own identifying
+# information: Portions Copyright [yyyy] [name of copyright owner]
+#
+# CDDL HEADER END
+#
+
+#
+# Copyright (c) 2010, Oracle and/or its affiliates. All rights reserved.
+#
+'''Provides definition of base classes for storage in Data Object Cache.
+'''
+
+__all__ = ["cache", "data_dict"]
+
+import copy
+import logging
+import re
+import sys
+from urllib import quote, unquote
+
+from abc import ABCMeta, abstractmethod
+
+from lxml import etree
+
+# Define various Data Object specific exceptions
+
+
+class DataObjectError(Exception):
+ '''A common base exception for any DataObject related errors.'''
+ pass
+
+
+class ObjectNotFoundError(DataObjectError):
+ '''An exception to be raised if specified objects are not found.'''
+ pass
+
+
+class ParsingError(DataObjectError):
+ '''Exception to be raised when parsing of XML failed.'''
+ pass
+
+
+class PathError(DataObjectError):
+ '''Exception to be raised when an invalid path is provided.'''
+ pass
+
+
+class DataObjectBase(object):
+ '''Core abstract base class for the Data Object Cache contents.
+
+ Every object that is stored in the Data Object Cache is required
+ to sub-class the DataObject class, and implement the required
+ abstract methods:
+
+ - to_xml()
+ - can_handle()
+ - from_xml()
+
+ This class provides the basic infrastructure for the Data Object
+ Cache:
+ - a tree mechanism
+ - parent/child relationships
+ - allows for insertion, deletion, and fetching of children in
+ the tree.
+ - the XML import/export mechanism.
+ - path-based searching.
+ '''
+ __metaclass__ = ABCMeta
+
+ # Define regular expressions for matching values in a path specification.
+ __NAME_RE = re.compile("^([^\[\].]+)")
+ __TYPE_RE = re.compile("^.*\[.*@((\w|\.)+)[#?\.]*.*\].*")
+ __COUNT_RE = re.compile("^.*\[.*#(-*\d+).*\].*")
+ __DEPTH_RE = re.compile("^.*\[.*\?(-*\d+).*\].*")
+ __ATTR_RE = re.compile(".*\.(\w+)$")
+
+ # Define regular expression for extracting paths from strings.
+ __STRING_REPLACEMENT_RE = re.compile("%{([^}]+)}")
+
+ def __init__(self, name):
+ self._name = name
+ self._parent = None
+
+ self.generates_xml_for_children = False
+
+ # instead of simple list, _children could be a
+ # MutatableSequence sub-class
+ self._children = []
+
+ # Abstract class methods.
+ # These methods must be implemented by DataObject sub-classes or an
+ # Exception will be raised when they are instantiated.
+
+ @abstractmethod
+ def to_xml(self):
+ '''
+ This method is used in the generation of XML output for a manifest.
+
+ It is defined as an abstract method to require implementors of
+ DataObject sub-classes to make a decision on what support is required
+ for their implementation.
+
+ The expected return values of this method are:
+
+ None
+
+ - If no XML is to be generated by this method return this.
+
+ etree.Element
+
+ - Return an lxml etree Element implementation which should
+ represent this object.
+
+ It is expected that children of this object will in turn
+ provide their own XML representation from their to_xml()
+ method, which will be added as sub-elements of the returned
+ value from this object.
+
+ If a sub-class of this class wishes to handle the XML for its
+ children as well as for itself, then set the instance
+ variable "generates_xml_for_children" to True and
+ return a suitable sub-tree from this method.
+
+ The XML returned here should be as close as possible to
+ it's equivalent in the AI/DC Manifest Schema.
+ '''
+ return None
+
+ @classmethod
+ @abstractmethod
+ def can_handle(cls, xml_node):
+ '''
+ This method is used, when importing XML, to quickly determine if a
+ class is able to process an XML Element, and convert it
+ to an instance of it self.
+
+ The parameter 'xml_node' will be an etree.Element object, and this
+ method should be implemented to quickly examine it to decide if
+ a subsequent call to the from_xml() method will be able to convert
+ it into a DataObject.
+
+ Examination of the XML Element can include examining it's tag,
+ attributes, parent, or anything else that is available using the
+ etree API.
+
+ Expected Return Values:
+
+ True - Returned if a subsequent call to 'from_xml()' would work.
+
+ False - Returned if this XML Element cannot be handled by this
+ class.
+ '''
+ return False
+
+ @classmethod
+ @abstractmethod
+ def from_xml(cls, xml_node):
+ ''' Generate XML to represent this object.
+
+ This method will only be called if a previous call to 'can_handle()'
+ returned True.
+
+ Its purpose is to further parse the XML Element passed and generate
+ a DataObject instance that matches it.
+
+ Child elements will usually be processed directly in the same way, but
+ if the to_xml() method generates XML for it's children, then it's
+ likely this this method should do the same when importing XML, and
+ generate children objects.
+
+ Expected Return Values:
+
+ DataObject instance
+ - This should be a sub-class of DataObject.
+
+ Exceptions:
+
+ ParsingError
+ - On an unexpected parsing error, this method should raise a
+ 'ParsingError' exception with information on why it failed.
+
+ '''
+ return None
+
+ # Read-only properties
+ #
+ # These are accessor methods for simple class properties.
+ # These methods should not be called directly, the Python 'property'
+ # syntax should be used instead.
+
+ @property
+ def name(self):
+ '''Returns the name given to the object when created'''
+ return self._name
+
+ @property
+ def parent(self):
+ '''Returns the parent class, set on insertion into tree'''
+ return self._parent
+
+ @property
+ def has_children(self):
+ '''Returns True if the class has any children, False otherwise.'''
+ return (len(self._children) > 0)
+
+ # Methods for searching the cache, we provide 3 variants:
+ #
+ # - get_children: returns a list of direct children matching
+ # criteria
+ #
+ # - get_first_child: returns only the first matching direct child
+ #
+ # - get_descendants: recursively searches tree returning all
+ # matching descendants
+ #
+ def get_children(self, name=None, class_type=None, max_count=None):
+ '''Obtains a list of children, possibly filtered by critera.
+
+ This method returns a list of the children objects that match
+ the provided criteria.
+
+ By default, if no criteria is specified, a list containing references
+ to all the children will be returned.
+
+ You may specify one, or more, of the following to narrow the list of
+ children returned:
+
+ name - When specified, this will search for children objects
+ with the specified name value, and return those objects.
+
+ class_type - When specified this will return any children with the
+ provided class_type.
+
+ max_count - Limit the number of children returned, searching stops
+ on reaching this number of matches.
+
+ Exceptions:
+
+ ObjectNotFoundError
+ If specific criteria is provided, and no matches are found,
+ then this exception will be thrown.
+
+ Note: the list returned is a copy (but the children aren't copied) so
+ modifying the list will not effect the internal list of children,
+ but modifying the children will.
+ '''
+
+ # Special case request for all children.
+ if name is None and class_type is None:
+ if max_count is None:
+ return copy.copy(self._children)
+
+ # If no class_type given, assume DataObjectBase, otherwise
+ # get_descentants will return error.
+ if class_type is None:
+ class_type = DataObjectBase
+
+ return self.get_descendants(name, class_type, max_depth=1,
+ max_count=max_count)
+
+ # Define children property, don't use @property since get_children is
+ # itself part of the exposed API.
+ children = property(get_children)
+
+ def get_first_child(self, name=None, class_type=None):
+ '''Obtains a reference to the first child matching criteria.
+
+ This method returns a reference to the first child object that matches
+ the provided criteria.
+
+ By default, if no criteria is specified, a reference to the first
+ child in the list of children will be returned.
+
+ You may specify one, or both, of the following to narrow the list of
+ children returned:
+
+ name - When specified, this will search for children objects
+ with the specified name value, and return the first
+ match.
+
+ class_type - When specified this will return the first child with
+ the provided class_type.
+
+ If no match is found, then this method will return 'None'.
+ '''
+
+ if name is None and class_type is None:
+ if len(self._children) > 0:
+ return self._children[0]
+ else:
+ return None
+
+ try:
+ child_list = self.get_descendants(name=name,
+ class_type=class_type, max_depth=1, max_count=1)
+ return child_list[0]
+ except ObjectNotFoundError:
+ return None
+
+ def get_descendants(self, name=None, class_type=None, max_depth=None,
+ max_count=None):
+ '''Searches tree for a list of descendents that match the criteria.
+
+ This method recursively searches a tree of DataObjects looking for
+ objects that match the provided criteria, and returning them in a
+ simple list.
+
+ The search is done in a 'depth-first' way, where a tree like:
+
+ A
+ B C
+ D E F G
+
+ would result in a list like:
+
+ A B D E C F G
+
+ You may specify one, or both, of the following to narrow the list of
+ children returned:
+
+ name - When specified, this will search for children objects
+ with the specified name value, and return those objects.
+
+ class_type - When specified this will return any children with the
+ provided class_type.
+
+ You may further limit the traversal of the tree by specifying the
+ following:
+
+ max_depth - Maximum depth to traverse the tree too, could speed up
+ such a search in a large tree structure. A value of 0,
+ or None, means the depth should not be limited.
+
+ max_count - Limit the number of children returned, searching stops
+ on reaching this number of matches.
+
+ Exceptions:
+
+ ValueError
+ Thrown if both of the name or class_type are not specified,
+ or if an invalid value is specified.
+
+ ObjectNotFoundError
+ If specific criteria is provided, and no matches are found,
+ then this exception will be thrown.
+
+ '''
+
+ if max_depth is not None and max_depth < 0:
+ raise ValueError(
+ "max_depth should be greater than or equal to 0, got %d" %
+ (max_depth))
+
+ if max_count is not None and max_count < 1:
+ raise ValueError(
+ "max_count should be greater than or equal to 1, got %d" %
+ (max_count))
+
+ if name is None and class_type is None:
+ raise ValueError(
+ "Please specify at least one of 'name' or 'class_type'")
+
+ # To simplify test in loop below, assume DataObject is the
+ # class_type if none given,
+ if class_type is None:
+ class_type = DataObjectBase
+
+ new_list = list()
+ new_max_count = None
+ for child in self._children:
+ if max_count is not None:
+ new_max_count = max_count - len(new_list)
+ if new_max_count < 1:
+ # Reached limit, stop now.
+ break
+
+ # Look for matches to criteria
+ if isinstance(child, class_type):
+ if name is None or name == child.name:
+ new_list.append(child)
+ # Double check max_count after adding to the list.
+ if max_count is not None:
+ if len(new_list) >= max_count:
+ break
+ else:
+ new_max_count -= 1
+
+ # Now search children's children, using recursion...
+ if child.has_children:
+ new_max_depth = None
+ if max_depth is not None:
+ if max_depth > 1:
+ new_max_depth = max_depth - 1
+ elif max_depth == 1:
+ # Don't go any deeper than current child level.
+ continue
+
+ try:
+ children_list = child.get_descendants(name=name,
+ class_type=class_type, max_depth=new_max_depth,
+ max_count=new_max_count)
+ new_list.extend(children_list)
+ except ObjectNotFoundError:
+ # Don't throw here, will throw later if still none found.
+ pass
+
+ if len(new_list) == 0:
+ raise ObjectNotFoundError(\
+ "No matching objects found: name = '%s' "
+ "and class_type = %s" %
+ (str(name), str(class_type)))
+
+ return new_list
+
+ @staticmethod
+ def _check_object_type(obj):
+ '''THIS IS A PRIVATE METHOD
+
+ Checks if an object is instance of DataObjectBase, need to check
+ for this rather than DataObject to allow for DataObjectDict and
+ similar classes to work..
+
+ Will raise a 'TypeError' exception if it fails.
+ '''
+
+ if not isinstance(obj, DataObjectBase):
+ msg = "Invalid Child Type: %s" % (obj.__class__.__name__)
+ logging.error(msg)
+ raise TypeError(msg)
+
+ # Methods for cloning / duplication objects
+ def __getstate__(self):
+ '''Provide a copy of the internal dictionary to be used in deepcopy'''
+ # Take a copy of the internal dictionary using constructor
+ state = dict(self.__dict__)
+ # Ensure that copy doesn't have a parent to avoid recusion up tree.
+ state['_parent'] = None
+ return state
+
+ def __setstate__(self, state):
+ '''Set the internal dictionary to construct new copy for deepcopy()'''
+ self.__dict__ = state
+ # Since we removed the parent refs in __getstate__ we need to restore
+ # them to our children, which are copies when using deepcopy().
+ for child in self._children:
+ child._parent = self
+
+ def __copy__(self):
+ '''Create a copy of ourselves for use by copy.copy()
+
+ The new copy will have no parent or children.
+
+ Complex objects should consider defining the special method
+ __copy__() if they wish to override the default behaviour.
+ If overriding this you must call the super-classes __copy__() method
+ to ensure correct behaviour.
+ '''
+
+ # Construct a new class to match self
+ new_copy = self.__class__.__new__(self.__class__)
+ # Set the dictionary.
+ new_copy.__dict__.update(self.__dict__)
+ # Clear the parent and children since we want to omit them.
+ new_copy._parent = None
+ new_copy._children = []
+
+ return new_copy
+
+ # Methods for creating an XML tree from the cache.
+ def __create_xml_tree(self, ancestor):
+ '''THIS IS A PRIVATE CLASS METHOD
+
+ Returns an XML tree for this object and all its decendents.
+ Converts current object to XML and then recursively calls
+ itself on all its children to append them to the XML tree.
+
+ Slightly complicated because any object in the tree
+ can return None from its to_xml() function, in which case
+ we need to 'skip' that generation and append its children
+ to their grandparent (or older ancestor) instead of parent.
+
+ When called initially (ie other than when it calls itself
+ recursively), the ancestor parameter should be None.
+
+ If the top-level object's to_xml() returns None, a 'root'
+ XML node is created to head the tree.
+ '''
+
+ # Need to know if we're using a dummy element.
+ using_dummy = False
+
+ element = self.to_xml()
+
+ if ancestor is None:
+ # should only be True when called initially
+ if element is None:
+ # create a dummy XML element
+ element = etree.Element("root")
+ using_dummy = True
+
+ ancestor = element
+
+ # At this point, at least one of element or ancestor
+ # will not be None. We must attach the current object's
+ # children to something, so use element first, or
+ # ancestor if that is None
+ attach_to = element
+ if attach_to is None:
+ attach_to = ancestor
+
+ if not self.generates_xml_for_children:
+ for child in self._children:
+ sub = child.__create_xml_tree(attach_to)
+ if sub is not None:
+ attach_to.append(sub)
+
+ # If we created a dummy element, but there are no sub-tags then
+ # we should not return it.
+ if using_dummy:
+ if len(element) != 0:
+ return element
+ else:
+ return None
+
+ return element
+
+ def get_xml_tree(self):
+ '''Returns an XML tree for this object and all its decendents.
+
+ Calls self.__create_xml_tree() to do the work.'''
+
+ return self.__create_xml_tree(None)
+
+ #
+ # Utility Methods
+ #
+ def get_xml_tree_str(self):
+ '''Returns an string representing the cache contents in XML format.'''
+ xml = self.get_xml_tree()
+ if xml is not None:
+ return etree.tostring(xml, pretty_print=True)
+ else:
+ return None
+
+ def __str__(self):
+ '''For debugging, produce an indented tree of children.
+
+ If you want to change what is output for an sub-class then the
+ method __repr__() should be overriden for the sub-class.
+ '''
+ line = ""
+ ancestor = self._parent
+ while ancestor is not None:
+ # add 1 tab per generation
+ line += "\t"
+ ancestor = ancestor._parent
+
+ line += "-> [%s] (%s)" % (self.name, repr(self))
+
+ for child in self._children:
+ line = line + "\n" + str(child)
+
+ return line
+
+ @property
+ def object_path(self):
+ ''' Generate path of this object relative to parents.
+
+ The top parent object, the '/' root node doesn't have it's name
+ included in the path, since it's releative to that object.
+
+ Unusual characters in the name will be encoded like URLs.
+ '''
+ # Root node will be simply /, so prefix with "" so join works.
+ my_path = [""]
+
+ # Traverse up parents, but stop below root node to get correct which
+ # doesn't include the root node's own name.
+ parent = self
+ while parent is not None and parent._parent is not None:
+ my_path.insert(1, quote(parent._name, ""))
+ parent = parent._parent
+
+ return "/".join(my_path)
+
+ def find_path(self, path_string):
+ '''Fetches elements of the DataObject tree structure using a path.
+
+ The provided path is broken down using tokens, which map as follows:
+
+ / => get_children()
+ First '/' would be the current object itself (root)
+
+ // => get_descendants()
+
+ @type => A fully-qualified class string which is mapped
+ to a class object. It is possible to omit the
+ qualification for classes in this module (e.g.
+ DataObject, etc).
+
+ #num => Max count (matches)
+
+ ?num => Max depth (with //)
+
+ .attr => An attribute of a matched object - always at the
+ end of the path - got via getattr (but would
+ omit '_' prefixed attrs).
+
+ Special characters in a name can be encoded using URL type encoding,
+ to avoid conflict with the above, for example:
+
+ /%2A%2Fa%20b
+
+ matches a child with the name "*/a b".
+
+ Constraints should be in square brackets ([]), and attributes
+ outside of the brackets, as follows:
+
+ /<name>[<constraints>].attribute
+
+ e.g.
+
+ /my_name[@my_mod.MyClass].attr
+
+ This method returns a list of matches to the criteria. The list will
+ contain objects if no attribute is specified, otherwise it will
+ contain the values of the attribute from each matched object.
+
+ The '/' root node is the 'self' reference, so it's name is not
+ included in a path, since it's releative to this object.
+
+ Exceptions:
+
+ PathError - Raised if invalid path is provided.
+
+ ObjectNotFoundError
+ - Raised if no match for the given path is found
+
+ AttributeError - Raised if there is an attempt to match to an
+ attribute that doesn't exist, or if you try to
+ access an internal attribute (starts with '_')
+
+ '''
+
+ # Used to enforce a max_depth if only one '/' specified.
+ max_depth = None
+ # Tokenize string specific to this level, will call recursively.
+ matched = list()
+ remaining_path = None
+ if (path_string.startswith("//")):
+ # Use descendants
+ tokens = path_string.split("/", 3)
+ to_eval = tokens[2]
+ if (len(tokens) > 3 and tokens[3] != ""):
+ remaining_path = "/" + tokens[3]
+ elif (path_string.startswith("/")):
+ # Use get_children OR max_depth = 1
+ tokens = path_string.split("/", 2)
+ to_eval = tokens[1]
+ if (len(tokens) > 2 and tokens[2] != ""):
+ remaining_path = "/" + tokens[2]
+ max_depth = 1
+ else:
+ # Raise error
+ raise PathError("Invalid path: '%s'" % (path_string))
+
+ kwargs = self.__convert_to_kwargs(to_eval)
+
+ # Remove attribute if found since it's not a valid parameter
+ # to get_descendants
+ attribute = None
+ if "attribute" in kwargs:
+ attribute = kwargs["attribute"]
+ del kwargs["attribute"]
+
+ if attribute.startswith("_"):
+ raise AttributeError("Invalid attribute: '%s'" % (attribute))
+
+ # Enforce a max_depth is we set one ourselves.
+ if max_depth is not None:
+ kwargs["max_depth"] = max_depth
+
+ children = self.get_descendants(**kwargs)
+ if remaining_path is not None:
+ # Keep descending, don't include intermediate matches.
+ child_matched = list()
+ for child in children:
+ try:
+ child_list = child.find_path(remaining_path)
+ child_matched.extend(child_list)
+ except ObjectNotFoundError:
+ pass
+ matched.extend(child_matched)
+ else:
+ # As deep as possible, return these children.:
+ matched.extend(children)
+
+ if len(matched) == 0:
+ raise ObjectNotFoundError("No children found matching : '%s'" %
+ (path_string))
+
+ if attribute is not None:
+ attr_values = list()
+ for match in matched:
+ # getattr() will generate AttributeErrors if invalid attribute.
+ attr_values.append(getattr(match, attribute))
+ return attr_values
+ else:
+ return matched
+
+ def str_replace_paths_refs(self, orig_string, value_separator=","):
+ """ Replace the %{...} references to DOC values with quoted strings
+
+ Returns a new string with the values replaced.
+
+ Multiple matches are concatenated using the value of the argument
+ 'value_separator', e.g.
+
+ val1,val2,val3
+
+ The value for each matched object is generated calling 'repr(obj)'
+ since that generates a more accurate representation of values
+ than calling str() - including the automatic quoting of strings.
+
+ If the references are not valid, the exceptions from the
+ DataObjectBase.find_path() will be passed on.
+ """
+ new_string = orig_string
+ for matches in re.finditer(
+ DataObjectBase.__STRING_REPLACEMENT_RE, orig_string):
+ path = matches.group(1)
+ if path is not None:
+ # find_path() throws an exception if no match found.
+ found_list = self.find_path(path)
+ value_str = ""
+ # Combine with SEPARATOR, using repr to get usable text values
+ # since it automatically quotes if it is a string.
+ for value in found_list:
+ if value_str == "":
+ value_str = repr(value)
+ else:
+ value_str += "%s%s" % (value_separator, repr(value))
+
+ logging.debug("Replacing reference to '%s' with '%s'" %
+ (matches.group(0), value_str))
+ new_string = new_string.replace(matches.group(0),
+ value_str, 1)
+
+ return new_string
+
+ @staticmethod
+ def __locate_class_by_name(class_name):
+ '''Locates a class by name, using modules already loaded
+
+ The class_name should in general be fully-qualified, i.e. it should be
+ using something like:
+
+ package.module.Class
+
+ but, we will assume that an un-qualified class name is part of this
+ module to allow for short-hand.
+ '''
+
+ # Do we have a fully-qualifed class-name - containing dots
+ mod_name = None
+ class_not_qualified = False
+ mods = class_name.split(".")
+ if len(mods) > 1:
+ mod_name = ".".join(mods[:-1])
+ class_name_only = mods[-1:][0]
+ else:
+ # Assume it's relative to own module, for now.
+ mod_name = DataObjectBase.__module__
+ class_name_only = class_name
+ class_not_qualified = True
+
+ try:
+ mod = sys.modules[mod_name]
+ except KeyError:
+ raise PathError("Invalid module name: %s" %
+ (mod_name))
+
+ if hasattr(mod, class_name_only):
+ class_obj = getattr(mod, class_name_only)
+ else:
+ if class_not_qualified:
+ # Don't confuse user with reference to module
+ # they didn't provide
+ raise PathError("Invalid non-qualified class name: %s" %
+ (class_name))
+ else:
+ raise PathError("No such class %s in module %s" %
+ (class_name_only, mod_name))
+
+ return(class_obj)
+
+ @staticmethod
+ def __convert_to_kwargs(value_string):
+ '''Convert a path element to a series of kwargs for get_descendants'''
+ args = dict()
+ match = DataObjectBase.__NAME_RE.match(value_string)
+ if match:
+ args["name"] = unquote(match.group(1))
+ match = DataObjectBase.__TYPE_RE.match(value_string)
+ if match:
+ args["class_type"] = \
+ DataObjectBase.__locate_class_by_name(unquote(match.group(1)))
+
+ # If neither specified assume DataObjectBase for the class_type
+ if "name" not in args and "class_type" not in args:
+ args["class_type"] = DataObjectBase
+
+ match = DataObjectBase.__COUNT_RE.match(value_string)
+ if match:
+ args["max_count"] = int(unquote(match.group(1)))
+ match = DataObjectBase.__DEPTH_RE.match(value_string)
+ if match:
+ args["max_depth"] = int(unquote(match.group(1)))
+ match = DataObjectBase.__ATTR_RE.match(value_string)
+ if match:
+ args["attribute"] = unquote(match.group(1))
+
+ return args
+
+
+class DataObject(DataObjectBase):
+ '''A variant of DataObjectBase which allows insertion and deletion.
+
+ This is the class that most people will sub-class when creating
+ an object for insertion in to the Data Object Cache.
+ '''
+
+ # Methods for adding objects to the cache and deleting them from it.
+ def insert_children(self, new_children, before=None, after=None):
+ '''Inserts new_children into the list of children.
+
+ By default, 'new_children' will be appended to the existing list. The
+ value for 'new_children' may be a single DataObject instance,
+ or a list of DataObject instances.
+
+ If 'before' is specified, and is an existing child, then this method
+ will insert the new_children before it.
+
+ If 'after' is specified, and is an existing child, then this method
+ will insert the new_children after it.
+
+ Exceptions:
+
+ ObjectNotFoundError
+ If either 'before' or 'after' are not existing children then this
+ exception will be raised.
+
+ TypeError
+ This is raised if new_children is not a DataObjectBase instance,
+ or is a list containing objects that are not DataObjectBase
+ instances.
+
+ ValueError
+ This is raised if both 'before' and 'after' are specified, only
+ one should be specified.
+
+ '''
+
+ insert_at = 0 # Where in list to insert new_children
+
+ if before is not None and after is not None:
+ msg = "Both 'before' and 'after' should not be specified."
+ logging.error(msg)
+ raise ValueError(msg)
+ elif before is not None:
+ self._check_object_type(before)
+ try:
+ insert_at = self._children.index(before)
+ except ValueError:
+ msg = "Invalid value for 'before' while inserting children"
+ logging.error(msg)
+ raise ObjectNotFoundError(msg)
+ elif after is not None:
+ self._check_object_type(after)
+ try:
+ insert_at = self._children.index(after) + 1
+ except ValueError:
+ msg = "Invalid value for 'after' while inserting children"
+ logging.error(msg)
+ raise ObjectNotFoundError(msg)
+ else:
+ insert_at = len(self._children)
+
+ # Prefer to use DataObject i/f over an iterable object.
+ if isinstance(new_children, DataObjectBase):
+ # Single instance of DataObject, and put it in a list.
+ new_children = [new_children]
+
+ # Check for iterator support on object, raises exception if not
+ offset = 0
+ for child in new_children:
+ self._check_object_type(child)
+ self._children.insert(insert_at + offset, child)
+ child._parent = self
+ offset += 1
+
+ def __delete_child(self, child):
+ '''THIS IS A PRIVATE CLASS METHOD
+
+ Internal utility method, to remove a specfic child, primarily
+ used by delete_children() method.
+
+ Will attempt to remove from self._children list and if it succeeds,
+ then it will set the removed child's parent to None.
+ '''
+ self._check_object_type(child)
+ try:
+ self._children.remove(child)
+ child._parent = None
+ except ValueError:
+ raise ObjectNotFoundError(
+ "Failed to remove non-existant object '%s'" % (str(child)))
+
+ def delete_children(self, children=None, name=None, class_type=None):
+ '''This method deletes children from this DataObject.
+
+ Without any parameters, it will delete all children of this object.
+
+ You can limit the deletion, by specifying either:
+
+ children - This can be either a single DataObject, or a list of
+ DataObjects. When specified, only these specific objects
+ will be deleted.
+
+ If this is provided, then the name and class_type
+ parameters will be ignored.
+
+ or one, or both of the following:
+
+ name - When specified, this will search for children objects
+ with the specified name value, and delete those objects.
+
+ class_type - When specified this will delete any children with the
+ provided class_type.
+
+ The following exceptions are thrown by this method:
+
+ ObjectNotFoundError - This will be thrown if no suitable match for the
+ criteria is found.
+
+ TypeError - This will be returned if any of the parameters
+ have invalid types.
+
+ '''
+
+ if not self.has_children and name is None and class_type is None:
+ # Nothing to be done.
+ return
+
+ # If a list of specific children is provided, it takes precedence
+ # over any criteria. Handle these specific children now and return
+ # when finished.
+ if children is not None:
+ if isinstance(children, DataObjectBase):
+ self.__delete_child(children)
+ else:
+ # Assume iterable
+ for child in children:
+ self.__delete_child(child)
+ # All done now, so return.
+ return
+
+ # Delete based on search criteria, or all children
+ deleted_children = False
+
+ # Need to loop over a copy of the list since doing otherwise
+ # causes some items to be missed, so uses .children property
+ # as opposed to self._children.
+ for child in self.children:
+ delete_child = False
+ if name is None and class_type is None:
+ delete_child = True
+ elif class_type is None and name == child.name:
+ delete_child = True
+ elif name is None and isinstance(child, class_type):
+ delete_child = True
+ elif name == child.name and isinstance(child, class_type):
+ delete_child = True
+
+ if delete_child:
+ self.__delete_child(child)
+ deleted_children = True
+
+ if not deleted_children:
+ raise ObjectNotFoundError(\
+ "No matching objects found: name = '%s' "
+ "and class_type = %s" %
+ (str(name), str(class_type)))
+
+ def delete(self):
+ '''Recursively deletes an object
+
+ Will remove itself from it's parent and removes it's children
+ from the tree too.
+ '''
+ for child in self._children:
+ child.delete()
+
+ if self._parent is not None:
+ self._parent._children.remove(self)
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/usr/src/lib/install_doc/data_object/cache.py Wed Oct 20 18:48:33 2010 +0100
@@ -0,0 +1,390 @@
+#
+# CDDL HEADER START
+#
+# The contents of this file are subject to the terms of the
+# Common Development and Distribution License (the "License").
+# You may not use this file except in compliance with the License.
+#
+# You can obtain a copy of the license at usr/src/OPENSOLARIS.LICENSE
+# or http://www.opensolaris.org/os/licensing.
+# See the License for the specific language governing permissions
+# and limitations under the License.
+#
+# When distributing Covered Code, include this CDDL HEADER in each
+# file and include the License file at usr/src/OPENSOLARIS.LICENSE.
+# If applicable, add the following below this CDDL HEADER, with the
+# fields enclosed by brackets "[]" replaced with your own identifying
+# information: Portions Copyright [yyyy] [name of copyright owner]
+#
+# CDDL HEADER END
+#
+
+#
+# Copyright (c) 2010, Oracle and/or its affiliates. All rights reserved.
+#
+"""Mechanism for providing a central store of in-memory data in the installer.
+"""
+
+import logging
+import pickle
+
+from lxml import etree
+
+from solaris_install.data_object import \
+ DataObjectBase, DataObject
+
+# Registry of all classes that use the cache (ie all sub-classes of DataObject)
+# Uses a dictionary, with keys being priorities, and values being a list of
+# classes at that priority level.
+_CACHE_CLASS_REGISTRY = dict()
+
+
+class DataObjectCacheChild(DataObject):
+ '''Object to represent the sub-trees of the DataObjectCache
+
+ Doesn't generate any XML or import any XML it-self.
+ '''
+
+ def __init__(self, name):
+ '''Initialization function for DataObjectCacheChild class.'''
+ super(DataObjectCacheChild, self).__init__(name)
+
+ def delete(self):
+ '''Recursively deletes an object
+
+ Override to avoid deletion of DataObjectCacheChildren object itself.
+ '''
+ for child in self._children:
+ child.delete()
+
+ @classmethod
+ def can_handle(cls, xml_node):
+ '''Children of DataObjectCache don't have XML representation.'''
+ return False
+
+ @classmethod
+ def from_xml(cls, xml_node):
+ '''Children of DataObjectCache don't have XML representation.'''
+ return None
+
+ def to_xml(self):
+ '''Children of DataObjectCache don't have XML representation.'''
+ return None
+
+ def __repr__(self):
+ return "DataObjectCacheChild: %s" % self.name
+
+
+class DataObjectCache(DataObjectBase):
+ '''Primary access class for the Data Object Cache infrastructure.
+
+ Usually accessed by the Engine singleton, but may be used independently
+ of the Engine.
+
+ Provides the following functionality:
+
+ - Has two sub-trees:
+
+ - persistent - will be written to disk if snapshot mechanism is used,
+ and is overwritten if snapshots are loaded.
+ - volatile - exists only in-memory and is not effected by snapshot
+ loading or written when a snapshot is taken.
+
+ - Snapshot and Roll-back
+
+ - It is possible to write out data from the 'persistent' sub-tree to a
+ file or file-like object.
+
+ - It is possible to roll-back the 'persistent' sub-tree to the contents
+ of a provided file or file-like object.
+
+ - XML Manifest Import and Generation
+
+ - Drives the import of an XML manifest into a DataObject based tree
+ - Drives the generation of XML suitable for conversion in to an XML
+ manifest via XSLT.
+
+ Sub-classes DataObjectBase to ensure that its not possible to
+ add or remove the "persistent" and "volatile" children.
+
+ Consumsers of the Data Object Cache should not insert/delete children
+ to/from the DataObjectCache object, but instead should insert/delete
+ them to/from the 'persistent' or 'volatile' sub-trees.
+
+ '''
+
+ VOLATILE_LABEL = "volatile"
+ PERSISTENT_LABEL = "persistent"
+
+ def __init__(self):
+ '''Initialization function for DataObjectCache class.'''
+ super(DataObjectCache, self).__init__("DataObjectCache")
+
+ # Create 'persistent' and 'volatile' sub-trees
+ self._persistent_tree = DataObjectCacheChild(
+ DataObjectCache.PERSISTENT_LABEL)
+ self._volatile_tree = DataObjectCacheChild(
+ DataObjectCache.VOLATILE_LABEL)
+
+ # Add children to tree, and update parent references.
+ self._children = [self._persistent_tree, self._volatile_tree]
+ self._persistent_tree._parent = self
+ self._volatile_tree._parent = self
+
+ @property
+ def persistent(self):
+ '''Returns the persistent tree child_node'''
+ return self._persistent_tree
+
+ @property
+ def volatile(self):
+ '''Returns the volatile tree child_node'''
+ return self._volatile_tree
+
+ #
+ # Utility Methods
+ #
+ def clear(self):
+ '''Delete all objects from the sub-trees.
+
+ This will delete all objects from cache, except for the root,
+ persistent and volatile nodes.
+ '''
+
+ for child in self._children:
+ child.delete_children()
+
+ msg = "DataObjectCache cleared!"
+ logging.info(msg)
+
+ @property
+ def is_empty(self):
+ '''Returns True if the contents of the cache is deemed empty.
+
+ Empty is defined as the sub-nodes, 'persistent' and 'volatile'
+ having no children.
+ '''
+ return not self._persistent_tree.has_children and \
+ not self._volatile_tree.has_children
+
+ def take_snapshot(self, file_obj):
+ '''Takes a snapshot of the 'persistent' sub-tree.
+
+ This method writes the contents of the 'persistent' sub-tree to the
+ destination provided by 'file_obj'.
+
+ 'file_obj' may be one of the following:
+
+ a string - this is used as the path of a file to open for writing.
+
+ an object - this object is required to have a 'write(str)' method
+ that takes a single string as a parameter. It can thus
+ be an open file object, a StringIO object, or any
+ other custom object that meets this interface.
+
+ Exceptions:
+
+ ValueError - This will be thrown if wrong type is passed for
+ 'file_obj'
+
+ IOError - This will be thrown if there is a problem opening the
+ specified file_obj path string.
+ '''
+
+ close_at_end = False
+ if isinstance(file_obj, str):
+ # If it's a string, then open the file_obj.
+ outfile = open(file_obj, 'wb')
+ close_at_end = True
+ # Check if it has a write() method...
+ elif hasattr(file_obj, "write"):
+ outfile = file_obj
+ else:
+ # Object isn't acceptable for output to, needs write() method.
+ raise ValueError("'file_obj' should be either a file path string \
+ or object with write(string) method")
+
+ pickle.dump(self._persistent_tree, outfile)
+
+ if close_at_end:
+ outfile.close()
+
+ def load_from_snapshot(self, file_obj):
+ '''Load a snapshot in to the 'persistent' sub-tree.
+
+ This method load the contents of the 'persistent' sub-tree from the
+ provide file_obj parameter.
+
+ 'file_obj' may be one of the following:
+
+ a string - this is used as the path of a file to open for reading.
+
+ an object - This is a file_obj-like object for reading a data
+ stream.
+
+ The file_obj-like object must have two methods, a read()
+ method that takes an integer argument, and a readline()
+ method that requires no arguments. Both methods should
+ return a string. This file-like object can be a file
+ object opened for reading, a StringIO object, or any
+ other custom object that meets this interface.
+
+ Exceptions:
+
+ ValueError - This will be thrown if wrong type is passed for
+ 'file_obj'
+
+ IOError - This will be thrown if there is a problem opening the
+ specified file_obj path string.
+
+ '''
+
+ close_at_end = False
+ if isinstance(file_obj, str):
+ # If it's a string, then open the file_obj.
+ infile = open(file_obj, 'rb')
+ close_at_end = True
+ # Check if it has a write() method...
+ elif (hasattr(file_obj, "read") and hasattr(file_obj, "readline")):
+ infile = file_obj
+ else:
+ # Object isn't acceptable for output to, needs write() method.
+ raise ValueError("'file_obj' should be either a file path string \
+ or object with read and readline methods")
+
+ new_cache_peristent_tree = pickle.load(infile)
+
+ if close_at_end:
+ infile.close()
+
+ self._persistent_tree.delete_children()
+ self._persistent_tree.insert_children(
+ new_cache_peristent_tree.children)
+
+ @classmethod
+ def register_class(cls, new_class_obj, priority=50):
+ '''Register a class with the DataObjectCache for importing XML.
+
+ Registers sub-classes of DataObject, which are then used when
+ importing XML to find classes that support the handling of XML
+ snippets by calling the 'can_handle()' and 'from_xml()' methods.
+
+ 'new_class_obj' may be either a single class object, or an iterable
+ object containing class objects.
+
+ The priority defines the order in which classes are checked, with
+ lower numbers being checked first - the default is for all registered
+ classes to be at priority 50.
+
+ Exceptions:
+
+ TypeError
+ - thrown if class_obj is not a sub-class of DataObject or
+ an iterable object.
+
+ ValueError
+ - thrown if priority is not in the range 0-100 inclusive.
+ '''
+
+ if priority < 0 or priority > 100:
+ raise ValueError("Invald priority value %d" % (priority))
+
+ try:
+ # Check if it's an iterable object, if so, pass through
+ iter(new_class_obj)
+ class_list = new_class_obj
+ except TypeError:
+ # Assume single instance of DataObjectBase, and put it in a list.
+ class_list = [new_class_obj]
+
+ # Should have iterable at this point, so loop through.
+ for class_ref in class_list:
+ if issubclass(class_ref, DataObjectBase):
+ _CACHE_CLASS_REGISTRY.setdefault(priority, [])\
+ .append(class_ref)
+ else:
+ raise TypeError("Class '%s' is not a sub-class of %s" %
+ (str(class_ref), str(DataObject)))
+
+ @classmethod
+ def get_registered_classes_str(cls):
+ '''Generates a string of all registered classes to standard out.'''
+ string_list = ["============================", "Registered Classes:"]
+ for prio in sorted(_CACHE_CLASS_REGISTRY.keys()):
+ string_list.append("[Priority = %d]" % (prio))
+ for cls in _CACHE_CLASS_REGISTRY[prio]:
+ string_list.append(" %s" % repr(cls))
+ string_list.extend(["============================", ""])
+
+ return "\n".join(string_list)
+
+ @classmethod
+ def find_class_to_handle(cls, node):
+ """Find a class that handles a node in the known_classes list."""
+ for prio in sorted(_CACHE_CLASS_REGISTRY.keys()):
+ for cls in _CACHE_CLASS_REGISTRY[prio]:
+ if cls.can_handle(node):
+ return cls
+
+ return None
+
+ @classmethod
+ def __create_doc_from_xml(cls, parent, node):
+ '''Given an XML tree, generates the contents of the DataObjectCache'''
+ # Use same parent, skip level by default
+ new_parent = parent
+
+ found_class = cls.find_class_to_handle(node)
+ if found_class:
+ obj = found_class.from_xml(node)
+ if obj:
+ parent.insert_children(obj)
+ new_parent = obj
+
+ for child in node:
+ cls.__create_doc_from_xml(new_parent, child)
+
+ def import_from_manifest_xml(self, dom_root_node, volatile=False):
+ '''Imports the provided XML tree into the Data Object cache
+
+ By default, because 'volatile' is False, the XML will be imported
+ into the DataObjectCache.persistent sub-tree.
+
+ Otherwise, if 'volatile' is True, then the XML will be imported into
+ the DataObjectCache.volatile sub-tree.
+
+ Exceptions:
+
+ ParsingError
+ - Because from_xml() is used to import the XML, it is possible
+ that there could be some bad XML which will result in this
+ exception being thrown.
+ '''
+
+ if volatile:
+ import_base = self.volatile
+ else:
+ import_base = self.persistent
+
+ for child in dom_root_node:
+ DataObjectCache.__create_doc_from_xml(import_base, child)
+
+ def generate_xml_manifest(self):
+ '''Generates XML from the DataObjectCache'''
+ return self.get_xml_tree()
+
+ # XML Generation
+ def to_xml(self):
+ '''Return the 'root' node of the DataObjectCache.'''
+ return etree.Element("root")
+
+ # XML Import
+ @classmethod
+ def can_handle(cls, xml_node):
+ '''The DataObjectCache class doesn't import any XML itself.'''
+ return False
+
+ @classmethod
+ def from_xml(cls, xml_node):
+ '''The DataObjectCache class doesn't import any XML itself.'''
+ return None
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/usr/src/lib/install_doc/data_object/data_dict.py Wed Oct 20 18:48:33 2010 +0100
@@ -0,0 +1,225 @@
+#
+# CDDL HEADER START
+#
+# The contents of this file are subject to the terms of the
+# Common Development and Distribution License (the "License").
+# You may not use this file except in compliance with the License.
+#
+# You can obtain a copy of the license at usr/src/OPENSOLARIS.LICENSE
+# or http://www.opensolaris.org/os/licensing.
+# See the License for the specific language governing permissions
+# and limitations under the License.
+#
+# When distributing Covered Code, include this CDDL HEADER in each
+# file and include the License file at usr/src/OPENSOLARIS.LICENSE.
+# If applicable, add the following below this CDDL HEADER, with the
+# fields enclosed by brackets "[]" replaced with your own identifying
+# information: Portions Copyright [yyyy] [name of copyright owner]
+#
+# CDDL HEADER END
+#
+
+#
+# Copyright (c) 2010, Oracle and/or its affiliates. All rights reserved.
+#
+'''Defines the DataObjectDict class to allow storage of a dictionary in cache.
+'''
+
+from lxml import etree
+
+from solaris_install.data_object import \
+ DataObjectBase, ParsingError
+
+
+class DataObjectDict(DataObjectBase):
+ ''' Storage Object to store a dictionary in the
+ Data Object Cache.
+
+ By default it will not generate any XML, if it is required that the data
+ would generate XML, then you need to pass generate_xml=True to the
+ constructor.
+
+ XML Generated is along the lines of:
+
+ <data_dictionary>
+ <data name="key">value</data>
+ ...
+ </data_dictionary>
+
+ It is possible to change the tag and sub-tag names by sub-classing this
+ object along the lines of:
+
+ class DataObjectDictDifferentTags(DataObjectDict):
+ # Override both TAG_NAME and SUB_TAG_NAME
+ TAG_NAME = "different_tag"
+ SUB_TAG_NAME = "different_sub_tag"
+ pass
+
+ it is necessary to do things this way to ensure that the class methods
+ can_handle() and from_xml() can behave as expected.
+
+ Sub-classes DataObjectNoChildManipulation since we want this to be
+ a leaf-node object.
+
+ '''
+
+ TAG_NAME = "data_dictionary"
+ SUB_TAG_NAME = "data"
+
+ def __init__(self, name, data_dict, generate_xml=False):
+ '''Initialize the object with the provided data_dict.
+
+ This method takes the following parameters:
+
+ name - the name of the object
+
+ data_dict - a python dictionary object containing the data.
+
+ generate_xml - boolean to say whether this will generate XML or not.
+ (default: False)
+
+ Exceptions:
+
+ ValueError - Will be raised if any invalid values are passed as
+ parameters.
+
+ '''
+
+ super(DataObjectDict, self).__init__(name)
+
+ if not isinstance(data_dict, dict):
+ raise ValueError("data_dict parameter is not a python 'dict'.")
+
+ self._data_dict = data_dict
+
+ self._generate_xml = generate_xml
+
+ # Override abstract functions fron DataObject class.
+ def to_xml(self):
+ '''Generate XML to represent a dictionary.
+
+ Generates XML in the format:
+
+ <data_dictionary>
+ <data name="NAME">VALUE</data>
+ ...
+ </data_dictionary>
+
+ The tags and sub-tags are defined by the class attributes TAG_NAME
+ and SUB_TAG_NAME - to change, you should sub-class this class and
+ set their values.
+ '''
+ if not self.generate_xml:
+ return None
+
+ element = etree.Element(self.TAG_NAME, name=self.name)
+ for k in sorted(self.data_dict.keys()):
+ sub_element = etree.SubElement(element, self.SUB_TAG_NAME)
+ sub_element.set("name", str(k))
+ sub_element.text = str(self.data_dict[k])
+
+ return element
+
+ @classmethod
+ def can_handle(cls, xml_node):
+ '''Determines if this class can import XML as generated by to_xml().
+
+ The class attributes TAG_NAME and SUB_TAG_NAME are used to determine
+ if this is possible.
+ '''
+ if xml_node.tag == cls.TAG_NAME:
+ for child in xml_node:
+ if child.tag != cls.SUB_TAG_NAME:
+ # Fail if we find anything that isn't the sub-tag.
+ return False
+ return True
+ else:
+ return False
+
+ @classmethod
+ def from_xml(cls, xml_node):
+ '''Imports XML as generated by to_xml().
+
+ The class attributes TAG_NAME and SUB_TAG_NAME are used when
+ doing the conversion.
+ '''
+ new_obj = None
+
+ if xml_node.tag == cls.TAG_NAME:
+ new_dict = dict()
+ new_obj = DataObjectDict(xml_node.get("name"), new_dict,
+ generate_xml=True)
+
+ # Populate child nodes into dictionary.
+ for child in xml_node:
+ if child.tag != cls.SUB_TAG_NAME:
+ # Fail if we find anything that isn't the sub-tag.
+ raise ParsingError("Invalid tag in data_dict: %s" \
+ % (child.tag))
+ new_dict[child.get("name")] = child.text
+ else:
+ raise ParsingError("Invalid tag in data_dict: %s" %
+ (xml_node.tag))
+
+ return new_obj
+
+ # Attributes accessors and mutators
+ def data_dict():
+ '''Access to the contained data_dictionary.'''
+ def fget(self):
+ '''Return the data dictionary being used by object.
+ '''
+ return self._data_dict
+
+ def fset(self, new_data_dict):
+ '''Sets the dictionary to be new_data_dict.
+
+ Exceptions:
+
+ ValueError - Will be thrown if the value for new_data_dict is
+ not of the correct type.
+ '''
+
+ if not isinstance(new_data_dict, dict):
+ raise ValueError(
+ "new_data_dict parameter is not a python 'dict'.")
+
+ self._data_dict = new_data_dict
+
+ doc = '''Get/Set the data dictionary
+
+ Exceptions:
+
+ ValueError - Will be thrown if a new value for data_dict is
+ not of the correct type.
+ '''
+ return locals()
+
+ data_dict = property(**data_dict())
+
+ def generate_xml():
+ def fget(self):
+ '''Returns whether this object will generate XML.
+ '''
+ return self._generate_xml
+
+ def fset(self, generate_xml):
+ '''Sets the generate_xml flag'''
+
+ self._generate_xml = generate_xml
+ doc = '''True if this object will generate XML'''
+ return locals()
+
+ generate_xml = property(**generate_xml())
+
+ def __getattr__(self, attr):
+ """Provide access to dictionary values as attributes if desired.
+
+ The primary use of this is when using paths in string substitutions
+ and the desire is to refer to a value in the dictionary.
+ """
+ if self.data_dict is not None:
+ try:
+ return self.data_dict[attr]
+ except KeyError:
+ raise AttributeError("Invalid attribute: %s" % (attr))
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/usr/src/lib/install_doc/test/simple_data_object.py Wed Oct 20 18:48:33 2010 +0100
@@ -0,0 +1,247 @@
+#
+# CDDL HEADER START
+#
+# The contents of this file are subject to the terms of the
+# Common Development and Distribution License (the "License").
+# You may not use this file except in compliance with the License.
+#
+# You can obtain a copy of the license at usr/src/OPENSOLARIS.LICENSE
+# or http://www.opensolaris.org/os/licensing.
+# See the License for the specific language governing permissions
+# and limitations under the License.
+#
+# When distributing Covered Code, include this CDDL HEADER in each
+# file and include the License file at usr/src/OPENSOLARIS.LICENSE.
+# If applicable, add the following below this CDDL HEADER, with the
+# fields enclosed by brackets "[]" replaced with your own identifying
+# information: Portions Copyright [yyyy] [name of copyright owner]
+#
+# CDDL HEADER END
+#
+
+#
+# Copyright (c) 2010, Oracle and/or its affiliates. All rights reserved.
+#
+'''Create some simple DataObject implmentations to allow for testing.'''
+
+from lxml import etree
+
+from solaris_install.data_object import DataObject
+
+
+class SimpleDataObject(DataObject):
+ '''Base simple data object, that enables import/export of XML'''
+
+ @classmethod
+ def can_handle(cls, xml_node):
+ '''Handle any tag with the same name as the class'''
+ if xml_node.tag == cls.__name__:
+ return True
+
+ @classmethod
+ def from_xml(cls, xml_node):
+ '''Import XML tags with the same name as the class'''
+ if xml_node.tag == cls.__name__:
+ # Create a new object, appropriate for the classname, works for
+ # sub-classes too...
+ return globals()[cls.__name__]\
+ (xml_node.attrib.get("name").strip())
+
+ return None
+
+ def to_xml(self):
+ '''Generate XML tags with the same name as the class'''
+ element = etree.Element(self.__class__.__name__, name=self.name)
+ return element
+
+ def __repr__(self):
+ '''Describe the class for debug output'''
+ return "%s: name = %s" % (self.__class__.__name__, self.name)
+
+
+class SimpleDataObjectNoXml(SimpleDataObject):
+ '''Simple DataObject that doesn't generate XML'''
+
+ @classmethod
+ def can_handle(cls, xml_node):
+ '''Doesn't import any XML'''
+ return False
+
+ @classmethod
+ def from_xml(cls, xml_node):
+ '''Doesn't import any XML'''
+ return None
+
+ def to_xml(self):
+ '''Doesn't generate any XML'''
+ return None
+
+
+# Define an alternative SimpleDataObjects for testing by by type
+class SimpleDataObject2(SimpleDataObject):
+ '''Alternative class definition for searching by class type tests'''
+ pass
+
+
+class SimpleDataObject3(SimpleDataObjectNoXml):
+ '''Alternative class definition for searching by class type without XML'''
+ pass
+
+
+class SimpleDataObject4(SimpleDataObject):
+ '''Alternative class definition for searching by class type tests'''
+ pass
+
+
+class SimpleDataObject5(SimpleDataObject):
+ '''Alternative class definition for searching by class type tests'''
+ pass
+
+
+# Simple DataObject that imports and generates children's XML
+class SimpleDataObjectHandlesChildren(SimpleDataObject):
+ '''A simple DataObject that generates and imports XML for it's children'''
+
+ TAG_NAME = "so_child"
+
+ def __init__(self, name):
+ super(SimpleDataObjectHandlesChildren, self).__init__(name)
+ self.generates_xml_for_children = True
+
+ def to_xml(self):
+ '''Generate XML for itself and it's children'''
+ element = etree.Element(self.__class__.__name__, name=self.name)
+
+ for child in self.children:
+ sub_element = etree.SubElement(element, self.TAG_NAME)
+ sub_element.set("name", child.name)
+
+ return element
+
+ @classmethod
+ def can_handle(cls, xml_node):
+ '''Can import XML for itself and it's children'''
+ if xml_node.tag == cls.__name__:
+ return True
+
+ @classmethod
+ def from_xml(cls, xml_node):
+ '''Import XML for itself and it's children'''
+ if xml_node.tag != cls.__name__:
+ return None
+
+ new_obj = SimpleDataObjectHandlesChildren(
+ xml_node.attrib.get("name").strip())
+
+ # Now, we need to handle children we generated
+ for node in xml_node:
+ if node.tag == cls.TAG_NAME:
+ new_child = SimpleDataObject(node.get("name").strip())
+ new_obj.insert_children(new_child)
+
+ return new_obj
+
+
+def create_simple_data_obj_tree():
+ '''Create test object tree and return dict to access specific elements'''
+
+ # Create a tree that looks like:
+ #
+ # root
+ # child_1
+ # child_1_1
+ # child_1_2
+ # child_2
+ # child_2_1
+ # child_2_1_1
+ # child_2_1_1_1
+ # child_2_1_1_2
+ # child_3
+ # child_3_1
+ # child_3_1_2
+ # child_3_1_2
+ # child_3_1_2_same_name
+ # child_4
+ # child_5
+ # child_5_1
+ # child_5_2
+ # child_5_2_1
+ # child_5_2_2
+ # child_5_2_3
+ # child_5_2_3_1
+ # child_5_2_3_2
+ # child_5_2_3_3
+ # child_5_2_3_3_same_name
+ # child_5_same_name
+
+ object_dict = dict()
+
+ # Create root node
+ object_dict["data_obj"] = SimpleDataObject("root")
+ # Add some children, used by most tests.
+ object_dict["child_1"] = SimpleDataObject2("child_1")
+ object_dict["child_2"] = SimpleDataObject("child_2")
+ object_dict["child_3"] = SimpleDataObject("child_3")
+ object_dict["child_4"] = SimpleDataObject2("child_4")
+ object_dict["child_5"] = SimpleDataObject3("child_5")
+ object_dict["child_5_same_name"] = SimpleDataObject("child_5")
+
+ do_list = list()
+ do_list.append(object_dict["child_1"])
+ do_list.append(object_dict["child_2"])
+ do_list.append(object_dict["child_3"])
+ do_list.append(object_dict["child_4"])
+ do_list.append(object_dict["child_5"])
+ do_list.append(object_dict["child_5_same_name"])
+
+ object_dict["data_obj"].insert_children(do_list)
+
+ # Now let's add the children of children, etc. for use by
+ # get_descendants() tests.
+ # child_1 children
+ object_dict["child_1_1"] = SimpleDataObject("child_1_1")
+ object_dict["child_1_2"] = SimpleDataObject("child_1_2")
+ object_dict["child_1"].insert_children(
+ [object_dict["child_1_1"], object_dict["child_1_2"]])
+
+ # child_2 tree
+ object_dict["child_2_1"] = SimpleDataObject2("child_2_1")
+ object_dict["child_2"].insert_children(object_dict["child_2_1"])
+ object_dict["child_2_1_1"] = SimpleDataObject2("child_2_1_1")
+ object_dict["child_2_1"].insert_children(object_dict["child_2_1_1"])
+ object_dict["child_2_1_1_1"] = SimpleDataObject2("child_2_1_1_1")
+ object_dict["child_2_1_1_2"] = SimpleDataObject2("child_2_1_1_2")
+ object_dict["child_2_1_1"].insert_children(
+ [object_dict["child_2_1_1_1"], object_dict["child_2_1_1_2"]])
+
+ # child_3 tree
+ object_dict["child_3_1"] = SimpleDataObject("child_3_1")
+ object_dict["child_3"].insert_children(object_dict["child_3_1"])
+ object_dict["child_3_1_1"] = SimpleDataObject("child_3_1_1")
+ object_dict["child_3_1_2"] = SimpleDataObject("child_3_1_2")
+ object_dict["child_3_1_2_same_name"] = SimpleDataObject("child_3_1_2")
+ object_dict["child_3_1"].insert_children(
+ [object_dict["child_3_1_1"], object_dict["child_3_1_2"],
+ object_dict["child_3_1_2_same_name"]])
+
+ # child_5 tree
+ object_dict["child_5_1"] = SimpleDataObject("child_5_1")
+ object_dict["child_5_2"] = SimpleDataObject("child_5_2")
+ object_dict["child_5"].insert_children(
+ [object_dict["child_5_1"], object_dict["child_5_2"]])
+ object_dict["child_5_2_1"] = SimpleDataObject("child_5_2_1")
+ object_dict["child_5_2_2"] = SimpleDataObject("child_5_2_2")
+ object_dict["child_5_2_3"] = SimpleDataObject4("child_5_2_3")
+ object_dict["child_5_2"].insert_children(
+ [object_dict["child_5_2_1"], object_dict["child_5_2_2"],
+ object_dict["child_5_2_3"]])
+
+ object_dict["child_5_2_3_1"] = SimpleDataObject("child_5_2_3_1")
+ object_dict["child_5_2_3_2"] = SimpleDataObject("child_5_2_3_2")
+ object_dict["child_5_2_3_3"] = SimpleDataObject("child_5_2_3_3")
+ object_dict["child_5_2_3_3_same_name"] = SimpleDataObject("child_5_2_3_3")
+ object_dict["child_5_2_3"].insert_children(
+ [object_dict["child_5_2_3_1"], object_dict["child_5_2_3_2"],
+ object_dict["child_5_2_3_3"], object_dict["child_5_2_3_3_same_name"]])
+
+ return object_dict
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/usr/src/lib/install_doc/test/test_data_object_cache_children.py Wed Oct 20 18:48:33 2010 +0100
@@ -0,0 +1,104 @@
+#
+# CDDL HEADER START
+#
+# The contents of this file are subject to the terms of the
+# Common Development and Distribution License (the "License").
+# You may not use this file except in compliance with the License.
+#
+# You can obtain a copy of the license at usr/src/OPENSOLARIS.LICENSE
+# or http://www.opensolaris.org/os/licensing.
+# See the License for the specific language governing permissions
+# and limitations under the License.
+#
+# When distributing Covered Code, include this CDDL HEADER in each
+# file and include the License file at usr/src/OPENSOLARIS.LICENSE.
+# If applicable, add the following below this CDDL HEADER, with the
+# fields enclosed by brackets "[]" replaced with your own identifying
+# information: Portions Copyright [yyyy] [name of copyright owner]
+#
+# CDDL HEADER END
+#
+
+#
+# Copyright (c) 2010, Oracle and/or its affiliates. All rights reserved.
+#
+'''Tests to validata the DOC children operate as expected'''
+
+import unittest
+
+from solaris_install.data_object.cache import \
+ DataObjectCache
+from simple_data_object import SimpleDataObject
+
+
+class TestDataObjectCacheChildren(unittest.TestCase):
+ '''Tests to validata the DOC children operate as expected'''
+
+ def setUp(self):
+ '''Create a reference to a DOC'''
+ self.doc = DataObjectCache()
+
+ def tearDown(self):
+ '''Cleanup reference to DOC and it's children'''
+ self.doc.clear()
+ self.doc = None
+
+ def test_data_object_cache_children_exist(self):
+ '''Validate that the DOC children always exist'''
+ persistent = self.doc.get_children(name="persistent")
+ volatile = self.doc.get_children(name="volatile")
+
+ self.assertTrue(len(persistent) > 0 and persistent[0] != None)
+ self.assertTrue(len(volatile) > 0 and volatile[0] != None)
+
+ self.assertEqual(persistent[0], self.doc.persistent)
+ self.assertEqual(volatile[0], self.doc.volatile)
+
+ def test_data_object_cache_children_insertion(self):
+ '''Validate that DOC doesn't allow insertion of direct children'''
+ simple = SimpleDataObject("Test Child")
+ try:
+ self.doc.insert_children(simple)
+ self.fail("Managed to insert child when expected exception")
+ except AttributeError:
+ pass
+
+ def test_data_object_cache_children_deletion_directly(self):
+ '''Validate the DOC children cannot be deleted by reference'''
+ try:
+ self.doc.delete_children(self.doc.persistent)
+ self.fail("Managed to delete 'persistent' when expected exception")
+ except AttributeError:
+ pass
+
+ try:
+ self.doc.delete_children(self.doc.volatile)
+ self.fail("Managed to delete 'volatile' when expected exception")
+ except AttributeError:
+ pass
+
+ def test_data_object_cache_children_deletion_all(self):
+ '''Validate the DOC children cannot be deleted by delete all.'''
+ try:
+ self.doc.delete_children()
+ self.fail("Managed to delete children when expected exception")
+ except AttributeError:
+ pass
+
+ def test_data_object_cache_children_delete(self):
+ '''Validate DOC and children cannot be deleted by delete() method'''
+ try:
+ self.doc.delete()
+ self.fail("Managed to delete self when expected exception")
+ except AttributeError:
+ pass
+
+ # Ensure that delete() call doesn't delete persistent and volatile.
+ self.doc.persistent.delete()
+ self.assertNotEqual(self.doc.persistent, None)
+
+ self.doc.volatile.delete()
+ self.assertNotEqual(self.doc.volatile, None)
+
+if __name__ == '__main__':
+ unittest.main()
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/usr/src/lib/install_doc/test/test_data_object_cache_registration.py Wed Oct 20 18:48:33 2010 +0100
@@ -0,0 +1,233 @@
+#
+# CDDL HEADER START
+#
+# The contents of this file are subject to the terms of the
+# Common Development and Distribution License (the "License").
+# You may not use this file except in compliance with the License.
+#
+# You can obtain a copy of the license at usr/src/OPENSOLARIS.LICENSE
+# or http://www.opensolaris.org/os/licensing.
+# See the License for the specific language governing permissions
+# and limitations under the License.
+#
+# When distributing Covered Code, include this CDDL HEADER in each
+# file and include the License file at usr/src/OPENSOLARIS.LICENSE.
+# If applicable, add the following below this CDDL HEADER, with the
+# fields enclosed by brackets "[]" replaced with your own identifying
+# information: Portions Copyright [yyyy] [name of copyright owner]
+#
+# CDDL HEADER END
+#
+
+#
+# Copyright (c) 2010, Oracle and/or its affiliates. All rights reserved.
+#
+'''Tests to validate DataObjectCache registration mechanism'''
+
+import unittest
+
+from lxml import etree
+
+from solaris_install.data_object import DataObject
+from solaris_install.data_object.cache import DataObjectCache
+import solaris_install.data_object.cache as DOC
+from simple_data_object import SimpleDataObject, SimpleDataObject2, \
+ SimpleDataObject3, SimpleDataObject4
+
+# Define two classes that both handle the same tag, but have
+# different priorities.
+COMMON_TAG = "common_tag"
+
+
+class SimpleDataObjectSameTagNormPrio(DataObject):
+ '''Define a simple data object that uses COMMON_TAG to use default prio'''
+
+ @classmethod
+ def can_handle(cls, xml_node):
+ '''Can we handle XML node?'''
+ if xml_node.tag == COMMON_TAG:
+ return True
+
+ @classmethod
+ def from_xml(cls, xml_node):
+ '''Return an instance of self if XML has correct tag'''
+ if xml_node.tag == COMMON_TAG:
+ SimpleDataObjectSameTagNormPrio(name=xml_node.attrib.get("name"))
+
+ return None
+
+ def to_xml(self):
+ '''Generate XML using COMMON_TAG value'''
+ element = etree.Element(COMMON_TAG, name=self.name)
+ return element
+
+ def __repr__(self):
+ return "%s: name = %s" % (self.__class__.__name__, self.name)
+
+
+class SimpleDataObjectSameTagHighPrio(SimpleDataObjectSameTagNormPrio):
+ '''Define a similar class, but will be given a higher priority'''
+ pass
+
+
+class TestDataObjectCacheRegistration(unittest.TestCase):
+ '''Tests to validate DataObjectCache registration mechanism'''
+
+ def setUp(self):
+ '''Create DOC reference, and ensure an empty class registry'''
+ self.doc = DataObjectCache()
+
+ # Hack to ensure that registry is empty before we use it,
+ self.orig_registry = DOC._CACHE_CLASS_REGISTRY
+ DOC._CACHE_CLASS_REGISTRY = dict()
+
+ def tearDown(self):
+ '''Cleanup DOC reference, but restore DOC's class registry when done'''
+ self.doc.clear()
+ self.doc = None
+
+ # Hack to ensure that registry is restored after we use it.
+ DOC._CACHE_CLASS_REGISTRY = self.orig_registry
+
+ def test_doc_registration_simple_data_object(self):
+ '''Validate registration and selection of a single class'''
+ try:
+ DataObjectCache.register_class(SimpleDataObject)
+ except (TypeError, ValueError):
+ self.fail("Failed to register SimpleDataObject!")
+
+ # Test that it's actually registered and will correclty return class.
+ simple = SimpleDataObject("TestSimple")
+ xml_elem = simple.to_xml()
+ class_obj = DataObjectCache.find_class_to_handle(xml_elem)
+ self.assertEqual(class_obj, SimpleDataObject, str(class_obj))
+
+ def test_doc_registration_simple_data_object_prio_30(self):
+ '''Validate insertion of a class with prio 30'''
+ try:
+ DataObjectCache.register_class(SimpleDataObject, priority=30)
+ except (TypeError, ValueError):
+ self.fail("Failed to register SimpleDataObject with prio 30!")
+
+ def test_doc_registration_simple_data_object_prio_0(self):
+ '''Validate insertion of a class with prio 0'''
+ try:
+ DataObjectCache.register_class(SimpleDataObject, priority=0)
+ except (TypeError, ValueError):
+ self.fail("Failed to register SimpleDataObject with prio 0!")
+
+ def test_doc_registration_simple_data_object_prio_100(self):
+ '''Validate insertion of a class with prio 100'''
+ try:
+ DataObjectCache.register_class(SimpleDataObject, priority=100)
+ except (TypeError, ValueError):
+ self.fail("Failed to register SimpleDataObject with prio 100!")
+
+ def test_doc_registration_simple_data_object_prio_minus_1(self):
+ '''Validate insertion fails with a class with prio -1'''
+ self.assertRaises(ValueError, DataObjectCache.register_class,
+ SimpleDataObject, priority=-1)
+
+ def test_doc_registration_simple_data_object_prio_101(self):
+ '''Validate insertion fails with a class with prio 101'''
+ self.assertRaises(ValueError, DataObjectCache.register_class,
+ SimpleDataObject, priority=101)
+
+ def test_doc_registration_non_data_object(self):
+ '''Validate insertion fails with a non-DataObject sub-class'''
+ self.assertRaises(TypeError, DataObjectCache.register_class,
+ object)
+
+ def test_doc_registration_get_registered_classes_str(self):
+ '''Validate correct output from get_registered_classes_str()'''
+ # Used as expected return string in method
+ # Compensate for indent
+ indentation = '''\
+ '''
+ expected_registered_classes_str = '''\
+ ============================
+ Registered Classes:
+ [Priority = 30]
+ <class 'simple_data_object.SimpleDataObject'>
+ [Priority = 50]
+ <class 'simple_data_object.SimpleDataObject2'>
+ <class 'simple_data_object.SimpleDataObject3'>
+ [Priority = 100]
+ <class 'simple_data_object.SimpleDataObject4'>
+ ============================
+ '''.replace(indentation, "")
+
+ DataObjectCache.register_class(SimpleDataObject, priority=30)
+ DataObjectCache.register_class(SimpleDataObject2)
+ DataObjectCache.register_class(SimpleDataObject3, priority=50)
+ DataObjectCache.register_class(SimpleDataObject4, priority=100)
+
+ txt = self.doc.get_registered_classes_str()
+
+ self.assertEquals(expected_registered_classes_str, txt,
+ "EXPECTED:\n%s\nGOT:\n%s\n" %
+ (expected_registered_classes_str, txt))
+
+ def test_doc_registration_multiple_classes_same_prio(self):
+ '''Validate registration of multiple classes at same priority'''
+ # Compensate for indent
+ indentation = '''\
+ '''
+ expected_registered_classes_str = '''\
+ ============================
+ Registered Classes:
+ [Priority = 30]
+ <class 'simple_data_object.SimpleDataObject'>
+ <class 'simple_data_object.SimpleDataObject2'>
+ <class 'simple_data_object.SimpleDataObject3'>
+ <class 'simple_data_object.SimpleDataObject4'>
+ ============================
+ '''.replace(indentation, "")
+ DataObjectCache.register_class(SimpleDataObject, priority=30)
+ DataObjectCache.register_class(SimpleDataObject2, priority=30)
+ DataObjectCache.register_class(SimpleDataObject3, priority=30)
+ DataObjectCache.register_class(SimpleDataObject4, priority=30)
+
+ txt = self.doc.get_registered_classes_str()
+
+ self.assertEquals(expected_registered_classes_str, txt,
+ "EXPECTED:\n%s\nGOT:\n%s\n" %
+ (expected_registered_classes_str, txt))
+
+ def test_doc_registration_multiple_handlers_highest_prio_selected(self):
+ '''Validate highest priority class is selected when several handlers'''
+ DataObjectCache.register_class(SimpleDataObjectSameTagHighPrio,
+ priority=30)
+ DataObjectCache.register_class(SimpleDataObjectSameTagNormPrio,
+ priority=50)
+
+ xml_elem = etree.Element(COMMON_TAG, name="some name")
+
+ class_obj = DataObjectCache.find_class_to_handle(xml_elem)
+
+ self.assertEqual(class_obj, SimpleDataObjectSameTagHighPrio)
+
+ def test_doc_registration_no_handler_found(self):
+ '''Validate failure of no handler is found'''
+ DataObjectCache.register_class(SimpleDataObject)
+ DataObjectCache.register_class(SimpleDataObject2)
+ DataObjectCache.register_class(SimpleDataObject3)
+ DataObjectCache.register_class(SimpleDataObject4)
+
+ # First ensure it works as expected.
+ xml_elem = etree.Element("SimpleDataObject", name="handled")
+
+ class_obj = DataObjectCache.find_class_to_handle(xml_elem)
+
+ self.assertEqual(class_obj, SimpleDataObject)
+
+ # Now ensure that it fails when expected.
+ xml_elem_fail = etree.Element("not_handled", name="not_handled_name")
+
+ class_obj = DataObjectCache.find_class_to_handle(xml_elem_fail)
+
+ self.assertEqual(class_obj, None)
+
+
+if __name__ == '__main__':
+ unittest.main()
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/usr/src/lib/install_doc/test/test_data_object_cache_snapshots.py Wed Oct 20 18:48:33 2010 +0100
@@ -0,0 +1,353 @@
+#
+# CDDL HEADER START
+#
+# The contents of this file are subject to the terms of the
+# Common Development and Distribution License (the "License").
+# You may not use this file except in compliance with the License.
+#
+# You can obtain a copy of the license at usr/src/OPENSOLARIS.LICENSE
+# or http://www.opensolaris.org/os/licensing.
+# See the License for the specific language governing permissions
+# and limitations under the License.
+#
+# When distributing Covered Code, include this CDDL HEADER in each
+# file and include the License file at usr/src/OPENSOLARIS.LICENSE.
+# If applicable, add the following below this CDDL HEADER, with the
+# fields enclosed by brackets "[]" replaced with your own identifying
+# information: Portions Copyright [yyyy] [name of copyright owner]
+#
+# CDDL HEADER END
+#
+
+#
+# Copyright (c) 2010, Oracle and/or its affiliates. All rights reserved.
+#
+'''Tests to validate DOC snapshots support'''
+
+import unittest
+from StringIO import StringIO
+from tempfile import mkdtemp, mktemp
+from os import unlink, rmdir, stat, path
+
+from solaris_install.data_object.cache import DataObjectCache
+from simple_data_object import SimpleDataObject, \
+ SimpleDataObject2, SimpleDataObject3, SimpleDataObject4
+
+
+class TestDataObjectCacheSnapshots(unittest.TestCase):
+ '''Tests to validate DOC snapshots support'''
+
+ def setUp(self):
+ '''Set up correct environment for tests.
+
+ Creates reference to a temp dir and file and StringIO buffer.
+
+ Creates a tree of elements in the DOC to validate before/after.
+ '''
+
+ # Create temporary work directory
+ self.temp_dir = mkdtemp(prefix="doc_test-")
+ self.temp_file = mktemp(prefix="snapshot-", dir=self.temp_dir)
+
+ # Create StringIO memory buffer for I/O test
+ self.buffer = StringIO()
+
+ # Create a tree that looks like:
+ # DOC
+ # volatile
+ # volatile_root
+ # persistent
+ # persistent_root
+ # child_1
+ # child_1_1
+ # child_1_2
+ # child_2
+ # child_2_1
+ # child_2_1_1
+ # child_2_1_1_1
+ # child_2_1_1_2
+ # child_3
+ # child_3_1
+ # child_3_1_2
+ # child_3_1_2
+ # child_3_1_2_same_name
+ # child_4
+ # child_5
+ # child_5_1
+ # child_5_2
+ # child_5_2_1
+ # child_5_2_2
+ # child_5_2_3
+ # child_5_2_3_1
+ # child_5_2_3_2
+ # child_5_2_3_3
+ # child_5_2_3_3_same_name
+ # child_5_same_name
+
+ # Create DOC node
+ self.doc = DataObjectCache()
+
+ # Create some persistent content
+ self.persistent_root = SimpleDataObject("persistent_root")
+ # Add some children, used by most tests.
+ self.child_1 = SimpleDataObject2("child_1")
+ self.child_2 = SimpleDataObject("child_2")
+ self.child_3 = SimpleDataObject("child_3")
+ self.child_4 = SimpleDataObject2("child_4")
+ self.child_5 = SimpleDataObject3("child_5")
+ self.child_5_same_name = SimpleDataObject("child_5")
+
+ self.do_list = list()
+ self.do_list.append(self.child_1)
+ self.do_list.append(self.child_2)
+ self.do_list.append(self.child_3)
+ self.do_list.append(self.child_4)
+ self.do_list.append(self.child_5)
+ self.do_list.append(self.child_5_same_name)
+
+ self.persistent_root.insert_children(self.do_list)
+
+ self.doc.persistent.insert_children(self.persistent_root)
+
+ # Now let's add the children of children, etc. for use by
+ # get_descendants() tests.
+ # child_1 children
+ self.child_1_1 = SimpleDataObject("child_1_1")
+ self.child_1_2 = SimpleDataObject("child_1_2")
+ self.child_1.insert_children([self.child_1_1, self.child_1_2])
+
+ # child_2 tree
+ self.child_2_1 = SimpleDataObject2("child_2_1")
+ self.child_2.insert_children(self.child_2_1)
+ self.child_2_1_1 = SimpleDataObject2("child_2_1_1")
+ self.child_2_1.insert_children(self.child_2_1_1)
+ self.child_2_1_1_1 = SimpleDataObject2("child_2_1_1_1")
+ self.child_2_1_1_2 = SimpleDataObject2("child_2_1_1_2")
+ self.child_2_1_1.insert_children(
+ [self.child_2_1_1_1, self.child_2_1_1_2])
+
+ # child_3 tree
+ self.child_3_1 = SimpleDataObject("child_3_1")
+ self.child_3.insert_children(self.child_3_1)
+ self.child_3_1_1 = SimpleDataObject("child_3_1_1")
+ self.child_3_1_2 = SimpleDataObject("child_3_1_2")
+ self.child_3_1_2_same_name = SimpleDataObject("child_3_1_2")
+ self.child_3_1.insert_children([self.child_3_1_1, self.child_3_1_2,
+ self.child_3_1_2_same_name])
+
+ # child_5 tree
+ self.child_5_1 = SimpleDataObject("child_5_1")
+ self.child_5_2 = SimpleDataObject("child_5_2")
+ self.child_5.insert_children([self.child_5_1, self.child_5_2])
+ self.child_5_2_1 = SimpleDataObject("child_5_2_1")
+ self.child_5_2_2 = SimpleDataObject("child_5_2_2")
+ self.child_5_2_3 = SimpleDataObject4("child_5_2_3")
+ self.child_5_2.insert_children(
+ [self.child_5_2_1, self.child_5_2_2, self.child_5_2_3])
+
+ self.child_5_2_3_1 = SimpleDataObject("child_5_2_3_1")
+ self.child_5_2_3_2 = SimpleDataObject("child_5_2_3_2")
+ self.child_5_2_3_3 = SimpleDataObject("child_5_2_3_3")
+ self.child_5_2_3_3_same_name = SimpleDataObject("child_5_2_3_3")
+ self.child_5_2_3.insert_children(
+ [self.child_5_2_3_1, self.child_5_2_3_2,
+ self.child_5_2_3_3, self.child_5_2_3_3_same_name])
+
+ # Create some volatile content, not much, just enough to test that it's
+ # not overwritten on loading of snapshot.
+ self.volatile_root = SimpleDataObject2("volatile_root")
+ self.doc.volatile.insert_children(self.volatile_root)
+
+ def tearDown(self):
+ '''Cleanup test environment and references.'''
+ # Remove temp dir, may not always be there.
+ if path.exists(self.temp_file):
+ unlink(self.temp_file)
+
+ if path.exists(self.temp_dir):
+ rmdir(self.temp_dir)
+
+ # Remove buffer
+ self.buffer.close() # Free's memory
+ self.buffer = None
+
+ # Unset variables.
+ self.doc.clear()
+ self.doc = None
+ self.volatile_root = None
+ self.persistent_root = None
+ self.child_1 = None
+ self.child_2 = None
+ self.child_3 = None
+ self.child_4 = None
+ self.child_5 = None
+ self.child_5_same_name = None
+ self.do_list = None
+
+ self.child_1_1 = None
+ self.child_1_2 = None
+
+ self.child_2_1 = None
+ self.child_2_1_1 = None
+ self.child_2_1_1_1 = None
+ self.child_2_1_1_2 = None
+
+ self.child_3_1 = None
+ self.child_3_1_1 = None
+ self.child_3_1_2 = None
+ self.child_3_1_2_same_name = None
+
+ self.child_5_1 = None
+ self.child_5_2 = None
+ self.child_5_2_1 = None
+ self.child_5_2_2 = None
+ self.child_5_2_3 = None
+
+ self.child_5_2_3_1 = None
+ self.child_5_2_3_2 = None
+ self.child_5_2_3_3 = None
+ self.child_5_2_3_3_same_name = None
+
+ def test_data_object_cache_snapshots_write_file_string(self):
+ '''Validate writing of a snapshot to a file'''
+ try:
+ self.doc.take_snapshot(self.temp_file)
+ except Exception, e:
+ self.fail("Got unexpected error writing snapshot: " + str(e))
+
+ try:
+ stat_info = stat(self.temp_file)
+ self.assertFalse(stat_info.st_size < 2048,
+ "Snapshot file size is too small: %d" % (stat_info.st_size))
+ except Exception, e:
+ self.fail("Got unexpected error stat-ing snapshot file: " + str(e))
+
+ def test_data_object_cache_snapshots_write_file_object(self):
+ '''Validate writing of a snapshot to a file-like object'''
+ try:
+ self.doc.take_snapshot(self.buffer)
+ except Exception, e:
+ self.fail("Got unexpected error writing snapshot: " + str(e))
+
+ try:
+ self.assertFalse(self.buffer.len < 2048,
+ "Snapshot buffer size is too small: %d" % (self.buffer.len))
+ except Exception, e:
+ self.fail("Got unexpected error stat-ing snapshot file: " + str(e))
+
+ def test_data_object_cache_snapshots_write_file_invalid_path(self):
+ '''Validate failure to invalid path'''
+ self.assertRaises(IOError, self.doc.take_snapshot,
+ self.temp_dir + "/tmpnon-existant_dir/file")
+
+ def test_data_object_cache_snapshots_write_file_null_object(self):
+ '''Validate failure if passed None'''
+ self.assertRaises(ValueError, self.doc.take_snapshot, None)
+
+ def test_data_object_cache_snapshots_read_file_string(self):
+ '''Validate snapshotted and restored data are the same'''
+ before_snap = str(self.doc.persistent)
+ try:
+ self.doc.take_snapshot(self.temp_file)
+ except Exception, e:
+ self.fail("Got unexpected error writing snapshot: " + str(e))
+
+ # Remove some persistent children to be sure it's empty so won't
+ # compare unless load of snapshot works correctly
+ for child in self.doc.persistent.children:
+ for child_child in child.children:
+ child_child.delete_children()
+
+ after_delete = str(self.doc.persistent)
+
+ self.assertNotEquals(before_snap, after_delete,
+ "Before and After delete strings are same:\
+ \nBEFORE\n%s\n\nAFTER:\n%s\n" % (before_snap, after_delete))
+
+ try:
+ self.doc.load_from_snapshot(self.temp_file)
+ except Exception, e:
+ self.fail("Got unexpected error reading snapshot: " + str(e))
+
+ after_snap = str(self.doc.persistent)
+
+ self.assertEquals(before_snap, after_snap,
+ "Before and After strings differ:\nBEFORE\n%s\n\nAFTER:\n%s\n" %
+ (before_snap, after_snap))
+
+ def test_data_object_cache_snapshots_read_file_object(self):
+ '''Validate snapshot and restore work after modifications in memory'''
+ before_snap = str(self.doc.persistent)
+
+ try:
+ self.doc.take_snapshot(self.buffer)
+ # Rewind to start so that read will be in right place.
+ self.buffer.seek(0)
+ except Exception, e:
+ self.fail("Got unexpected error writing snapshot: " + str(e))
+
+ self.assertTrue(self.buffer.len > 2048,
+ "Buffer size is wrong: %d" % (self.buffer.len))
+
+ # Remove some persistent children to be sure it's empty so won't
+ # compare unless load of snapshot works correctly
+ for child in self.doc.persistent.children:
+ for child_child in child.children:
+ child_child.delete_children()
+
+ after_delete = str(self.doc.persistent)
+
+ self.assertNotEquals(before_snap, after_delete,
+ "Before and After delete strings are same:\
+ \nBEFORE\n%s\n\nAFTER:\n%s\n" % (before_snap, after_delete))
+
+ try:
+ self.doc.load_from_snapshot(self.buffer)
+ except Exception, e:
+ self.fail("Got unexpected error reading snapshot: " + str(e))
+
+ after_snap = str(self.doc.persistent)
+
+ self.assertEquals(before_snap, after_snap,
+ "Before and After strings differ:\nBEFORE\n%s\n\nAFTER:\n%s\n" %
+ (before_snap, after_snap))
+
+ def test_data_object_cache_snapshots_read_file_invalid_path(self):
+ '''Validate failure if non-existant file path passed'''
+ self.assertRaises(IOError, self.doc.load_from_snapshot,
+ self.temp_dir + "/tmpnon-existant_dir/file")
+
+ def test_data_object_cache_snapshots_read_file_null_object(self):
+ '''Validate failure if None valud passed to load_from_snapshot()'''
+ self.assertRaises(ValueError, self.doc.load_from_snapshot, None)
+
+ def test_data_object_cache_snapshots_ensure_volatile_skipped(self):
+ '''Validate that 'volatile' doesn't change on restore'''
+ try:
+ self.doc.take_snapshot(self.buffer)
+ # Rewind to start so that read will be in right place.
+ self.buffer.seek(0)
+ except Exception, e:
+ self.fail("Got unexpected error writing snapshot: " + str(e))
+
+ # Add a child to the volatile tree
+ new_child = SimpleDataObject3("new_volatile_child")
+ self.doc.volatile.insert_children(new_child)
+ before_restore = str(self.doc.volatile)
+
+ try:
+ self.doc.load_from_snapshot(self.buffer)
+ except Exception, e:
+ self.fail("Got unexpected error reading snapshot: " + str(e))
+
+ after_restore = str(self.doc.volatile)
+
+ self.assertEquals(before_restore, after_restore,
+ "Before and After strings differ:\nBEFORE\n%s\n\nAFTER:\n%s\n" %
+ (before_restore, after_restore))
+
+ self.assertEquals(self.doc.volatile.get_children(name=new_child.name),
+ [new_child], "Failed to locate 'new_child' in DOC!")
+
+if __name__ == '__main__':
+ unittest.main()
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/usr/src/lib/install_doc/test/test_data_object_cache_utility.py Wed Oct 20 18:48:33 2010 +0100
@@ -0,0 +1,100 @@
+#
+# CDDL HEADER START
+#
+# The contents of this file are subject to the terms of the
+# Common Development and Distribution License (the "License").
+# You may not use this file except in compliance with the License.
+#
+# You can obtain a copy of the license at usr/src/OPENSOLARIS.LICENSE
+# or http://www.opensolaris.org/os/licensing.
+# See the License for the specific language governing permissions
+# and limitations under the License.
+#
+# When distributing Covered Code, include this CDDL HEADER in each
+# file and include the License file at usr/src/OPENSOLARIS.LICENSE.
+# If applicable, add the following below this CDDL HEADER, with the
+# fields enclosed by brackets "[]" replaced with your own identifying
+# information: Portions Copyright [yyyy] [name of copyright owner]
+#
+# CDDL HEADER END
+#
+
+#
+# Copyright (c) 2010, Oracle and/or its affiliates. All rights reserved.
+#
+'''Tests for DOC utility functionality'''
+
+import unittest
+
+from solaris_install.data_object.cache import DataObjectCache
+from simple_data_object import SimpleDataObject
+
+
+class TestDataObjectCacheUtility(unittest.TestCase):
+ '''Tests for DOC utility functionality'''
+
+ def setUp(self):
+ '''Create small set of objects and references to them'''
+ self.doc = DataObjectCache()
+ self.persistent_child_1 = SimpleDataObject("persistent_child_1")
+ self.persistent_child_2 = SimpleDataObject("persistent_child_2")
+ self.persistent_child_3 = SimpleDataObject("persistent_child_3")
+ self.doc.persistent.insert_children([self.persistent_child_1,
+ self.persistent_child_2, self.persistent_child_3])
+
+ self.volatile_child_1 = SimpleDataObject("volatile_child_1")
+ self.volatile_child_2 = SimpleDataObject("volatile_child_2")
+ self.volatile_child_3 = SimpleDataObject("volatile_child_3")
+ self.doc.volatile.insert_children([self.volatile_child_1,
+ self.volatile_child_2, self.volatile_child_3])
+
+ def tearDown(self):
+ '''Clean up contents of DOC and reference'''
+ self.doc.clear()
+ self.doc = None
+
+ self.persistent_child_1 = None
+ self.persistent_child_2 = None
+ self.persistent_child_3 = None
+
+ self.volatile_child_1 = None
+ self.volatile_child_2 = None
+ self.volatile_child_3 = None
+
+
+ def test_data_object_cache_utility_clear(self):
+ '''Validate the doc.clear() clears children of sub-trees only'''
+ self.assertTrue(self.doc.has_children,
+ "DataObjectCache should always have children\n%s" %\
+ (str(self.doc)))
+ self.assertTrue(self.doc.persistent.has_children,
+ "Persistent sub-tree should have children\n%s" %\
+ (str(self.doc)))
+ self.assertTrue(self.doc.volatile.has_children,
+ "Volatile sub-tree should have children\n%s" %\
+ (str(self.doc)))
+
+ self.doc.clear()
+
+ self.assertFalse(self.doc.persistent.has_children,
+ "Persistent sub-tree should have no children:\n%s" %\
+ (str(self.doc)))
+ self.assertFalse(self.doc.volatile.has_children,
+ "Volatile sub-tree should have no children\n%s" %\
+ (str(self.doc)))
+ self.assertTrue(self.doc.has_children,
+ "DataObjectCache should always have children\n%s" %\
+ (str(self.doc)))
+
+ def test_data_object_cache_utility_is_empty(self):
+ '''Validate that doc.is_empty property is valid'''
+ self.assertFalse(self.doc.is_empty,
+ "DOC doesn't contain children, when it should: \n%s" %
+ (str(self.doc)))
+ self.doc.clear()
+ self.assertTrue(self.doc.is_empty,
+ "DOC contains children when it should be empty: \n%s" %
+ (str(self.doc)))
+
+if __name__ == '__main__':
+ unittest.main()
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/usr/src/lib/install_doc/test/test_data_object_cache_xml_support.py Wed Oct 20 18:48:33 2010 +0100
@@ -0,0 +1,321 @@
+#
+# CDDL HEADER START
+#
+# The contents of this file are subject to the terms of the
+# Common Development and Distribution License (the "License").
+# You may not use this file except in compliance with the License.
+#
+# You can obtain a copy of the license at usr/src/OPENSOLARIS.LICENSE
+# or http://www.opensolaris.org/os/licensing.
+# See the License for the specific language governing permissions
+# and limitations under the License.
+#
+# When distributing Covered Code, include this CDDL HEADER in each
+# file and include the License file at usr/src/OPENSOLARIS.LICENSE.
+# If applicable, add the following below this CDDL HEADER, with the
+# fields enclosed by brackets "[]" replaced with your own identifying
+# information: Portions Copyright [yyyy] [name of copyright owner]
+#
+# CDDL HEADER END
+#
+
+#
+# Copyright (c) 2010, Oracle and/or its affiliates. All rights reserved.
+#
+'''Tests to test DOC XML specific methods'''
+
+import unittest
+
+from solaris_install.data_object.cache import DataObjectCache
+import solaris_install.data_object.cache as DOC
+from simple_data_object import SimpleDataObject, \
+ SimpleDataObject2, SimpleDataObject3, SimpleDataObjectHandlesChildren
+
+
+class TestDataObjectCacheXmlSupport(unittest.TestCase):
+ '''Tests to test DOC XML specific methods'''
+
+ def setUp(self):
+ '''Create DOC, and empty registry of classes, some children and refs'''
+
+ # Hack to ensure that registry is empty before we use it,
+ self.orig_registry = DOC._CACHE_CLASS_REGISTRY
+ DOC._CACHE_CLASS_REGISTRY = dict()
+
+ DataObjectCache.register_class([SimpleDataObject, SimpleDataObject2,
+ SimpleDataObject3, SimpleDataObjectHandlesChildren])
+
+ # Create a tree that looks like:
+ #
+ # root
+ # child_1
+ # child_1_1
+ # child_1_2
+ # child_2
+ # child_2_1
+ # child_2_1_1
+ # child_2_1_1_1
+ # child_2_1_1_2
+ # child_3
+ # child_3_1
+ # child_3_1_2
+ # child_3_1_2
+ # child_4
+ # child_5
+ # child_5_1
+ # child_5_2
+ # child_5_2_1
+ # child_5_2_2
+ # child_5_2_3
+ # child_5_2_3_1
+ # child_5_2_3_2
+ # child_5_2_3_3
+ # child_5_same_name
+
+ # Create root node
+ self.doc = DataObjectCache()
+
+ # Create some persistent content
+ self.persistent_root = SimpleDataObject("persistent_root")
+
+ # Add some children, used by most tests.
+ self.child_1 = SimpleDataObject2("child_1")
+ self.child_2 = SimpleDataObject("child_2")
+ self.child_3 = SimpleDataObject("child_3")
+ self.child_4 = SimpleDataObject2("child_4")
+ self.child_5 = SimpleDataObject3("child_5")
+
+ self.do_list = list()
+ self.do_list.append(self.child_1)
+ self.do_list.append(self.child_2)
+ self.do_list.append(self.child_3)
+ self.do_list.append(self.child_4)
+ self.do_list.append(self.child_5)
+
+ self.persistent_root.insert_children(self.do_list)
+
+ self.doc.persistent.insert_children(self.persistent_root)
+
+ # Now let's add the children of children, etc. for use by
+ # get_descendants() tests.
+ # child_1 children
+ self.child_1_1 = SimpleDataObject("child_1_1")
+ self.child_1_2 = SimpleDataObject("child_1_2")
+ self.child_1.insert_children([self.child_1_1, self.child_1_2])
+
+ # child_2 tree
+ self.child_2_1 = SimpleDataObject2("child_2_1")
+ self.child_2.insert_children(self.child_2_1)
+ self.child_2_1_1 = SimpleDataObject2("child_2_1_1")
+ self.child_2_1.insert_children(self.child_2_1_1)
+ self.child_2_1_1_1 = SimpleDataObject2("child_2_1_1_1")
+ self.child_2_1_1_2 = SimpleDataObject2("child_2_1_1_2")
+ self.child_2_1_1.insert_children([self.child_2_1_1_1,
+ self.child_2_1_1_2])
+
+ # child_3 tree
+ self.child_3_1 = SimpleDataObject("child_3_1")
+ self.child_3.insert_children(self.child_3_1)
+ self.child_3_1_1 = SimpleDataObject("child_3_1_1")
+ self.child_3_1_2 = SimpleDataObject("child_3_1_2")
+ self.child_3_1_2_same_name = SimpleDataObject("child_3_1_2")
+ self.child_3_1.insert_children([self.child_3_1_1, self.child_3_1_2,
+ self.child_3_1_2_same_name])
+
+ # child_5 tree
+ self.child_5_1 = SimpleDataObject("child_5_1")
+ self.child_5_2 = SimpleDataObject("child_5_2")
+ self.child_5.insert_children([self.child_5_1, self.child_5_2])
+ self.child_5_2_1 = SimpleDataObject("child_5_2_1")
+ self.child_5_2_2 = SimpleDataObject("child_5_2_2")
+ self.child_5_2_3 = SimpleDataObjectHandlesChildren("child_5_2_3")
+ self.child_5_2.insert_children([self.child_5_2_1, self.child_5_2_2,
+ self.child_5_2_3])
+
+ self.child_5_2_3_1 = SimpleDataObject("child_5_2_3_1")
+ self.child_5_2_3_2 = SimpleDataObject("child_5_2_3_2")
+ self.child_5_2_3_3 = SimpleDataObject("child_5_2_3_3")
+ self.child_5_2_3_3_same_name = SimpleDataObject("child_5_2_3_3")
+ self.child_5_2_3.insert_children([self.child_5_2_3_1,
+ self.child_5_2_3_2, self.child_5_2_3_3,
+ self.child_5_2_3_3_same_name])
+
+ # Create some volatile content, not much, just enough to test that it's
+ # not overwritten on loading of snapshot.
+ self.volatile_root = SimpleDataObject2("volatile_root")
+ self.doc.volatile.insert_children(self.volatile_root)
+
+ def tearDown(self):
+ '''Restore class registry and clear DOC and other references'''
+
+ # Hack to ensure that registry is restored after we use it.
+ DOC._CACHE_CLASS_REGISTRY = self.orig_registry
+
+ self.doc.clear()
+ self.doc = None
+ self.persistent_root = None
+ self.volatile_root = None
+ self.child_1 = None
+ self.child_2 = None
+ self.child_3 = None
+ self.child_4 = None
+ self.child_5 = None
+ self.do_list = None
+
+ self.child_1_1 = None
+ self.child_1_2 = None
+
+ self.child_2_1 = None
+ self.child_2_1_1 = None
+ self.child_2_1_1_1 = None
+ self.child_2_1_1_2 = None
+
+ self.child_3_1 = None
+ self.child_3_1_1 = None
+ self.child_3_1_2 = None
+ self.child_3_1_2_same_name = None
+
+ self.child_5_1 = None
+ self.child_5_2 = None
+ self.child_5_2_1 = None
+ self.child_5_2_2 = None
+ self.child_5_2_3 = None
+
+ self.child_5_2_3_1 = None
+ self.child_5_2_3_2 = None
+ self.child_5_2_3_3 = None
+ self.child_5_2_3_3_same_name = None
+
+ def test_data_object_cache_xml_support_skip_sub_tree_elements(self):
+ '''Validate no XML generated by volatile and persistent'''
+
+ # doc.volatile and doc.persistent shouldn't generate their own
+ # XML, so ensure tha this is the case.
+ xml_tree = self.doc.generate_xml_manifest()
+
+ child_names = []
+ for xml_child in xml_tree:
+ child_names.append(xml_child.get("name"))
+
+ self.assertEqual(child_names, \
+ [self.persistent_root.name, self.volatile_root.name])
+
+ def test_data_object_cache_xml_support_generates_expected_xml(self):
+ '''Validate that expected XML is generated by generate_xml_manifest'''
+ indentation = '''\
+ '''
+ expected_xml = '''\
+ <root>
+ ..<SimpleDataObject name="persistent_root">
+ ....<SimpleDataObject2 name="child_1">
+ ......<SimpleDataObject name="child_1_1"/>
+ ......<SimpleDataObject name="child_1_2"/>
+ ....</SimpleDataObject2>
+ ....<SimpleDataObject name="child_2">
+ ......<SimpleDataObject2 name="child_2_1">
+ ........<SimpleDataObject2 name="child_2_1_1">
+ ..........<SimpleDataObject2 name="child_2_1_1_1"/>
+ ..........<SimpleDataObject2 name="child_2_1_1_2"/>
+ ........</SimpleDataObject2>
+ ......</SimpleDataObject2>
+ ....</SimpleDataObject>
+ ....<SimpleDataObject name="child_3">
+ ......<SimpleDataObject name="child_3_1">
+ ........<SimpleDataObject name="child_3_1_1"/>
+ ........<SimpleDataObject name="child_3_1_2"/>
+ ........<SimpleDataObject name="child_3_1_2"/>
+ ......</SimpleDataObject>
+ ....</SimpleDataObject>
+ ....<SimpleDataObject2 name="child_4"/>
+ ....<SimpleDataObject name="child_5_1"/>
+ ....<SimpleDataObject name="child_5_2">
+ ......<SimpleDataObject name="child_5_2_1"/>
+ ......<SimpleDataObject name="child_5_2_2"/>
+ ......<SimpleDataObjectHandlesChildren name="child_5_2_3">
+ ........<so_child name="child_5_2_3_1"/>
+ ........<so_child name="child_5_2_3_2"/>
+ ........<so_child name="child_5_2_3_3"/>
+ ........<so_child name="child_5_2_3_3"/>
+ ......</SimpleDataObjectHandlesChildren>
+ ....</SimpleDataObject>
+ ..</SimpleDataObject>
+ ..<SimpleDataObject2 name="volatile_root"/>
+ </root>
+ '''.replace(indentation, "").replace(".", " ")
+
+ xml_str = self.doc.get_xml_tree_str()
+
+ self.assertEqual(xml_str, expected_xml,
+ "Resulting XML doesn't match expected (len=%d != %d):\
+ \nGOT:\n%s\nEXPECTED:\n%s\n" %
+ (len(xml_str), len(expected_xml), xml_str, expected_xml))
+
+ def test_data_object_cache_xml_support_children_from_xml_volatile(self):
+ '''Validate import_from_manifest_xml volatile flag'''
+ # Get original XML tree
+ orig_xml_tree = self.doc.generate_xml_manifest()
+ orig_xml_str = self.doc.get_xml_tree_str()
+
+ # Empty the DOC
+ self.doc.clear()
+
+ self.assertTrue(self.doc.is_empty)
+
+ # Now, try to re-create DOC from oricinal XML
+ self.doc.import_from_manifest_xml(orig_xml_tree, volatile=True)
+
+ self.assertTrue(self.doc.volatile.has_children)
+ self.assertFalse(self.doc.persistent.has_children)
+
+ imported_xml_str = self.doc.get_xml_tree_str()
+
+ self.assertEqual(imported_xml_str, orig_xml_str,
+ "Resulting XML doesn't match expected (len=%d != %d):\
+ \nGOT:\n%s\nEXPECTED:\n%s\n" %
+ (len(imported_xml_str), len(orig_xml_str),
+ imported_xml_str, orig_xml_str))
+
+ def test_data_object_cache_xml_support_children_from_xml_persistent(self):
+ '''Validate default XML import into persistent tree'''
+ # Get original XML tree
+ orig_xml_tree = self.doc.generate_xml_manifest()
+ orig_xml_str = self.doc.get_xml_tree_str()
+
+ # Empty the DOC
+ self.doc.clear()
+
+ self.assertTrue(self.doc.is_empty)
+
+ # Now, try to re-create DOC from oricinal XML
+ self.doc.import_from_manifest_xml(orig_xml_tree)
+
+ # Ensure it's in the correct tree
+ self.assertFalse(self.doc.volatile.has_children)
+ self.assertTrue(self.doc.persistent.has_children)
+
+ imported_xml_str = self.doc.get_xml_tree_str()
+
+ self.assertEqual(imported_xml_str, orig_xml_str,
+ "Resulting XML doesn't match expected (len=%d != %d):\
+ \nGOT:\n%s\nEXPECTED:\n%s\n" %
+ (len(imported_xml_str), len(orig_xml_str),
+ imported_xml_str, orig_xml_str))
+
+ def test_data_object_cache_xml_methods(self):
+ '''Validate correct values returned from XML methods'''
+ self.assertNotEqual(self.doc.to_xml(), None)
+ self.assertEqual(self.doc.persistent.to_xml(), None)
+ self.assertEqual(self.doc.volatile.to_xml(), None)
+
+ self.assertFalse(self.doc.can_handle(None))
+ self.assertFalse(self.doc.from_xml(None))
+
+ self.assertFalse(self.doc.persistent.can_handle(None))
+ self.assertFalse(self.doc.persistent.from_xml(None))
+
+ self.assertFalse(self.doc.volatile.can_handle(None))
+ self.assertFalse(self.doc.volatile.from_xml(None))
+
+
+if __name__ == '__main__':
+ unittest.main()
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/usr/src/lib/install_doc/test/test_data_object_deletion.py Wed Oct 20 18:48:33 2010 +0100
@@ -0,0 +1,251 @@
+#
+# CDDL HEADER START
+#
+# The contents of this file are subject to the terms of the
+# Common Development and Distribution License (the "License").
+# You may not use this file except in compliance with the License.
+#
+# You can obtain a copy of the license at usr/src/OPENSOLARIS.LICENSE
+# or http://www.opensolaris.org/os/licensing.
+# See the License for the specific language governing permissions
+# and limitations under the License.
+#
+# When distributing Covered Code, include this CDDL HEADER in each
+# file and include the License file at usr/src/OPENSOLARIS.LICENSE.
+# If applicable, add the following below this CDDL HEADER, with the
+# fields enclosed by brackets "[]" replaced with your own identifying
+# information: Portions Copyright [yyyy] [name of copyright owner]
+#
+# CDDL HEADER END
+#
+
+#
+# Copyright (c) 2010, Oracle and/or its affiliates. All rights reserved.
+#
+'''Tests for various DataObject deletion methods'''
+
+import unittest
+
+from solaris_install.data_object import ObjectNotFoundError
+from simple_data_object import SimpleDataObject, \
+ SimpleDataObject2, SimpleDataObject3
+
+
+class TestDataObjectDeletion(unittest.TestCase):
+ '''Tests for various DataObject deletion methods'''
+
+ def setUp(self):
+ '''Create simple tree and references to children'''
+ # Create root node
+ self.data_obj = SimpleDataObject("root")
+ # Add some children
+ self.child_1 = SimpleDataObject2("child_1")
+ self.child_2 = SimpleDataObject("child_2")
+ self.child_3 = SimpleDataObject("child_3")
+ self.child_4 = SimpleDataObject2("child_4")
+ self.child_5 = SimpleDataObject("child_5")
+
+ # Add a child to child_2
+ self.child_2_1 = SimpleDataObject("child_2_1")
+ self.child_2.insert_children(self.child_2_1)
+
+ self.do_list = list()
+ self.do_list.append(self.child_1)
+ self.do_list.append(self.child_2)
+ self.do_list.append(self.child_3)
+ self.do_list.append(self.child_4)
+ self.do_list.append(self.child_5)
+
+ self.data_obj.insert_children(self.do_list)
+
+ def tearDown(self):
+ '''Clean up stored references'''
+ self.data_obj = None
+ self.child_1 = None
+ self.child_2 = None
+ self.child_3 = None
+ self.child_4 = None
+ self.child_5 = None
+
+ self.child_2_1 = None
+
+ self.do_list = None
+
+ def test_data_object_delete_self(self):
+ '''Validate that self.delete() deletes self from parent'''
+
+ obj = self.data_obj.get_children(self.child_4.name)
+
+ self.assertEqual(obj, [self.child_4],
+ "Failed to locate child_4 as child of data_obj.")
+
+ self.child_4.delete() # Delete self from parent.
+
+ self.assertRaises(ObjectNotFoundError, self.data_obj.get_children,
+ self.child_4.name)
+
+ def test_data_object_delete_self_and_children(self):
+ '''Validate self.delete() deletes self plus children'''
+
+ obj = self.data_obj.get_children(self.child_2.name)
+
+ self.assertEqual(obj, [self.child_2],
+ "Failed to locate child_2 as child of data_obj.")
+
+ self.child_2.delete() # Delete self from parent.
+
+ self.assertRaises(ObjectNotFoundError, self.data_obj.get_children,
+ self.child_2.name)
+
+ # Ensure that child_2 now has no children
+ self.assertFalse(self.child_2.has_children,
+ "child_2 shouldn't have children anymore.")
+
+ def test_data_object_delete_all(self):
+ '''Validate delete_children() deletes all children nodes'''
+
+ self.data_obj.delete_children()
+
+ self.assertFalse(self.data_obj.has_children, str(self.data_obj))
+
+ def test_data_object_delete_specific_single(self):
+ '''Validate deletion of a specific child by reference'''
+
+ self.data_obj.delete_children(self.child_3)
+
+ for child in self.data_obj.children:
+ self.assertNotEqual(child.name, self.child_3.name,
+ "Found deleted object 'child_3': %s" + str(self.data_obj))
+
+ def test_data_object_delete_specific_list(self):
+ '''Validate deletion of a list of references to specific children'''
+
+ self.data_obj.delete_children([self.child_3, self.child_5])
+
+ for child in self.data_obj.children:
+ self.assertNotEqual(child, self.child_3,
+ "Found deleted object 'child_3': %s" + str(self.data_obj))
+ self.assertNotEqual(child, self.child_5,
+ "Found deleted object 'child_5': %s" + str(self.data_obj))
+
+ def test_data_object_delete_by_name_no_children(self):
+ '''Validate failure if asked to delete children if there are none'''
+ self.assertRaises(ObjectNotFoundError, self.child_2_1.delete_children,
+ name="ignored")
+
+ def test_data_object_delete_by_class_type_no_children(self):
+ '''Validate failure if asked to delete non-existant class type'''
+ self.assertRaises(ObjectNotFoundError, self.child_2_1.delete_children,
+ class_type=SimpleDataObject)
+
+ def test_data_object_delete_by_name(self):
+ '''Validate failure if asked to delete non-exitant child by name'''
+ self.data_obj.delete_children(name=self.child_4.name)
+
+ for child in self.data_obj.children:
+ self.assertNotEqual(child, self.child_4,
+ "Found deleted object 'child_4': %s" + str(self.data_obj))
+
+ def test_data_object_delete_by_type(self):
+ '''Validate correct deletion of specific class types'''
+ # Should remove child_1 and child_4 which are of type SimpleDataObject2
+ self.data_obj.delete_children(class_type=SimpleDataObject2)
+
+ for child in self.data_obj.children:
+ self.assertNotEqual(child, self.child_1,
+ "Found deleted object 'child_1': %s" + str(self.data_obj))
+ self.assertNotEqual(child, self.child_4,
+ "Found deleted object 'child_4': %s" + str(self.data_obj))
+
+ def test_data_object_delete_by_name_and_type(self):
+ '''Validate correct deletion of an obj by name and type'''
+ # Should remove child_4 which has name and type SimpleDataObject2
+ self.data_obj.delete_children(name=self.child_4.name,
+ class_type=SimpleDataObject2)
+
+ found_child_1 = False
+ for child in self.data_obj.children:
+ if child == self.child_1:
+ found_child_1 = True
+ self.assertNotEqual(child, self.child_4,
+ "Found deleted object 'child_4': %s" + str(self.data_obj))
+
+ self.assertTrue(found_child_1,
+ "child_1 should still be present: %s" % (str(self.data_obj)))
+
+ def test_data_object_delete_by_children_not_exist_single(self):
+ '''Validate failure if asked to delete non-existant child reference'''
+ not_a_child = SimpleDataObject("Not A Child 1")
+
+ self.assertRaises(ObjectNotFoundError, self.data_obj.delete_children,
+ not_a_child)
+
+ def test_data_object_delete_by_children_not_exist_list(self):
+ '''Validate failure when deleting a list of non-existant children'''
+ not_a_child_list = [self.child_5, SimpleDataObject("Not A Child 1")]
+
+ self.assertRaises(ObjectNotFoundError, self.data_obj.delete_children,
+ not_a_child_list)
+
+ def test_data_object_delete_by_children_not_exist_tuple(self):
+ '''Validate deletion of a tuple containing non-existant ref'''
+ not_a_child_list = (self.child_5, SimpleDataObject("Not A Child 1"))
+
+ self.assertRaises(ObjectNotFoundError, self.data_obj.delete_children,
+ not_a_child_list)
+
+ def test_data_object_delete_by_name_not_exist(self):
+ '''Validate failure when deleting non-existant child by name'''
+ self.assertRaises(ObjectNotFoundError, self.data_obj.delete_children,
+ name="non_existant_name")
+
+ def test_data_object_delete_by_type_not_exist(self):
+ '''Validate failure when deleting non-existant child by type'''
+ self.assertRaises(ObjectNotFoundError, self.data_obj.delete_children,
+ class_type=SimpleDataObject3)
+
+ def test_data_object_delete_by_name_exist_and_type_not_exist(self):
+ '''Validate failure when deleting child name and non-existant type'''
+ self.assertRaises(ObjectNotFoundError, self.data_obj.delete_children,
+ name=self.child_4.name, class_type=SimpleDataObject3)
+
+ def test_data_object_delete_by_name_not_exist_and_type_exist(self):
+ '''Validate failure when deleting child non-existant name and type'''
+ self.assertRaises(ObjectNotFoundError, self.data_obj.delete_children,
+ name="non existant name", class_type=SimpleDataObject2)
+
+ def test_data_object_delete_by_object_with_name_and_type_ignored(self):
+ '''Validate name and type ignored if specific child ref provided'''
+ # Should remove child_3 only, and ignore name and type
+ self.data_obj.delete_children(children=self.child_3,
+ name=self.child_4.name,
+ class_type=SimpleDataObject2)
+
+ found_child_1 = False
+ found_child_2 = False
+ found_child_4 = False
+ found_child_5 = False
+ for child in self.data_obj.children:
+ if child == self.child_1:
+ found_child_1 = True
+ if child == self.child_2:
+ found_child_2 = True
+ if child == self.child_4:
+ found_child_4 = True
+ if child == self.child_5:
+ found_child_5 = True
+ self.assertNotEqual(child, self.child_3,
+ "Found deleted object 'child_3': %s" + str(self.data_obj))
+
+ self.assertTrue(found_child_1,
+ "child_1 should still be present: %s" % (str(self.data_obj)))
+ self.assertTrue(found_child_2,
+ "child_2 should still be present: %s" % (str(self.data_obj)))
+ self.assertTrue(found_child_4,
+ "child_4 should still be present: %s" % (str(self.data_obj)))
+ self.assertTrue(found_child_5,
+ "child_5 should still be present: %s" % (str(self.data_obj)))
+
+
+if __name__ == '__main__':
+ unittest.main()
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/usr/src/lib/install_doc/test/test_data_object_dict.py Wed Oct 20 18:48:33 2010 +0100
@@ -0,0 +1,508 @@
+#
+# CDDL HEADER START
+#
+# The contents of this file are subject to the terms of the
+# Common Development and Distribution License (the "License").
+# You may not use this file except in compliance with the License.
+#
+# You can obtain a copy of the license at usr/src/OPENSOLARIS.LICENSE
+# or http://www.opensolaris.org/os/licensing.
+# See the License for the specific language governing permissions
+# and limitations under the License.
+#
+# When distributing Covered Code, include this CDDL HEADER in each
+# file and include the License file at usr/src/OPENSOLARIS.LICENSE.
+# If applicable, add the following below this CDDL HEADER, with the
+# fields enclosed by brackets "[]" replaced with your own identifying
+# information: Portions Copyright [yyyy] [name of copyright owner]
+#
+# CDDL HEADER END
+#
+
+#
+# Copyright (c) 2010, Oracle and/or its affiliates. All rights reserved.
+#
+'''Tests to validate DataObjectDict functionality'''
+
+import unittest
+
+from lxml import etree
+
+from solaris_install.data_object.data_dict import \
+ DataObjectDict, ParsingError
+
+from simple_data_object import SimpleDataObject
+
+
+class DataObjectDictDiffTag(DataObjectDict):
+ '''Define class where we override TAG_NAME'''
+ TAG_NAME = "different_tag"
+
+
+class DataObjectDictDiffSubTag(DataObjectDict):
+ '''Define class where we override SUB_TAG_NAME'''
+ SUB_TAG_NAME = "different_sub_tag"
+
+
+class DataObjectDictDiffBothTag(DataObjectDict):
+ '''Define class where we override both TAG_NAME and SUB_TAG_NAME'''
+ TAG_NAME = "different_both_tag"
+ SUB_TAG_NAME = "different_both_sub_tag"
+
+
+class DataObjectDictBadTag(DataObjectDict):
+ '''Define class where we override TAG_NAME with a bad tag value'''
+ TAG_NAME = "bad tag"
+
+
+class DataObjectDictBadSubTag(DataObjectDict):
+ '''Define class where we override SUB_TAG_NAME with a bad tag value'''
+ SUB_TAG_NAME = "bad sub tag"
+
+
+class TestDataObjectDict(unittest.TestCase):
+ '''Tests to validate DataObjectDict functionality'''
+
+ def setUp(self):
+ '''Create simple dictionary, and several objects using it'''
+ tmp_dict = dict()
+ tmp_dict["name1"] = "value1"
+ tmp_dict["name2"] = "value2"
+ tmp_dict["name3"] = "value3"
+ tmp_dict["name4"] = "value4"
+ tmp_dict["name5"] = "value5"
+
+ self.data_dict_xml = DataObjectDict("test_dictionary_xml", tmp_dict,
+ generate_xml=True)
+
+ self.data_dict_no_xml = DataObjectDict("test_dictionary_no_xml",
+ tmp_dict)
+
+ self.data_dict_diff_tag = DataObjectDictDiffTag(
+ "test_dictionary_diff_tag", tmp_dict, generate_xml=True)
+
+ self.data_dict_diff_sub_tag = DataObjectDictDiffSubTag(
+ "test_dictionary_diff_sub_tag", tmp_dict, generate_xml=True)
+
+ self.data_dict_diff_both_tags = DataObjectDictDiffBothTag(
+ "test_dictionary_diff_both_tags", tmp_dict,
+ generate_xml=True)
+
+ # Slightly different dictionary, add a key of 'name'.
+ tmp_dict = dict()
+ tmp_dict["name"] = "value"
+ tmp_dict["name1"] = "value1"
+ tmp_dict["name2"] = "value2"
+ tmp_dict["name3"] = "value3"
+ tmp_dict["name4"] = "value4"
+ tmp_dict["name5"] = "value5"
+
+ self.data_dict_attr = DataObjectDict("test_dictionary_attr", tmp_dict)
+
+ def tearDown(self):
+ '''Free up references to objects'''
+ self.data_dict_xml = None
+ self.data_dict_no_xml = None
+ self.data_dict_diff_tag = None
+ self.data_dict_diff_sub_tag = None
+ self.data_dict_diff_both_tags = None
+
+ def test_data_object_dict_no_xml(self):
+ '''Validate that XML isn't generated if generate_xml=False'''
+ self.assertTrue(self.data_dict_no_xml.get_xml_tree() == None,
+ self.data_dict_no_xml.get_xml_tree_str())
+
+ def test_data_object_dict_xml_default(self):
+ '''Validate that XML is generated with default settings'''
+ # Set expected xml, and compensate for indent
+ indentation = '''\
+ '''
+ expected_xml = '''\
+ <data_dictionary name="test_dictionary_xml">
+ <data name="name1">value1</data>
+ <data name="name2">value2</data>
+ <data name="name3">value3</data>
+ <data name="name4">value4</data>
+ <data name="name5">value5</data>
+ </data_dictionary>
+ '''.replace(indentation, "")
+
+ xml_str = self.data_dict_xml.get_xml_tree_str()
+ self.assertEqual(xml_str, expected_xml, "EXPECTED:\n%s\nGOT:\n%s" %\
+ (expected_xml, xml_str))
+
+ def test_data_object_dict_value_using_attr(self):
+ '''Validate that its possible to refer to a value as an attribute'''
+ self.assertEqual(self.data_dict_attr.name1,
+ self.data_dict_attr.data_dict["name1"])
+
+ # Ensure that name returns _name, not value in dictionary.
+ self.assertEqual(self.data_dict_attr.name,
+ self.data_dict_attr._name)
+
+ def test_data_object_dict_value_fail_using_attr(self):
+ '''Validate that its not possible to refer to an invalid attribute'''
+ try:
+ self.data_dict_attr.name10
+ self.fail("Didn't raise exception referring to invalid attribute")
+ except AttributeError:
+ pass
+
+ def test_data_object_dict_xml_diff_tag(self):
+ '''Validate that XML is generated using a different TAG_NAME'''
+ # Set expected xml, and compensate for indent
+ indentation = '''\
+ '''
+ expected_xml = '''\
+ <different_tag name="test_dictionary_diff_tag">
+ <data name="name1">value1</data>
+ <data name="name2">value2</data>
+ <data name="name3">value3</data>
+ <data name="name4">value4</data>
+ <data name="name5">value5</data>
+ </different_tag>
+ '''.replace(indentation, "")
+
+ xml_str = self.data_dict_diff_tag.get_xml_tree_str()
+ self.assertEqual(xml_str, expected_xml, "EXPECTED:\n%s\nGOT:\n%s" %\
+ (expected_xml, xml_str))
+
+ def test_data_object_dict_xml_diff_sub_tag(self):
+ '''Validate that XML is generated using a different SUB_TAG_NAME'''
+ # Set expected xml, and compensate for indent
+ indentation = '''\
+ '''
+ expected_xml = '''\
+ <data_dictionary name="test_dictionary_diff_sub_tag">
+ <different_sub_tag name="name1">value1</different_sub_tag>
+ <different_sub_tag name="name2">value2</different_sub_tag>
+ <different_sub_tag name="name3">value3</different_sub_tag>
+ <different_sub_tag name="name4">value4</different_sub_tag>
+ <different_sub_tag name="name5">value5</different_sub_tag>
+ </data_dictionary>
+ '''.replace(indentation, "")
+
+ xml_str = self.data_dict_diff_sub_tag.get_xml_tree_str()
+ self.assertEqual(xml_str, expected_xml, "EXPECTED:\n%s\nGOT:\n%s" %\
+ (expected_xml, xml_str))
+
+ def test_data_object_dict_xml_diff_both_tag(self):
+ '''Validate that XML uses different TAG_NAME and SUB_TAG_NAME'''
+ # Set expected xml, and compensate for indent
+ indentation = '''\
+ '''
+ expected_xml = '''\
+ <different_both_tag name="test_dictionary_diff_both_tags">
+ <different_both_sub_tag name="name1">value1</different_both_sub_tag>
+ <different_both_sub_tag name="name2">value2</different_both_sub_tag>
+ <different_both_sub_tag name="name3">value3</different_both_sub_tag>
+ <different_both_sub_tag name="name4">value4</different_both_sub_tag>
+ <different_both_sub_tag name="name5">value5</different_both_sub_tag>
+ </different_both_tag>
+ '''.replace(indentation, "")
+
+ xml_str = self.data_dict_diff_both_tags.get_xml_tree_str()
+ self.assertEqual(xml_str, expected_xml, "EXPECTED:\n%s\nGOT:\n%s" %\
+ (expected_xml, xml_str))
+
+ def test_data_object_dict_fail_not_dict(self):
+ '''Validate failure if non-dict type passed for dictionary'''
+ try:
+ DataObjectDict("not_dict", data_dict=["elem1"])
+ self.fail("Unexpected success creating obj with a list")
+ except ValueError:
+ pass
+
+ def test_data_object_dict_set_dict_prop(self):
+ '''Validate correct setting of data_dict property on creation'''
+ obj = DataObjectDict("not_dict", data_dict=dict())
+ obj.data_dict = {'key1': 'value1', 'key2': 'value2'}
+ self.assertEqual(obj.data_dict['key1'], 'value1')
+ self.assertEqual(obj.data_dict['key2'], 'value2')
+
+ def test_data_object_dict_fail_not_dict_prop(self):
+ '''Validate failure if non-dict passed as data_dict on creation'''
+ try:
+ obj = DataObjectDict("not_dict", data_dict=dict())
+ obj.data_dict = list()
+ self.fail("Unexpected success setting data_dict to a list")
+ except ValueError:
+ pass
+
+ def test_data_object_dict_fail_invalid_tag(self):
+ '''Validate that XML generation fails using an invalid TAG_NAME'''
+ try:
+ data_dict = {'key1': 'value1', 'key2': 'value2'}
+ data_obj = DataObjectDictBadTag("invalid_tag", data_dict,
+ generate_xml=True)
+ data_obj.to_xml()
+ self.fail("Unexpected success creating obj with a bad tag name")
+ except ValueError:
+ pass
+
+ def test_data_object_dict_fail_invalid_sub_tag(self):
+ '''Validate that XML generation fails using an invalid SUB_TAG_NAME'''
+ try:
+ data_dict = {'key1': 'value1', 'key2': 'value2'}
+ data_obj = DataObjectDictBadSubTag("invalid_tag", data_dict,
+ generate_xml=True)
+ data_obj.to_xml()
+ self.fail(
+ "Unexpected success creating obj with a bad sub-tag name")
+ except ValueError:
+ pass
+
+ def test_data_object_dict_fail_can_handle_invalid_tag(self):
+ '''Validate that can_handle fails using an invalid tag name'''
+ indentation = '''\
+ '''
+ TEST_XML = '''\
+ <bad_data_dictionary name="test_dictionary_xml">
+ <data name="name1">value1</data>
+ <data name="name2">value2</data>
+ <data name="name3">value3</data>
+ <data name="name4">value4</data>
+ <data name="name5">value5</data>
+ </bad_data_dictionary>
+ '''.replace(indentation, "")
+
+ # Parse the XML into an XML tree.
+ xml_tree = etree.fromstring(TEST_XML)
+
+ self.assertFalse(DataObjectDict.can_handle(xml_tree),
+ "can_handle returned True when given a bad tag: %s" % (TEST_XML))
+
+ def test_data_object_dict_fail_can_handle_invalid_sub_tag(self):
+ '''Validate that can_handle fails using an invalid sub tag'''
+ indentation = '''\
+ '''
+ TEST_XML = '''\
+ <data_dictionary name="test_dictionary_xml">
+ <data name="name1">value1</data>
+ <data name="name2">value2</data>
+ <data name="name3">value3</data>
+ <data name="name4">value4</data>
+ <baddata name="name4">value4</baddata>
+ <data name="name5">value5</data>
+ </data_dictionary>
+ '''.replace(indentation, "")
+
+ # Parse the XML into an XML tree.
+ xml_tree = etree.fromstring(TEST_XML)
+
+ self.assertFalse(DataObjectDict.can_handle(xml_tree),
+ "can_handle returned True when given a bad sub_tag: %s" %
+ (TEST_XML))
+
+ def test_data_object_dict_fail_from_xml_invalid_tag(self):
+ '''Validate that from_xml() fails using an invalid XML tag'''
+ indentation = '''\
+ '''
+ TEST_XML = '''\
+ <bad_data_dictionary name="test_dictionary_xml">
+ <data name="name1">value1</data>
+ <data name="name2">value2</data>
+ <data name="name3">value3</data>
+ <data name="name4">value4</data>
+ <data name="name5">value5</data>
+ </bad_data_dictionary>
+ '''.replace(indentation, "")
+
+ # Parse the XML into an XML tree.
+ xml_tree = etree.fromstring(TEST_XML)
+
+ # can_handle tested seperately, just ensure from_xml will fail too.
+ self.assertRaises(ParsingError, DataObjectDict.from_xml, xml_tree)
+
+ def test_data_object_dict_fail_from_xml_invalid_sub_tag(self):
+ '''Validate that from_xml() fails using an invalid XML sub tag'''
+ indentation = '''\
+ '''
+ TEST_XML = '''\
+ <data_dictionary name="test_dictionary_xml">
+ <data name="name1">value1</data>
+ <data name="name2">value2</data>
+ <data name="name3">value3</data>
+ <data name="name4">value4</data>
+ <baddata name="name4">value4</baddata>
+ <data name="name5">value5</data>
+ </data_dictionary>
+ '''.replace(indentation, "")
+
+ # Parse the XML into an XML tree.
+ xml_tree = etree.fromstring(TEST_XML)
+
+ # can_handle tested seperately, just ensure from_xml will fail too.
+ self.assertRaises(ParsingError, DataObjectDict.from_xml, xml_tree)
+
+ def test_data_object_dict_set_generate_xml_prop(self):
+ '''Validate that set/get of generate_xml flag works'''
+ self.assertFalse(self.data_dict_no_xml.generate_xml)
+ self.assertTrue(self.data_dict_no_xml.to_xml() == None)
+
+ self.data_dict_no_xml.generate_xml = True
+ self.assertTrue(self.data_dict_no_xml.generate_xml)
+ self.assertTrue(self.data_dict_no_xml.to_xml() != None)
+
+ self.data_dict_no_xml.generate_xml = False
+ self.assertFalse(self.data_dict_no_xml.generate_xml)
+ self.assertTrue(self.data_dict_no_xml.to_xml() == None)
+
+ def test_data_object_dict_import_xml_default(self):
+ '''Validate that from_xml() correctly imports XML'''
+ # Set expected xml, and compensate for indent
+ indentation = '''\
+ '''
+ TEST_XML = '''\
+ <data_dictionary name="test_dictionary_xml">
+ <data name="name1">value1</data>
+ <data name="name2">value2</data>
+ <data name="name3">value3</data>
+ <data name="name4">value4</data>
+ <data name="name5">value5</data>
+ </data_dictionary>
+ '''.replace(indentation, "")
+
+ # Parse the XML into an XML tree.
+ xml_tree = etree.fromstring(TEST_XML)
+
+ if DataObjectDict.can_handle(xml_tree):
+ new_obj = DataObjectDict.from_xml(xml_tree)
+ self.assertTrue(new_obj != None,
+ "Failed to create DataObjectDict from XML")
+ self.assertEquals(type(new_obj.data_dict), dict,
+ "new object's data_dict is not a data_dict.")
+ self.assertEquals(new_obj.data_dict["name1"], "value1",
+ "new object's name1 doesn't have correct value")
+ self.assertEquals(new_obj.data_dict["name2"], "value2",
+ "new object's name2 doesn't have correct value")
+ self.assertEquals(new_obj.data_dict["name3"], "value3",
+ "new object's name3 doesn't have correct value")
+ self.assertEquals(new_obj.data_dict["name4"], "value4",
+ "new object's name4 doesn't have correct value")
+ self.assertEquals(new_obj.data_dict["name5"], "value5",
+ "new object's name5 doesn't have correct value")
+ else:
+ self.fail("can_handle returned False, expected True!")
+
+ def test_data_object_dict_import_xml_diff_tag(self):
+ '''Validate from_xml() imports correctly with diff tag'''
+ # Set expected xml, and compensate for indent
+ indentation = '''\
+ '''
+ TEST_XML = '''\
+ <different_tag name="test_dictionary_xml">
+ <data name="name1">value1</data>
+ <data name="name2">value2</data>
+ <data name="name3">value3</data>
+ <data name="name4">value4</data>
+ <data name="name5">value5</data>
+ </different_tag>
+ '''.replace(indentation, "")
+
+ # Parse the XML into an XML tree.
+ xml_tree = etree.fromstring(TEST_XML)
+
+ if DataObjectDictDiffTag.can_handle(xml_tree):
+ new_obj = DataObjectDictDiffTag.from_xml(xml_tree)
+ self.assertTrue(new_obj != None,
+ "Failed to create DataObjectDict from XML")
+ self.assertEquals(type(new_obj.data_dict), dict,
+ "new object's data_dict is not a data_dict.")
+ self.assertEquals(new_obj.data_dict["name1"], "value1",
+ "new object's name1 doesn't have correct value")
+ self.assertEquals(new_obj.data_dict["name2"], "value2",
+ "new object's name2 doesn't have correct value")
+ self.assertEquals(new_obj.data_dict["name3"], "value3",
+ "new object's name3 doesn't have correct value")
+ self.assertEquals(new_obj.data_dict["name4"], "value4",
+ "new object's name4 doesn't have correct value")
+ self.assertEquals(new_obj.data_dict["name5"], "value5",
+ "new object's name5 doesn't have correct value")
+ else:
+ self.fail("can_handle returned False, expected True!")
+
+ def test_data_object_dict_import_xml_diff_sub_tag(self):
+ '''Validate from_xml() imports correctly with diff sub tag'''
+ # Set expected xml, and compensate for indent
+ indentation = '''\
+ '''
+ TEST_XML = '''\
+ <data_dictionary name="test_dictionary_xml">
+ <different_sub_tag name="name1">value1</different_sub_tag>
+ <different_sub_tag name="name2">value2</different_sub_tag>
+ <different_sub_tag name="name3">value3</different_sub_tag>
+ <different_sub_tag name="name4">value4</different_sub_tag>
+ <different_sub_tag name="name5">value5</different_sub_tag>
+ </data_dictionary>
+ '''.replace(indentation, "")
+
+ # Parse the XML into an XML tree.
+ xml_tree = etree.fromstring(TEST_XML)
+
+ if DataObjectDictDiffSubTag.can_handle(xml_tree):
+ new_obj = DataObjectDictDiffSubTag.from_xml(xml_tree)
+ self.assertTrue(new_obj != None,
+ "Failed to create DataObjectDict from XML")
+ self.assertEquals(type(new_obj.data_dict), dict,
+ "new object's data_dict is not a data_dict.")
+ self.assertEquals(new_obj.data_dict["name1"], "value1",
+ "new object's name1 doesn't have correct value")
+ self.assertEquals(new_obj.data_dict["name2"], "value2",
+ "new object's name2 doesn't have correct value")
+ self.assertEquals(new_obj.data_dict["name3"], "value3",
+ "new object's name3 doesn't have correct value")
+ self.assertEquals(new_obj.data_dict["name4"], "value4",
+ "new object's name4 doesn't have correct value")
+ self.assertEquals(new_obj.data_dict["name5"], "value5",
+ "new object's name5 doesn't have correct value")
+ else:
+ self.fail("can_handle returned False, expected True!")
+
+ def test_data_object_dict_import_xml_diff_both_tag(self):
+ '''Validate from_xml() imports correctly with diff tag and sub-tag'''
+ # Set expected xml, and compensate for indent
+ indentation = '''\
+ '''
+ TEST_XML = '''\
+ <different_both_tag name="test_dictionary_diff_both_tags">
+ <different_both_sub_tag name="name1">value1</different_both_sub_tag>
+ <different_both_sub_tag name="name2">value2</different_both_sub_tag>
+ <different_both_sub_tag name="name3">value3</different_both_sub_tag>
+ <different_both_sub_tag name="name4">value4</different_both_sub_tag>
+ <different_both_sub_tag name="name5">value5</different_both_sub_tag>
+ </different_both_tag>
+ '''.replace(indentation, "")
+
+ # Parse the XML into an XML tree.
+ xml_tree = etree.fromstring(TEST_XML)
+
+ if DataObjectDictDiffBothTag.can_handle(xml_tree):
+ new_obj = DataObjectDictDiffBothTag.from_xml(xml_tree)
+ self.assertTrue(new_obj != None,
+ "Failed to create DataObjectDict from XML")
+ self.assertEquals(type(new_obj.data_dict), dict,
+ "new object's data_dict is not a data_dict.")
+ self.assertEquals(new_obj.data_dict["name1"], "value1",
+ "new object's name1 doesn't have correct value")
+ self.assertEquals(new_obj.data_dict["name2"], "value2",
+ "new object's name2 doesn't have correct value")
+ self.assertEquals(new_obj.data_dict["name3"], "value3",
+ "new object's name3 doesn't have correct value")
+ self.assertEquals(new_obj.data_dict["name4"], "value4",
+ "new object's name4 doesn't have correct value")
+ self.assertEquals(new_obj.data_dict["name5"], "value5",
+ "new object's name5 doesn't have correct value")
+ else:
+ self.fail("can_handle returned False, expected True!")
+
+ def test_data_object_dict_can_insert_to_doc(self):
+ '''Validate DataObjectDict can be inserted as child of DataObject'''
+ data_obj = SimpleDataObject("test_obj")
+ data_dict = {'key1': 'value1', 'key2': 'value2'}
+ data_dict_obj = DataObjectDict("TestChild", data_dict)
+ data_obj.insert_children(data_dict_obj)
+
+
+if __name__ == '__main__':
+ unittest.main()
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/usr/src/lib/install_doc/test/test_data_object_fetching.py Wed Oct 20 18:48:33 2010 +0100
@@ -0,0 +1,431 @@
+#
+# CDDL HEADER START
+#
+# The contents of this file are subject to the terms of the
+# Common Development and Distribution License (the "License").
+# You may not use this file except in compliance with the License.
+#
+# You can obtain a copy of the license at usr/src/OPENSOLARIS.LICENSE
+# or http://www.opensolaris.org/os/licensing.
+# See the License for the specific language governing permissions
+# and limitations under the License.
+#
+# When distributing Covered Code, include this CDDL HEADER in each
+# file and include the License file at usr/src/OPENSOLARIS.LICENSE.
+# If applicable, add the following below this CDDL HEADER, with the
+# fields enclosed by brackets "[]" replaced with your own identifying
+# information: Portions Copyright [yyyy] [name of copyright owner]
+#
+# CDDL HEADER END
+#
+
+#
+# Copyright (c) 2010, Oracle and/or its affiliates. All rights reserved.
+#
+'''Tests for DataObject fetching methods'''
+
+import unittest
+
+from solaris_install.data_object import ObjectNotFoundError
+from simple_data_object import create_simple_data_obj_tree, SimpleDataObject, \
+ SimpleDataObject2, SimpleDataObject3, SimpleDataObject4, SimpleDataObject5
+
+
+class TestDataObjectFetching(unittest.TestCase):
+ '''Tests for DataObject fetching methods'''
+
+ def setUp(self):
+ '''Create tree of data objects to test on'''
+ self.data_objs = create_simple_data_obj_tree()
+
+ def tearDown(self):
+ '''Clean up references to objects'''
+ self.data_objs = None
+ del self.data_objs
+
+ #
+ # Test 'get_children()' and property 'children'
+ #
+ def test_dobj_get_all_children_using_method(self):
+ '''Validate get_children() returns all children'''
+ children = self.data_objs["data_obj"].get_children()
+ internal_children = self.data_objs["data_obj"]._children
+
+ # Ensure both are the same length.
+ self.assertEquals(len(children), len(internal_children))
+
+ # Ensure that while the lists themselves are different, the children
+ # contained in the lists are the same - comparison of lists will
+ # return true if they both contain the same items.
+ self.assertEquals(children, internal_children)
+
+ # Ensure that the list returned from get_children() is a copy
+ children.remove(self.data_objs["child_3"])
+ self.assertNotEquals(children, internal_children)
+
+ def test_dobj_get_all_children_using_property(self):
+ '''Validate .children property returns all children'''
+
+ children = self.data_objs["data_obj"].children
+ internal_children = self.data_objs["data_obj"]._children
+
+ # Ensure both are the same length.
+ self.assertEquals(len(children), len(internal_children))
+
+ # Ensure that while the lists themselves are different, the children
+ # contained in the lists are the same - comparison of lists will
+ # return true if they both contain the same items.
+ self.assertEquals(children, internal_children)
+
+ # Ensure that the list returned from get_children() is a copy
+ children.remove(self.data_objs["child_3"])
+ self.assertNotEquals(children, internal_children)
+
+ def test_dobj_get_all_children_using_method_max_count_2(self):
+ '''Validate get_children(max_count=2) returns only 2 children'''
+ children = self.data_objs["data_obj"].get_children(max_count=2)
+ internal_children = self.data_objs["data_obj"]._children
+
+ # Ensure both are the same length.
+ self.assertEquals(len(children), 2)
+
+ # Ensure that while the lists themselves are different, the children
+ # contained in the lists are the same - comparison of lists will
+ # return true if they both contain the same items.
+ self.assertEquals(children, internal_children[:2])
+
+ def test_dobj_get_children_by_name_unique(self):
+ '''Validate get_children with unique name returns correct child'''
+ found_obj_list = self.data_objs["data_obj"].get_children(
+ name=self.data_objs["child_4"].name)
+
+ self.assertTrue(len(found_obj_list) == 1)
+ self.assertEqual(found_obj_list[0], self.data_objs["child_4"])
+
+ def test_dobj_get_children_by_name_multiple(self):
+ '''Validate get_children with multiple name matches returns list'''
+ found_obj_list = self.data_objs["data_obj"].get_children(
+ name=self.data_objs["child_5"].name)
+
+ self.assertTrue(len(found_obj_list) == 2)
+ self.assertEqual(found_obj_list,
+ [self.data_objs["child_5"], self.data_objs["child_5_same_name"]])
+
+ def test_dobj_get_children_by_type_unique(self):
+ '''Validate get_children with unique type match returns 1 child'''
+ # Should return child_5 which is of type SimpleDataObject3
+ found_obj_list = self.data_objs["data_obj"].get_children(
+ class_type=SimpleDataObject3)
+
+ self.assertTrue(len(found_obj_list) == 1)
+ self.assertEqual(found_obj_list[0], self.data_objs["child_5"])
+
+ def test_dobj_get_children_by_type_multiple(self):
+ '''Validate get_children with multiple type matches returns list'''
+ # Should return child_1 and child_4 which are of type SimpleDataObject2
+ found_obj_list = self.data_objs["data_obj"].get_children(
+ class_type=SimpleDataObject2)
+
+ self.assertTrue(len(found_obj_list) == 2)
+ self.assertEqual(found_obj_list,
+ [self.data_objs["child_1"], self.data_objs["child_4"]])
+
+ def test_dobj_get_children_by_name_and_type(self):
+ '''Validate get_children matches with name and type'''
+ # Should return child_4 which has name and type SimpleDataObject2
+ found_obj_list = self.data_objs["data_obj"].get_children(
+ name=self.data_objs["child_4"].name, class_type=SimpleDataObject2)
+
+ self.assertTrue(len(found_obj_list) == 1)
+ self.assertEqual(found_obj_list[0], self.data_objs["child_4"])
+
+ def test_dobj_get_children_by_name_not_exist(self):
+ '''Validate get_children failure with non-exitant name'''
+ self.assertRaises(ObjectNotFoundError,
+ self.data_objs["data_obj"].get_children, name="non_existant_name")
+
+ def test_dobj_get_children_by_type_not_exist(self):
+ '''Validate get_children failure with non-existant type'''
+ self.assertRaises(ObjectNotFoundError,
+ self.data_objs["data_obj"].get_children,
+ class_type=SimpleDataObject4)
+
+ def test_dobj_get_children_by_name_exist_and_type_not_exist(self):
+ '''Validate get_children failure with valid name & non-existant type'''
+ self.assertRaises(ObjectNotFoundError,
+ self.data_objs["data_obj"].get_children,
+ name=self.data_objs["child_4"].name, class_type=SimpleDataObject4)
+
+ def test_dobj_get_children_by_name_not_exist_and_type_exist(self):
+ '''Validate get_children failure with non-existant name & valid type'''
+ self.assertRaises(ObjectNotFoundError,
+ self.data_objs["data_obj"].get_children,
+ name="non existant name", class_type=SimpleDataObject2)
+
+ #
+ # Test get_file_child()
+ #
+ def test_dobj_get_first_child(self):
+ '''Validate get_first_child() returns first child'''
+
+ child = self.data_objs["data_obj"].get_first_child()
+ internal_children = self.data_objs["data_obj"]._children
+
+ # Ensure that it's really the first child in internal list.
+ self.assertEquals(child, internal_children[0])
+
+ def test_dobj_get_first_child_no_children(self):
+ '''Validate get_first_child() fails with no children'''
+
+ child = self.data_objs["child_4"].get_first_child()
+
+ # Ensure object has no children
+ self.assertFalse(self.data_objs["child_4"].has_children)
+
+ self.assertEquals(child, None,
+ "Got child returned when parent had no children!")
+
+ def test_dobj_get_first_child_by_name_unique(self):
+ '''Validate get_first_child find first match of unique name'''
+ found_obj = self.data_objs["data_obj"].get_first_child(
+ name=self.data_objs["child_4"].name)
+
+ self.assertEqual(found_obj, self.data_objs["child_4"])
+
+ def test_dobj_get_first_child_by_name_not_unique(self):
+ '''Validate get_first_child find first match of non-unique name'''
+ found_obj = self.data_objs["data_obj"].get_first_child(
+ name=self.data_objs["child_5"].name)
+
+ self.assertEqual(found_obj, self.data_objs["child_5"])
+
+ def test_dobj_get_first_child_by_type_unique(self):
+ '''Validate get_first_child find first match of unique type'''
+ # Should return child_5 which is of type SimpleDataObject3
+ found_obj = self.data_objs["data_obj"].get_first_child(
+ class_type=SimpleDataObject3)
+
+ self.assertEqual(found_obj, self.data_objs["child_5"])
+
+ def test_dobj_get_first_child_by_type_not_unique(self):
+ '''Validate get_first_child find first match of non-unique type'''
+ # Should return child_1 with type SimpleDataObject2, as is child_4
+ found_obj = self.data_objs["data_obj"].get_first_child(
+ class_type=SimpleDataObject2)
+
+ self.assertEqual(found_obj, self.data_objs["child_1"])
+
+ def test_dobj_get_first_child_by_name_and_type(self):
+ '''Validate get_first_child find first match of name & type'''
+ # Should return child_4 which has name and type SimpleDataObject2
+ found_obj = self.data_objs["data_obj"].get_first_child(
+ name=self.data_objs["child_4"].name, class_type=SimpleDataObject2)
+
+ self.assertEqual(found_obj, self.data_objs["child_4"])
+
+ def test_dobj_get_first_child_by_name_not_exist(self):
+ '''Validate get_first_child fails for non-existant name'''
+ found_obj = self.data_objs["data_obj"].get_first_child(
+ name="non_existant_name")
+ self.assertEquals(found_obj, None)
+
+ def test_dobj_get_first_child_by_type_not_exist(self):
+ '''Validate get_first_child fails for non-existant type'''
+ found_obj = self.data_objs["data_obj"].get_first_child(
+ class_type=SimpleDataObject4)
+ self.assertEquals(found_obj, None)
+
+ def test_dobj_get_first_child_by_name_exist_and_type_not_exist(self):
+ '''Validate get_first_child fails for valid name & non-existant type'''
+ found_obj = self.data_objs["data_obj"].get_first_child(
+ name=self.data_objs["child_4"].name, class_type=SimpleDataObject4)
+ self.assertEquals(found_obj, None)
+
+ def test_dobj_get_first_child_by_name_not_exist_and_type_exist(self):
+ '''Validate get_first_child fails for non-existant name & valid type'''
+ found_obj = self.data_objs["data_obj"].get_first_child(
+ name="non existant name", class_type=SimpleDataObject2)
+ self.assertEquals(found_obj, None)
+
+ #
+ # Test 'get_descendants()'
+ #
+ def test_dobj_get_descendants_by_name_unique(self):
+ '''Validate get_descendants finds unique name'''
+ found_obj_list = self.data_objs["data_obj"].get_descendants(
+ name=self.data_objs["child_5_2_3_1"].name)
+
+ self.assertTrue(len(found_obj_list) == 1)
+ self.assertEqual(found_obj_list[0], self.data_objs["child_5_2_3_1"])
+
+ def test_dobj_get_descendants_by_name_multiple(self):
+ '''Validate get_descendants finds all objs with non-unique name'''
+ found_obj_list = self.data_objs["data_obj"].get_descendants(
+ name=self.data_objs["child_3_1_2"].name)
+
+ self.assertTrue(len(found_obj_list) == 2,
+ "Expected len 2, got %d" % (len(found_obj_list)))
+ self.assertEqual(found_obj_list,
+ [self.data_objs["child_3_1_2"],
+ self.data_objs["child_3_1_2_same_name"]])
+
+ def test_dobj_get_descendants_by_type_unique(self):
+ '''Validate get_descendants finds unique type'''
+ # Should return child_5_2_3 which is of type SimpleDataObject4
+ found_obj_list = self.data_objs["data_obj"].get_descendants(
+ class_type=SimpleDataObject4)
+
+ self.assertTrue(len(found_obj_list) == 1)
+ self.assertEqual(found_obj_list[0], self.data_objs["child_5_2_3"])
+
+ def test_dobj_get_descendants_by_type_multiple(self):
+ '''Validate get_descendants finds all objs with non-unique type'''
+ found_obj_list = self.data_objs["data_obj"].get_descendants(
+ class_type=SimpleDataObject2)
+
+ self.assertTrue(len(found_obj_list) == 6,
+ "Expected len 6, got %d : %s" %
+ (len(found_obj_list), str(found_obj_list)))
+ self.assertEqual(found_obj_list,
+ [self.data_objs["child_1"], self.data_objs["child_2_1"],
+ self.data_objs["child_2_1_1"],
+ self.data_objs["child_2_1_1_1"],
+ self.data_objs["child_2_1_1_2"], self.data_objs["child_4"]])
+
+ def test_dobj_get_descendants_using_no_params(self):
+ '''Validate get_descendants fails with no params'''
+ self.assertRaises(ValueError,
+ self.data_objs["data_obj"].get_descendants)
+
+ def test_dobj_get_descendants_by_name_and_type(self):
+ '''Validate get_descendants finds all objs with name & type'''
+ found_obj_list = self.data_objs["data_obj"].get_descendants(
+ name=self.data_objs["child_2_1_1_2"].name,
+ class_type=SimpleDataObject2)
+
+ self.assertTrue(len(found_obj_list) == 1)
+ self.assertEqual(found_obj_list[0], self.data_objs["child_2_1_1_2"])
+
+ def test_dobj_get_descendants_by_name_not_exist(self):
+ '''Validate get_descendants fails with non-existant name'''
+ self.assertRaises(ObjectNotFoundError,
+ self.data_objs["data_obj"].get_descendants,
+ name="non_existant_name")
+
+ def test_dobj_get_descendants_by_type_not_exist(self):
+ '''Validate get_descendants fails with non-existant type'''
+ self.assertRaises(ObjectNotFoundError,
+ self.data_objs["data_obj"].get_descendants,
+ class_type=SimpleDataObject5)
+
+ def test_dobj_get_descendants_by_name_exist_and_type_not_exist(self):
+ '''Validate get_descendants fails with valid name & non-existant type
+ '''
+ self.assertRaises(ObjectNotFoundError,
+ self.data_objs["data_obj"].get_descendants,
+ name=self.data_objs["child_5_2_2"].name,
+ class_type=SimpleDataObject4)
+
+ def test_dobj_get_descendants_by_name_not_exist_and_type_exist(self):
+ '''Validate get_descendants fails with non-existant name & valid type
+ '''
+ self.assertRaises(ObjectNotFoundError,
+ self.data_objs["data_obj"].get_descendants,
+ name="non existant name", class_type=SimpleDataObject2)
+
+ def test_dobj_get_descendants_by_type_and_max_depth_minus_1(self):
+ '''Validate get_descendants fails with max_depth=-1'''
+ self.assertRaises(ValueError,
+ self.data_objs["data_obj"].get_descendants,
+ class_type=SimpleDataObject2, max_depth=-1)
+
+ def test_dobj_get_descendants_by_type_and_max_depth_1(self):
+ '''Validate get_descendants limits by type to max_depth = 1'''
+ found_obj_list = self.data_objs["data_obj"].get_descendants(
+ class_type=SimpleDataObject2, max_depth=1)
+
+ self.assertTrue(len(found_obj_list) == 2,
+ "Expected len 2, got %d : %s" %
+ (len(found_obj_list), str(found_obj_list)))
+ self.assertEqual(found_obj_list,
+ [self.data_objs["child_1"], self.data_objs["child_4"]])
+
+ def test_dobj_get_descendants_by_type_and_max_depth_2(self):
+ '''Validate get_descendants limits by type to max_depth = 2'''
+ found_obj_list = self.data_objs["data_obj"].get_descendants(
+ class_type=SimpleDataObject2, max_depth=2)
+
+ self.assertTrue(len(found_obj_list) == 3,
+ "Expected len 3, got %d : %s" %
+ (len(found_obj_list), str(found_obj_list)))
+ self.assertEqual(found_obj_list,
+ [self.data_objs["child_1"], self.data_objs["child_2_1"],
+ self.data_objs["child_4"]])
+
+ def test_dobj_get_descendants_by_type_and_max_depth_3(self):
+ '''Validate get_descendants limits by type to max_depth = 3'''
+ found_obj_list = self.data_objs["data_obj"].get_descendants(
+ class_type=SimpleDataObject2, max_depth=3)
+
+ self.assertTrue(len(found_obj_list) == 4,
+ "Expected len 4, got %d : %s" %
+ (len(found_obj_list), str(found_obj_list)))
+ self.assertEqual(found_obj_list,
+ [self.data_objs["child_1"], self.data_objs["child_2_1"],
+ self.data_objs["child_2_1_1"], self.data_objs["child_4"]])
+
+ def test_dobj_get_descendants_by_type_and_max_depth_4(self):
+ '''Validate get_descendants limits by type to max_depth = 4'''
+ found_obj_list = self.data_objs["data_obj"].get_descendants(
+ class_type=SimpleDataObject2, max_depth=4)
+
+ self.assertTrue(len(found_obj_list) == 6,
+ "Expected len 6, got %d : %s" %
+ (len(found_obj_list), str(found_obj_list)))
+ self.assertEqual(found_obj_list,
+ [self.data_objs["child_1"], self.data_objs["child_2_1"],
+ self.data_objs["child_2_1_1"],
+ self.data_objs["child_2_1_1_1"],
+ self.data_objs["child_2_1_1_2"], self.data_objs["child_4"]])
+
+ def test_dobj_get_descendants_using_method_max_count_invalid(self):
+ '''Validate get_descendants fails with max_count = 0'''
+ self.assertRaises(ValueError,
+ self.data_objs["data_obj"].get_descendants,
+ class_type=SimpleDataObject, max_count=0)
+
+ def test_dobj_get_descendants_by_type_and_max_count_1(self):
+ '''Validate get_descendants limits by type to max_count = 1'''
+ found_obj_list = self.data_objs["data_obj"].get_descendants(
+ class_type=SimpleDataObject, max_count=1)
+
+ self.assertTrue(len(found_obj_list) == 1,
+ "Expected len 2, got %d : %s" %
+ (len(found_obj_list), str(found_obj_list)))
+ self.assertEqual(found_obj_list, [self.data_objs["child_1"]])
+
+ def test_dobj_get_descendants_by_type_and_max_count_2(self):
+ '''Validate get_descendants limits by type to max_count = 2'''
+ found_obj_list = self.data_objs["data_obj"].get_descendants(
+ class_type=SimpleDataObject, max_count=2)
+
+ self.assertTrue(len(found_obj_list) == 2,
+ "Expected len 2, got %d : %s" %
+ (len(found_obj_list), str(found_obj_list)))
+ self.assertEqual(found_obj_list,
+ [self.data_objs["child_1"], self.data_objs["child_1_1"]])
+
+ def test_dobj_get_descendants_by_type_and_max_count_4(self):
+ '''Validate get_descendants limits by type to max_count = 4'''
+ found_obj_list = self.data_objs["data_obj"].get_descendants(
+ class_type=SimpleDataObject, max_count=4)
+
+ self.assertTrue(len(found_obj_list) == 4,
+ "Expected len 2, got %d : %s" %
+ (len(found_obj_list), str(found_obj_list)))
+ self.assertEqual(found_obj_list,
+ [self.data_objs["child_1"], self.data_objs["child_1_1"],
+ self.data_objs["child_1_2"], self.data_objs["child_2"]])
+
+if __name__ == '__main__':
+ unittest.main()
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/usr/src/lib/install_doc/test/test_data_object_insertion.py Wed Oct 20 18:48:33 2010 +0100
@@ -0,0 +1,363 @@
+#
+# CDDL HEADER START
+#
+# The contents of this file are subject to the terms of the
+# Common Development and Distribution License (the "License").
+# You may not use this file except in compliance with the License.
+#
+# You can obtain a copy of the license at usr/src/OPENSOLARIS.LICENSE
+# or http://www.opensolaris.org/os/licensing.
+# See the License for the specific language governing permissions
+# and limitations under the License.
+#
+# When distributing Covered Code, include this CDDL HEADER in each
+# file and include the License file at usr/src/OPENSOLARIS.LICENSE.
+# If applicable, add the following below this CDDL HEADER, with the
+# fields enclosed by brackets "[]" replaced with your own identifying
+# information: Portions Copyright [yyyy] [name of copyright owner]
+#
+# CDDL HEADER END
+#
+
+#
+# Copyright (c) 2010, Oracle and/or its affiliates. All rights reserved.
+#
+'''Tests for DataObject insertion methods'''
+
+import unittest
+
+from solaris_install.data_object import ObjectNotFoundError
+from simple_data_object import SimpleDataObject
+
+
+class TestDataObjectInsertion(unittest.TestCase):
+ '''Tests for DataObject insertion methods'''
+
+ def setUp(self):
+ '''Create simple data object reference'''
+ self.data_obj = SimpleDataObject("root")
+
+ def tearDown(self):
+ '''Clean up data_obj reference'''
+ self.data_obj = None
+
+ def test_insert_children_default_single(self):
+ '''Validate insertion of single child'''
+ new_do = SimpleDataObject("child_1")
+
+ self.data_obj.insert_children(new_do)
+ self.assertEqual(self.data_obj.children[0], new_do)
+
+ def test_insert_children_default_fail_both_before_and_after(self):
+ '''Validate failure on insert with invalid before and after'''
+ new_do = SimpleDataObject("child_1")
+
+ self.assertRaises(ValueError, self.data_obj.insert_children,
+ new_do, new_do, new_do)
+
+ def test_insert_children_default_fail_single(self):
+ '''Validate failure on insert with non-DataObjectBase sub-class'''
+ new_obj = object()
+
+ self.assertRaises(TypeError, self.data_obj.insert_children, new_obj)
+
+ def test_insert_children_default_fail_single_null_param(self):
+ '''Validate failure on insert of None value'''
+ self.assertRaises(TypeError, self.data_obj.insert_children, None)
+
+ def test_insert_children_default_list(self):
+ '''Validate insert of list of children'''
+ new_do_list = list()
+ new_do_list.append(SimpleDataObject("child_1"))
+ new_do_list.append(SimpleDataObject("child_2"))
+ new_do_list.append(SimpleDataObject("child_3"))
+ new_do_list.append(SimpleDataObject("child_4"))
+ new_do_list.append(SimpleDataObject("child_5"))
+
+ self.data_obj.insert_children(new_do_list)
+ i = 0
+ for child in self.data_obj.children:
+ self.assertEqual(child, new_do_list[i])
+ i += 1
+ self.assertEqual(i, len(new_do_list))
+
+ def test_insert_children_default_fail_list(self):
+ '''Validate failure of insert of list of children with bad element'''
+ new_do_list = list()
+ new_do_list.append(SimpleDataObject("child_1"))
+ new_do_list.append(SimpleDataObject("child_2"))
+ new_do_list.append(SimpleDataObject("child_3"))
+ new_do_list.append(SimpleDataObject("child_4"))
+ new_do_list.append(object())
+
+ self.assertRaises(TypeError, self.data_obj.insert_children,
+ new_do_list)
+
+ def test_insert_children_default_fail_tuple(self):
+ '''Validate failure of insert of tuple of children with bad element'''
+ # Create tuple
+ new_do_list = (
+ SimpleDataObject("child_1"),
+ SimpleDataObject("child_2"),
+ SimpleDataObject("child_3"),
+ SimpleDataObject("child_4"), object())
+
+ self.assertRaises(TypeError, self.data_obj.insert_children,
+ new_do_list)
+
+ def test_insert_children_default_fail_invalid_type(self):
+ '''Validate failure of insert of child with non-DataObjectBase'''
+ self.assertRaises(TypeError, self.data_obj.insert_children,
+ object())
+
+ def test_insert_children_before_single(self):
+ '''Validate insertion of children list with before value'''
+ # Populate existing children first.
+ new_do_list = list()
+ new_do_list.append(SimpleDataObject("child_1"))
+ new_do_list.append(SimpleDataObject("child_2"))
+ child_3 = SimpleDataObject("child_3")
+ new_do_list.append(child_3)
+ new_do_list.append(SimpleDataObject("child_4"))
+ new_do_list.append(SimpleDataObject("child_5"))
+
+ self.data_obj.insert_children(new_do_list)
+
+ #Now for the real test, to insert something before child_3
+ new_do = SimpleDataObject("before_child_3")
+ self.data_obj.insert_children(new_do, before=child_3)
+
+ i = 0
+ for child in self.data_obj.children:
+ if i == 2:
+ self.assertEqual(child, new_do, str(self.data_obj))
+ break
+ i += 1
+
+ def test_insert_children_before_doesnt_exist(self):
+ '''Validate failure on insertion with non-existant 'before' obj'''
+ # Populate existing children first.
+ new_do_list = list()
+ new_do_list.append(SimpleDataObject("child_1"))
+ new_do_list.append(SimpleDataObject("child_2"))
+ new_do_list.append(SimpleDataObject("child_3"))
+ new_do_list.append(SimpleDataObject("child_4"))
+ new_do_list.append(SimpleDataObject("child_5"))
+
+ self.data_obj.insert_children(new_do_list)
+
+ not_in_list = SimpleDataObject("child_not_in_list")
+
+ new_do = SimpleDataObject("before_child_3")
+ #Now for the real test, to insert something before non-existant child
+ self.assertRaises(ObjectNotFoundError, self.data_obj.insert_children,
+ new_do, before=not_in_list)
+
+ def test_insert_children_after_doesnt_exist(self):
+ '''Validate failure on insertion with non-existant 'after' obj'''
+ # Populate existing children first.
+ new_do_list = list()
+ new_do_list.append(SimpleDataObject("child_1"))
+ new_do_list.append(SimpleDataObject("child_2"))
+ new_do_list.append(SimpleDataObject("child_3"))
+ new_do_list.append(SimpleDataObject("child_4"))
+ new_do_list.append(SimpleDataObject("child_5"))
+
+ self.data_obj.insert_children(new_do_list)
+
+ not_in_list = SimpleDataObject("child_not_in_list")
+
+ new_do = SimpleDataObject("after_child_3")
+ #Now for the real test, to insert something after non-existant child
+ self.assertRaises(ObjectNotFoundError, self.data_obj.insert_children,
+ new_do, after=not_in_list)
+
+ def test_insert_children_before_first_single(self):
+ '''Validate insertion of child with before == first child'''
+ # Populate existing children first.
+ new_do_list = list()
+ child_1 = SimpleDataObject("child_1")
+ new_do_list.append(child_1)
+ new_do_list.append(SimpleDataObject("child_2"))
+ new_do_list.append(SimpleDataObject("child_3"))
+ new_do_list.append(SimpleDataObject("child_4"))
+ new_do_list.append(SimpleDataObject("child_5"))
+
+ self.data_obj.insert_children(new_do_list)
+
+ #Now for the real test, to insert something before child_1
+ new_do = SimpleDataObject("before_child_1")
+ self.data_obj.insert_children(new_do, before=child_1)
+
+ self.assertEqual(self.data_obj.children[0],
+ new_do, str(self.data_obj))
+
+ def test_insert_children_after_single(self):
+ '''Validate insertion of children with after value'''
+ # Populate existing children first.
+ new_do_list = list()
+ new_do_list.append(SimpleDataObject("child_1"))
+ new_do_list.append(SimpleDataObject("child_2"))
+ child_3 = SimpleDataObject("child_3")
+ new_do_list.append(child_3)
+ new_do_list.append(SimpleDataObject("child_4"))
+ new_do_list.append(SimpleDataObject("child_5"))
+
+ self.data_obj.insert_children(new_do_list)
+
+ #Now for the real test, to insert something before child_3
+ new_do = SimpleDataObject("after_child_5")
+ self.data_obj.insert_children(new_do, after=child_3)
+
+ i = 0
+ for child in self.data_obj.children:
+ if i == 3:
+ self.assertEqual(child, new_do, str(self.data_obj))
+ break
+ i += 1
+
+ def test_insert_children_after_last_single(self):
+ '''Validate insertion of children with after == last child'''
+ # Populate existing children first.
+ new_do_list = list()
+ new_do_list.append(SimpleDataObject("child_2"))
+ new_do_list.append(SimpleDataObject("child_3"))
+ new_do_list.append(SimpleDataObject("child_4"))
+ new_do_list.append(SimpleDataObject("child_5"))
+ child_5 = SimpleDataObject("child_5")
+ new_do_list.append(child_5)
+
+ self.data_obj.insert_children(new_do_list)
+
+ #Now for the real test, to insert something after child_5
+ new_do = SimpleDataObject("after_child_5")
+ self.data_obj.insert_children(new_do, after=child_5)
+
+ children = self.data_obj.children
+ self.assertEqual(children[len(children) - 1],
+ new_do, str(self.data_obj))
+
+ def test_insert_children_before_list(self):
+ '''Validate insertion of children with before value'''
+ # Populate existing children first.
+ new_do_list = list()
+ new_do_list.append(SimpleDataObject("child_1"))
+ new_do_list.append(SimpleDataObject("child_2"))
+ child_3 = SimpleDataObject("child_3")
+ new_do_list.append(child_3)
+ new_do_list.append(SimpleDataObject("child_4"))
+ new_do_list.append(SimpleDataObject("child_5"))
+
+ self.data_obj.insert_children(new_do_list)
+
+ #Now for the real test, to insert something before child_3
+ to_insert = list()
+ to_insert.append(SimpleDataObject("before_child_3 - A"))
+ to_insert.append(SimpleDataObject("before_child_3 - B"))
+ to_insert.append(SimpleDataObject("before_child_3 - C"))
+
+ self.data_obj.insert_children(to_insert, before=child_3)
+
+ i = 0
+ j = 0
+ for child in self.data_obj.children:
+ if i >= 2:
+ self.assertEqual(child, to_insert[j],
+ "child = %s ; compared_to = %s" % (child, to_insert[j]))
+ j += 1
+ if j >= len(to_insert):
+ break
+ i += 1
+
+ def test_insert_children_before_first_list(self):
+ '''Validate insertion of children with before == first child'''
+ # Populate existing children first.
+ new_do_list = list()
+ child_1 = SimpleDataObject("child_1")
+ new_do_list.append(child_1)
+ new_do_list.append(SimpleDataObject("child_2"))
+ new_do_list.append(SimpleDataObject("child_3"))
+ new_do_list.append(SimpleDataObject("child_4"))
+ new_do_list.append(SimpleDataObject("child_5"))
+
+ self.data_obj.insert_children(new_do_list)
+
+ #Now for the real test, to insert something before child_1
+ to_insert = list()
+ to_insert.append(SimpleDataObject("before_child_1 - A"))
+ to_insert.append(SimpleDataObject("before_child_1 - B"))
+ to_insert.append(SimpleDataObject("before_child_1 - C"))
+
+ self.data_obj.insert_children(to_insert, before=child_1)
+
+ i = 0
+ for child in self.data_obj.children:
+ self.assertEqual(child, to_insert[i],
+ "child = %s ; compared_to = %s" % (child, to_insert[i]))
+ i += 1
+ if not (i < len(to_insert)):
+ break
+
+ def test_insert_children_after_list(self):
+ '''Validate insertion of children list with after value'''
+ # Populate existing children first.
+ new_do_list = list()
+ new_do_list.append(SimpleDataObject("child_1"))
+ new_do_list.append(SimpleDataObject("child_2"))
+ child_3 = SimpleDataObject("child_3")
+ new_do_list.append(child_3)
+ new_do_list.append(SimpleDataObject("child_4"))
+ new_do_list.append(SimpleDataObject("child_5"))
+
+ self.data_obj.insert_children(new_do_list)
+
+ #Now for the real test, to insert something after child_3
+ to_insert = list()
+ to_insert.append(SimpleDataObject("after_child_3 - A"))
+ to_insert.append(SimpleDataObject("after_child_3 - B"))
+ to_insert.append(SimpleDataObject("after_child_3 - C"))
+
+ self.data_obj.insert_children(to_insert, after=child_3)
+
+ i = 0
+ j = 0
+ for child in self.data_obj.children:
+ if i >= 3:
+ self.assertEqual(child, to_insert[j],
+ "child = %s ; compared_to = %s" % (child, to_insert[j]))
+ j += 1
+ if j >= len(to_insert):
+ break
+ i += 1
+
+ def test_insert_children_after_last_list(self):
+ '''Validate insertion of children list with after == last child'''
+ # Populate existing children first.
+ new_do_list = list()
+ new_do_list.append(SimpleDataObject("child_2"))
+ new_do_list.append(SimpleDataObject("child_3"))
+ new_do_list.append(SimpleDataObject("child_4"))
+ new_do_list.append(SimpleDataObject("child_5"))
+ child_5 = SimpleDataObject("child_5")
+ new_do_list.append(child_5)
+
+ self.data_obj.insert_children(new_do_list)
+
+ #Now for the real test, to insert something after child_5
+ to_insert = list()
+ to_insert.append(SimpleDataObject("after_child_3 - A"))
+ to_insert.append(SimpleDataObject("after_child_3 - B"))
+ to_insert.append(SimpleDataObject("after_child_3 - C"))
+
+ self.data_obj.insert_children(to_insert, after=child_5)
+
+ children = self.data_obj.children
+ num_children = len(children)
+ offset = num_children - len(to_insert)
+ for i in range(len(to_insert)):
+ self.assertEqual(children[offset + i], to_insert[i],
+ "child = %s ; compared_to = %s" %
+ (children[offset + i], to_insert[i]))
+
+if __name__ == '__main__':
+ unittest.main()
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/usr/src/lib/install_doc/test/test_data_object_paths.py Wed Oct 20 18:48:33 2010 +0100
@@ -0,0 +1,599 @@
+#
+# CDDL HEADER START
+#
+# The contents of this file are subject to the terms of the
+# Common Development and Distribution License (the "License").
+# You may not use this file except in compliance with the License.
+#
+# You can obtain a copy of the license at usr/src/OPENSOLARIS.LICENSE
+# or http://www.opensolaris.org/os/licensing.
+# See the License for the specific language governing permissions
+# and limitations under the License.
+#
+# When distributing Covered Code, include this CDDL HEADER in each
+# file and include the License file at usr/src/OPENSOLARIS.LICENSE.
+# If applicable, add the following below this CDDL HEADER, with the
+# fields enclosed by brackets "[]" replaced with your own identifying
+# information: Portions Copyright [yyyy] [name of copyright owner]
+#
+# CDDL HEADER END
+#
+
+#
+# Copyright (c) 2010, Oracle and/or its affiliates. All rights reserved.
+#
+'''Tests to validate DataObject paths functionality'''
+
+import unittest
+
+from solaris_install.data_object import ObjectNotFoundError, PathError, \
+ DataObjectBase
+import simple_data_object
+
+
+class TestDataObjectPaths(unittest.TestCase):
+ '''Tests to validate DataObject paths functionality'''
+
+ def setUp(self):
+ '''Create ref to simple tree of data objects'''
+ self.data_objs = simple_data_object.create_simple_data_obj_tree()
+
+ def tearDown(self):
+ '''Clean up references'''
+ self.data_objs = None
+ del self.data_objs
+
+ def test_dobj_path_find_path_by_name(self):
+ '''Validate simple paths, using name only, finds correct child'''
+
+ # Find direct child.
+ found_obj_list = self.data_objs["data_obj"].find_path(
+ "/%s" % (self.data_objs["child_3"].name))
+
+ self.assertEqual(found_obj_list, [self.data_objs["child_3"]])
+
+ # Search for deep child, at top - should fail.
+ self.assertRaises(ObjectNotFoundError,
+ self.data_objs["data_obj"].find_path,
+ "/%s" % (self.data_objs["child_2_1_1_1"].name))
+
+ # Search for same child again, but using '//' syntax, to search down.
+ found_obj_list = self.data_objs["data_obj"].find_path("//%s" %
+ (self.data_objs["child_2_1_1_1"].name))
+
+ self.assertEqual(found_obj_list, [self.data_objs["child_2_1_1_1"]])
+
+ def test_dobj_path_get_all_children_using_type(self):
+ '''Validate path to object by type'''
+ children = self.data_objs["data_obj"].find_path(
+ "/[@solaris_install.data_object.DataObject]")
+ internal_children = self.data_objs["data_obj"]._children
+
+ # Ensure both are the same length.
+ self.assertEquals(len(children), len(internal_children))
+
+ # Ensure that while the lists themselves are different, the children
+ # contained in the lists are the same - comparison of lists will
+ # return true if they both contain the same items.
+ self.assertEquals(children, internal_children)
+
+ # Ensure that the list returned from get_children() is a copy
+ children.remove(self.data_objs["child_3"])
+ self.assertNotEquals(children, internal_children)
+
+ def test_dobj_path_get_all_children_using_method_max_count_2(self):
+ '''Validate path to objects with max_count == 2'''
+ children = self.data_objs["data_obj"].find_path(
+ "/[@solaris_install.data_object.DataObject#2]")
+ internal_children = self.data_objs["data_obj"]._children
+
+ # Ensure both are the same length.
+ self.assertEquals(len(children), 2)
+
+ # Ensure that while the lists themselves are different, the children
+ # contained in the lists are the same - comparison of lists will
+ # return true if they both contain the same items.
+ self.assertEquals(children, internal_children[:2])
+
+ def test_dobj_path_get_children_by_name_unique(self):
+ '''Validate path to uniquely named object returns 1 obj'''
+ found_obj_list = self.data_objs["data_obj"].find_path("/%s" %
+ (self.data_objs["child_4"].name))
+
+ self.assertTrue(len(found_obj_list) == 1)
+ self.assertEqual(found_obj_list[0], self.data_objs["child_4"])
+
+ def test_dobj_path_get_children_by_name_multiple(self):
+ '''Validate path to non-unique named objects returns all matches'''
+ found_obj_list = self.data_objs["data_obj"].find_path("/%s" %
+ (self.data_objs["child_5"].name))
+
+ self.assertTrue(len(found_obj_list) == 2)
+ self.assertEqual(found_obj_list,
+ [self.data_objs["child_5"],
+ self.data_objs["child_5_same_name"]])
+
+ def test_dobj_path_get_children_by_type_unique(self):
+ '''Validate path with unique type returns 1 object'''
+ # Should return child_5 which is of type SimpleDataObject3
+ found_obj_list = self.data_objs["data_obj"].find_path(
+ "/[@simple_data_object.SimpleDataObject3]")
+
+ self.assertTrue(len(found_obj_list) == 1)
+ self.assertEqual(found_obj_list[0], self.data_objs["child_5"])
+
+ def test_dobj_path_get_children_by_type_multiple(self):
+ '''Validate path with non-unique type returns correct children'''
+ # Should return child_1 and child_4 which are of type SimpleDataObject2
+ found_obj_list = self.data_objs["data_obj"].find_path(
+ "/[@simple_data_object.SimpleDataObject2]")
+
+ self.assertTrue(len(found_obj_list) == 2)
+ self.assertEqual(found_obj_list,
+ [self.data_objs["child_1"], self.data_objs["child_4"]])
+
+ def test_dobj_path_get_children_by_name_and_type(self):
+ '''Validate path with name and type returns single match'''
+ # Should return child_4 which has name and type SimpleDataObject2
+ found_obj_list = self.data_objs["data_obj"].find_path(
+ "/%s[@simple_data_object.SimpleDataObject2]" %
+ (self.data_objs["child_4"].name))
+
+ self.assertTrue(len(found_obj_list) == 1)
+ self.assertEqual(found_obj_list[0], self.data_objs["child_4"])
+
+ def test_dobj_path_get_children_by_name_not_exist(self):
+ '''Validate failure on path with non-existant name'''
+ self.assertRaises(ObjectNotFoundError,
+ self.data_objs["data_obj"].find_path,
+ "/non_existant_name")
+
+ def test_dobj_path_get_children_by_type_not_exist(self):
+ '''Validate failure on path with non-existant type'''
+ self.assertRaises(ObjectNotFoundError,
+ self.data_objs["data_obj"].find_path,
+ "/[@simple_data_object.SimpleDataObject4]")
+
+ def test_dobj_path_get_children_by_name_exist_and_type_not_exist(self):
+ '''Validate failure on path with valid name and non-existant type'''
+ self.assertRaises(ObjectNotFoundError,
+ self.data_objs["data_obj"].find_path,
+ "/%s[@simple_data_object.SimpleDataObject4]" %
+ (self.data_objs["child_4"].name))
+
+ def test_dobj_path_get_children_by_name_not_exist_and_type_exist(self):
+ '''Validate failure on path with non-existant name and valid type'''
+ self.assertRaises(ObjectNotFoundError,
+ self.data_objs["data_obj"].find_path,
+ "/non existant name[@simple_data_object.SimpleDataObject2]")
+
+ #
+ # Test get_file_child()
+ #
+ def test_dobj_path_get_first_child(self):
+ '''Validate path limit to first match'''
+ child_list = self.data_objs["data_obj"].find_path(
+ "/[@solaris_install.data_object.DataObject#1]")
+ internal_children = [self.data_objs["data_obj"]._children[0]]
+
+ # Ensure that it's really the first child in internal list.
+ self.assertEquals(child_list, internal_children)
+
+ def test_dobj_path_get_first_child_no_children(self):
+ '''Validate failure on path with no children & limit to first match'''
+
+ self.assertRaises(ObjectNotFoundError,
+ self.data_objs["child_4"].find_path,
+ "/[@solaris_install.data_object.DataObject#1]")
+
+ def test_dobj_path_get_first_child_by_name_unique(self):
+ '''Validate path with name and limit to first match'''
+ found_obj = self.data_objs["data_obj"].find_path("/%s[#1]" %
+ (self.data_objs["child_4"].name))
+
+ self.assertEqual(found_obj, [self.data_objs["child_4"]])
+
+ def test_dobj_path_get_first_child_by_name_not_unique(self):
+ '''Validate path with non-unique name and limit to first match'''
+ found_obj = self.data_objs["data_obj"].find_path("/%s[#1]" %
+ (self.data_objs["child_5"].name))
+
+ self.assertEqual(found_obj, [self.data_objs["child_5"]])
+
+ def test_dobj_path_get_first_child_by_type_unique(self):
+ '''Validate path with unique type and limit to first match'''
+ # Should return child_5 which is of type SimpleDataObject3
+ found_obj = self.data_objs["data_obj"].find_path(
+ "/[@simple_data_object.SimpleDataObject3#1]")
+
+ self.assertEqual(found_obj, [self.data_objs["child_5"]])
+
+ def test_dobj_path_get_first_child_by_type_not_unique(self):
+ '''Validate path with non-unique name and limit to first match'''
+ # Should return child_1 with type SimpleDataObject2, as is child_4
+ found_obj = self.data_objs["data_obj"].find_path(
+ "/[@simple_data_object.SimpleDataObject2#1]")
+
+ self.assertEqual(found_obj, [self.data_objs["child_1"]])
+
+ def test_dobj_path_get_first_child_by_name_and_type(self):
+ '''Validate path with name and type and limit to first match'''
+ # Should return child_4 which has name and type SimpleDataObject2
+ found_obj = self.data_objs["data_obj"].find_path(
+ "/%s[@simple_data_object.SimpleDataObject2#1]" %
+ (self.data_objs["child_4"].name))
+
+ self.assertEqual(found_obj, [self.data_objs["child_4"]])
+
+ #
+ # Test 'get_descendants()'
+ #
+ def test_dobj_path_get_descendants_by_name_unique(self):
+ '''Validate path with deep search by unique name'''
+ found_obj_list = self.data_objs["data_obj"].find_path("//%s" %
+ (self.data_objs["child_5_2_3_1"].name))
+
+ self.assertTrue(len(found_obj_list) == 1)
+ self.assertEqual(found_obj_list[0], self.data_objs["child_5_2_3_1"])
+
+ def test_dobj_path_get_descendants_by_name_multiple(self):
+ '''Validate path with deep search by non-unique name'''
+ found_obj_list = self.data_objs["data_obj"].find_path("//%s" %
+ (self.data_objs["child_3_1_2"].name))
+
+ self.assertTrue(len(found_obj_list) == 2,
+ "Expected len 2, got %d" % (len(found_obj_list)))
+ self.assertEqual(found_obj_list,
+ [self.data_objs["child_3_1_2"],
+ self.data_objs["child_3_1_2_same_name"]])
+
+ def test_dobj_path_get_descendants_by_type_unique(self):
+ '''Validate path with deep search by unique type'''
+ # Should return child_5_2_3 which is of type SimpleDataObject4
+ found_obj_list = self.data_objs["data_obj"].find_path(
+ "//[@simple_data_object.SimpleDataObject4]")
+
+ self.assertTrue(len(found_obj_list) == 1)
+ self.assertEqual(found_obj_list[0], self.data_objs["child_5_2_3"])
+
+ def test_dobj_path_get_descendants_by_type_multiple(self):
+ '''Validate path with deep search by non-unique type'''
+ found_obj_list = self.data_objs["data_obj"].find_path(
+ "//[@simple_data_object.SimpleDataObject2]")
+
+ self.assertTrue(len(found_obj_list) == 6,
+ "Expected len 6, got %d : %s" %
+ (len(found_obj_list), str(found_obj_list)))
+ self.assertEqual(found_obj_list,
+ [self.data_objs["child_1"], self.data_objs["child_2_1"],
+ self.data_objs["child_2_1_1"], self.data_objs["child_2_1_1_1"],
+ self.data_objs["child_2_1_1_2"], self.data_objs["child_4"]])
+
+ def test_dobj_path_get_descendants_name_by_type_multiple(self):
+ '''Validate path with deep search by non-unique type and name attr'''
+ found_obj_list = self.data_objs["data_obj"].find_path(
+ "//[@simple_data_object.SimpleDataObject2].name")
+
+ self.assertTrue(len(found_obj_list) == 6,
+ "Expected len 6, got %d : %s" %
+ (len(found_obj_list), str(found_obj_list)))
+ self.assertEqual(found_obj_list,
+ ["child_1", "child_2_1", "child_2_1_1", "child_2_1_1_1",
+ "child_2_1_1_2", "child_4"])
+
+ def test_dobj_path_get_descendants_by_name_and_type(self):
+ '''Validate path with deep search by name and type'''
+ found_obj_list = self.data_objs["data_obj"].find_path(
+ "//%s[@simple_data_object.SimpleDataObject2]" %
+ (self.data_objs["child_2_1_1_2"].name))
+
+ self.assertTrue(len(found_obj_list) == 1)
+ self.assertEqual(found_obj_list[0], self.data_objs["child_2_1_1_2"])
+
+ def test_dobj_path_get_descendants_by_name_not_exist(self):
+ '''Validate failure on path with deep search by non-existant name'''
+ self.assertRaises(ObjectNotFoundError,
+ self.data_objs["data_obj"].find_path,
+ "//non_existant_name")
+
+ def test_dobj_path_get_descendants_by_type_not_exist(self):
+ '''Validate fail on path with deep search by non-existant type'''
+ self.assertRaises(ObjectNotFoundError,
+ self.data_objs["data_obj"].find_path,
+ "//[@simple_data_object.SimpleDataObject5]")
+
+ def test_dobj_path_get_descendants_by_name_exist_and_type_not_exist(self):
+ '''Validate fail on path with deep search by valid name & invalid type
+ '''
+ self.assertRaises(ObjectNotFoundError,
+ self.data_objs["data_obj"].find_path,
+ "//%s[@simple_data_object.SimpleDataObject4]" %
+ (self.data_objs["child_5_2_2"].name))
+
+ def test_dobj_path_get_descendants_by_name_not_exist_and_type_exist(self):
+ '''Validate failure on path deep search non-existant name & valid type
+ '''
+ self.assertRaises(ObjectNotFoundError,
+ self.data_objs["data_obj"].find_path,
+ "//nonexistantname[@simple_data_object.SimpleDataObject2]")
+
+ def test_dobj_path_get_descendants_by_type_and_max_depth_minus_1(self):
+ '''Validate failure on path deep search with invalid (-1) max_depth'''
+ self.assertRaises(ValueError, self.data_objs["data_obj"].find_path,
+ "//[@simple_data_object.SimpleDataObject2?-1]")
+
+ def test_dobj_path_get_descendants_by_type_and_max_depth_1(self):
+ '''Validate path with deep search by type and max_depth == 1'''
+ found_obj_list = self.data_objs["data_obj"].find_path(
+ "//[@simple_data_object.SimpleDataObject2?1]")
+
+ self.assertTrue(len(found_obj_list) == 2,
+ "Expected len 2, got %d : %s" %
+ (len(found_obj_list), str(found_obj_list)))
+ self.assertEqual(found_obj_list,
+ [self.data_objs["child_1"], self.data_objs["child_4"]])
+
+ def test_dobj_path_get_descendants_by_type_and_max_depth_2(self):
+ '''Validate path with deep search by type and max_depth == 2'''
+ found_obj_list = self.data_objs["data_obj"].find_path(
+ "//[@simple_data_object.SimpleDataObject2?2]")
+
+ self.assertTrue(len(found_obj_list) == 3,
+ "Expected len 3, got %d : %s" %
+ (len(found_obj_list), str(found_obj_list)))
+ self.assertEqual(found_obj_list,
+ [self.data_objs["child_1"], self.data_objs["child_2_1"],
+ self.data_objs["child_4"]])
+
+ def test_dobj_path_get_descendants_by_type_and_max_depth_3(self):
+ '''Validate path with deep search by type and max_depth == 3'''
+ found_obj_list = self.data_objs["data_obj"].find_path(
+ "//[@simple_data_object.SimpleDataObject2?3]")
+
+ self.assertTrue(len(found_obj_list) == 4,
+ "Expected len 4, got %d : %s" %
+ (len(found_obj_list), str(found_obj_list)))
+ self.assertEqual(found_obj_list,
+ [self.data_objs["child_1"], self.data_objs["child_2_1"],
+ self.data_objs["child_2_1_1"], self.data_objs["child_4"]])
+
+ def test_dobj_path_get_descendants_by_type_and_max_depth_4(self):
+ '''Validate path with deep search by type and max_depth == 4'''
+ found_obj_list = self.data_objs["data_obj"].find_path(
+ "//[@simple_data_object.SimpleDataObject2?4]")
+
+ self.assertTrue(len(found_obj_list) == 6,
+ "Expected len 6, got %d : %s" %
+ (len(found_obj_list), str(found_obj_list)))
+ self.assertEqual(found_obj_list,
+ [self.data_objs["child_1"], self.data_objs["child_2_1"],
+ self.data_objs["child_2_1_1"],
+ self.data_objs["child_2_1_1_1"],
+ self.data_objs["child_2_1_1_2"], self.data_objs["child_4"]])
+
+ def test_dobj_path_get_descendants_using_method_max_count_invalid(self):
+ '''Validate fail on path with deep search with invalid max_count (-1)
+ '''
+ self.assertRaises(ValueError, self.data_objs["data_obj"].find_path,
+ "//[@simple_data_object.SimpleDataObject#0]")
+
+ def test_dobj_path_get_descendants_by_type_and_max_count_1(self):
+ '''Validate path with deep search by type and max_count == 1'''
+ found_obj_list = self.data_objs["data_obj"].find_path(
+ "//[@simple_data_object.SimpleDataObject#1]")
+
+ self.assertTrue(len(found_obj_list) == 1,
+ "Expected len 2, got %d : %s" %
+ (len(found_obj_list), str(found_obj_list)))
+ self.assertEqual(found_obj_list, [self.data_objs["child_1"]])
+
+ def test_dobj_path_get_descendants_by_type_and_max_count_2(self):
+ '''Validate path with deep search by type and max_count == 2'''
+ found_obj_list = self.data_objs["data_obj"].find_path(
+ "//[@simple_data_object.SimpleDataObject#2]")
+
+ self.assertTrue(len(found_obj_list) == 2,
+ "Expected len 2, got %d : %s" %
+ (len(found_obj_list), str(found_obj_list)))
+ self.assertEqual(found_obj_list, [self.data_objs["child_1"],
+ self.data_objs["child_1_1"]])
+
+ def test_dobj_path_get_descendants_by_type_and_max_count_4(self):
+ '''Validate path with deep search by type and max_count == 4'''
+ found_obj_list = self.data_objs["data_obj"].find_path(
+ "//[@simple_data_object.SimpleDataObject#4]")
+
+ self.assertTrue(len(found_obj_list) == 4,
+ "Expected len 2, got %d : %s" %
+ (len(found_obj_list), str(found_obj_list)))
+ self.assertEqual(found_obj_list, [self.data_objs["child_1"],
+ self.data_objs["child_1_1"], self.data_objs["child_1_2"],
+ self.data_objs["child_2"]])
+
+ def test_dobj_path_get_deep_path_objs(self):
+ '''Validate path with long path and max_count'''
+ found_obj_list = self.data_objs["data_obj"].find_path(
+ "/child_2/child_2_1/child_2_1_1/"
+ "[@simple_data_object.SimpleDataObject#4]")
+
+ self.assertTrue(len(found_obj_list) == 2,
+ "Expected len 2, got %d : %s" %
+ (len(found_obj_list), str(found_obj_list)))
+ self.assertEqual(found_obj_list,
+ [self.data_objs["child_2_1_1_1"], self.data_objs["child_2_1_1_2"]])
+
+ def test_dobj_path_get_deep_path_name(self):
+ '''Validate path with long path and max_count getting name attr'''
+ found_obj_list = self.data_objs["data_obj"].find_path(
+ "/child_2/child_2_1/child_2_1_1/"
+ "[@simple_data_object.SimpleDataObject#4].name")
+
+ self.assertTrue(len(found_obj_list) == 2,
+ "Expected len 2, got %d : %s" %
+ (len(found_obj_list), str(found_obj_list)))
+ self.assertEqual(found_obj_list,
+ ["child_2_1_1_1", "child_2_1_1_2"])
+
+ def test_dobj_path_find_path_prop(self):
+ '''Validate object_path property can retrieve same object'''
+ test_obj = self.data_objs["child_2_1_1_1"]
+ self.assertEquals("/child_2/child_2_1/child_2_1_1/child_2_1_1_1",
+ test_obj.object_path)
+
+ # Ensure that fetching something by it's own path returns itself.
+ self.assertEquals([test_obj],
+ self.data_objs["data_obj"].find_path(test_obj.object_path))
+
+ def test_dobj_path_find_path_special_chars(self):
+ '''Validate correct handling of path with special chars'''
+ # Construct a special tree for this test
+ root = simple_data_object.SimpleDataObject("root")
+ child_1 = simple_data_object.SimpleDataObject("using/some/slashes")
+ child_2 = simple_data_object.SimpleDataObject("using /%$ ?'@#&")
+ child_1_1 = simple_data_object.SimpleDataObject(
+ "using / some / slashes&others!? $%^!*^&")
+ child_2_1 = simple_data_object.SimpleDataObject("using just spaces")
+
+ root.insert_children([child_1, child_2])
+ child_1.insert_children(child_1_1)
+ child_2.insert_children(child_2_1)
+
+ # Ensure that fetching something by it's own path returns itself.
+ self.assertEquals([child_1_1],
+ root.find_path(child_1_1.object_path))
+
+ self.assertEquals([child_2_1],
+ root.find_path(child_2_1.object_path))
+
+ def test_dobj_path_find_path_end_slash_ignored(self):
+ '''Validate path with trailing slash'''
+ end_slash = "/child_1/"
+
+ self.assertEquals([self.data_objs["child_1"]],
+ self.data_objs["data_obj"].find_path(end_slash))
+
+ def test_dobj_path_find_path_fail_bad_paths(self):
+ '''Validate fail on path without '/' at start'''
+ rel_path = "child_1/child_1_1"
+
+ self.assertRaises(PathError,
+ self.data_objs["data_obj"].find_path, rel_path)
+
+ def test_dobj_path_find_path_1_valid_attribute(self):
+ '''Validate path with unique name getting attribute'''
+ attr = "/child_1/child_1_1.name"
+
+ attr_val_list = self.data_objs["data_obj"].find_path(attr)
+
+ self.assertEquals(attr_val_list,
+ [self.data_objs["child_1_1"].name])
+
+ def test_dobj_path_find_path_many_valid_attribute(self):
+ '''Validate path with non-unique name getting attribute'''
+ attr = "//child_5_2_3/[@DataObject].name"
+
+ attr_val_list = self.data_objs["data_obj"].find_path(attr)
+
+ self.assertEquals(attr_val_list,
+ [self.data_objs["child_5_2_3_1"].name,
+ self.data_objs["child_5_2_3_2"].name,
+ self.data_objs["child_5_2_3_3"].name,
+ self.data_objs["child_5_2_3_3"].name])
+
+ def test_dobj_path_find_path_no_name_or_class_type(self):
+ '''Validate access to path with no name to access attribute'''
+ attr = "/child_1/.name"
+
+ self.assertEquals(self.data_objs["data_obj"].find_path(attr),
+ [self.data_objs["child_1_1"].name,
+ self.data_objs["child_1_2"].name])
+
+ def test_dobj_path_find_path_fail_bad_attribute(self):
+ '''Validate fail on path attempting to access protected attributes'''
+ attr = "/child_1/child_1_1._name"
+
+ self.assertRaises(AttributeError,
+ self.data_objs["data_obj"].find_path, attr)
+
+ def test_dobj_path_find_path_ignore_bad_expr(self):
+ '''Validate path with bad expr ignores the bad elements'''
+ attr = "//child_5_2_3/[a bad expr, here].name"
+
+ attr_val_list = self.data_objs["data_obj"].find_path(attr)
+
+ self.assertEquals(attr_val_list,
+ [self.data_objs["child_5_2_3_1"].name,
+ self.data_objs["child_5_2_3_2"].name,
+ self.data_objs["child_5_2_3_3"].name,
+ self.data_objs["child_5_2_3_3"].name])
+
+ def test_dobj_path_find_path_fail_no_matches(self):
+ '''Validate path with non-existant name fails'''
+ attr = "/child_1/no_such_child"
+
+ self.assertRaises(ObjectNotFoundError,
+ self.data_objs["data_obj"].find_path, attr)
+
+ def test_dobj_path_find_path_fail_invalid_mod_name(self):
+ '''Validate path with non-existant object module name fails'''
+ attr = "/child_1/[@no_such_mod.MyObject]"
+
+ self.assertRaises(PathError,
+ self.data_objs["data_obj"].find_path, attr)
+
+ def test_dobj_path_find_path_fail_invalid_class_name(self):
+ '''Validate path with non-existant Class and no module name fails'''
+ attr = "/child_1/[@MyObject]"
+
+ self.assertRaises(PathError,
+ self.data_objs["data_obj"].find_path, attr)
+
+ def test_dobj_path_find_path_fail_valid_mod_not_class(self):
+ '''Validate path with non-existant Class and in a valid module fails'''
+ attr = "/child_1/[@solaris_install.data_object.NoSuchClass]"
+
+ self.assertRaises(PathError,
+ self.data_objs["data_obj"].find_path, attr)
+
+ def test_dobj_path_find_path_just_slashes(self):
+ '''Validate path with just '/' or '//' '''
+ self.assertEqual(self.data_objs["data_obj"].find_path("/"),
+ self.data_objs["data_obj"].children)
+
+ self.assertEqual(self.data_objs["data_obj"].find_path("//"),
+ self.data_objs["data_obj"].get_descendants(
+ class_type=DataObjectBase))
+
+ def test_dobj_path_simple_str_subst(self):
+ """Validate string replace with some simple substitutions
+ """
+ self.assertEquals(self.data_objs["data_obj"].str_replace_paths_refs(
+ "value=%{//child_5_2_3_1.name}"),
+ "value='child_5_2_3_1'")
+
+ self.assertEquals(self.data_objs["data_obj"].str_replace_paths_refs(
+ "allvalues=%{//[@simple_data_object.SimpleDataObject2].name}"),
+ "allvalues='child_1','child_2_1','child_2_1_1','child_2_1_1_1',"
+ "'child_2_1_1_2','child_4'")
+
+ self.assertEquals(self.data_objs["data_obj"].str_replace_paths_refs(
+ "allvalues=%{/child_2//.name}"),
+ "allvalues="
+ "'child_2_1','child_2_1_1','child_2_1_1_1','child_2_1_1_2'")
+
+ self.assertEquals(self.data_objs["data_obj"].str_replace_paths_refs(
+ "allvalues=%{/child_2//.name}", value_separator=" "),
+ "allvalues="
+ "'child_2_1' 'child_2_1_1' 'child_2_1_1_1' 'child_2_1_1_2'")
+
+ def test_dobj_path_multiple_str_subst(self):
+ """Validate string replace with some multiple substitutions
+ """
+ self.assertEquals(self.data_objs["data_obj"].str_replace_paths_refs(
+ "value1=%{//child_3_1_1.name}"
+ " value2=%{//child_5_2_1.name}"),
+ "value1='child_3_1_1' value2='child_5_2_1'")
+
+
+if __name__ == '__main__':
+ unittest.main()
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/usr/src/lib/install_doc/test/test_data_object_utility.py Wed Oct 20 18:48:33 2010 +0100
@@ -0,0 +1,169 @@
+#
+# CDDL HEADER START
+#
+# The contents of this file are subject to the terms of the
+# Common Development and Distribution License (the "License").
+# You may not use this file except in compliance with the License.
+#
+# You can obtain a copy of the license at usr/src/OPENSOLARIS.LICENSE
+# or http://www.opensolaris.org/os/licensing.
+# See the License for the specific language governing permissions
+# and limitations under the License.
+#
+# When distributing Covered Code, include this CDDL HEADER in each
+# file and include the License file at usr/src/OPENSOLARIS.LICENSE.
+# If applicable, add the following below this CDDL HEADER, with the
+# fields enclosed by brackets "[]" replaced with your own identifying
+# information: Portions Copyright [yyyy] [name of copyright owner]
+#
+# CDDL HEADER END
+#
+
+#
+# Copyright (c) 2010, Oracle and/or its affiliates. All rights reserved.
+#
+'''Test for DataObject utility methods'''
+
+import unittest
+import copy
+
+from simple_data_object import SimpleDataObject, \
+ SimpleDataObject2, SimpleDataObject3, SimpleDataObject4, \
+ create_simple_data_obj_tree
+
+
+class TestDataObjectUtility(unittest.TestCase):
+ '''Test for DataObject utility methods'''
+
+ def setUp(self):
+ '''Create tree structure and local reference'''
+ self.data_obj = create_simple_data_obj_tree()
+
+ def tearDown(self):
+ '''Clean up refrences'''
+ self.data_obj = None
+
+ def test_data_object_utility_get_name(self):
+ '''Validate name property is read-only and returns correct info'''
+ self.assertEqual(self.data_obj["child_1_2"]._name,
+ self.data_obj["child_1_2"].name)
+
+ try:
+ self.data_obj["child_1"].name = "NewName"
+ self.fail("Succeeded in setting name, when expected failure.")
+ except AttributeError:
+ pass
+
+ def test_data_object_utility_get_parent(self):
+ '''Validate parent property is read-only and returns correct info'''
+ self.assertEqual(self.data_obj["child_1_2"]._parent,
+ self.data_obj["child_1_2"].parent)
+
+ try:
+ self.data_obj["child_1_1"].parent = self.data_obj["child_2"]
+ self.fail("Succeeded in setting parent, when expected failure.")
+ except AttributeError:
+ pass
+
+ def test_data_object_utility_get_xml_str(self):
+ '''Validate get_xml_str() method'''
+ s = self.data_obj["data_obj"].get_xml_tree_str()
+ # Ensure not None
+ self.assertTrue(s != None, "get_xml_str() returned None!")
+ # Ensure it contains a string we expect
+ self.assertTrue(s.find(self.data_obj["child_5_2_3_3"].name) != -1)
+ # Ensure it's the length we expect - could fail easily, may
+ # need re-baseline?
+ self.assertTrue(len(s) == 1262,
+ "get_xml_str() returned invalid string len: len = %d\n%s" %
+ (len(s), s))
+
+ def test_data_object_utility_has_children(self):
+ '''Validate has_children property'''
+ # Test multiple children
+ self.assertTrue(self.data_obj["data_obj"].has_children,
+ "'%s' should have children\n%s" %\
+ (self.data_obj["data_obj"].name, str(self.data_obj)))
+ # Test no children
+ self.assertFalse(self.data_obj["child_4"].has_children,
+ "'%s' should not have children\n%s" %\
+ (self.data_obj["child_4"].name, str(self.data_obj["child_4"])))
+ # Test one child.
+ self.assertTrue(self.data_obj["child_2_1"].has_children,
+ "'%s' should have children\n%s" %\
+ (self.data_obj["child_2_1"].name, str(self.data_obj["child_2_1"])))
+
+ def test_data_object_utility_copy(self):
+ '''Validate copy mechanism'''
+ orig_name = self.data_obj["child_5_2"].name
+ orig_parent = self.data_obj["child_5_2"].parent
+ orig_children = self.data_obj["child_5_2"].children
+
+ my_copy = copy.copy(self.data_obj["child_5_2"])
+
+ # Ensure original is unchanged.
+ self.assertEqual(self.data_obj["child_5_2"].name, orig_name)
+ self.assertEqual(self.data_obj["child_5_2"].parent, orig_parent)
+ self.assertEqual(self.data_obj["child_5_2"].children, orig_children)
+
+ # Ensure that copy has expected differences.
+ self.assertNotEqual(my_copy, self.data_obj["child_5_2"])
+
+ self.assertEqual(my_copy.name, self.data_obj["child_5_2"].name)
+
+ self.assertTrue(my_copy.parent == None,
+ "Copy shouldn't have a parent")
+
+ self.assertFalse(my_copy.has_children,
+ "Copy shouldn't have any children")
+
+ self.assertNotEqual(my_copy.children,
+ self.data_obj["child_5_2"].children)
+
+ def test_data_object_utility_deepcopy(self):
+ '''Validate deepcopy mechanism'''
+ orig_name = self.data_obj["child_5_2"].name
+ orig_parent = self.data_obj["child_5_2"].parent
+ orig_children = self.data_obj["child_5_2"].children
+
+ my_copy = copy.deepcopy(self.data_obj["child_5_2"])
+
+ # Ensure original is unchanged.
+ self.assertEqual(self.data_obj["child_5_2"].name, orig_name)
+ self.assertEqual(self.data_obj["child_5_2"].parent, orig_parent)
+ self.assertEqual(self.data_obj["child_5_2"].children, orig_children)
+
+ # Ensure that copy has expected differences
+ self.assertNotEqual(my_copy, self.data_obj["child_5_2"])
+
+ self.assertEqual(my_copy.name, self.data_obj["child_5_2"].name)
+
+ self.assertTrue(my_copy.parent == None,
+ "Copy shouldn't have a parent")
+
+ self.assertTrue(my_copy.has_children,
+ "Copy should have children")
+
+ # Children aren't exactly same objects.
+ self.assertNotEqual(my_copy.children,
+ self.data_obj["child_5_2"].children)
+
+ # Children should be same contents though...
+ orig_children = self.data_obj["child_5_2"].children
+ copy_children = my_copy.children
+
+ self.assertEqual(len(orig_children), len(copy_children),
+ "Copy should have the same number of children! %d != %d" %\
+ (len(orig_children), len(copy_children)))
+
+ for i in range(len(orig_children)):
+ # Check that original child has correct parent
+ self.assertEqual(orig_children[i].parent,
+ self.data_obj["child_5_2"])
+ # Check that new copy has the correct parent
+ self.assertEqual(copy_children[i].parent, my_copy)
+ # Check that the names are still the same
+ self.assertEqual(orig_children[i].name, copy_children[i].name)
+
+if __name__ == '__main__':
+ unittest.main()
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/usr/src/lib/install_doc/test/test_data_object_xml_support.py Wed Oct 20 18:48:33 2010 +0100
@@ -0,0 +1,254 @@
+#
+# CDDL HEADER START
+#
+# The contents of this file are subject to the terms of the
+# Common Development and Distribution License (the "License").
+# You may not use this file except in compliance with the License.
+#
+# You can obtain a copy of the license at usr/src/OPENSOLARIS.LICENSE
+# or http://www.opensolaris.org/os/licensing.
+# See the License for the specific language governing permissions
+# and limitations under the License.
+#
+# When distributing Covered Code, include this CDDL HEADER in each
+# file and include the License file at usr/src/OPENSOLARIS.LICENSE.
+# If applicable, add the following below this CDDL HEADER, with the
+# fields enclosed by brackets "[]" replaced with your own identifying
+# information: Portions Copyright [yyyy] [name of copyright owner]
+#
+# CDDL HEADER END
+#
+
+#
+# Copyright (c) 2010, Oracle and/or its affiliates. All rights reserved.
+#
+'''Tests to ensure that DataObject XML support is working as expected'''
+
+import unittest
+
+from solaris_install.data_object import DataObject
+from simple_data_object import SimpleDataObject, SimpleDataObject2, \
+ SimpleDataObject3, SimpleDataObjectHandlesChildren
+
+
+class TestDataObjectXmlSupport(unittest.TestCase):
+ '''Tests to ensure that DataObject XML support is working as expected'''
+
+ def setUp(self):
+ '''Create an XML tree for testing and references to them.
+
+ The tree will look like the following:
+
+ root
+ child_1
+ child_1_1
+ child_1_2
+ child_2
+ child_2_1
+ child_2_1_1
+ child_2_1_1_1
+ child_2_1_1_2
+ child_3
+ child_3_1
+ child_3_1_2
+ child_3_1_2
+ child_3_1_2_same_name
+ child_4
+ child_5
+ child_5_1
+ child_5_2
+ child_5_2_1
+ child_5_2_2
+ child_5_2_3
+ child_5_2_3_1
+ child_5_2_3_2
+ child_5_2_3_3
+ child_5_2_3_3_same_name
+ child_5_same_name
+ '''
+
+ # Create root node
+ self.data_obj = SimpleDataObject("root")
+ # Add some children, used by most tests.
+ self.child_1 = SimpleDataObject2("child_1")
+ self.child_2 = SimpleDataObject("child_2")
+ self.child_3 = SimpleDataObject("child_3")
+ self.child_4 = SimpleDataObject2("child_4")
+ self.child_5 = SimpleDataObject3("child_5")
+
+ self.do_list = list()
+ self.do_list.append(self.child_1)
+ self.do_list.append(self.child_2)
+ self.do_list.append(self.child_3)
+ self.do_list.append(self.child_4)
+ self.do_list.append(self.child_5)
+
+ self.data_obj.insert_children(self.do_list)
+
+ # Now let's add the children of children, etc. for use by
+ # get_descendants() tests.
+ # child_1 children
+ self.child_1_1 = SimpleDataObject("child_1_1")
+ self.child_1_2 = SimpleDataObject("child_1_2")
+ self.child_1.insert_children([self.child_1_1, self.child_1_2])
+
+ # child_2 tree
+ self.child_2_1 = SimpleDataObject2("child_2_1")
+ self.child_2.insert_children(self.child_2_1)
+ self.child_2_1_1 = SimpleDataObject2("child_2_1_1")
+ self.child_2_1.insert_children(self.child_2_1_1)
+ self.child_2_1_1_1 = SimpleDataObject2("child_2_1_1_1")
+ self.child_2_1_1_2 = SimpleDataObject2("child_2_1_1_2")
+ self.child_2_1_1.insert_children(
+ [self.child_2_1_1_1, self.child_2_1_1_2])
+
+ # child_3 tree
+ self.child_3_1 = SimpleDataObject("child_3_1")
+ self.child_3.insert_children(self.child_3_1)
+ self.child_3_1_1 = SimpleDataObject("child_3_1_1")
+ self.child_3_1_2 = SimpleDataObject("child_3_1_2")
+ self.child_3_1_2_same_name = SimpleDataObject("child_3_1_2")
+ self.child_3_1.insert_children([self.child_3_1_1, self.child_3_1_2,
+ self.child_3_1_2_same_name])
+
+ # child_5 tree
+ self.child_5_1 = SimpleDataObject("child_5_1")
+ self.child_5_2 = SimpleDataObject("child_5_2")
+ self.child_5.insert_children([self.child_5_1, self.child_5_2])
+ self.child_5_2_1 = SimpleDataObject("child_5_2_1")
+ self.child_5_2_2 = SimpleDataObject("child_5_2_2")
+ self.child_5_2_3 = SimpleDataObjectHandlesChildren("child_5_2_3")
+ self.child_5_2.insert_children(
+ [self.child_5_2_1, self.child_5_2_2, self.child_5_2_3])
+
+ self.child_5_2_3_1 = SimpleDataObject("child_5_2_3_1")
+ self.child_5_2_3_2 = SimpleDataObject("child_5_2_3_2")
+ self.child_5_2_3_3 = SimpleDataObject("child_5_2_3_3")
+ self.child_5_2_3_3_same_name = SimpleDataObject("child_5_2_3_3")
+ self.child_5_2_3.insert_children(
+ [self.child_5_2_3_1, self.child_5_2_3_2,
+ self.child_5_2_3_3, self.child_5_2_3_3_same_name])
+
+ def tearDown(self):
+ '''Clean up all references to objects'''
+ self.data_obj = None
+ self.child_1 = None
+ self.child_2 = None
+ self.child_3 = None
+ self.child_4 = None
+ self.child_5 = None
+ self.do_list = None
+
+ self.child_1_1 = None
+ self.child_1_2 = None
+
+ self.child_2_1 = None
+ self.child_2_1_1 = None
+ self.child_2_1_1_1 = None
+ self.child_2_1_1_2 = None
+
+ self.child_3_1 = None
+ self.child_3_1_1 = None
+ self.child_3_1_2 = None
+ self.child_3_1_2_same_name = None
+
+ self.child_5_1 = None
+ self.child_5_2 = None
+ self.child_5_2_1 = None
+ self.child_5_2_2 = None
+ self.child_5_2_3 = None
+
+ self.child_5_2_3_1 = None
+ self.child_5_2_3_2 = None
+ self.child_5_2_3_3 = None
+ self.child_5_2_3_3_same_name = None
+
+ def test_data_object_xml_support_skips_levels(self):
+ '''Validate XML generation will skip levels if child doesn't gen XML'''
+ # SimpleDataObject3 objects don't generate XML, so should be skipped
+ # and all of child_5's children should be direct sub-elements of the
+ # node called 'root'
+
+ xml_tree = self.data_obj.get_xml_tree()
+
+ child_names = []
+ for xml_child in xml_tree:
+ child_names.append(xml_child.get("name"))
+
+ self.assertEqual(child_names,
+ [self.child_1.name, self.child_2.name, self.child_3.name,
+ self.child_4.name, self.child_5_1.name, self.child_5_2.name])
+
+ def test_data_object_xml_support_parent_generates_xml(self):
+ '''Validate ability to generate XML for children'''
+ # child_5_2_3 generates the xml for it's children in the form:
+ # <child name="<NAME>"/>
+
+ xml_tree = self.child_5_2_3.get_xml_tree()
+ names = []
+ for xml_child in xml_tree:
+ self.assertEqual(xml_child.tag,
+ SimpleDataObjectHandlesChildren.TAG_NAME,
+ "sib-element had unexpected tag : %s" % (xml_child.tag))
+ names.append(xml_child.get("name"))
+
+ self.assertEqual(names, \
+ [self.child_5_2_3_1.name, self.child_5_2_3_2.name,
+ self.child_5_2_3_3.name, self.child_5_2_3_3_same_name.name])
+
+ def test_data_object_xml_support_get_tree_string(self):
+ '''Validate ability to generate XML using get_xml_tree_str()'''
+ # Define expected string, compensate for indent. Using '.' in expected
+ # string to remove conflict with indent replacement.
+ indentation = '''\
+ '''
+ expected_xml = '''\
+ <SimpleDataObject name="root">
+ ..<SimpleDataObject2 name="child_1">
+ ....<SimpleDataObject name="child_1_1"/>
+ ....<SimpleDataObject name="child_1_2"/>
+ ..</SimpleDataObject2>
+ ..<SimpleDataObject name="child_2">
+ ....<SimpleDataObject2 name="child_2_1">
+ ......<SimpleDataObject2 name="child_2_1_1">
+ ........<SimpleDataObject2 name="child_2_1_1_1"/>
+ ........<SimpleDataObject2 name="child_2_1_1_2"/>
+ ......</SimpleDataObject2>
+ ....</SimpleDataObject2>
+ ..</SimpleDataObject>
+ ..<SimpleDataObject name="child_3">
+ ....<SimpleDataObject name="child_3_1">
+ ......<SimpleDataObject name="child_3_1_1"/>
+ ......<SimpleDataObject name="child_3_1_2"/>
+ ......<SimpleDataObject name="child_3_1_2"/>
+ ....</SimpleDataObject>
+ ..</SimpleDataObject>
+ ..<SimpleDataObject2 name="child_4"/>
+ ..<SimpleDataObject name="child_5_1"/>
+ ..<SimpleDataObject name="child_5_2">
+ ....<SimpleDataObject name="child_5_2_1"/>
+ ....<SimpleDataObject name="child_5_2_2"/>
+ ....<SimpleDataObjectHandlesChildren name="child_5_2_3">
+ ......<so_child name="child_5_2_3_1"/>
+ ......<so_child name="child_5_2_3_2"/>
+ ......<so_child name="child_5_2_3_3"/>
+ ......<so_child name="child_5_2_3_3"/>
+ ....</SimpleDataObjectHandlesChildren>
+ ..</SimpleDataObject>
+ </SimpleDataObject>
+ '''.replace(indentation, "").replace(".", " ")
+ xml_str = self.data_obj.get_xml_tree_str()
+
+ self.assertEqual(xml_str, expected_xml,
+ "Resulting XML doesn't match expected (len=%d != %d):\
+ \nGOT:\n'%s'\nEXPECTED:\n'%s'\n" %
+ (len(xml_str), len(expected_xml), xml_str, expected_xml))
+
+ def test_data_object_xml_methods(self):
+ '''Validate XML methods react correctly to None parameter'''
+ self.assertFalse(DataObject.can_handle(None))
+
+ self.assertFalse(DataObject.from_xml(None))
+
+if __name__ == '__main__':
+ unittest.main()
--- a/usr/src/pkg/manifests/system-library-install.mf Wed Oct 20 10:31:42 2010 -0600
+++ b/usr/src/pkg/manifests/system-library-install.mf Wed Oct 20 18:48:33 2010 +0100
@@ -34,6 +34,7 @@
dir path=usr/lib/python2.6/vendor-packages
dir path=usr/lib/python2.6/vendor-packages/osol_install
dir path=usr/lib/python2.6/vendor-packages/solaris_install
+dir path=usr/lib/python2.6/vendor-packages/solaris_install/data_object
dir path=usr/snadm
dir path=usr/snadm/lib
file path=usr/lib/libaiscf.so.1
@@ -45,8 +46,16 @@
file path=usr/lib/python2.6/vendor-packages/osol_install/errsvc.pyc
file path=usr/lib/python2.6/vendor-packages/osol_install/liberrsvc.py
file path=usr/lib/python2.6/vendor-packages/osol_install/liberrsvc.pyc
+file path=usr/lib/python2.6/vendor-packages/solaris_install/__init__.py
+file path=usr/lib/python2.6/vendor-packages/solaris_install/__init__.pyc
file path=usr/lib/python2.6/vendor-packages/solaris_install/logger.py
file path=usr/lib/python2.6/vendor-packages/solaris_install/logger.pyc
+file path=usr/lib/python2.6/vendor-packages/solaris_install/data_object/__init__.py
+file path=usr/lib/python2.6/vendor-packages/solaris_install/data_object/__init__.pyc
+file path=usr/lib/python2.6/vendor-packages/solaris_install/data_object/cache.py
+file path=usr/lib/python2.6/vendor-packages/solaris_install/data_object/cache.pyc
+file path=usr/lib/python2.6/vendor-packages/solaris_install/data_object/data_dict.py
+file path=usr/lib/python2.6/vendor-packages/solaris_install/data_object/data_dict.pyc
file path=usr/snadm/lib/libspmicommon.so.1
license cr_Sun license=cr_Sun
link path=usr/lib/libaiscf.so target=libaiscf.so.1
--- a/usr/src/tools/tests/tests.nose Wed Oct 20 10:31:42 2010 -0600
+++ b/usr/src/tools/tests/tests.nose Wed Oct 20 18:48:33 2010 +0100
@@ -30,4 +30,4 @@
# the files in that directory should begine with "test_". Files
# containing in-line doc-tests should be added explicitly.
-tests=lib/liberrsvc_pymod/test/,cmd/ai-webserver/test/,cmd/text-install/osol_install/text_install/test/,cmd/installadm/installadm_common.py,lib/install_utils/test/,lib/libict_pymod/test/,lib/install_logging_pymod/test
+tests=lib/liberrsvc_pymod/test/,cmd/ai-webserver/test/,cmd/text-install/osol_install/text_install/test/,cmd/installadm/installadm_common.py,lib/install_utils/test/,lib/libict_pymod/test/,lib/install_logging_pymod/test,lib/install_doc/test