4425 pkg install should deal w/ complex dependency changes in one install
authorBart Smaalders <Bart.Smaalders@Sun.COM>
Wed, 18 Nov 2009 15:53:48 -0800
changeset 1505 cc598d70bbbe
parent 1504 265a1d6b86bd
child 1506 9c8b3a3184be
4425 pkg install should deal w/ complex dependency changes in one install 12018 Implement Facets 12046 Implement publisher search order proposal 12050 Implement Exclude type dependency 762 Dead code in cfgfiles.py? 2606 expanded dependency version specification capability desired 3096 install and uninstall of multiple matches doesn't seem to work 5015 newest package version not installed by depend action with type=require 6018 image-update fails to update optional dependency 7394 package operation failure misleading when mixing build versions 8535 pkgsend in testutils should not fork 8988 nested incorporations fail with IndexError 9030 image-update fails when package constrained by two incorporations 9242 gcc-dev, ss-dev, etc. should be constrained by entire 9294 uninstall should not remove symlinks still used by another package 10922 Image.repair() doesn't set self.imageplan, leading to traceback on "pkg fix" 11681 fmri object should include publisher in hash 11697 pkg dumps stack traceback when --be-name argument contains '+' character 12120 -n operations (install, image-update, etc) w/o -v can skip retrieving manifests 12121 filters have been obsoleted by variants & facets and should be removed. 12455 pkg needs additional exit status codes 12551 imageplan should use manifest prefetch facility introduced in 1472:c50eb141435a
.hgignore
doc/client_api_versions.txt
doc/facets.txt
doc/publisher_search_order.txt
doc/sat.txt
src/client.py
src/gui/modules/installupdate.py
src/gui/modules/misc_non_gui.py
src/man/pkg.1.txt
src/modules/actions/depend.py
src/modules/cfgfiles.py
src/modules/client/actuator.py
src/modules/client/api.py
src/modules/client/api_errors.py
src/modules/client/constraint.py
src/modules/client/debugvalues.py
src/modules/client/filter.py
src/modules/client/history.py
src/modules/client/image.py
src/modules/client/imageconfig.py
src/modules/client/imageplan.py
src/modules/client/imagestate.py
src/modules/client/pkg_solver.py
src/modules/client/pkgplan.py
src/modules/client/publisher.py
src/modules/client/transport/transport.py
src/modules/depotcontroller.py
src/modules/facet.py
src/modules/fmri.py
src/modules/manifest.py
src/modules/misc.py
src/modules/solver/LICENSE
src/modules/solver/Makefile.standalone
src/modules/solver/README
src/modules/solver/py_solver.c
src/modules/solver/solver.c
src/modules/solver/solver.h
src/modules/solver/vec.h
src/modules/variant.py
src/pkgdefs/SUNWipkg/copyright
src/pkgdefs/SUNWipkg/prototype
src/pkgdep.py
src/setup.py
src/tests/api/t_filter.py
src/tests/api/t_manifest.py
src/tests/baseline.txt
src/tests/cli/t_api.py
src/tests/cli/t_api_info.py
src/tests/cli/t_api_search.py
src/tests/cli/t_change_facet.py
src/tests/cli/t_change_variant.py
src/tests/cli/t_colliding_links.py
src/tests/cli/t_fix.py
src/tests/cli/t_pkg_R_option.py
src/tests/cli/t_pkg_api_install.py
src/tests/cli/t_pkg_depotd.py
src/tests/cli/t_pkg_history.py
src/tests/cli/t_pkg_image_update.py
src/tests/cli/t_pkg_info.py
src/tests/cli/t_pkg_install.py
src/tests/cli/t_pkg_intent.py
src/tests/cli/t_pkg_list.py
src/tests/cli/t_pkg_publisher.py
src/tests/cli/t_pkgdep_resolve.py
src/tests/cli/t_pkgsend.py
src/tests/cli/t_solver.py
src/tests/cli/t_variants.py
src/tests/cli/testutils.py
src/tests/pkg5unittest.py
--- a/.hgignore	Tue Nov 17 17:06:35 2009 -0600
+++ b/.hgignore	Wed Nov 18 15:53:48 2009 -0800
@@ -12,6 +12,7 @@
 ^nbproject
 ^proto
 ^packages
+^nbproject
 # Specific build products
 ^src/extern/.*
 ^src/brand/fmri_compare$
--- a/doc/client_api_versions.txt	Tue Nov 17 17:06:35 2009 -0600
+++ b/doc/client_api_versions.txt	Wed Nov 18 15:53:48 2009 -0800
@@ -1,3 +1,13 @@
+Version 24:
+Incompatible with clients using version 0-23.
+    The pkg.client.api module has changed as follows:
+        * plan_install no longer takes a filters argument
+	* plan_change_variant changed to plan_change_varcets.
+	  w/ argument list changes.
+	* Added get_pub_search_order
+	* Added set_pub_search_after
+	* Added set_pub_search_before
+
 Version 23:
 Incompatible with clients using version 0-22.
     The pkg.client.api module has changed as follows:
--- a/doc/facets.txt	Tue Nov 17 17:06:35 2009 -0600
+++ b/doc/facets.txt	Wed Nov 18 15:53:48 2009 -0800
@@ -16,7 +16,7 @@
 collide.  Other examples include debug vs non-debug binaries, and
 global vs nonglobal zones.
 
-In IPS, options that may be selected or not selected, such as various
+In pkg(5), options that may be selected or not selected, such as various
 locales, documentation, etc., are referred to as facets.  Options which
 are mutually exclusive are called variants.  Both variants and facets
 appear as tags on IPS actions, and result in the action being
@@ -24,12 +24,12 @@
 
 Name                  values
 --------------------------------------------
-facet.locale          zero or more of en_US, de, ... 
-facet.doc.man	      true
-facet.doc	      true
-facet.devel           true
+facet.locale.*        true, false
+facet.doc.man	      true, false
+facet.doc	      true, false
+facet.devel.*         true, false
 variant.arch	      sparc, i386, zos
-variant.debug	      true
+variant.debug.*	      true, false
 
 An action that is tagged w/ a facet or variant that is not selected
 will be automatically excluded; actions w/o facets or variants are
@@ -37,68 +37,85 @@
 tags; an example would be an architecture-specific header file that's
 used by developers:
 
-file 8e7247b269fd116345abbf1aa9000a3d81ed871b chash=1fe53e8e2d0ad25bae13e1fd622c50397a2389ce group=bin mode=0644 owner=root path=usr/include/sys/regset.h variant.arch=x86 facet.devel=true pkg.csize=4002 pkg.size=12457
+file 8e7247b269fd116345abbf1aa9000a3d81ed871b chash=1fe53e8e2d0ad25bae13e1fd622c50397a2389ce group=bin mode=0644 owner=root path=usr/include/sys/regset.h variant.arch=x86 facet.devel.headers=true pkg.csize=4002 pkg.size=12457
 
 This implies that facets and variants are evaluated ANDed together;
-within a single facet the matched values are OR'd together.  For
-example, a particular action may have multiple facet.locale tags; if
-only one of those tags matches then the action is selected.  However, if
-the facet.devel tag is also present on the action but is not set in
-the image, the action is deselected.  
+if any of the variant tags do not match, the action is not installed.
+On the other hand, the facet tags are OR'd together; if any of the
+facets match the action is installed.
 
 Facets and variants are tags, and as such can initially be 
 set on any action, including dependencies, etc.  This can make
-testing problematic, however.
- 
-Facet and variants can be set to a particular value at the image or
-package level. This allows just some documentation to be installed, or
-just one debug driver. It also means that package developers cannot
-rely on the variant to be the same for all packages; it is an error to
-deliver packages that intersect when the variants are inconsistent. It is
-likely also an error for an action to depend on actions in other
-packages that if those actions are part of different facets than the
-dependee.  For example, a command should not require other
-packages be installed with the developer facet enabled if the command
-doesn't also require this.  This is likely to be difficult to enforce;
-setting facets on packages differently that affect dependencies will
-be tricky.
+testing problematic, however.  To simplify matters, variants and
+facets are set at the image level.  Package developers desiring
+fine grained control of their componentry are advised to use
+unique facet tags.
 
 In order to simplify grouping of facets, wildcarding is supported
-when setting image or package facets, but not variants.  For example,
+when setting facets, but not variants.  For example,
 facet.doc.* matches facet.doc.man, facet.doc.info and facet.doc.html.
 For ease of installation and backwards compatibility, facets that
-are unspecified in the image are automatically included.
+are unspecified in the image are automatically included; for the
+same reasons, any variant matching the name variant.debug.* is
+automatically set to false.  When multiple image facet patterns
+match, the longest match is selected. For example, the image
+may have:
+
+
+facet.locale.*  false
+facet.locale.en_US(utf8) true
+
+Actions marked w/ facet.locale.de would match facet.locale.*
+and thus not be installed, but actions matching facet.locale.en_US(utf8)
+would match both patterns; since facet.locale.en_US(utf8) is
+longer than facet.locale.* that logic would prevail.  Note that
+exact matches are always preferred.
+
+A more useful example would be installing the french locale as
+spoken in France.  This consists of files tagged
 
-Changing either variants or facets for an installed image or package
+facet.locale.fr, which tags files which should be installed for all
+French locales, 
+
+and
+
+facet.locale.fr_FR, which is for France in particular, 
+
+but not
+
+facet.locale.fr_CA, which is for Canada.
+
+Setting the following facets insures this selection:
+
+facet.locale.*     false   # install only locales we specify
+facet.locale.fr    true    
+facet.locale.fr_FR true
+
+Changing either variants or facets for an installed image
 effectively causes re-evaluation of the actions in the installed 
 packages, and may or may not be done live depending on the impact
 of the change.
 
+Because of the need to select the appropriate variant types prior to
+installation or parsing manifests, only variant.debug.* variants can
+used with pkg(5) without making explicit changes to the source
+code. Developers are encourged to design their components not to
+intersect in the filesystem so that facets may be used rather than
+variants.
+
 Proposed facets and variants in initial implementation:
 
-Name			Values
+Name			 default      comments
 -----------------------------------------------------
-facet.locale		en_AU.UTF-8, en_US.UTF-8, ...
-facet.doc.info		true
-facet.doc.man		true
-facet.doc.html		true
-facet.devel		true
-facet.platform.sun4u	true
-facet.platform.sun4v	true
-variant.arch		sparc, i386
-variant.zone		global, nonglobal
-variant.debug		true, false
+facet.*			 true   implements default "all facets are included"
+facet.locale.*		 true	should be set to false if individual locales are selected
+facet.doc.info		 true
+facet.doc.man		 true
+facet.doc.html		 true
+facet.devel		 true
+facet.platform.sun4u	 true 
+facet.platform.sun4v	 true
+variant.arch		 one of sparc, i386 as set by platform code
+variant.opensolaris.zone either global or nonglobal as set by image type
+variant.debug.<all>	 false
 
-Since previous installations have implicitly included all facets, on
-upgrade the version of IPS containing facet support will likely
-default to automatically including all facets, and automatically
-selecting the variant that is the running machine.  This will preserve
-current installation features on upgrade.
-
-OpenSolaris has shipped several versions of packages for i386, and will
-ship sparc packages in the near future.  If a user with a sparc machine
-finds a repository with x86 packages that don't have the appropriate 
-variant tags, they could inadvertently install non-functional software.
-This will be rectified as packages are republished with the newer 
-tools.
-
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/doc/publisher_search_order.txt	Wed Nov 18 15:53:48 2009 -0800
@@ -0,0 +1,98 @@
+
+In order to select packages from a variety of sources, pkg(5) supports
+the configuration of multiple publishers.  We've started with the
+concept of a preferred publisher, but some ambiguity has arisen as to
+what should happen when publishers are deleted, added, etc.  In
+addition, the single level of selection afforded by the preferred
+designation appears to be somewhat limiting; at the same time, the
+rules for selecting amongst multiple publishers of the same package
+need clarification rather than additional complexity.
+
+We propose the idea of publisher search order, starting with the
+preferred publisher.  Adding a publisher adds a new publisher at the
+end of the search order; if a publisher is made preferred it is moved
+to the front of the search order.  Deleting or disabling a publisher
+causes it to be removed from the search order. Re-enabling a publisher
+causes it to added at the end of the search order.
+
+When a package is nominated for installation without an explicit
+publisher being specified, the first publisher found in the search
+order to provide the package is selected. Once installed, subsequent
+updates to that package by default always occur from that publisher,
+even if a publisher earlier in the search order starts publishing a
+package with the same name.  This behavior of a publisher is
+characterized as "sticky", and is the default.  It can be disabled on
+a per-publisher basis, and such disablement is useful mostly for
+developers seeking to replace a portion of their packages w/
+development versions. If a publisher is made "non-sticky", its
+packages are searched for as on initial installation on every update -
+no preference is afforded by the previous installation.  Deleted/disabled
+publishers are made non-sticky.
+
+Each selection of the publisher for a package is made independently
+according to the algorithms above; there is no implicit inheritance
+of publisher across dependencies of any type.
+
+The above suggests the following additions to the set-publisher
+subcommand of pkg:
+
+set-publisher [--search-before=publisher] [--search-after=publisher] publisher
+set-publisher [--sticky] [--non-sticky] publisher
+
+
+--search-before=publisherB publisherA causes publisher A to be moved
+from its current place in the search order to be just ahead of publisher B.
+
+--search-after=publisherB publisherA causes publisher A to be moved
+from its current place in the search order to be just behind publisher B.
+
+Specifying --non-sticky causes this publisher not to automatically
+be selected for all updates to packages already installed from this
+publisher; instead, publishers searched earlier are automatically preferred.
+--sticky causes the original behavior to be restored for subsequent
+updates.
+
+
+Use cases
+---------
+
+Normal user getting packages from pkg.opensolaris.org and datahavens.org:
+
+   1) Installs system as per usual, points preferred to usual
+      best available publisher - say, pkg.opensolaris.org
+   2) Adds new publisher datahavens.org to acquire mplayer.
+      Without specifying search order, new publisher is appended
+      to the current order.
+
+Project Developer:
+
+   1) Installs system as per usual, points preferred to usual
+      best available publisher - say, pkg.opensolaris.org
+   2) Adds new publisher "MyOwnRepo" pointing at his
+      latest and greatest bits. Note that his repo is lowest 
+      in search order, but since his package names are unique no issues 
+      arise.
+
+Contrib User that prefers supported bits:
+
+   1) Installs system as per usual, points preferred to usual
+      best available publisher - say, pkg.opensolaris.org
+   2) Adds contrib repo after p.o.o, and makes it non-sticky.
+   3) Adds packages from contrib as desired.
+   4) When and if packages move to p.o.o, they're automatically
+      updated from p.o.o on the next image update or
+      pkg install ....
+
+OpenSolaris developer:
+
+   1) Installs system as per usual, points preferred to usual
+      best available publisher - say, pkg.opensolaris.org/dev
+   2) Adds new preferred publisher "MyOwnRepo" pointing at his
+      latest and greatest bits; making it preferred places it 
+      first in the search order.
+   3) Pkg.opensolaris.org is made non-sticky to cause pkgs from MyOwnRepo
+      to replace those from pkg.opensolaris.org on next update
+   4) If additional fresh bits are required, additional development
+      repos can be added and placed ahead of pkg.opensolaris.org
+      in the search order; in this way multiple consolidations 
+      can be kept at the bleeding edge.
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/doc/sat.txt	Wed Nov 18 15:53:48 2009 -0800
@@ -0,0 +1,238 @@
+
+It's become clear that our approach to evaluating
+packaging dependencies and constraints needs to become
+a lot more sophisticated as we've been trying to make
+packaging metadata more accurately reflect the way
+we build and test packages.
+
+A significant part of the difficulty is dealing with
+externally produced packages; if a variety of versions
+are available we may need to iteratively test multiple
+versions, evaluating their dependencies to find one that
+is compatible w/ the constraints that may be active on
+the current image.
+
+One method of doing this sort of automated decision making
+is to cast the problem as a series of boolean expressions,
+and then apply a SAT solver to find a solution.  These notes
+describe the results of my experiments w/ the minisat solver 
+Stephen posted some time ago....
+
+Notes:
+--------
+
+1) The presence of a particular package version is a 
+   single boolean variable; True if it's present,
+   False if not.
+
+   The problem set handed to the SAT solver is a series
+   of clauses; each clause are boolean variables (or
+   their negation) or'd together.  All clauses must be
+   true for the solution to exist.
+
+   The clauses need to encode the fact that only one version
+   of a package may be installed at a time, and also encode
+   all different package dependencies and constraints.
+
+2) Each package has some number of versions, inherently ordered.
+   Only one version of a package may be installed at a time
+
+   pkg a -> a.1, a.2, a.3, a.4
+   pkg b -> b.1, b.2, b.3, b.4
+
+   Thus for "a":
+
+   !a.1 | !a.2
+   !a.1 | !a.3
+   !a.1 | !a.4
+   !a.2 | !a.3
+   !a.2 | !a.4
+   !a.3 | !a.4
+
+   where !a represents the negation of a.
+
+   This means that for N versions, we have N(N-1)/2 clauses;
+   pruning older non-accessible versions will be required to 
+   bound memory consumption.
+
+3) Each version of a package may have dependencies on other
+   packages, either w/ or w/o a version.  The version specification
+   will likely not be fully specified (eg multiple versions
+   may satisfy this requirement).
+
+4) dependencies may be of the following types:
+
+   required: fmri specifies minimum acceptable version
+
+   if a.1 requires b.2, b.3 or b.4:
+      !a.1 | b.2 | b.3 | b.4
+   
+   optional: if present, fmri must be at this level or greater
+   if a.1 optionally requires b.3:
+      !a.1 | !b.1
+      !a.1 | !b.2
+
+   incorporate: if present, pkg must match fmri
+
+   if a.1 incorporates b.3:
+      !a.1 | !b.1
+      !a.1 | !b.2
+      !a.1 | !b.4
+
+   exclude: if present, pkg must be less that version in fmri:
+
+   if a.1 excludes b.3,
+
+      !a.1 | !b.3
+      !a.1 | !b.4
+
+   All of these are linear in the number of package versions
+   either meeting or failing to meet the dependency.
+
+5) To express state, the presence of a package is encoded as a 
+   clause.  We compute the matching fmris and then construct
+   a clause that matches one of those fmris.  Specifying a single
+   version requires that version to be present in the solution;
+   we can also specify current version or newer, or any version of
+   a package.
+
+6) The SAT solver will find a particular solution to our packaging
+   problem, but there is no way of "preferring" newer packages, and
+   preventing the introduction of extraneous unneeded packages.
+   As a result, external optimization in the form of repeated 
+   solution attempts w/ additional constraints is necessary.  
+   The following algorithm has been implemented:
+
+   The packaging problem to be solved is expressed as a series of
+   boolean constraints, and a solution obtained.  Then, for each
+   fmri appearing in the solution vector, all older versions are
+   excluded; in other words, if a.3 is part of the solution, then
+   subsequent solutions will not contain a.1 or a.2.  Then a single
+   vector is added that is the negation of the just found vector,
+   and another solution is found.  For example:
+
+   if solution is a.2, b.3, z.10, we add
+  
+   # first negations of older versions
+   !a.1
+   !b.1
+   !b.2
+   !z.1
+   !z.2
+   ...
+   !z.9
+   # now negate just found solution
+   !a.2 | !b.3 | !z.10
+
+   The latter vector requires that the new solution not contain
+   a.2 and b.3 and z.10; since we've excluded older versions we
+   will either get a vector that eliminates one of the packages
+   as unneeded (if dependencies allow) or one that has newer 
+   versions of one of the needed pkgs.
+
+   We repeat the above process until a solution cannot be found; 
+   the last found solution must therefore be the most optimal one.
+
+   The above technique may fail to find the overall optimal 
+   solution if newer packages have incorporation dependencies
+   on earlier versions of their dependencies.  This is expected
+   to be rare.  Pruning the solution space to eliminate older
+   packages is necessary due to rapid solution space growth if
+   there are multiple versions that satisfy dependencies.
+
+
+7) In order to prevent rapid growth of clause count as the
+   number of versions of packages increases, trimming the
+   solution space is essential.  I'm currently using the
+   following techniques:
+
+   1) install a new package on existing system
+
+   identify any existing installed constraints,
+   and trim pkg catalog to eliminate versions
+   outside those constraints.  
+
+   trim pkg catalog to exclude all pkg older than
+   those already installed
+
+   input to solver is trimmed catalog, and 
+   vectors selecting any version of already installed
+   pkgs that meet constraints, plus a vector selected
+   any version of desired pkg.
+
+   2) upgrade to latest version of all available pkgs
+
+   identify any existing installed constraints,
+   and trim pkg catalog to eliminate versions
+   OLDER than those constraints.  
+  
+   trim pkg catalog to exclude all pkg older than
+   those already installed
+
+   input to solver is trimmed catalog, and 
+   vectors selecting any version of already installed
+   pkgs 
+
+   3) upgrade to specified version 
+
+   identify any existing installed constraints,
+   and trim pkg catalog to eliminate versions
+   OLDER than those constraints.  
+  
+   trim pkg catalog to exclude all pkg older than
+   those already installed
+
+   input to solver is trimmed catalog, and 
+   vectors selecting any version of already installed
+   pkgs, plus vector(s) selecting desired constraint(s).
+
+8) One of the most difficult aspects of using a SAT solver
+   is providing a reasonable error message when no solution
+   can be found.
+
+
+   Some techniques that I'm experimenting with include:
+
+   Explicitly checking for obvious non-starters (pkg
+   version earlier than already installed, pkg version that
+   violates constraints on system) prior to passing to SAT
+   solver.  This is needed to permit trimming in any case.
+
+   Using the pruned catalog to quickly evaluate the effect
+   of constraints.   
+   
+   
+Implementation details
+-------------------------
+
+combine catalog object w/ list of installed pkgs and proposed
+changes:
+
+class pkg_solver(object):
+      def __init__(self, catalog, existing_fmris):
+
+      def solve_install(existing_freezes, proposed_fmris):
+      	  """tries to find solution that adds specified fmris
+	  to existing set; any existing packages containing
+	  incorporate dependencies which are at most only depended on
+	  by name (no version) are frozen."""
+
+      def solve_reinstall(existing_freezes, proposed_fmris):
+          """tries to find solution that replaces existing version
+	  with specified version; this one allows stuff to go
+	  backwards if specified on command line"""
+      
+      def solve_uninstall(existing_freezes, proposed_fmris):
+          """tries to remove specified package"""
+
+      def solve_update_all(existing_freezes):
+          """find most recent version of all packages"""
+
+      solve* routines return a list of tuples (old_version, new_version)
+      for each fmri that is changing; new installs have None as old_version,
+      removals have None as new_version.  A returned empty list indicates
+      that no action is needed.
+
+      A failure to find a solution throws an exception,
+      pkg_solver.No_Solution_Found.
+
--- a/src/client.py	Tue Nov 17 17:06:35 2009 -0600
+++ b/src/client.py	Wed Nov 18 15:53:48 2009 -0800
@@ -79,13 +79,22 @@
     RESULT_FAILED_OUTOFMEMORY)
 from pkg.misc import EmptyI, msg, PipeError
 
-CLIENT_API_VERSION = 23
+CLIENT_API_VERSION = 24
 PKG_CLIENT_NAME = "pkg"
 
 JUST_UNKNOWN = 0
 JUST_LEFT = -1
 JUST_RIGHT = 1
 
+#pkg exit codes
+EXIT_OK      = 0
+EXIT_OOPS    = 1
+EXIT_BADOPT  = 2
+EXIT_PARTIAL = 3
+EXIT_NOP     = 4
+EXIT_NOTLIVE = 5
+
+
 logger = global_settings.logger
 
 valid_special_attrs = ["action.name", "action.key", "action.raw"]
@@ -147,11 +156,14 @@
             [-t action_type ... ] [pkg_fmri_pattern ...]
         pkg image-create [-fFPUz] [--force] [--full|--partial|--user] [--zone]
             [-k ssl_key] [-c ssl_cert] [--no-refresh]
-            [--variant <variant_spec>=<instance>]
+            [--variant <variant_spec>=<instance>] 
+            [--facet <facet_spec>=[True,False]]
             (-p|--publisher) name=uri dir
         pkg change-variant [-nvq] [--be-name name] <variant_spec>=<instance>
             [<variant_spec>=<instance> ...]
-
+        pkg change-facet -nvq [--be-name name] <facet_spec>=[True|False|None] ...
+        pkg variant -H [<variant_spec>]
+        pkg facet -H [<facet_spec>]
         pkg set-property propname propvalue
         pkg unset-property propname ...
         pkg property [-H] [propname ...]
@@ -161,9 +173,11 @@
             [-G origin_to_remove | --remove-origin=origin_to_remove]
             [-m mirror_to_add | --add-mirror=mirror_to_add]
             [-M mirror_to_remove | --remove-mirror=mirror_to_remove]
-            [--enable] [--disable] [--no-refresh] [--reset-uuid] publisher
+            [--enable] [--disable] [--no-refresh] [--reset-uuid] 
+            [--non-sticky] [--sticky] [--search-after=publisher]
+            [--search-before=publisher] publisher
         pkg unset-publisher publisher ...
-        pkg publisher [-HPa] [publisher ...]
+        pkg publisher [-HPn] [publisher ...]
         pkg history [-Hl]
         pkg purge-history
         pkg rebuild-index
@@ -243,13 +257,13 @@
                 fmt_str = "%-45s %-15s %-10s %s"
 
         if not check_fmri_args(pargs):
-                return 1
+                return EXIT_OOPS
 
         img.history.operation_name = "list"
 
         api_inst = __api_alloc(img, quiet=True)
         if api_inst == None:
-                return 1
+                return EXIT_OOPS
 
         info_needed = frozenset([api.PackageInfo.SUMMARY])
         ppub = img.get_preferred_publisher()
@@ -331,7 +345,7 @@
                                         pis = ret[api.ImageInterface.INFO_FOUND]
                                 except api_errors.ApiException, e:
                                         error(e)
-                                        return 1
+                                        return EXIT_OOPS
 
                                 msg(fmt_str % (pf, pis[0].summary))
 
@@ -345,7 +359,7 @@
                                 logger.error(_("no packages installed"))
                                 img.history.operation_result = \
                                     history.RESULT_NOTHING_TO_DO
-                                return 1
+                                return EXIT_OOPS
 
                         if upgradable_only:
                                 if pargs:
@@ -356,14 +370,14 @@
                                             "have available updates"))
                                 img.history.operation_result = \
                                     history.RESULT_NOTHING_TO_DO
-                                return 1
+                                return EXIT_OOPS
 
                         img.history.operation_result = \
                             history.RESULT_NOTHING_TO_DO
-                        return 1
+                        return EXIT_OOPS
 
                 img.history.operation_result = history.RESULT_SUCCEEDED
-                return 0
+                return EXIT_OK
 
         except api_errors.InventoryException, e:
                 if e.illegal:
@@ -371,7 +385,7 @@
                                 error(i)
                         img.history.operation_result = \
                             history.RESULT_FAILED_BAD_REQUEST
-                        return 1
+                        return EXIT_OOPS
 
                 if all_known:
                         state = _("known")
@@ -382,7 +396,7 @@
                             "'%(pattern)s' %(state)s") %
                             { "pattern": pat, "state": state })
                 img.history.operation_result = history.RESULT_NOTHING_TO_DO
-                return 1
+                return EXIT_OOPS
 
 def get_tracker(quiet=False):
         if quiet:
@@ -434,12 +448,13 @@
                             "Please retry this operation on an alternate boot "
                             "environment."))
                         success = False
+                        return EXIT_NOTLIVE
 
                 if not success:
                         progresstracker.verify_done()
-                        return 1
+                        return EXIT_OOPS
         progresstracker.verify_done()
-        return 0
+        return EXIT_OK
 
 def verify_image(img, args):
         opts, pargs = getopt.getopt(args, "vfqH")
@@ -465,14 +480,14 @@
         progresstracker = get_tracker(quiet)
 
         if not check_fmri_args(pargs):
-                return 1
+                return EXIT_OOPS
 
         fmris, notfound, illegals = img.installed_fmris_from_args(pargs)
 
         if illegals:
                 for i in illegals:
                         logger.error(str(i))
-                return 1
+                return EXIT_OOPS
 
         any_errors = False
 
@@ -528,8 +543,8 @@
                 any_errors = True
 
         if any_errors:
-                return 1
-        return 0
+                return EXIT_OOPS
+        return EXIT_OK
 
 def __api_prepare(operation, api_inst, verbose=False):
         # Exceptions which happen here are printed in the above level, with
@@ -561,22 +576,22 @@
                 api_inst.execute_plan()
         except RuntimeError, e:
                 error(_("%s failed: %s") % (operation, e))
-                return False
+                return EXIT_OOPS
         except api_errors.ImageUpdateOnLiveImageException:
                 error(_("%s cannot be done on live image") % operation)
-                return False
+                return EXIT_NOTLIVE
         except api_errors.RebootNeededOnLiveImageException:
                 error(_("Requested \"%s\" operation would affect files that cannot be "
                         "modified in live image.\n"
                         "Please retry this operation on an alternate boot environment.") %
                       operation)
-                return False
+                return EXIT_NOTLIVE
         except api_errors.CorruptedIndexException, e:
                 error(INCONSISTENT_INDEX_ERROR_MESSAGE)
-                return False
+                return EXIT_OOPS
         except api_errors.ProblematicPermissionsIndexException, e:
                 error(str(e) + PROBLEMATIC_PERMISSIONS_ERROR_MESSAGE)
-                return False
+                return EXIT_OOPS
         except api_errors.ReadOnlyFileSystemException, e:
                 error(e)
                 raise
@@ -584,20 +599,20 @@
                 # Prepend a newline because otherwise the exception will
                 # be printed on the same line as the spinner.
                 error("\n" + str(e))
-                return False
+                return EXIT_OOPS
         except api_errors.MainDictParsingException, e:
                 error(str(e))
-                return False
+                return EXIT_OOPS
         except KeyboardInterrupt:
                 raise
         except api_errors.BEException, e:
                 error(e)
-                return False
+                return EXIT_OOPS
         except api_errors.WrapSuccessfulIndexingException:
                 raise
         except api_errors.ActionExecutionError, e:
                 if not raise_ActionExecutionError:
-                        return False
+                        return 1
                 error(_("An unexpected error happened during " \
                     "%s: %s") % (operation, e))
                 raise
@@ -605,7 +620,7 @@
                 error(_("An unexpected error happened during " \
                     "%s: %s") % (operation, e))
                 raise
-        return True
+        return EXIT_OK
 
 def __api_alloc(img, quiet=False):
         progresstracker = get_tracker(quiet)
@@ -649,9 +664,11 @@
                 if noexecute:
                         return True
                 return False
-        if e_type == api_errors.BEException:
+
+        if issubclass(e_type, api_errors.BEException):
                 error(_(e))
                 return False
+
         if e_type in (api_errors.CertificateError,
             api_errors.PlanCreationException,
             api_errors.PermissionsException):
@@ -715,28 +732,113 @@
                 return 1
 
         try:
-                stuff_to_do = api_inst.plan_change_variant(variants,
+                stuff_to_do = api_inst.plan_change_varcets(variants, facets=None,
                     noexecute=noexecute, verbose=verbose, be_name=be_name)
         except:
                 if not __api_plan_exception(op, noexecute=noexecute):
+                        return EXIT_OOPS
+
+        if not stuff_to_do:
+                msg(_("No updates necessary for this image."))
+                return EXIT_NOP
+
+        if noexecute:
+                return EXIT_OOPS
+
+
+        # Exceptions which happen here are printed in the above level, with
+        # or without some extra decoration done here.
+        if not __api_prepare("change-variant", api_inst, verbose=verbose):
+                return EXIT_OOPS
+
+        ret_code = __api_execute_plan("change-variant", api_inst)
+
+        if bool(os.environ.get("PKG_MIRROR_STATS", False)):
+                print_mirror_stats(api_inst)
+
+        return ret_code
+
+def change_facet(img, args):
+        """Attempt to change the facets as specified, updating
+        image as necessary"""
+
+        op = "change-facet"
+        opts, pargs = getopt.getopt(args, "nvq", ["be-name="])
+
+        quiet = noexecute = verbose = False
+        be_name = None
+        for opt, arg in opts:
+                if opt == "-n":
+                        noexecute = True
+                elif opt == "-v":
+                        verbose = True
+                elif opt == "-q":
+                        quiet = True
+                elif opt == "--be-name":
+                        be_name = arg
+
+        if verbose and quiet:
+                usage(_("%s: -v and -q may not be combined") % op)
+
+        if not pargs:
+                usage(_("%s: no facets specified") % op)
+
+        facets = img.get_facets()
+        allowed_values = {
+            "TRUE" : True, 
+            "FALSE": False, 
+            "NONE" : None
+        }
+
+        for arg in pargs:
+
+                # '=' is not allowed in facet names or values
+                if (len(arg.split('=')) != 2):
+                        usage(_("%s: facets must to be of the form "
+                            "'facet....=[True|False|None]'") % op)
+
+                # get the facet name and value
+                name, value = arg.split('=')                
+                if not name.startswith("facet."):
+                        name = "facet." + name
+                
+                if value.upper() not in allowed_values:
+                        usage(_("%s: facets must to be of the form "
+                            "'facet....=[True|False|None]'.") % op)
+
+                v = allowed_values[value.upper()]
+
+                if v is None and name in facets:
+                        del facets[name]
+                else:
+                        facets[name] = v
+
+        api_inst = __api_alloc(img, quiet)
+        if api_inst == None:
+                return EXIT_OOPS
+
+        try:
+                stuff_to_do = api_inst.plan_change_varcets(variants=None, 
+                    facets=facets, noexecute=noexecute, verbose=verbose,
+                    be_name=be_name)
+        except:
+                if not __api_plan_exception(op, noexecute=noexecute):
                         return 1
 
         if not stuff_to_do:
-                msg(_("No updates necessary for this image."))
-                return 0
+                msg(_("Facet change has no effect on image"))
+                return EXIT_NOP
 
         if noexecute:
-                return 0
+                return EXIT_OK
 
 
         # Exceptions which happen here are printed in the above level, with
         # or without some extra decoration done here.
-        if not __api_prepare("change-variant", api_inst, verbose=verbose):
-                return 1
-
-        ret_code = 0
-        if not __api_execute_plan("change-variant", api_inst):
-                ret_code = 1
+        if not __api_prepare(op, api_inst, verbose=verbose):
+                return EXIT_OOPS
+
+        ret_code = __api_execute_plan(op, api_inst)
 
         if bool(os.environ.get("PKG_MIRROR_STATS", False)):
                 print_mirror_stats(api_inst)
@@ -748,7 +850,6 @@
         version."""
 
         # XXX Publisher-catalog issues.
-        # XXX Are filters appropriate for an image update?
         # XXX Leaf package refinements.
 
         op = "image-update"
@@ -783,7 +884,7 @@
 
         api_inst = __api_alloc(img, quiet)
         if api_inst == None:
-                return 1
+                return EXIT_OOPS
 
         try:
                 stuff_to_do, opensolaris_image = \
@@ -792,21 +893,19 @@
                         update_index=update_index, be_name=be_name)
         except:
                 if not __api_plan_exception(op, noexecute=noexecute):
-                        return 1
+                        return EXIT_OOPS
 
         if not stuff_to_do:
                 msg(_("No updates available for this image."))
-                return 0
+                return EXIT_NOP
 
         if noexecute:
-                return 0
+                return EXIT_OK
 
         if not __api_prepare(op, api_inst, verbose=verbose):
                 return 1
 
-        ret_code = 0
-        if not __api_execute_plan(op, api_inst):
-                ret_code = 1
+        ret_code = __api_execute_plan(op, api_inst)
 
         if ret_code == 0 and opensolaris_image:
                 msg("\n" + "-" * 75)
@@ -834,18 +933,16 @@
 
         # XXX Publisher-catalog issues.
         op = "install"
-        opts, pargs = getopt.getopt(args, "nvf:q", ["no-refresh", "no-index"])
+        opts, pargs = getopt.getopt(args, "nvq", ["no-refresh", "no-index"])
 
         quiet = noexecute = verbose = False
         refresh_catalogs = update_index = True
-        filters = []
+
         for opt, arg in opts:
                 if opt == "-n":
                         noexecute = True
                 elif opt == "-v":
                         verbose = True
-                elif opt == "-f":
-                        filters += [ arg ]
                 elif opt == "-q":
                         quiet = True
                 elif opt == "--no-refresh":
@@ -860,40 +957,34 @@
                 usage(_("-v and -q may not be combined"), cmd="install")
 
         if not check_fmri_args(pargs):
-                return 1
-
-        # XXX not sure where this should live
-        pkg_list = [ pat.replace("*", ".*").replace("?", ".")
-            for pat in pargs ]
+                return EXIT_OOPS
 
         api_inst = __api_alloc(img, quiet)
         if api_inst == None:
-                return 1
+                return EXIT_OOPS
 
         try:
-                stuff_to_do = api_inst.plan_install(pkg_list, filters,
+                stuff_to_do = api_inst.plan_install(pargs,
                     refresh_catalogs, noexecute, verbose=verbose,
                     update_index=update_index)
         except:
                 if not __api_plan_exception(op, noexecute=noexecute):
-                        return 1
+                        return EXIT_OOPS
 
         if not stuff_to_do:
                 msg(_("No updates necessary for this image."))
-                return 0
+                return EXIT_NOP
 
         if noexecute:
-                return 0
+                return EXIT_OK
 
         # Exceptions which happen here are printed in the above level, with
         # or without some extra decoration done here.
         if not __api_prepare(op, api_inst, verbose=verbose):
-                return 1
-
-        ret_code = 0
-        if not __api_execute_plan(op, api_inst,
-            raise_ActionExecutionError=False):
-                ret_code = 1
+                return EXIT_OOPS
+
+        ret_code = __api_execute_plan(op, api_inst,
+            raise_ActionExecutionError=False)
 
         if bool(os.environ.get("PKG_MIRROR_STATS", False)):
                 print_mirror_stats(api_inst)
@@ -928,46 +1019,38 @@
                 usage(_("-v and -q may not be combined"), cmd="uninstall")
 
         if not check_fmri_args(pargs):
-                return 1
-
-        # XXX not sure where this should live
-        pkg_list = [ pat.replace("*", ".*").replace("?", ".")
-            for pat in pargs ]
+                return EXIT_OOPS
 
         api_inst = __api_alloc(img, quiet)
-        if api_inst == None:
-                return 1
+        if api_inst is None:
+                return EXIT_OOPS
 
         try:
-                if not api_inst.plan_uninstall(pkg_list, recursive_removal,
+                if not api_inst.plan_uninstall(pargs, recursive_removal,
                     noexecute, verbose=verbose, update_index=update_index):
                         assert 0
         except:
                 if not __api_plan_exception(op, noexecute=noexecute):
-                        return 1
+                        return EXIT_OOPS
         if noexecute:
-                return 0
+                return EXIT_OK
 
         # Exceptions which happen here are printed in the above level, with
         # or without some extra decoration done here.
         if not __api_prepare(op, api_inst, verbose=verbose):
-                return 1
-
-        ret_code = 0
-        if not __api_execute_plan(op, api_inst):
-                ret_code = 1
-
-        return ret_code
+                return EXIT_OOPS
+
+        return __api_execute_plan(op, api_inst)
 
 def freeze(img, args):
         """Attempt to take package specified to FROZEN state, with given
         restrictions.  Package must have been in the INSTALLED state."""
-        return 0
+        return EXIT_OK
 
 def unfreeze(img, args):
         """Attempt to return package specified to INSTALLED state from FROZEN
         state."""
-        return 0
+        return EXIT_OK
 
 def __convert_output(a_str, match):
         """Converts a string to a three tuple with the information to fill
@@ -1132,14 +1215,14 @@
 
         api_inst = __api_alloc(img)
         if api_inst == None:
-                return 1
+                return EXIT_OOPS
 
         try:
                 query = [api.Query(" ".join(pargs), case_sensitive,
                     return_actions)]
         except api_errors.BooleanQueryException, e:
                 error(e)
-                return 1
+                return EXIT_OOPS
 
         good_res = False
         bad_res = False
@@ -1151,7 +1234,7 @@
                         searches.append(api_inst.remote_search(query,
                             servers=servers))
                 # By default assume we don't find anything.
-                retcode = 1
+                retcode = EXIT_OOPS
 
                 # get initial set of results
                 justs = calc_justs(attrs)
@@ -1245,7 +1328,7 @@
             api_errors.InconsistentIndexException):
                 error(_("The search index appears corrupted.  Please "
                     "rebuild the index with 'pkg rebuild-index'."))
-                return 1
+                return EXIT_OOPS
         except api_errors.ProblematicSearchServers, e:
                 error(e)
                 bad_res = True
@@ -1255,16 +1338,16 @@
             api_errors.InconsistentIndexException):
                 error(_("The search index appears corrupted.  Please "
                     "rebuild the index with 'pkg rebuild-index'."))
-                return 1
+                return EXIT_OOPS
         except api_errors.ApiException, e:
                 error(e)
-                return 1
+                return EXIT_OOPS
         if good_res and bad_res:
-                retcode = 4
+                retcode = EXIT_NOP
         elif bad_res:
-                retcode = 1
+                retcode = EXIT_OOPS
         elif good_res:
-                retcode = 0
+                retcode = EXIT_OK
         return retcode
 
 def info(img, args):
@@ -1294,13 +1377,13 @@
                     cmd="info")
 
         if not check_fmri_args(pargs):
-                return 1
+                return EXIT_OOPS
 
         err = 0
 
         api_inst = __api_alloc(img, quiet=True)
         if api_inst == None:
-                return 1
+                return EXIT_OOPS
 
         try:
                 info_needed = api.PackageInfo.ALL_OPTIONS
@@ -1314,7 +1397,7 @@
                         ret = api_inst.info(pargs, info_local, info_needed)
                 except api_errors.UnrecognizedOptionsToInfo, e:
                         error(e)
-                        return 1
+                        return EXIT_OOPS
                 pis = ret[api.ImageInterface.INFO_FOUND]
                 notfound = ret[api.ImageInterface.INFO_MISSING]
                 illegals = ret[api.ImageInterface.INFO_ILLEGALS]
@@ -1322,10 +1405,10 @@
 
         except api_errors.PermissionsException, e:
                 error(e)
-                return 1
+                return EXIT_OOPS
         except api_errors.NoPackagesInstalledException:
                 error(_("no packages installed"))
-                return 1
+                return EXIT_OOPS
 
         no_licenses = []
         for i, pi in enumerate(pis):
@@ -1395,7 +1478,7 @@
                 # XXX add license/copyright info here?
 
         if notfound:
-                err = 1
+                err = EXIT_OOPS
                 if pis:
                         logger.error("")
                 if info_local:
@@ -1412,19 +1495,19 @@
                         logger.error("        %s" % p)
 
         if illegals:
-                err = 1
+                err = EXIT_OOPS
                 for i in illegals:
                         logger.error(str(i))
 
         if multi_match:
-                err = 1
+                err = EXIT_OOPS
                 for pfmri, matches in multi_match:
                         error(_("'%s' matches multiple packages") % pfmri)
                         for k in matches:
                                 logger.error("\t%s" % k)
 
         if no_licenses:
-                err = 1
+                err = EXIT_OOPS
                 error(_("no license information could be found for the "
                     "following packages:"))
                 for pfmri in no_licenses:
@@ -1685,7 +1768,7 @@
                    "packages"), cmd="contents")
 
         if not check_fmri_args(pargs):
-                return 1
+                return EXIT_OOPS
 
         if display_raw:
                 display_headers = False
@@ -1702,7 +1785,7 @@
 
         img.history.operation_name = "contents"
 
-        err = 0
+        err = EXIT_OK
 
         if local:
                 fmris, notfound, illegals = \
@@ -1713,13 +1796,13 @@
                                 logger.error(i)
                         img.history.operation_result = \
                             history.RESULT_FAILED_BAD_REQUEST
-                        return 1
+                        return EXIT_OOPS
 
                 if not fmris and not notfound:
                         error(_("no packages installed"))
                         img.history.operation_result = \
                             history.RESULT_NOTHING_TO_DO
-                        return 1
+                        return EXIT_OOPS
         elif remote:
                 # Verify validity of certificates before attempting network
                 # operations
@@ -1728,7 +1811,7 @@
                 except (api_errors.CertificateError,
                     api_errors.PermissionsException), e:
                         img.history.log_operation_end(error=e)
-                        return 1
+                        return EXIT_OOPS
 
                 fmris = []
                 notfound = []
@@ -1805,7 +1888,7 @@
         else:
                 excludes = img.list_excludes()
 
-        manifests = ( img.get_manifest(f, all_arch=display_raw) for f in fmris )
+        manifests = ( img.get_manifest(f, all_variants=display_raw) for f in fmris )
 
         actionlist = [
             (m.fmri, a, None, None, None)
@@ -1818,7 +1901,7 @@
                     action_types, display_headers)
 
         if notfound:
-                err = 1
+                err = EXIT_OOPS
                 if fmris:
                         logger.error("")
                 if local:
@@ -1889,7 +1972,7 @@
 
         api_inst = __api_alloc(img)
         if api_inst == None:
-                return 1
+                return EXIT_OOPS
 
         try:
                 # The user explicitly requested this refresh, so set the
@@ -1899,25 +1982,26 @@
         except api_errors.PublisherError, e:
                 error(e)
                 error(_("'pkg publisher' will show a list of publishers."))
-                return 1
+                return EXIT_OOPS
         except (api_errors.PermissionsException), e:
                 # Prepend a newline because otherwise the exception will
                 # be printed on the same line as the spinner.
                 error("\n" + str(e))
-                return 1
+                return EXIT_OOPS
         except api_errors.CatalogRefreshException, e:
                 if display_catalog_failures(e) == 0:
-                        return 1
+                        return EXIT_OOPS
                 else:
-                        return 3
+                        return EXIT_PARTIAL
         else:
-                return 0
+                return EXIT_OK
 
 def publisher_set(img, args):
         """pkg set-publisher [-Ped] [-k ssl_key] [-c ssl_cert] [--reset-uuid]
             [-g|--add-origin origin to add] [-G|--remove-origin origin to
             remove] [-m|--add-mirror mirror to add] [-M|--remove-mirror mirror
-            to remove] [--enable] [--disable] [--no-refresh] publisher"""
+            to remove] [--enable] [--disable] [--no-refresh] [--sticky] [--non-sticky ]
+            [--search-before=publisher] [--search-after=publisher] publisher"""
 
         preferred = False
         ssl_key = None
@@ -1930,10 +2014,14 @@
         remove_origins = set()
         refresh_catalogs = True
         disable = None
+        sticky = None
+        search_before = None
+        search_after = None
 
         opts, pargs = getopt.getopt(args, "Pedk:c:O:G:g:M:m:",
             ["add-mirror=", "remove-mirror=", "add-origin=", "remove-origin=",
-            "no-refresh", "reset-uuid", "enable", "disable"])
+            "no-refresh", "reset-uuid", "enable", "disable", "sticky", 
+            "non-sticky", "search-before=", "search-after="])
 
         for opt, arg in opts:
                 if opt == "-c":
@@ -1960,7 +2048,15 @@
                         reset_uuid = True
                 if opt == "--no-refresh":
                         refresh_catalogs = False
-
+                if opt == "--sticky":
+                        sticky = True
+                if opt == "--non-sticky":
+                        sticky = False
+                if opt == "--search-before":
+                        search_before = arg
+                if opt == "--search-after":
+                        search_after = arg
+                        
         if len(pargs) == 0:
                 usage(_("requires a publisher name"), cmd="set-publisher")
         elif len(pargs) > 1:
@@ -1977,9 +2073,13 @@
                 usage(_("the -O and -g, --add-origin, -G, or --remove-origin "
                     "options may not be combined"), cmd="set-publisher")
 
+        if search_before and search_after:
+                usage(_("search_before and search_after may not be combined"),
+                      cmd="set-publisher")
+
         api_inst = __api_alloc(img)
         if api_inst == None:
-                return 1
+                return EXIT_OOPS
 
         new_pub = False
         try:
@@ -1990,13 +2090,13 @@
                 repo = pub.selected_repository
         except api_errors.PermissionsException, e:
                 error(e, cmd="set-publisher")
-                return 1
+                return EXIT_OOPS
         except api_errors.UnknownPublisher:
                 if not origin_url and not add_origins:
                         error(_("publisher does not exist. Use -g to define "
                             "origin URI for new publisher."),
                             cmd="set-publisher")
-                        return 1
+                        return EXIT_OOPS
                 # No pre-existing, so create a new one.
                 repo = publisher.Repository()
                 pub = publisher.Publisher(name, repositories=[repo])
@@ -2006,6 +2106,10 @@
                 # Set disabled property only if provided.
                 pub.disabled = disable
 
+        if sticky is not None:
+                # Set stickiness only if provided
+                pub.sticky = sticky
+
         if origin_url:
                 # For compatibility with old -O behaviour, treat -O as a wipe
                 # of existing origins and add the new one.
@@ -2029,7 +2133,7 @@
                         # here.
                 except api_errors.PublisherError, e:
                         error(e, cmd="set-publisher")
-                        return 1
+                        return EXIT_OOPS
 
         for entry in (("mirror", add_mirrors, remove_mirrors), ("origin",
             add_origins, remove_origins)):
@@ -2045,7 +2149,7 @@
                 except (api_errors.PublisherError,
                     api_errors.CertificateError), e:
                         error(e, cmd="set-publisher")
-                        return 1
+                        return EXIT_OOPS
 
         # None is checked for here so that a client can unset a ssl_cert or
         # ssl_key by using -k "" or -c "".
@@ -2079,7 +2183,7 @@
                 except (api_errors.PublisherError,
                     api_errors.CertificateError), e:
                         error(e, cmd="set-publisher")
-                        return 1
+                        return EXIT_OOPS
 
         try:
                 if new_pub:
@@ -2091,24 +2195,31 @@
         except api_errors.CatalogRefreshException, e:
                 text = "Could not refresh the catalog for %s"
                 error(_(text) % pub)
-                return 1
+                return EXIT_OOPS
         except api_errors.InvalidDepotResponseException, e:
                 error(_("The origin URIs for '%(pubname)s' do not appear to "
                     "point to a valid pkg server.\nPlease check the server's "
                     "address and client's network configuration."
                     "\nAdditional details:\n\n%(details)s") %
                     { "pubname": pub.prefix, "details": e })
-                return 1
+                return EXIT_OOPS
         except api_errors.PermissionsException, e:
                 # Prepend a newline because otherwise the exception will
                 # be printed on the same line as the spinner.
                 error("\n" + str(e))
-                return 1
+                return EXIT_OOPS
 
         if preferred:
                 api_inst.set_preferred_publisher(prefix=pub.prefix)
 
-        return 0
+        if search_before:
+                api_inst.set_pub_search_before(pub.prefix, search_before)
+
+        if search_after:
+                api_inst.set_pub_search_after(pub.prefix, search_after)
+
+
+        return EXIT_OK
 
 def publisher_unset(img, args):
         """pkg unset-publisher publisher ..."""
@@ -2119,7 +2230,7 @@
 
         api_inst = __api_alloc(img)
         if api_inst == None:
-                return 1
+                return EXIT_OOPS
 
         errors = []
         for name in args:
@@ -2129,17 +2240,17 @@
                     api_errors.PublisherError), e:
                         errors.append((name, e))
 
-        retcode = 0
+        retcode = EXIT_OK
         if errors:
                 if len(errors) == len(args):
                         # If the operation failed for every provided publisher
                         # prefix or alias, complete failure occurred.
-                        retcode = 1
+                        retcode = EXIT_OOPS
                 else:
                         # If the operation failed for only some of the provided
                         # publisher prefixes or aliases, then partial failure
                         # occurred.
-                        retcode = 3
+                        retcode = EXIT_PARTIAL
 
                 txt = ""
                 for name, err in errors:
@@ -2155,20 +2266,21 @@
         """pkg publishers"""
         omit_headers = False
         preferred_only = False
-        inc_disabled = False
-
-        opts, pargs = getopt.getopt(args, "HPa")
+        inc_disabled = True
+
+        # 'a' is left over
+        opts, pargs = getopt.getopt(args, "HPan")
         for opt, arg in opts:
                 if opt == "-H":
                         omit_headers = True
                 if opt == "-P":
                         preferred_only = True
-                if opt == "-a":
-                        inc_disabled = True
+                if opt == "-n":
+                        inc_disabled = False
 
         api_inst = __api_alloc(img, quiet=True)
         if api_inst == None:
-                return 1
+                return EXIT_OOPS
 
         cert_cache = {}
         def get_cert_info(ssl_cert):
@@ -2207,7 +2319,7 @@
 
                 return cert_cache[ssl_cert]
 
-        retcode = 0
+        retcode = EXIT_OK
         if len(pargs) == 0:
                 fmt = "%-24s %-12s %-8s %-8s %s"
                 if not omit_headers:
@@ -2223,13 +2335,32 @@
                             if inc_disabled or not p.disabled
                         ]
 
+                # if more than one, list in publisher search order
+                if len(pubs) > 1:
+                        so = api_inst.get_pub_search_order()
+                        pub_dict = dict([(p.prefix, p) for p in pubs])
+                        pubs = [
+                                pub_dict[name]
+                                for name in so
+                                if name in pub_dict
+                                ]
                 for p in pubs:
                         pfx = p.prefix
                         pstatus = ""
+
+                        if not p.sticky:
+                                pstatus_list = [_("non-sticky")]
+                        else:
+                                pstatus_list = []
+
                         if not preferred_only and p == pref_pub:
-                                pstatus = _("(preferred)")
+                                pstatus_list.append(_("preferred"))
                         if p.disabled:
-                                pstatus = _("(disabled)")
+                                pstatus_list.append(_("disabled"))
+                        if pstatus_list:
+                                pstatus = "(%s)" % ", ".join(pstatus_list)
+                        else:
+                                pstatus = ""
 
                         # Only show the selected repository's information in
                         # summary view.
@@ -2244,7 +2375,7 @@
                                     uri))
         else:
                 def display_ssl_info(uri):
-                        retcode = 0
+                        retcode = EXIT_OK
                         c = get_cert_info(uri.ssl_cert)
                         msg(_("              SSL Key:"), uri.ssl_key)
                         msg(_("             SSL Cert:"), uri.ssl_cert)
@@ -2253,7 +2384,7 @@
                                 return retcode
 
                         if c["errors"]:
-                                retcode = 1
+                                retcode = EXIT_OOPS
 
                         for e in c["errors"]:
                                 logger.error("\n" + str(e) + "\n")
@@ -2271,13 +2402,13 @@
                                 msg(_("           Origin URI:"), uri)
                                 rval = display_ssl_info(uri)
                                 if rval == 1:
-                                        retcode = 3
+                                        retcode = EXIT_PARTIAL
 
                         for uri in r.mirrors:
                                 msg(_("           Mirror URI:"), uri)
                                 rval = display_ssl_info(uri)
                                 if rval == 1:
-                                        retcode = 3
+                                        retcode = EXIT_PARTIAL
                         return retcode
 
                 for name in pargs:
@@ -2321,7 +2452,7 @@
         if propname == "preferred-publisher":
                 error(_("set-publisher must be used to change the preferred "
                     "publisher"), cmd="set-property")
-                return 1
+                return EXIT_OOPS
 
         try:
                 img.set_property(propname, propvalue)
@@ -2329,8 +2460,8 @@
                 # Prepend a newline because otherwise the exception
                 # will be printed on the same line as the spinner.
                 error("\n" + str(e), cmd="set-property")
-                return 1
-        return 0
+                return EXIT_OOPS
+        return EXIT_OK
 
 def property_unset(img, args):
         """pkg unset-property propname ..."""
@@ -2349,21 +2480,21 @@
                 if p == "preferred-publisher":
                         error(_("set-publisher must be used to change the "
                             "preferred publisher"), cmd="unset-property")
-                        return 1
+                        return EXIT_OOPS
 
                 try:
                         img.delete_property(p)
                 except KeyError:
                         error(_("no such property: %s") % p,
                             cmd="unset-property")
-                        return 1
+                        return EXIT_OOPS
                 except api_errors.PermissionsException, e:
                         # Prepend a newline because otherwise the exception
                         # will be printed on the same line as the spinner.
                         error("\n" + str(e), cmd="unset-property")
-                        return 1
-
-        return 0
+                        return EXIT_OOPS
+
+        return EXIT_OK
 
 def property_list(img, args):
         """pkg property [-H] [propname ...]"""
@@ -2377,7 +2508,7 @@
         for p in pargs:
                 if not img.has_property(p):
                         error(_("property: no such property: %s") % p)
-                        return 1
+                        return EXIT_OOPS
 
         if not pargs:
                 pargs = list(img.properties())
@@ -2390,8 +2521,74 @@
         for p in pargs:
                 msg(fmt % (p, img.get_property(p)))
 
-        return 0
-
+        return EXIT_OK
+
+def variant_list(img, args):
+        """pkg variant [-H] [<variant_spec>]"""
+
+        omit_headers = False
+
+        opts, pargs = getopt.getopt(args, "H")
+
+        for opt, arg in opts:
+                if opt == "-H":
+                        omit_headers = True
+
+        variants = img.get_variants()
+
+        for p in pargs:
+                if p not in variants:
+                        error(_("no such variant: %s") % p, cmd="variant")
+                        return EXIT_OOPS
+
+        if not pargs:
+                pargs = variants.keys()
+
+        width = max(max([len(p) for p in pargs]), 8)
+        fmt = "%%-%ss %%s" % width
+        if not omit_headers:
+                msg(fmt % ("VARIANT", "VALUE"))
+
+        for p in pargs:
+                msg(fmt % (p, variants[p]))
+
+        return EXIT_OK
+        
+def facet_list(img, args):
+        """pkg facet [-H] [<facet_spec>]"""
+
+        omit_headers = False
+
+        opts, pargs = getopt.getopt(args, "H")
+
+        for opt, arg in opts:
+                if opt == "-H":
+                        omit_headers = True
+
+        facets = img.get_facets()
+
+        for i, p in enumerate(pargs[:]):
+                if not p.startswith("facet."):
+                        pargs[i] = "facet." + p
+
+        if not pargs:
+                pargs = facets.keys()
+
+        if pargs:
+                width = max(max([len(p) for p in pargs]), 8)
+        else:
+                width = 8
+
+        fmt = "%%-%ss %%s" % width
+
+        if not omit_headers:
+                msg(fmt % ("FACETS", "VALUE"))
+
+        for p in pargs:
+                msg(fmt % (p, facets[p]))
+
+        return EXIT_OK
+       
 def image_create(args):
         """Create an image of the requested kind, at the given path.  Load
         catalog for initial publisher for convenience.
@@ -2411,9 +2608,10 @@
         ssl_key = None
         ssl_cert = None
         variants = {}
+        facets = pkg.facet.Facets()
 
         opts, pargs = getopt.getopt(args, "fFPUza:g:m:p:k:c:",
-            ["force", "full", "partial", "user", "zone", "authority=",
+            ["force", "full", "partial", "user", "zone", "authority=", "facet=",
                 "mirror=", "origin=", "publisher=", "no-refresh", "variant="])
 
         for opt, arg in opts:
@@ -2463,6 +2661,19 @@
                                     cmd="image-create")
                         variants[v_name] = v_value
 
+                if opt == "--facet":
+
+                        allow = {"TRUE":True, "FALSE":False}
+                        f_name, f_value = arg.split("=",1)
+                        if not f_name.startswith("facet."):
+                                f_name = "facet.%s" % f_name
+                        if f_value.upper() not in allow:
+                                usage(_("Facet arguments must be"
+                                    "form 'facet..=[True|False]'"),
+                                    cmd="image-create")
+                        facets[f_name]= allow[f_value.upper()]
+
+
         if len(pargs) != 1:
                 usage(_("only one image directory path may be specified"),
                     cmd="image-create")
@@ -2494,22 +2705,22 @@
         if pub_name.startswith(fmri.PREF_PUB_PFX):
                 error(_("a publisher's prefix may not start with the text: %s"
                         % fmri.PREF_PUB_PFX), cmd="image-create")
-                return 1
+                return EXIT_OOPS
 
         if not misc.valid_pub_prefix(pub_name):
                 error(_("publisher prefix contains invalid characters"),
                     cmd="image-create")
-                return 1
+                return EXIT_OOPS
 
         global __img
         try:
                 progtrack = get_tracker()
                 __img = img = image.Image(root=image_dir, imgtype=imgtype,
                     should_exist=False, progtrack=progtrack, force=force)
-                img.set_attrs(is_zone, pub_name, mirrors=mirrors,
+                img.set_attrs(is_zone, pub_name, facets=facets,
                     origins=origins, ssl_key=ssl_key, ssl_cert=ssl_cert,
                     refresh_allowed=refresh_catalogs, progtrack=progtrack,
-                    variants=variants)
+                    variants=variants, mirrors=mirrors)
                 img.cleanup_downloads()
         except OSError, e:
                 # Ensure messages are displayed after the spinner.
@@ -2518,16 +2729,18 @@
                 error(_("cannot create image at %(image_dir)s: %(reason)s") %
                     { "image_dir": image_dir, "reason": e.args[1] },
                     cmd="image-create")
-                return 1
+                return EXIT_OOPS
+                
         except api_errors.PublisherError, e:
                 error(e, cmd="image-create")
-                return 1
+                return EXIT_OOPS
+
         except api_errors.PermissionsException, e:
                 # Ensure messages are displayed after the spinner.
                 img.cleanup_downloads()
                 logger.error("")
                 error(e, cmd="image-create")
-                return 1
+                return EXIT_OOPS
         except api_errors.InvalidDepotResponseException, e:
                 # Ensure messages are displayed after the spinner.
                 img.cleanup_downloads()
@@ -2539,19 +2752,19 @@
                     { "pub_url": pub_url, "error": e },
                     cmd="image-create")
                 print_proxy_config()
-                return 1
+                return EXIT_OOPS
         except api_errors.CatalogRefreshException, cre:
                 # Ensure messages are displayed after the spinner.
                 img.cleanup_downloads()
                 error("", cmd="image-create")
                 if display_catalog_failures(cre) == 0:
-                        return 1
+                        return EXIT_OOPS
                 else:
-                        return 3
+                        return EXIT_PARTIAL
         except api_errors.ImageCreationException, e:
                 error(e, cmd="image-create")
-                return 1
-        return 0
+                return EXIT_OOPS
+        return EXIT_OK
 
 def rebuild_index(img, pargs):
         """pkg rebuild-index
@@ -2565,22 +2778,22 @@
 
         api_inst = __api_alloc(img)
         if api_inst == None:
-                return 1
+                return EXIT_OOPS
 
         try:
                 api_inst.rebuild_search_index()
         except api_errors.CorruptedIndexException:
                 error(INCONSISTENT_INDEX_ERROR_MESSAGE, cmd="rebuild-index")
-                return 1
+                return EXIT_OOPS
         except api_errors.ProblematicPermissionsIndexException, e:
                 error(str(e) + PROBLEMATIC_PERMISSIONS_ERROR_MESSAGE,
                     cmd="rebuild-index")
-                return 1
+                return EXIT_OOPS
         except api_errors.MainDictParsingException, e:
                 error(str(e), cmd="rebuild-index")
-                return 1
+                return EXIT_OOPS
         else:
-                return 0
+                return EXIT_OK
 
 def history_list(img, args):
         """Display history about the current image.
@@ -2606,7 +2819,7 @@
 
         if not os.path.exists(img.history.path):
                 # Nothing to display.
-                return 0
+                return EXIT_OK
 
         for entry in sorted(os.listdir(img.history.path)):
                 # Load the history entry.
@@ -2615,7 +2828,7 @@
                             filename=entry)
                 except api_errors.PermissionsException, e:
                         error(e, cmd="history")
-                        return 1
+                        return EXIT_OOPS
                 except history.HistoryLoadException, e:
                         if e.parse_failure:
                                 # Ignore corrupt entries.
@@ -2677,8 +2890,15 @@
                         msg("%-19s %-25s %-15s %s" % (start_time,
                             he.operation_name, he.client_name, outcome))
 
-        return 0
-
+        return EXIT_OK
+
+def history_purge(img, pargs):
+        """Purge image history"""
+        ret_code = img.history.purge()
+        if ret_code == EXIT_OK:
+                msg(_("History purged."))
+        return ret_code
+                
 def print_proxy_config():
         """If the user has configured http_proxy or https_proxy in the
         environment, print out the values.  Some transport errors are
@@ -2715,12 +2935,16 @@
         show_usage = False
         for opt, arg in opts:
                 if opt == "-D" or opt == "--debug":
-                        try:
-                                key, value = arg.split("=", 1)
-                        except (AttributeError, ValueError):
-                                usage(_("%(opt)s takes argument of form "
-                                    "name=value, not %(arg)s") % { "opt":  opt,
-                                    "arg": arg })
+                        if arg in ["plan", "transport"]:
+                                key = arg
+                                value = "True"
+                        else:
+                                try:
+                                        key, value = arg.split("=", 1)
+                                except (AttributeError, ValueError):
+                                        usage(_("%(opt)s takes argument of form "
+                                            "name=value, not %(arg)s") % { "opt":  opt,
+                                            "arg": arg })
                         DebugValues.set_value(key, value)
                 elif opt == "-R":
                         mydir = arg
@@ -2764,7 +2988,7 @@
                         usage(_("version: command does not take operands " \
                             "('%s')") % " ".join(pargs))
                 msg(pkg.VERSION)
-                return 0
+                return EXIT_OK
 
         provided_image_dir = True
         pkg_image_used = False
@@ -2789,7 +3013,7 @@
                 error(_("Could not find image.  Use the -R option or set "
                     "$PKG_IMAGE to point\nto an image, or change the working "
                     "directory to one inside the image."))
-                return 1
+                return EXIT_OOPS
 
         try:
                 __img = img = image.Image(mydir, provided_image_dir)
@@ -2801,58 +3025,44 @@
                         error(_(m) % e.user_dir)
                 else:
                         error(_("No image found."))
-                return 1
-
+                return EXIT_OOPS
+
+        cmds = {
+                "authority"        : publisher_list,
+                "change-facet"     : change_facet,
+                "change-variant"   : change_variant,
+                "contents"         : list_contents,
+                "facet"            : facet_list,
+                "fix"              : fix_image,
+                "freeze"           : freeze,
+                "history"          : history_list,
+                "image-update"     : image_update,
+                "info"             : info,
+                "install"          : install,
+                "list"             : list_inventory,
+                "property"         : property_list,
+                "publisher"        : publisher_list,
+                "purge-history"    : history_purge,
+                "rebuild-index"    : rebuild_index,
+                "refresh"          : publisher_refresh,
+                "search"           : search,
+                "set-authority"    : publisher_set,
+                "set-property"     : property_set,
+                "set-publisher"    : publisher_set,
+                "unfreeze"         : unfreeze,
+                "uninstall"        : uninstall,
+                "unset-authority"  : publisher_unset,
+                "unset-property"   : property_unset,
+                "unset-publisher"  : publisher_unset,
+                "variant"          : variant_list,
+                "verify"           : verify_image
+               }
+
+        callable = cmds.get(subcommand, None)
+        if not callable:
+                usage(_("unknown subcommand '%s'") % subcommand)
         try:
-                if subcommand == "refresh":
-                        return publisher_refresh(img, pargs)
-                elif subcommand == "list":
-                        return list_inventory(img, pargs)
-                elif subcommand == "image-update":
-                        return image_update(img, pargs)
-                elif subcommand == "change-variant":
-                        return change_variant(img, pargs)
-                elif subcommand == "install":
-                        return install(img, pargs)
-                elif subcommand == "uninstall":
-                        return uninstall(img, pargs)
-                elif subcommand == "freeze":
-                        return freeze(img, pargs)
-                elif subcommand == "unfreeze":
-                        return unfreeze(img, pargs)
-                elif subcommand == "search":
-                        return search(img, pargs)
-                elif subcommand == "info":
-                        return info(img, pargs)
-                elif subcommand == "contents":
-                        return list_contents(img, pargs)
-                elif subcommand == "fix":
-                        return fix_image(img, pargs)
-                elif subcommand == "verify":
-                        return verify_image(img, pargs)
-                elif subcommand in ("set-authority", "set-publisher"):
-                        return publisher_set(img, pargs)
-                elif subcommand in ("unset-authority", "unset-publisher"):
-                        return publisher_unset(img, pargs)
-                elif subcommand in ("authority", "publisher"):
-                        return publisher_list(img, pargs)
-                elif subcommand == "set-property":
-                        return property_set(img, pargs)
-                elif subcommand == "unset-property":
-                        return property_unset(img, pargs)
-                elif subcommand == "property":
-                        return property_list(img, pargs)
-                elif subcommand == "history":
-                        return history_list(img, pargs)
-                elif subcommand == "purge-history":
-                        ret_code = img.history.purge()
-                        if ret_code == 0:
-                                msg(_("History purged."))
-                        return ret_code
-                elif subcommand == "rebuild-index":
-                        return rebuild_index(img, pargs)
-                else:
-                        usage(_("unknown subcommand '%s'") % subcommand)
+                return callable(img, pargs)
 
         except getopt.GetoptError, e:
                 if e.opt in ("help", "?"):
@@ -2883,7 +3093,7 @@
                         if __img:
                                 __img.history.abort(RESULT_FAILED_OUTOFMEMORY)
                         error("\n" + misc.out_of_memory())
-                        __ret = 1
+                        __ret = EXIT_OOPS
         except SystemExit, __e:
                 if __img:
                         __img.history.abort(RESULT_FAILED_UNKNOWN)
@@ -2893,17 +3103,17 @@
                         __img.history.abort(RESULT_CANCELED)
                 # We don't want to display any messages here to prevent
                 # possible further broken pipe (EPIPE) errors.
-                __ret = 1
+                __ret = EXIT_OOPS
         except api_errors.CertificateError, __e:
                 if __img:
                         __img.history.abort(RESULT_FAILED_CONFIGURATION)
                 error(__e)
-                __ret = 1
+                __ret = EXIT_OOPS
         except api_errors.PublisherError, __e:
                 if __img:
                         __img.history.abort(RESULT_FAILED_BAD_REQUEST)
                 error(__e)
-                __ret = 1
+                __ret = EXIT_OOPS
         except api_errors.TransportError, __e:
                 if __img:
                         __img.history.abort(RESULT_FAILED_TRANSPORT)
@@ -2912,7 +3122,7 @@
                     "operation."))
                 logger.error(_("Details follow:\n\n%s") % __e)
                 print_proxy_config()
-                __ret = 1
+                __ret = EXIT_OOPS
         except api_errors.InvalidDepotResponseException, __e:
                 if __img:
                         __img.history.abort(RESULT_FAILED_TRANSPORT)
@@ -2923,7 +3133,7 @@
                     "attempt to contact the server using a web browser."))
                 logger.error(_("\nAdditional details:\n\n%s") % __e)
                 print_proxy_config()
-                __ret = 1
+                __ret = EXIT_OOPS
         except history.HistoryLoadException, __e:
                 # Since a history related error occurred, discard all
                 # information about the current operation(s) in progress.
@@ -2932,7 +3142,7 @@
                 error(_("An error was encountered while attempting to load "
                     "history information\nabout past client operations."))
                 error(__e)
-                __ret = 1
+                __ret = EXIT_OOPS
         except history.HistoryStoreException, __e:
                 # Since a history related error occurred, discard all
                 # information about the current operation(s) in progress.
@@ -2942,7 +3152,7 @@
                     "information about the\ncurrent operation in client "
                     "history."))
                 error(__e)
-                __ret = 1
+                __ret = EXIT_OOPS
         except history.HistoryPurgeException, __e:
                 # Since a history related error occurred, discard all
                 # information about the current operation(s) in progress.
@@ -2951,7 +3161,7 @@
                 error(_("An error was encountered while attempting to purge "
                     "client history."))
                 error(__e)
-                __ret = 1
+                __ret = EXIT_OOPS
         except api_errors.VersionException, __e:
                 if __img:
                         __img.history.abort(RESULT_FAILED_UNKNOWN)
@@ -2961,9 +3171,9 @@
                     {'client': __e.received_version,
                      'api': __e.expected_version
                     })
-                __ret = 1
+                __ret = EXIT_OOPS
         except api_errors.WrapSuccessfulIndexingException, __e:
-                __ret = 0
+                __ret = EXIT_OK
         except api_errors.WrapIndexingException, __e:
                 def _wrapper():
                         raise __e.wrapped
@@ -2977,7 +3187,7 @@
                     "successfuly.")
                 error(s)
         except api_errors.ReadOnlyFileSystemException, __e:
-                __ret = 1
+                __ret = EXIT_OOPS
         except:
                 if __img:
                         __img.history.abort(RESULT_FAILED_UNKNOWN)
--- a/src/gui/modules/installupdate.py	Tue Nov 17 17:06:35 2009 -0600
+++ b/src/gui/modules/installupdate.py	Wed Nov 18 15:53:48 2009 -0800
@@ -237,7 +237,7 @@
                 if self.ipkg_ipkgui_list == None:
                         return True
                 upgrade_needed = self.api_o.plan_install(
-                    self.ipkg_ipkgui_list, filters = [])
+                    self.ipkg_ipkgui_list)
                 return not upgrade_needed
 
         def __proceed_with_stages(self):
@@ -647,8 +647,7 @@
                 stuff_to_do = False
                 if self.action == enumerations.INSTALL_UPDATE:
                         stuff_to_do = self.api_o.plan_install(
-                            self.list_of_packages, refresh_catalogs = False,
-                            filters = [])
+                            self.list_of_packages, refresh_catalogs = False)
                 elif self.action == enumerations.REMOVE:
                         plan_uninstall = self.api_o.plan_uninstall
                         stuff_to_do = \
--- a/src/gui/modules/misc_non_gui.py	Tue Nov 17 17:06:35 2009 -0600
+++ b/src/gui/modules/misc_non_gui.py	Wed Nov 18 15:53:48 2009 -0800
@@ -31,7 +31,7 @@
 
 #The current version of the Client API the PM, UM and
 #WebInstall GUIs have been tested against and are known to work with.
-CLIENT_API_VERSION = 23
+CLIENT_API_VERSION = 24
 
 def get_cache_dir(api_object):
         img = api_object.img
--- a/src/man/pkg.1.txt	Tue Nov 17 17:06:35 2009 -0600
+++ b/src/man/pkg.1.txt	Wed Nov 18 15:53:48 2009 -0800
@@ -21,13 +21,20 @@
      /usr/bin/pkg refresh [--full] [publisher ...]
 
      /usr/bin/pkg image-create [-fFPUz] [--force] [--full|--partial|--user]
-         [--zone] [-k ssl_key] [-c ssl_cert] [--no-refresh]
-         [-g|--origin ...] [-m|--mirror ...] (-p|--publisher) name=uri dir
+         [--zone] [-k ssl_key] [-c ssl_cert] [--no-refresh] 
+         [--variant <variant_spec>=<instance>] [-g|--origin ...] [-m|--mirror ...]
+         [--facet <facet_spec>=[True|False]] (-p|--publisher) name=uri dir
+
      /usr/bin/pkg image-update [-fnvq] [--be-name name] [--no-refresh]
          [--no-index]
 
+     /usr/bin/pkg variant [<variant_spec>]
      /usr/bin/pkg change-variant [-nvq] [--be-name name]
-         <variant_spec>=<instance> [<variant_spec>=<instance> ...]
+         <variant_spec>=<instance> ...
+
+     /usr/bin/pkg facet [<facet_spec>] 
+     /usr/bin/pkg change-facet [-nvq] [--be-name name] 
+         <facet_spec>=[True|False|None] ...
 
      /usr/bin/pkg set-property propname propvalue
      /usr/bin/pkg unset-property propname ...
@@ -38,9 +45,11 @@
          [-G origin_to_remove | --remove-origin=origin_to_remove ...]
          [-m mirror_to_add | --add-mirror=mirror_to_add ...]
          [-M mirror_to_remove | --remove-mirror=mirror_to_remove ...]
+         [--search-before publisher] [--search-after publisher]
+         [--sticky] [--non-sticky]
          [--enable] [--disable] [--no-refresh] [--reset-uuid] publisher
      /usr/bin/pkg unset-publisher publisher ...
-     /usr/bin/pkg publisher [-HPa] [publisher ...]
+     /usr/bin/pkg publisher [-HPn] [publisher ...]
 
      /usr/bin/pkg history [-Hl]
      /usr/bin/pkg purge-history
@@ -103,6 +112,8 @@
 
      image-create [-fFPUz] [--force] [--full|--partial|--user] [--zone]
        [-k ssl_key] [-c ssl_cert] [--no-refresh] [-g|--origin ...]
+       [--variant <variant_spec>=<instance>] ...
+       [--facet <facet_spec>=[True|False]] ...
        [-m|--mirror ...] (-p|--publisher) name=origin_uri dir
           Create, at location given by dir, an image suitable for package
           operations.  The default image type is user, as given by the -U
@@ -129,6 +140,10 @@
           the image's publishers to retrieve publisher metadata (e.g.
           catalogs).
 
+          With --variant, set the specified variant to the indicated value.
+
+          With --facet, set the specified facet to the indicated value.
+
      image-update [-fnvq] [--be-name name] [--no-index] [--no-refresh]
           Update all installed packages in the current image to the
           latest available version.
@@ -370,12 +385,16 @@
 
           The -H option causes the headers to be omitted.
 
+     variant [<variant_spec> ...]
+          Display the current values of all variants, or with arguments,
+          only the variants specified
+
      change-variant [-nvq] [--be-name name]
        <variant_spec>=<instance> [<variant_spec>=<instance> ...]
           Change the specified variants in the current image.
 
-          With the -n option, execute the requested operation but make no
-          persistent changes to the image.
+          With the -n option, plan the requested operation but make
+          no actual changes.
 
           With the -v option, issue verbose progress messages during the
           requested operation.  With the -q option, be completely silent.
@@ -384,6 +403,29 @@
           argument given.  This option is only valid if a new boot environment
           is created during image update. See also beadm(1m).
 
+     facet [<facet_spec> ...]
+          Without arguments, displays the current values of all facets.  With
+          argument(s), evalute if each facet would be true or false and print
+          the result.
+
+     change-facet [-nvq] [--be-name name]
+       <facet_spec>=[True|False|None] ...
+
+          Change the specified facets in the current image.
+
+          With the -n option, plan the requested operation but make
+          no actual changes.
+
+          With the -v option, issue verbose progress messages during the
+          requested operation.  With the -q option, be completely silent.
+
+          With --be-name, rename the newly created boot environment to be the
+          argument given.  This option is only valid if a new boot environment
+          is created during the operation. See also beadm(1m).
+
+          Facets may be set to True or False.  Setting one to None removes
+          that facet specification from the current image.
+     
      fix [pkg_fmri_pattern ...]
           Fix any errors reported by pkg verify.
 
@@ -408,20 +450,36 @@
        [-G origin_to_remove | --remove-origin=origin_to_remove ...]
        [-m mirror_to_add | --add-mirror=mirror_to_add]
        [-M mirror_to_remove | --remove-mirror=mirror_to_remove]
-       [--enable] [--disable] [--no-refresh] [--reset-uuid] publisher
+       [--search-before publisher] [--search-after publisher]
+       [--sticky] [--non-sticky] [--enable] [--disable] [--no-refresh] 
+       [--reset-uuid] publisher
+
           Update an existing publisher or add an additional package
+          publisher.  If no options affecting search order are specified,
+          new publishers are appended to the search order and are thus
+          searched last.
+
+          With -P, set the specified publisher as the preferred
+          publisher, i.e.  first in the search order.  When installing
+          new packages, this publisher will be searched first.
+          Updates to already installed packages will come from the
+          same publisher that originally provided the package so long
+          as that publisher remains sticky.
+
+          With --non-sticky, specify that higher ranked publishers than
+          this one may provide updates to packages originally installed
+          from this publisher.
+
+          With --sticky, return to the default behavior of always sourcing
+          updates from the same publisher that provided the package originally.
+
+          With --search-before, alter the publisher search order so that 
+          the publisher being modified is now searched before the specified
           publisher.
 
-          With -P, set the specified publisher as the preferred publisher.  When
-          installing new packages, the preferred publisher will be checked first
-          when determining which package repositories should be used for the
-          retrieval of package content and metadata during package operations.
-          When updating existing packages, if a package was installed from a
-          publisher that was preferred at the time of install, then updates will
-          be obtained from the current preferred publisher.  For packages that
-          were installed from a publisher that was not preferred at the time of
-          install, updates will always be obtained from the original publisher
-          regardless of the current, preferred publisher.
+          With --search-after, alter the publisher search order so that 
+          the publisher being modified is now searched after the specified
+          publisher.
 
           With -c and -k, specify client SSL certificate and key respectively.
 
@@ -454,14 +512,15 @@
           Remove the configuration associated with the given publisher
           or publishers.
 
-     publisher [-HPa] [publisher ...]
-          Display publisher information.  With no arguments, display the
-          list of enabled publishers, their origin URIs, and mirrors.  If
-          specific publishers are requested, display the configuration
-          values, including mirrors, associated with those publishers.  With
-          -H, omit the headers from the listing.  With -P, display only the
-          preferred publisher. With -a, display all publishers (including those
-          that are disabled).
+     publisher [-HPn] [publisher ...]
+          Display publisher information.  With no arguments, display
+          the list of all publishers, their origin URIs, and mirrors
+          in search order preference.  If specific publishers are
+          requested, display the configuration values, including
+          mirrors, associated with those publishers.  With -H, omit
+          the headers from the listing.  With -P, display only the
+          preferred publisher. With -n, display only enabled
+          publishers.
 
      history [-Hl]
           Displays the command history of the applicable image.  With -H, omit
@@ -574,15 +633,20 @@
 EXIT STATUS
      The following exit values are returned:
 
-     0     Everything worked.
+     0     Command succeeded.
 
-     1     Something bad happened.
+     1     An error occurred.
 
      2     Invalid command line options were specified.
 
      3     Multiple operations were requested, but only some of them
            succeeded.
 
+     4     No changes were made - nothing to do.
+
+     5     The requested operation cannot be performed on a live
+           image.
+
 FILES
      var/pkg                    Metadata location for full and partial
                                 images, relative to image's root
--- a/src/modules/actions/depend.py	Tue Nov 17 17:06:35 2009 -0600
+++ b/src/modules/actions/depend.py	Wed Nov 18 15:53:48 2009 -0800
@@ -31,37 +31,32 @@
 relationship between the package containing the action and another package.
 """
 
-import urllib
 import generic
 import pkg.fmri as fmri
 import pkg.version
-import pkg.client.constraint as constraint
 
 class DependencyAction(generic.Action):
         """Class representing a dependency packaging object.  The fmri attribute
         is expected to be the pkg FMRI that this package depends on.  The type
-        attribute is one of
+        attribute is one of these:
 
-        optional - dependency if present activates additional functionality,
-                   but is not needed
-
-        require - dependency is needed for correct function
+        optional - optional dependency on minimum version of other package. In
+        other words, if installed, other packages must be at least at specified
+        version level.
 
-        transfer - dependency on minimum version of other package that donated
-        components to this package at earlier version.  Other package need not
-        be installed, but if it is, it must be at the specified version.  Effect
-        is the same as optional, but the semantics are different.  OpenSolaris
-        doesn't use these for bundled packages, as incorporations are preferred.
+        require -  dependency on minimum version of other package is needed 
+        for correct function of this package.
 
-        incorporate - optional freeze at specified version
+        incorporate - optional dependency on precise version of other package; 
+        non-specified portion of version is free to float.
 
-        exclude - package non-functional if dependent package is present
-        (unimplemented) """
+        exclude - package may not be installed together with named version 
+        or higher - reverse logic of require."""
 
         name = "depend"
         attributes = ("type", "fmri")
         key_attr = "fmri"
-        known_types = ("optional", "require", "transfer", "incorporate")
+        known_types = ("optional", "require", "exclude", "incorporate")
 
         def __init__(self, data=None, **attrs):
                 generic.Action.__init__(self, data, **attrs)
@@ -164,80 +159,38 @@
                 #           (fmri_string, cleanfmri)
                 self.attrs["fmri"] = cleanfmri
 
-        def get_constrained_fmri(self, image):
-                """ returns fmri of incorporation pkg or None if not
-                an incorporation"""
-
-                ctype = self.attrs["type"]
-                if ctype != "incorporate":
-                        return None
-
-                pkgfmri = self.attrs["fmri"]
-                f = fmri.PkgFmri(pkgfmri, image.attrs["Build-Release"])
-                image.fmri_set_default_publisher(f)
-
-                return f
-
-        def parse(self, image, source_name):
-                """decode depend action into fmri & constraint"""
-                ctype = self.attrs["type"]
-                fmristr = self.attrs["fmri"]
-                f = fmri.PkgFmri(fmristr, image.attrs["Build-Release"])
-                min_ver = f.version
-
-                if min_ver == None:
-                        min_ver = pkg.version.Version("0",
-                            image.attrs["Build-Release"])
-
-                name = f.get_name()
-                max_ver = None
-                presence = None
-
-                if ctype == "require":
-                        presence = constraint.Constraint.ALWAYS
-                elif ctype == "exclude":
-                        presence = constraint.Constraint.NEVER
-                elif ctype == "incorporate":
-                        presence = constraint.Constraint.MAYBE
-                        max_ver = min_ver
-                elif ctype == "optional":
-                        # Must be done here to avoid circular dependency
-                        # problems during import.
-                        from pkg.client.imageconfig import REQUIRE_OPTIONAL
-                        if image.cfg_cache.get_policy(REQUIRE_OPTIONAL):
-                                presence = constraint.Constraint.ALWAYS
-                        else:
-                                presence = constraint.Constraint.MAYBE
-                elif ctype == "transfer":
-                        presence = constraint.Constraint.MAYBE
-
-                assert presence
-
-                return f, constraint.Constraint(name, min_ver, max_ver,
-                    presence, source_name)
-
         def verify(self, image, **args):
                 # XXX Exclude and range between min and max not yet handled
 
+                def __min_version():
+                        return pkg.version.Version("0", image.attrs["Build-Release"])
+
                 ctype = self.attrs["type"]
+                pfmri = fmri.PkgFmri(self.attrs["fmri"], image.attrs["Build-Release"])
 
                 if ctype not in self.known_types:
                         return ["Unknown type (%s) in depend action" % ctype]
 
-                pkgfmri = self.attrs["fmri"]
-                f = fmri.PkgFmri(pkgfmri, image.attrs["Build-Release"])
+                installed_version = image.get_version_installed(pfmri)
 
-                installed_version = image.get_version_installed(f)
-
-                min_fmri, cons = self.parse(image, "")
+                min_fmri = None
+                max_fmri = None
 
-                if cons.max_ver:
-                        max_fmri = min_fmri.copy()
-                        max_fmri.version = cons.max_ver
-                else:
-                        max_fmri = None
-
-                required = (cons.presence == constraint.Constraint.ALWAYS)
+                if ctype == "require":
+                        required = True
+                        min_fmri = pfmri
+                elif ctype == "incorporate":
+                        max_fmri = pfmri
+                        min_fmri = pfmri
+                        required = False
+                elif ctype == "optional":
+                        required = False
+                        min_fmri = pfmri
+                elif ctype == "exclude":
+                        required = False
+                        max_fmri = pfmri
+                        min_fmri = pfmri.copy()
+                        min_fmri.version = __min_version()
 
                 if installed_version:
                         vi = installed_version.version
@@ -246,13 +199,14 @@
                             pkg.version.CONSTRAINT_NONE):
                                 return ["%s dependency %s is downrev (%s)" %
                                     (ctype, min_fmri, installed_version)]
-                        if max_fmri and vi > max_fmri.version and \
+                        if max_fmri and max_fmri.version and  \
+                            vi > max_fmri.version and \
                             not vi.is_successor(max_fmri.version,
                             pkg.version.CONSTRAINT_AUTO):
                                 return ["%s dependency %s is uprev (%s)" %
                                     (ctype, max_fmri, installed_version)]
                 elif required:
-                        return ["Required dependency %s is not installed" % f]
+                        return ["Required dependency %s is not installed" % pfmri]
 
                 return []
 
--- a/src/modules/cfgfiles.py	Tue Nov 17 17:06:35 2009 -0600
+++ b/src/modules/cfgfiles.py	Wed Nov 18 15:53:48 2009 -0800
@@ -458,54 +458,3 @@
         c["attributes"] = str
         return CfgFile.valuetostr(self, c)
 
-
-
-if __name__ == "__main__":
-
-    if 0:
-        pa = UserattrFile("/tmp")
-
-        a = pa.getvalue({"username" : "postgres"})
-
-        print a
-
-        attrdict = a["attributes"]
-
-        attrdict["profiles"].append("Wombat")
-        attrdict["profiles"] = list(set(attrdict["profiles"]))
-
-        pa.setvalue(a)
-
-        pa.writefile()
-
-
-        pa = UserattrFile("/tmp")
-
-        a = pa.getvalue({"username" : "postgres"})
-
-        print a
-
-        attrdict = a["attributes"]
-
-        p = set(attrdict["profiles"])
-        p.discard("Wombat")
-        attrdict["profiles"] = list(p)
-
-        pa.setvalue(a)
-
-        pa.writefile()
-
-    ftp = FtpusersFile("/tmp")
-    print ftp.getuser("root")
-    ftp.setuser("root", True)
-    print ftp.getuser("root")
-    ftp.writefile()
-
-
-    ftp = FtpusersFile("/tmp")
-    ftp.setuser("root", False)
-    print ftp.getuser("root")
-    ftp.writefile()
-
-    pw = PasswordFile("/tmp", lock=True)
-    pw.unlockfile()
--- a/src/modules/client/actuator.py	Tue Nov 17 17:06:35 2009 -0600
+++ b/src/modules/client/actuator.py	Wed Nov 18 15:53:48 2009 -0800
@@ -39,6 +39,8 @@
                 self.removal = {}
                 self.update =  {}
 
+        def __nonzero__(self):
+                return bool(self.install) or bool(self.removal) or bool(self.update)
 
         def scan_install(self, attrs):
                 self.__scan(self.install, attrs)
@@ -51,7 +53,7 @@
 
         def __scan(self, dictionary, attrs):
                 for a in set(attrs.keys()) & self.actuator_attrs:
-			dictionary.setdefault(a, set()).add(attrs[a])
+                        dictionary.setdefault(a, set()).add(attrs[a])
 
         def reboot_needed(self):
                 return False
@@ -115,11 +117,22 @@
                 self.cmd_path = ""
 
         def __str__(self):
+                merge = {}
+
+                for d in [self.removal, self.update, self.install]:
+                        for a in d.keys():
+                                for smf in d[a]:
+                                        merge.setdefault(a, set()).add(smf)
+
+                if self.reboot_needed():
+                        merge["reboot-needed"] = set(["true"])
+                else:
+                        merge["reboot-needed"] = set(["false"])
+
                 return "\n".join([
                     "  %16s: %s" % (fmri, smf)
-                    for d in [self.removal, self.update, self.install]
-                    for fmri in d
-                    for smf in d[fmri]
+                    for fmri in merge
+                    for smf in merge[fmri]
                     ])
 
         def reboot_needed(self):
@@ -128,10 +141,10 @@
 
         def exec_prep(self, image):
                 if not image.is_liveroot():
-                        dir = DebugValues.get_value("actuator_cmds_dir")
-                        if not dir:
+                        cmds_dir = DebugValues.get_value("actuator_cmds_dir")
+                        if not cmds_dir:
                                 return
-                        self.cmd_path = dir
+                        self.cmd_path = cmds_dir
                 self.do_nothing = False
 
         def exec_pre_actuators(self, image):
@@ -213,7 +226,7 @@
                 params = tuple(refresh_fmris)
 
                 if params:
-                    self.__call(args + params)
+                        self.__call(args + params)
 
                 for fmri in restart_fmris.copy():
                         if self.__smf_svc_is_disabled(fmri):
@@ -268,7 +281,7 @@
 
                 try:
                         buf = self.__call(args)
-                except NonzeroExitException, e:
+                except NonzeroExitException:
                         return {} # empty output == not installed
 
                 return dict([
@@ -288,7 +301,7 @@
                         buf = proc.stdout.readlines()
                         ret = proc.wait()
                 except OSError, e:
-                        raise RuntimeError, "cannot execute %s" % (args,)
+                        raise RuntimeError, "cannot execute %s: %s" % (args, e)
 
                 if ret != 0:
                         raise NonzeroExitException(args, ret, buf)
--- a/src/modules/client/api.py	Tue Nov 17 17:06:35 2009 -0600
+++ b/src/modules/client/api.py	Wed Nov 18 15:53:48 2009 -0800
@@ -43,13 +43,12 @@
 import pkg.misc as misc
 import pkg.p5i as p5i
 import pkg.search_errors as search_errors
-import pkg.variant as variant
 import pkg.nrlock
 
 from pkg.client.imageplan import EXECUTED_OK
 from pkg.client import global_settings
 
-CURRENT_API_VERSION = 23
+CURRENT_API_VERSION = 24
 CURRENT_P5I_VERSION = 1
 
 logger = global_settings.logger
@@ -88,7 +87,7 @@
                 canceled changes. It can raise VersionException and
                 ImageNotFoundException."""
 
-                compatible_versions = set([23])
+                compatible_versions = set([CURRENT_API_VERSION])
 
                 if version_id not in compatible_versions:
                         raise api_errors.VersionException(CURRENT_API_VERSION,
@@ -243,13 +242,11 @@
                 self.__activity_lock.release()
                 raise
 
-
-        def plan_install(self, pkg_list, filters, refresh_catalogs=True,
+        def plan_install(self, pkg_list, refresh_catalogs=True,
             noexecute=False, verbose=False, update_index=True):
                 """Contructs a plan to install the packages provided in
-                pkg_list.  pkg_list is a list of packages to install.  filters
-                is a list of filters to apply to the actions of the installed
-                packages.  refresh_catalogs controls whether the catalogs will
+                pkg_list.  pkg_list is a list of packages to install.  
+                refresh_catalogs controls whether the catalogs will
                 automatically be refreshed. noexecute determines whether the
                 history will be recorded after planning is finished.  verbose
                 controls whether verbose debugging output will be printed to the
@@ -268,7 +265,7 @@
                         self.__img.make_install_plan(pkg_list,
                             self.__progresstracker,
                             self.__check_cancelation, noexecute,
-                            filters=filters, verbose=verbose)
+                            verbose=verbose)
 
                         assert self.__img.imageplan
 
@@ -389,25 +386,10 @@
                                         # case; so proceed.
                                         pass
 
-                        # XXX For now, strip the publisher prefix from FMRIs for
-                        # packages that were installed from a publisher that was
-                        # preferred at the time of installation.  However, when
-                        # the image starts using prioritised publishers, this
-                        # will have to be removed.  Newer versions of packages
-                        # should only be a valid match if offered the same
-                        # publisher since equivalence cannot be determined by
-                        # package name alone.
-                        ppub = self.__img.get_preferred_publisher()
-                        pkg_list = [
-                            p.get_pkg_stem(anarchy=self.__img.is_pkg_preferred(
-                                p))
-                            for p in self.__img.gen_installed_pkgs()
-                        ]
-
-                        self.__img.make_install_plan(pkg_list,
-                            self.__progresstracker, self.__check_cancelation,
-                            noexecute, verbose=verbose, multimatch_ignore=True)
-
+                        self.__img.make_update_plan(self.__progresstracker,
+                            self.__check_cancelation, noexecute,
+                            verbose=verbose)
+                            
                         assert self.__img.imageplan
 
                         if self.__canceling:
@@ -433,13 +415,10 @@
                 res = not self.__img.imageplan.nothingtodo()
                 return res, opensolaris_image
 
-        def plan_change_variant(self, variants, noexecute=False,
-            verbose=False, be_name=None):
-                """Creates a plan to change the specified variants on an image.
-                There is option to refresh_catalogs since if we're changing
-                architectures, we have to download manifests that were
-                previously uncached.  noexecute determines whether the history
-                will be recorded after planning is finished.  verbose controls
+        def plan_change_varcets(self, variants=None, facets=None,
+            noexecute=False, verbose=False, be_name=None):
+                """Creates a plan to change the specified variants/facets on an image.
+                verbose controls
                 whether verbose debugging output will be printed to the
                 terminal.  This function has two return values.  The first is
                 a boolean which tells the client whether there is anything to
@@ -450,13 +429,16 @@
                 PlanCreationException and PermissionsException."""
 
                 self.__plan_common_start("change-variant")
+                if not variants and not facets:
+                        raise ValueError, "Nothing to do"
                 try:
                         self.check_be_name(be_name)
                         self.be_name = be_name
 
                         self.__refresh_publishers()
 
-                        self.__img.image_change_variant(variants,
+                        self.__img.image_change_varcets(variants, 
+                            facets,
                             self.__progresstracker,
                             self.__check_cancelation,
                             noexecute, verbose=verbose)
@@ -598,7 +580,7 @@
                                         self.log_operation_end(error=e)
                                         raise e
                         else:
-                                if self.__img.imageplan.actuators.reboot_needed() and \
+                                if self.__img.imageplan.reboot_needed() and \
                                     self.__img.is_liveroot():
                                         e = api_errors.RebootNeededOnLiveImageException()
                                         self.log_operation_end(error=e)
@@ -1053,9 +1035,6 @@
                     e.missing_matches or e.illegal:
                         self.log_operation_end(error=e,
                             result=history.RESULT_FAILED_BAD_REQUEST)
-                elif e.constraint_violations:
-                        self.log_operation_end(error=e,
-                            result=history.RESULT_FAILED_CONSTRAINED)
                 else:
                         self.log_operation_end(error=e)
 
@@ -1088,12 +1067,10 @@
                                     gen_installed_pkg_names=\
                                         self.__img.gen_installed_pkg_names,
                                     case_sensitive=q.case_sensitive)
-                                excludes = [variant.Variants(
-                                    {"variant.arch": self.__img.get_arch()}
-                                    ).allow_action]
                                 res = query.search(
                                     self.__img.gen_installed_pkgs,
-                                    self.__img.get_manifest_path, excludes)
+                                    self.__img.get_manifest_path,
+                                    self.__img.list_excludes())
                         except search_errors.InconsistentIndexException, e:
                                 raise api_errors.InconsistentIndexException(e)
                         # i is being inserted to track which query the results
@@ -1267,11 +1244,9 @@
                 if not os.path.isdir(self.__img.index_dir):
                         self.__img.mkdirs()
                 try:
-                        excludes = [variant.Variants(
-                            {"variant.arch": self.__img.get_arch()}).allow_action]
                         ind = indexer.Indexer(self.__img, self.__img.get_manifest,
                             self.__img.get_manifest_path,
-                            self.__progresstracker, excludes)
+                            self.__progresstracker, self.__img.list_excludes())
                         ind.rebuild_index_from_scratch(
                             self.__img.gen_installed_pkgs())
                 except search_errors.ProblematicPermissionsIndexException, e:
@@ -1308,6 +1283,21 @@
                 finally:
                         self.__img.cleanup_downloads()
 
+        def get_pub_search_order(self):
+                """Return current search order of publishers; includes
+                disabled publishers"""
+                return self.__img.cfg_cache.publisher_search_order
+
+        def set_pub_search_after(self, being_moved_prefix, staying_put_prefix):
+                """Change the publisher search order so that being_moved is
+                searched after staying_put"""
+                self.__img.pub_search_after(being_moved_prefix, staying_put_prefix)
+
+        def set_pub_search_before(self, being_moved_prefix, staying_put_prefix):
+                """Change the publisher search order so that being_moved is
+                searched before staying_put"""
+                self.__img.pub_search_before(being_moved_prefix, staying_put_prefix)
+
         def get_preferred_publisher(self):
                 """Returns the preferred publisher object for the image."""
                 return self.get_publisher(
@@ -1458,7 +1448,11 @@
                                 # iterator key since the prefix might be
                                 # different for the new publisher object.
                                 updated = True
-                                del publishers[key]
+
+                                # only if prefix is different - this
+                                # preserves search order
+                                if key != pub.prefix:
+                                        del publishers[key]
 
                                 # Prepare the new publisher object.
                                 pub.meta_root = \
--- a/src/modules/client/api_errors.py	Tue Nov 17 17:06:35 2009 -0600
+++ b/src/modules/client/api_errors.py	Wed Nov 18 15:53:48 2009 -0800
@@ -155,18 +155,22 @@
 class PlanCreationException(ApiException):
         def __init__(self, unmatched_fmris=EmptyI, multiple_matches=EmptyI,
             missing_matches=EmptyI, illegal=EmptyI,
-            constraint_violations=EmptyI, badarch=EmptyI, not_installed=EmptyI,
-            installed=EmptyI, obsolete=EmptyI):
+            badarch=EmptyI, installed=EmptyI, multispec=EmptyI, 
+            no_solution=False, no_version=EmptyI, missing_dependency=EmptyI, 
+            wrong_publishers=EmptyI, obsolete=EmptyI):
                 ApiException.__init__(self)
-                self.unmatched_fmris = unmatched_fmris
-                self.multiple_matches = multiple_matches
-                self.missing_matches = missing_matches
-                self.illegal = illegal
-                self.constraint_violations = constraint_violations
-                self.badarch = badarch
-                self.not_installed = not_installed
-                self.installed = installed
+                self.unmatched_fmris       = unmatched_fmris
+                self.multiple_matches      = multiple_matches
+                self.missing_matches       = missing_matches
+                self.illegal               = illegal
+                self.badarch               = badarch
+                self.installed             = installed
+                self.multispec             = multispec
                 self.obsolete = obsolete
+                self.no_solution           = no_solution
+                self.no_version            = no_version
+                self.missing_dependency    = missing_dependency
+                self.wrong_publishers      = wrong_publishers
 
         def __str__(self):
                 res = []
@@ -177,6 +181,13 @@
                         res += [s]
                         res += ["\t%s" % p for p in self.unmatched_fmris]
 
+                if self.wrong_publishers:
+                        s = _("The following patterns only matched packages "
+                            "that are from publishers other than that which "
+                            "supplied the already installed version of this package")
+                        res += [s]
+                        res += ["\t%s: %s" % (p[0], ", ".join(p[1])) for p in self.wrong_publishers]
+
                 if self.multiple_matches:
                         s = _("'%s' matches multiple packages")
                         for p, lst in self.multiple_matches:
@@ -184,16 +195,13 @@
                                 for pfmri in lst:
                                         res.append("\t%s" % pfmri)
 
-                s = _("'%s' matches no installed packages")
-                res += [ s % p for p in self.missing_matches ]
+                if self.missing_matches:
+                        s = _("'%s' matches no installed packages")
+                        res += [ s % p for p in self.missing_matches ]
 
-                s = _("'%s' is an illegal fmri")
-                res += [ s % p for p in self.illegal ]
-
-                if self.constraint_violations:
-                        s = _("The following package(s) violated constraints:")
-                        res += [s]
-                        res += ["\t%s" % p for p in self.constraint_violations]
+                if self.illegal:
+                        s = _("'%s' is an illegal fmri")
+                        res += [ s % p for p in self.illegal ]
 
                 if self.badarch:
                         s = _("'%s' supports the following architectures: %s")
@@ -205,13 +213,6 @@
                 s = _("'%(p)s' depends on obsolete package '%(op)s'")
                 res += [ s % {"p": p, "op": op} for p, op in self.obsolete ]
 
-                if self.not_installed:
-                        s = _("The proposed operation can not be performed for "
-                            "the following package(s) as they are not "
-                            "installed: ")
-                        res += [s]
-                        res += ["\t%s" % p for p in self.not_installed]
-
                 if self.installed:
                         s = _("The proposed operation can not be performed for "
                             "the following package(s) as they are already "
@@ -219,6 +220,25 @@
                         res += [s]
                         res += ["\t%s" % p for p in self.installed]
 
+                if self.multispec:
+                        s = _("The following different patterns specify the"
+                              "same package(s):")
+                        res += [s]
+                        for t in self.multispec:
+                                res += [
+                                        ", ".join(
+                                        [t[i] for i in range(1, len(t))])
+                                        + ": %s" % t[0]
+                                        ] 
+                if self.no_solution:
+                        res += [_("No solution was found to satisfy constraints")]
+
+                if self.no_version:
+                        res += self.no_version
+
+                if self.missing_dependency:
+                        res += _("Package %s is missing a dependency: %s" % self.missing_dependency)
+                        
                 return "\n".join(res)
 
 
@@ -975,6 +995,13 @@
                 return _("The preferred publisher cannot be removed.")
 
 
+class MoveRelativeToSelf(PublisherError):
+        """Used to indicate an attempt to search a repo before or after itself"""
+
+        def __str__(self):
+                return _("Cannot search a repository before or after itself")
+
+
 class SelectedRepositoryRemoval(PublisherError):
         """Used to indicate that an attempt to remove the selected repository
         for a publisher was made."""
--- a/src/modules/client/constraint.py	Tue Nov 17 17:06:35 2009 -0600
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,290 +0,0 @@
-#!/usr/bin/python2.4
-#
-# CDDL HEADER START
-#
-# The contents of this file are subject to the terms of the
-# Common Development and Distribution License (the "License").
-# You may not use this file except in compliance with the License.
-#
-# You can obtain a copy of the license at usr/src/OPENSOLARIS.LICENSE
-# or http://www.opensolaris.org/os/licensing.
-# See the License for the specific language governing permissions
-# and limitations under the License.
-#
-# When distributing Covered Code, include this CDDL HEADER in each
-# file and include the License file at usr/src/OPENSOLARIS.LICENSE.
-# If applicable, add the following below this CDDL HEADER, with the
-# fields enclosed by brackets "[]" replaced with your own identifying
-# information: Portions Copyright [yyyy] [name of copyright owner]
-#
-# CDDL HEADER END
-#
-
-# Copyright 2008 Sun Microsystems, Inc.  All rights reserved.
-# Use is subject to license terms.
-
-import pkg.version
-
-class ConstraintException(Exception):
-        """Constraint exception is thrown by constraint functions
-        if constraint conflicts occur."""
-
-        PRESENCE_CONFLICT  = 0
-        VERSION_CONFLICT   = 1
-        FMRI_CONFLICT      = 2 # same as version, but diff error
-        DOWNGRADE_CONFLICT = 3
-
-
-        def __init__(self, reason, new, old):
-                Exception.__init__(self)
-                self.new = new
-                self.old = old
-                self.reason = reason
-
-        def __str__(self):
-                if self.reason == self.PRESENCE_CONFLICT:                        
-                        return _(
-"Package presence is both required and prohibited:\n\t%s\n\t%s\n") % \
-                            (self.new, self.old)
-                elif self.reason == self.VERSION_CONFLICT:
-                        return _(
-"""Package %s contains constraint incompatible with constraint in installed package %s
-         proposed: %s
-        installed: %s
-""") % (self.new.source_name, self.old.source_name, self.new, self.old)
-                elif self.reason == self.FMRI_CONFLICT:
-                        return _(
-"""Package %s conflicts with constraint in installed pkg:/%s: 
-        %s""") % (self.new, self.old.source_name, self.old)
-                elif self.reason == self.DOWNGRADE_CONFLICT:
-                        return _(
-""""Package %s contains constraint that requires downgrade of installed pkg %s:
-        %s""") % \
-                            (self.new.source_name, self.old, self.new)
-                assert 0, "Illegal reason code"
-
-class ConstraintSet(object):
-        """used to hold set of constraints on an image"""
-        def __init__(self):
-                self.constraints = {}
-                # dict of version, constrained pkgs by pkg name
-                self.loaded_fmri_versions = {}
-                self.active_fmri = None
-
-        def finish_loading(self, fmri):
-                """ declare that we're done loading constraints 
-                from this fmri"""
-                assert self.active_fmri == fmri, \
-                    "Finishing for wrong fmri (%s != %s)" %(self.active_fmri, fmri)
-                self.active_fmri = None
-
-        def start_loading(self, fmri):
-                """ load a new set of constraints from fmri,
-                deleting any constraints defined by previous versions
-                of this fmri... skip if we're reloading the same
-                one by returning False, otherwise True. """
-
-                assert self.active_fmri == None, "Already loading!"
-                self.active_fmri = fmri
-                fmri_name = fmri.get_name()
-
-                if fmri_name in self.loaded_fmri_versions:
-                        oldv, pkg_list = self.loaded_fmri_versions[fmri_name]
-                        if oldv == fmri.version:
-                                self.active_fmri = None
-                                return False # already loaded this pkg once                        
-                        # remove constraints est. by previous version
-                        for p in pkg_list:
-                                cl = self.constraints[p]
-                                deletions = 0                                
-                                for i, c in enumerate(cl[:]):
-                                        if c.source_name == fmri_name:
-                                                del cl[i - deletions]
-                                                deletions += 1
-
-                self.loaded_fmri_versions[fmri_name] = (fmri.version, [])
-                return True
-
-        def update_constraints(self, constraint):
-                """ add a constraint from the active fmri to the
-                set of system constraints"""
-
-                active_fmri_name = self.active_fmri.get_name()
-                v, pkg_list = self.loaded_fmri_versions[active_fmri_name]
-
-                assert active_fmri_name == constraint.source_name
-
-                if constraint.presence == Constraint.ALWAYS:
-                        return # don't record these here for now
-
-                # find existing constraint list for this package                
-
-                cl = self.constraints.get(constraint.pkg_name, None)
-
-                if cl:
-                        # check to make sure new constraint is
-                        # compatible w/ existing constraints
-                        # compatiblity is such that if
-                        # A will combine w/ any item in list B,
-                        # A will combine with combination of all of B
-                        for c in cl:
-                                c.combine(constraint)
-                        cl.append(constraint)                        
-                else:
-                        self.constraints[constraint.pkg_name] = [constraint]
-
-                if constraint.pkg_name not in pkg_list:
-                        pkg_list.append(constraint.pkg_name)
-
-        def apply_constraints(self, constraint): 
-                """ if constraints exist for this package, apply 
-                them.  Apply the new one last so that exception
-                contains proper error message... error message
-                generation will be unclear if multiple constraints
-                exist"""
-                cl = self.constraints.get(constraint.pkg_name, None)
-                if cl:
-                        mc = reduce(lambda a, b: a.combine(b), cl)
-                        return mc.combine(constraint)
-                return None
-
-        def apply_constraints_to_fmri(self, fmri, auto=False):
-                """ treats fmri as min required version; apply any 
-                constraints and if fmri is more specific, return 
-                original fmri, otherwise return more constrained
-                version... remap exception for better error handling"""
-                if not auto:
-                        ic = Constraint.reqfmri2constraint(fmri, "")
-                else:
-                        ic = Constraint.autofmri2constraint(fmri, "")
-                try:
-                        nc = self.apply_constraints(ic)
-                except ConstraintException, e:
-                        raise ConstraintException(ConstraintException.FMRI_CONFLICT, 
-                            fmri, e.old)
-                if not nc or ic == nc:
-                        return fmri
-                nfmri = fmri.copy()
-                nfmri.version = nc.min_ver
-                return nfmri
-                        
-class Constraint(object):
-        """basic constraint object; describes constraints on fmris 
-        and provides a method of computing the intersection of two 
-        constraints"""
-        # some defines for presence
-        ERROR   = 0 #order matters; see self.combine for details
-        ALWAYS  = 1 #required
-        MAYBE   = 2 #optional
-        NEVER   = 3 #exclude, not yet functional
-
-        __presence_strs = ["ERROR", "Required", "Optional", "Excluded"]
-
-        compat = {
-            (ALWAYS, ALWAYS): ALWAYS,
-            (ALWAYS, MAYBE):  ALWAYS,
-            (ALWAYS, NEVER):  ERROR,
-            (MAYBE,  MAYBE):  MAYBE,
-            (MAYBE,  NEVER):  NEVER,
-            (NEVER,  NEVER):  NEVER
-        }
-
-        def __init__(self, pkg_name, min_ver, max_ver, presence, source_name):
-                self.pkg_name = pkg_name
-                self.presence = presence
-                self.min_ver = min_ver
-                self.max_ver = max_ver
-                self.source_name = source_name
-
-        def __str__(self):
-                return "Pkg %s: %s min_version: %s max version: %s defined by: pkg:/%s" % \
-                    (self.pkg_name, self.__presence_strs[self.presence], 
-                     self.min_ver, self.max_ver, self.source_name)
-
-        def __eq__(self, other):
-                return \
-                    self.pkg_name == other.pkg_name and \
-                    self.presence == other.presence and \
-                    self.min_ver  == other.min_ver and \
-                    self.max_ver  == other.max_ver
-
-        @staticmethod
-        def autofmri2constraint(fmri, source_name):
-                return Constraint(fmri.get_name(), fmri.version, 
-                    fmri.version, Constraint.ALWAYS, source_name)
-               
-        @staticmethod
-        def reqfmri2constraint(fmri, source_name):
-                return Constraint(fmri.get_name(), fmri.version, 
-                    None, Constraint.ALWAYS, source_name)
-
-        @staticmethod
-        def optfmri2constraint(fmri, source_name):
-                return Constraint(fmri.get_name(), fmri.version, 
-                    None, Constraint.MAYBE, source_name)
-
-        @staticmethod
-        def incfmri2constraint(fmri, source_name):
-                return Constraint(fmri.get_name(), fmri.version, 
-                    fmri.version, Constraint.MAYBE, source_name)
-        
-        def check_for_work(self, fmri_present):
-                """Evaluate work needed to meet new constraint if fmri_present
-                is the fmri installed (None if none is installed).  Returns
-                None if no work to do, otherwise version to be installed.
-                Raises ConstraintException in case of uninstall or downgrade
-                required."""
-
-                if not fmri_present:
-                        if self.presence == Constraint.MAYBE or \
-                            self.presence == Constraint.NEVER:
-                                return None
-                        return self.min_ver
-                else:
-                        # following assertion awaits rename removal
-                        # assert fmri_present.get_name() == self.pkg_name
-                        if self.presence == Constraint.NEVER:
-                                raise Constraint.Exception(
-                                    ConstraintException.PRESENCE_CONFLICT,
-                                    self, fmri_present)
-                version_present = fmri_present.version
-                if version_present < self.min_ver:
-                        return self.min_ver
-                if self.max_ver and version_present > self.max_ver and not \
-                    version_present.is_successor(self.max_ver, 
-                    pkg.version.CONSTRAINT_AUTO):
-                        raise ConstraintException(ConstraintException.DOWNGRADE_CONFLICT,
-                            self, fmri_present)
-                return None
-
-        def combine(self, proposed):
-                assert self.pkg_name == proposed.pkg_name
-
-                presence = self.compat[(min(self.presence, proposed.presence),
-                                        max(self.presence, proposed.presence))]
-
-                if presence == Constraint.ERROR:
-                        raise ConstraintException(
-                            ConstraintException.PRESENCE_CONFLICT, proposed, self)
-
-                # following relies on None < any version
-
-                if self.max_ver == None or proposed.max_ver == None:
-                        max_ver = max(self.max_ver, proposed.max_ver)
-                else:
-                        max_ver = min(self.max_ver, proposed.max_ver)
-                
-                min_ver = max(self.min_ver, proposed.min_ver)
-
-                if max_ver and max_ver < min_ver and \
-                    not min_ver.is_successor(max_ver, pkg.version.CONSTRAINT_AUTO):
-                        raise ConstraintException(
-                            ConstraintException.VERSION_CONFLICT, proposed, self)
-
-                return Constraint(self.pkg_name, min_ver, max_ver, 
-                    presence, self.source_name)
-
-                
-                
-                                
-        
--- a/src/modules/client/debugvalues.py	Tue Nov 17 17:06:35 2009 -0600
+++ b/src/modules/client/debugvalues.py	Wed Nov 18 15:53:48 2009 -0800
@@ -23,19 +23,21 @@
 # Copyright 2008 Sun Microsystems, Inc.  All rights reserved.
 # Use is subject to license terms.
 
+from pkg.misc import Singleton
 
-class DebugValues(object):
-        """ simple singleton class to handle debug variables """
-        __debug_values = {}
+class DebugValues(dict):
+        """Singleton dict that returns None if unknown value
+        is referenced"""
+        __metaclass__ = Singleton
 
-        def __call__(self):
-                return self
+        def __getitem__(self, item):
+                """ returns None if not set """
+                return self.get(item, None)
 
         def get_value(self, key):
-                """ returns None if not set """
-                return self.__debug_values.get(key, None)
+                return self[key]
 
         def set_value(self, key, value):
-                self.__debug_values[key] = value
+                self[key] = value
 
 DebugValues=DebugValues()
--- a/src/modules/client/filter.py	Tue Nov 17 17:06:35 2009 -0600
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,150 +0,0 @@
-#!/usr/bin/python2.4
-#
-# CDDL HEADER START
-#
-# The contents of this file are subject to the terms of the
-# Common Development and Distribution License (the "License").
-# You may not use this file except in compliance with the License.
-#
-# You can obtain a copy of the license at usr/src/OPENSOLARIS.LICENSE
-# or http://www.opensolaris.org/os/licensing.
-# See the License for the specific language governing permissions
-# and limitations under the License.
-#
-# When distributing Covered Code, include this CDDL HEADER in each
-# file and include the License file at usr/src/OPENSOLARIS.LICENSE.
-# If applicable, add the following below this CDDL HEADER, with the
-# fields enclosed by brackets "[]" replaced with your own identifying
-# information: Portions Copyright [yyyy] [name of copyright owner]
-#
-# CDDL HEADER END
-#
-
-# Copyright 2009 Sun Microsystems, Inc.  All rights reserved.
-# Use is subject to license terms.
-
-from cStringIO import StringIO
-import tokenize
-import token
-from pkg.misc import msg
-
-def compile_filter(filter):
-        def f_get(tup):
-                if tup[0] == token.NAME:
-                        return "NAME", tup[0], tup[1]
-                elif tup[0] == token.NUMBER:
-                        return "NUMBER", tup[0], tup[1]
-                else:
-                        return tup[1], tup[0], tup[1]
-        tok_stream = [
-            f_get(i)
-            for i in tokenize.generate_tokens(StringIO(filter).readline)
-        ]
-
-        f_str = ""
-        expr = ""
-        want_attr = True
-        next_tok = ("(", "NAME", "NUMBER")
-        for tok_str, tok_type, tok in tok_stream:
-                if tok_str not in next_tok:
-                        raise RuntimeError, \
-                            "'%s' is not an allowable token. Expected one of" \
-                                " the following %s after: %s" % \
-                            (tok_str, next_tok, f_str)
-
-                if tok_type == token.NAME or tok_type == token.NUMBER:
-                        # If the parser has found either of these token types
-                        # just append them and look for the next token.
-                        expr += tok
-                        if want_attr:
-                                next_tok = ("NAME", "NUMBER", ".", "=")
-                        else:
-                                next_tok = ("NAME", "NUMBER", ".", "&", "|",
-                                        ")", "")
-                        continue
-                elif tok_type == token.ENDMARKER:
-                        if not expr == "":
-                                # The parser has encountered the end of the
-                                # filter string (encountered a newline). Thus,
-                                # the expression portion of the filter can be
-                                # generated if we have something to add.
-                                f_str += "'%s') == '%s'" % (expr, expr)
-                        else:
-                                # End of line, but nothing to add.
-                                continue
-                elif tok_type == token.OP:
-                        if tok == "=":
-                                # The assignment operator acts as the
-                                # terminator for parsing attributes.
-                                f_str += "d.get('%s', " % (expr)
-
-                                # Now setup the parser to look for a value. It
-                                # can only be composed of text and/or numeric
-                                # tokens. Then look for the next token.
-                                expr = ""
-                                want_attr = False
-                                next_tok = ("NAME", "NUMBER")
-                                continue
-                        elif tok == "(":
-                                # If the parser finds this token, it just needs
-                                # to be appended, and the next token found.
-                                expr = ""
-                                f_str += "("
-                                next_tok = ("(", "NAME", "NUMBER")
-                                continue
-                        elif tok == ".":
-                                # If the parser finds this token, the value just
-                                # needs to be appended and the next token found.
-                                expr += "."
-                                next_tok = ("NAME", "NUMBER")
-                                continue
-
-                        if not expr == "":
-                                # The remaining tokens to be parsed act as
-                                # terminating operators. As a result, the
-                                # expression portion of the filter needs to be
-                                # generated first before continuing if we have
-                                # something to add.
-                                f_str += "'%s') == '%s'" % (expr, expr)
-
-                        # Now append any conditions to the filter or terminate
-                        # this portion of it.
-                        if tok == "&":
-                                f_str += " and "
-                                next_tok = ("NAME", "NUMBER", "(")
-                                want_attr = True
-                        elif tok == "|":
-                                f_str += " or "
-                                next_tok = ("NAME", "NUMBER", "(")
-                                want_attr = True
-                        elif tok == ")":
-                                f_str += ")"
-                                next_tok = ("&", "|", ")", "")
-                                want_attr = False
-
-                        # Finally, prepare for the next cycle.
-                        expr = ""
-
-        return f_str, compile(f_str, "<filter string>", "eval")
-
-def apply_filters(action, filters):
-        """Apply the filter chain to the action, returning the True if it's
-        not filtered out, or False if it is.
-        
-        Filters operate on action attributes.  A simple filter will eliminate
-        an action if the action has the attribute in the filter, but the value
-        is different.  Simple filters can be chained together with AND and OR
-        logical operators.  In addition, multiple filters may be applied; they
-        are effectively ANDed together.
-        """
-
-        if not action:
-                return False
-
-        # Evaluate each filter in turn.  If a filter eliminates the action, we
-        # need check no further.  If no filters eliminate the action, return
-        # True.
-        for f_entry, code in filters:
-                if not eval(code, {"d": action.attrs}):
-                        return False
-        return True
--- a/src/modules/client/history.py	Tue Nov 17 17:06:35 2009 -0600
+++ b/src/modules/client/history.py	Wed Nov 18 15:53:48 2009 -0800
@@ -86,6 +86,7 @@
     api_errors.PermissionsException: RESULT_FAILED_STORAGE,
     api_errors.MainDictParsingException: RESULT_FAILED_STORAGE,
     api_errors.SearchException: RESULT_FAILED_SEARCH,
+    api_errors.PlanCreationException: RESULT_FAILED_CONSTRAINED,
     api_errors.NonLeafPackageException: RESULT_FAILED_CONSTRAINED,
     api_errors.IpkgOutOfDateException: RESULT_FAILED_CONSTRAINED,
     api_errors.InvalidDepotResponseException: RESULT_FAILED_TRANSPORT,
--- a/src/modules/client/image.py	Tue Nov 17 17:06:35 2009 -0600
+++ b/src/modules/client/image.py	Wed Nov 18 15:53:48 2009 -0800
@@ -39,11 +39,9 @@
 import pkg.Uuid25
 import pkg.catalog
 import pkg.client.api_errors            as api_errors
-import pkg.client.constraint            as constraint
 import pkg.client.history               as history
 import pkg.client.imageconfig           as imageconfig
 import pkg.client.imageplan             as imageplan
-import pkg.client.imagestate            as imagestate
 import pkg.client.pkgplan               as pkgplan
 import pkg.client.progress              as progress
 import pkg.client.publisher             as publisher
@@ -53,7 +51,6 @@
 import pkg.misc                         as misc
 import pkg.portable                     as portable
 import pkg.server.catalog
-import pkg.variant                      as variant
 import pkg.version
 
 from pkg.client.debugvalues import DebugValues
@@ -191,13 +188,11 @@
                 self.__init_catalogs()
                 self.__upgraded = False
 
-                self.arch_change = False
                 self.attrs = {
                     "Policy-Require-Optional": False,
                     "Policy-Pursue-Latest": True
                 }
                 self.cfg_cache = None
-                self.constraints = constraint.ConstraintSet()
                 self.dl_cache_dir = None
                 self.dl_cache_incoming = None
                 self.history = history.History()
@@ -206,9 +201,9 @@
                 self.imgdir = None
                 self.index_dir = None
                 self.is_user_cache_dir = False
-                self.new_variants = {}
                 self.pkgdir = None
                 self.root = root
+                self.__req_dependents = None
 
                 # Transport operations for this image
                 self.transport = transport.Transport(self)
@@ -229,8 +224,6 @@
                 # a place to keep info about saved_files; needed by file action
                 self.saved_files = {}
 
-                self.state = imagestate.ImageState(self)
-
                 self.type = None
 
                 # right now we don't explicitly set dir/file modes everywhere;
@@ -289,8 +282,8 @@
                 while True:
                         imgtype = self.image_type(d)
                         if imgtype == IMG_USER:
-                                # XXX Look at image file to determine filter
-                                # tags and repo URIs.
+                                # XXX Should look at image file to determine 
+                                # repo URIs.
                                 if exact_match and \
                                     os.path.realpath(startd) != \
                                     os.path.realpath(d):
@@ -301,8 +294,8 @@
                                 self.attrs["Build-Release"] = "5.11"
                                 return
                         elif imgtype == IMG_ENTIRE:
-                                # XXX Look at image file to determine filter
-                                # tags and repo URIs.
+                                # XXX Look at image file to determine
+                                # repo URIs.
                                 # XXX Look at image file to determine if this
                                 # image is a partial image.
                                 if exact_match and \
@@ -470,7 +463,7 @@
 
         def set_attrs(self, is_zone, prefix, mirrors=EmptyI, origins=EmptyI,
             ssl_key=None, ssl_cert=None, refresh_allowed=True, progtrack=None,
-            variants=EmptyDict):
+            variants=EmptyDict, facets=EmptyDict):
                 """Creates a new image with the given attributes if it does not
                 exist or sets the attributes of an already existing image."""
 
@@ -500,7 +493,6 @@
 
                 # Determine and add the default variants for the image.
                 if is_zone:
-                        self.cfg_cache.filters["opensolaris.zone"] = "nonglobal"
                         self.cfg_cache.variants[
                             "variant.opensolaris.zone"] = "nonglobal"
                 else:
@@ -511,20 +503,21 @@
                     variants.get("variant.arch", platform.processor())
 
                 # After setting up the default variants, add any overrides or
-                # additional variants specified.
+                # additional variants or facets specified.
                 self.cfg_cache.variants.update(variants)
+                self.cfg_cache.facets.update(facets)
 
                 # Now everything is ready for publisher configuration.
-                self.cfg_cache.preferred_publisher = newpub.prefix
                 self.add_publisher(newpub, refresh_allowed=refresh_allowed,
                     progtrack=progtrack)
+                self.cfg_cache.preferred_publisher = newpub.prefix
 
                 # No need to save configuration as add_publisher will do that
                 # if successful.
                 self.history.log_operation_end()
 
         def is_liveroot(self):
-                return bool(self.root == "/" or
+                return bool(self.root == "/" or 
                     DebugValues.get_value("simulate_live_root"))
 
         def is_zone(self):
@@ -532,10 +525,7 @@
                     "variant.opensolaris.zone"] == "nonglobal"
 
         def get_arch(self):
-                if "variant.arch" in self.new_variants:
-                        return self.new_variants["variant.arch"]
-                else:
-                        return self.cfg_cache.variants["variant.arch"]
+                return self.cfg_cache.variants["variant.arch"]
 
         def get_root(self):
                 return self.root
@@ -548,6 +538,30 @@
                         if inc_disabled or not pub.disabled:
                                 yield self.cfg_cache.publishers[p]
 
+        def get_publisher_ranks(self):
+                """Returns dictionary of publishers by name; each
+                entry contains a tuple of search order index starting 
+                at 0, and a boolean indicating whether or not 
+                this publisher is "sticky"."""
+
+                # automatically make disabled publishers not sticky
+                so = self.cfg_cache.publisher_search_order
+
+                ret = dict([
+                            (p.prefix, 
+                             (so.index(p.prefix), 
+                              not p.disabled and p.sticky)) 
+                            for p in self.gen_publishers()
+                            ])
+
+                # add any publishers for pkgs that are installed,
+                # but have been deleted... so they're not sticky.
+                for pfmri in self.gen_installed_pkgs():
+                        ret.setdefault(pfmri.get_publisher(), 
+                            (len(ret) + 1, False))
+                
+                return ret
+                        
         def check_cert_validity(self):
                 """Look through the publishers defined for the image.  Print
                 a message and exit with an error if one of the certificates
@@ -599,7 +613,7 @@
                 return self.cfg_cache.publishers
 
         def get_publisher(self, prefix=None, alias=None, origin=None):
-                publishers = [p for p in self.get_publishers().values()]
+                publishers = [p for p in self.cfg_cache.publishers.values()]
                 for pub in publishers:
                         if prefix and prefix == pub.prefix:
                                 return pub
@@ -610,6 +624,48 @@
                                 return pub
                 raise api_errors.UnknownPublisher(max(prefix, alias, origin))
 
+        def pub_search_before(self, being_moved, staying_put):
+                """Moves publisher "being_moved" to after "staying_put"
+                in search order"""
+                self.__pub_search_common(being_moved, staying_put, after=False)
+
+        def pub_search_after(self, being_moved, staying_put):
+                """Moves publisher "being_moved" to after "staying_put"
+                in search order"""
+                self.__pub_search_common(being_moved, staying_put, after=True)
+
+        def __pub_search_common(self, being_moved, staying_put, after=True):
+                """Moves publisher "being_moved" to after "staying_put"
+                in search order"""
+                if after:
+                        r = "search-after"
+                else:
+                        r = "search-before"
+
+                self.history.log_operation_start(r)
+                try:
+                        bm = self.get_publisher(being_moved).prefix
+                        sp = self.get_publisher(staying_put).prefix
+                except api_errors.ApiException, e:
+                        self.history.log_operation_end(e)
+                        raise
+
+                if bm == sp:
+                        e = api_errors.MoveRelativeToSelf()
+                        self.history.log_operation_end(e)
+                        raise e
+                
+                # compute new order and set it
+                so = self.cfg_cache.publisher_search_order
+                so.remove(bm)
+                if after:
+                        so.insert(so.index(sp) + 1, bm)
+                else:
+                        so.insert(so.index(sp), bm)
+                self.cfg_cache.change_publisher_search_order(so)
+                self.save_config()
+                self.history.log_operation_end()
+
         def get_preferred_publisher(self):
                 """Returns the prefix of the preferred publisher."""
                 return self.cfg_cache.preferred_publisher
@@ -736,21 +792,13 @@
                                 yield (act, errors)
 
         def __call_imageplan_evaluate(self, ip, verbose=False):
-                if verbose:
-                        logger.info(_("Before evaluation:"))
-                        logger.info(ip)
-
                 # A plan can be requested without actually performing an
                 # operation on the image.
                 if self.history.operation_name:
                         self.history.operation_start_state = ip.get_plan()
 
-                try:
-                        ip.evaluate()
-                except constraint.ConstraintException, e:
-                        raise api_errors.PlanCreationException(
-                            constraint_violations=str(e).split("\n"))
-
+                ip.evaluate(verbose)
+                
                 self.imageplan = ip
 
                 if self.history.operation_name:
@@ -758,63 +806,37 @@
                             ip.get_plan(full=False)
 
                 if verbose:
-                        logger.info(_("After evaluation:"))
                         ip.display()
 
-        def image_change_variant(self, variants, progtrack, check_cancelation,
+        def image_change_varcets(self, variants, facets, progtrack, check_cancelation,
             noexecute, verbose=False):
 
-                ip = imageplan.ImagePlan(self, progtrack, lambda: False,
-                    noexecute=noexecute, variants=variants,
-                    recursive_removal=True)
+
+                ip = imageplan.ImagePlan(self, progtrack, check_cancelation,
+                    noexecute=noexecute)
+
                 progtrack.evaluate_start()
 
-                # make sure that some variants are actually changing
-                variants = dict(set(variants.iteritems()) - \
-                    set(self.cfg_cache.variants.iteritems()))
-
-                if not variants:
-                        self.__call_imageplan_evaluate(ip, verbose)
-                        logger.info("No variant changes.")
-                        return
-
-                #
-                # only get manifests for all architectures if we're
-                # changing the architecture variant
-                #
-                if "variant.arch" in variants:
-                        self.arch_change = True
+                # compute dict of changing variants
+                if variants:
+                        variants = dict(set(variants.iteritems()) - \
+                           set(self.cfg_cache.variants.iteritems()))
+                # facets are always the entire set
 
-                #
-                # we can't set self.new_variants until after we
-                # instantiate the image plan since the image plan has
-                # to cache information like the old excludes, and
-                # once we set new_variants things like self.list_excludes()
-                # and self.get_arch() will report valus based off of the
-                # new variants we're moving too.
-                #
-                self.new_variants = variants
-
-                for fmri in self.gen_installed_pkgs():
-                        m = self.get_manifest(fmri)
-                        m_arch = m.get_variants("variant.arch")
-                        if not m_arch:
-                                # keep packages that don't have an explicit arch
-                                ip.propose_fmri(fmri)
-                        elif self.get_arch() in m_arch:
-                                # keep packages that match the current arch
-                                ip.propose_fmri(fmri)
-                        else:
-                                # remove packages for different archs
-                                ip.propose_fmri_removal(fmri)
+                ip.plan_change_varcets(variants, facets)
 
                 self.__call_imageplan_evaluate(ip, verbose)
 
-        def image_config_update(self):
-                if not self.new_variants:
-                        return
+        def image_config_update(self, new_variants, new_facets):
+                """update variants in image config"""
                 ic = self.cfg_cache
-                ic.variants.update(self.new_variants)
+
+                if new_variants is not None:
+                        ic.variants.update(new_variants)
+
+                if new_facets is not None:
+                        ic.facets = new_facets
+
                 ic.write(self.imgdir)
                 ic = imageconfig.ImageConfig(self.root,
                     self._get_publisher_meta_dir())
@@ -834,14 +856,16 @@
                         pp.propose_repair(fmri, m, actions)
                         pp.evaluate(self.list_excludes(), self.list_excludes())
                         pps.append(pp)
+                ip = imageplan.ImagePlan(self, progtrack, lambda: False)
+                self.imageplan = ip
 
-                ip = imageplan.ImagePlan(self, progtrack, lambda: False)
                 ip.update_index = False
+                ip.state = imageplan.EVALUATED_PKGS
                 progtrack.evaluate_start()
                 ip.pkg_plans = pps
 
                 ip.evaluate()
-                if ip.actuators.reboot_needed() and self.is_liveroot():
+                if ip.reboot_needed() and self.is_liveroot():
                         raise api_errors.RebootNeededOnLiveImageException()
                 ip.preexecute()
                 ip.execute()
@@ -858,101 +882,49 @@
 
                 return False
 
-        def __fetch_manifest(self, fmri, excludes=EmptyI):
-                """A wrapper call for getting manifests.  This invokes
-                the transport method, gets the manifest, and performs
-                any additional image-related processing."""
-
-                m = self.transport.get_manifest(fmri, excludes,
-                    self.state.get_intent_str(fmri))
-
-                # What is the client currently processing?
-                targets = self.state.get_targets()
-
-                intent = None
-                for entry in targets:
-                        target, reason = entry
-
-                        # Ignore the publisher for comparison.
-                        np_target = target.get_fmri(anarchy=True)
-                        np_fmri = fmri.get_fmri(anarchy=True)
-                        if np_target == np_fmri:
-                                intent = reason
-
-                # If no intent could be found, assume INTENT_INFO.
-                self.state.set_touched_manifest(fmri,
-                    max(intent, imagestate.INTENT_INFO))
-
-                return m
-
-        def __touch_manifest(self, fmri):
-                """Perform steps necessary to 'touch' a manifest to provide
-                intent information.  Ignores most exceptions as this operation
-                is only for informational purposes."""
-
-                # What is the client currently processing?
-                target, intent = self.state.get_target()
-
-                # Ignore dry-runs of operations or operations which do not have
-                # a set target.
-                if not target or intent == imagestate.INTENT_EVALUATE:
-                        return
-
-                if not self.state.get_touched_manifest(fmri, intent):
-                        # If the manifest for this fmri hasn't been "seen"
-                        # before, determine if intent information needs to be
-                        # provided.
-
-                        # Ignore the publisher for comparison.
-                        np_target = target.get_fmri(anarchy=True)
-                        np_fmri = fmri.get_fmri(anarchy=True)
-                        if np_target == np_fmri:
-                                # If the client is currently processing
-                                # the given fmri (for an install, etc.)
-                                # then intent information is needed.
-                                try:
-                                        self.transport.touch_manifest(fmri,
-                                            self.state.get_intent_str(fmri))
-                                except (api_errors.UnknownPublisher,
-                                    api_errors.TransportError):
-                                        # It's not fatal if we can't find
-                                        # or reach the publisher.
-                                        pass
-                                self.state.set_touched_manifest(fmri, intent)
-
         def get_manifest_path(self, fmri):
                 """Return path to on-disk manifest"""
                 mpath = os.path.join(self.imgdir, "pkg",
                     fmri.get_dir_path(), "manifest")
                 return mpath
 
-        def __get_manifest(self, fmri, excludes=EmptyI):
+        def __get_manifest(self, fmri, excludes=EmptyI, intent=None):
                 """Find on-disk manifest and create in-memory Manifest
                 object.... grab from server if needed"""
 
                 try:
-                        return manifest.CachedManifest(fmri, self.pkgdir,
+                        ret = manifest.CachedManifest(fmri, self.pkgdir,
                             self.cfg_cache.preferred_publisher,
                             excludes)
+                        # if we have a intent string, let depot
+                        # know for what we're using the cached manifest
+                        if intent:
+                                try:
+                                        self.transport.touch_manifest(fmri, intent)
+                                except (api_errors.UnknownPublisher,
+                                    api_errors.TransportError):
+                                        # It's not fatal if we can't find
+                                        # or reach the publisher.
+                                        pass
                 except KeyError:
-                        return self.__fetch_manifest(fmri, excludes)
-
-        def get_manifest(self, fmri, all_arch=False):
-                """return manifest; uses cached version if available.
-                all_arch controls whether manifest contains actions
-                for all architectures"""
+                        ret = self.transport.get_manifest(fmri, excludes,
+                                                    intent)
+                return ret
 
-                # Normally elide other arch variants
-                if self.arch_change or all_arch:
-                        all_arch = True
-                        v = EmptyI
+        def get_manifest(self, fmri, all_variants=False, intent=None):
+                """return manifest; uses cached version if available.
+                all_variants controls whether manifest contains actions
+                for all variants"""
+
+                # Normally elide other arch variants, facets
+                
+                if all_variants:
+                        excludes = EmptyI
                 else:
-                        arch = {"variant.arch": self.get_arch()}
-                        v = [variant.Variants(arch).allow_action]
+                        excludes = [ self.cfg_cache.variants.allow_action ]
 
-                m = self.__get_manifest(fmri, v)
+                m = self.__get_manifest(fmri, excludes=excludes, intent=intent)
 
-                self.__touch_manifest(fmri)
                 return m
 
         def set_pkg_state(self, pfmri, state):
@@ -1124,7 +1096,7 @@
                         entry = cat.get_entry(f)
                         states = entry["metadata"]["states"]
                         if self.PKG_STATE_V1 not in states:
-                                return self.get_manifest(f, all_arch=True)
+                                return self.get_manifest(f, all_variants=True)
                         return None
 
                 # batch_mode is set to True here as any operations that modify
@@ -1220,53 +1192,32 @@
                 states = entry["metadata"]["states"]
                 return self.PKG_STATE_INSTALLED in states
 
-        def is_pkg_preferred(self, pfmri):
-                """Compatibility function for use by pkg.client.api only.
-                Should be removed once publishers are ranked by priority.
-
-                Returns a boolean value indicating whether the specified
-                package was installed from a publisher that was preferred at
-                the time of installation, or is from the same publisher as
-                the currently preferred publisher."""
-
-                if pfmri.publisher == self.get_preferred_publisher():
-                        return True
-
-                # Avoid loading the installed catalog if the known catalog
-                # is already loaded.  This is safe since the installed
-                # catalog is a subset of the known, and a specific entry
-                # is being retrieved.
-                if not self.__catalog_loaded(self.IMG_CATALOG_KNOWN):
-                        cat = self.get_catalog(self.IMG_CATALOG_INSTALLED)
-                else:
-                        cat = self.get_catalog(self.IMG_CATALOG_KNOWN)
-
-                try:
-                        entry = cat.get_entry(pfmri)
-                except api_errors.UnknownCatalogEntry:
-                        return False
-                states = entry["metadata"]["states"]
-                return self.__PKG_STATE_PREFERRED in states
-
-        def list_excludes(self, new_variants=None):
+        def list_excludes(self, new_variants=None, new_facets=None):
                 """Generate a list of callables that each return True if an
                 action is to be included in the image using the currently
-                defined variants for the image, or an updated set if
-                new_variants are specified.  The callables take a single action
-                argument.  Variants, facets and filters will be handled in
-                this fashion."""
-
-                # XXX simple for now; facets and filters need impl.
+                defined variants & facets for the image, or an updated set if
+                new_variants or new_facets are specified."""
+                
                 if new_variants:
                         new_vars = self.cfg_cache.variants.copy()
                         new_vars.update(new_variants)
-                        return [new_vars.allow_action]
-                elif self.new_variants:
-                        new_vars = self.cfg_cache.variants.copy()
-                        new_vars.update(self.new_variants)
-                        return [new_vars.allow_action]
+                        var_call = new_vars.allow_action
+                else:
+                        var_call = self.cfg_cache.variants.allow_action
+                if new_facets:
+                        fac_call = new_facets.allow_action
                 else:
-                        return [self.cfg_cache.variants.allow_action]
+                        fac_call = self.cfg_cache.facets.allow_action
+
+                return [var_call, fac_call]
+
+        def get_variants(self):
+                """ return a copy of the current image variants"""
+                return self.cfg_cache.variants.copy()
+
+        def get_facets(self):
+                """ Return a copy of the current image facets"""
+                return self.cfg_cache.facets.copy()
 
         def __build_dependents(self, progtrack):
                 """Build a dictionary mapping packages to the list of packages
@@ -1282,30 +1233,17 @@
                                 if a.name != "depend" or \
                                     a.attrs["type"] != "require":
                                         continue
-
-                                dfmri = self.strtofmri(a.attrs["fmri"])
-                                if dfmri not in self.__req_dependents:
-                                        self.__req_dependents[dfmri] = []
-                                self.__req_dependents[dfmri].append(f)
+                                name = self.strtofmri(a.attrs["fmri"]).pkg_name
+                                self.__req_dependents.setdefault(name, []).append(f)
 
         def get_dependents(self, pfmri, progtrack):
                 """Return a list of the packages directly dependent on the given
                 FMRI."""
 
-                if not hasattr(self, "_Image__req_dependents"):
+                if self.__req_dependents is None:
                         self.__build_dependents(progtrack)
 
-                dependents = []
-                # We run through all the keys, in case a package is depended
-                # upon under multiple versions.  That is, if pkgA depends on
-                # libc@1 and pkgB depends on libc@2, we need to return both pkgA
-                # and pkgB.  If we used package names as keys, this would be
-                # simpler, but it wouldn't handle catalog operations (such as
-                # rename) that might have been applied to the fmri.
-                for f in self.__req_dependents.iterkeys():
-                        if pfmri.is_successor(f):
-                                dependents.extend(self.__req_dependents[f])
-                return dependents
+                return self.__req_dependents.get(pfmri.pkg_name, [])
 
         def __rebuild_image_catalogs(self, progtrack=None):
                 """Rebuilds the image catalogs based on the available publisher
@@ -1921,54 +1859,6 @@
                 return pkg.fmri.MatchingPkgFmri(myfmri,
                     self.attrs["Build-Release"])
 
-        def load_constraints(self, progtrack):
-                """Load constraints for all install pkgs"""
-
-                cat = self.get_catalog(self.IMG_CATALOG_INSTALLED)
-                for f, actions in cat.actions([cat.DEPENDENCY],
-                    excludes=self.list_excludes()):
-                        if not self.constraints.start_loading(f):
-                                # skip loading if already done
-                                continue
-
-                        for a in actions:
-                                if a.name != "depend":
-                                        continue
-                                progtrack.evaluate_progress()
-                                con = a.parse(self, f.get_name())[1]
-                                self.constraints.update_constraints(con)
-                        self.constraints.finish_loading(f)
-
-        def __get_installed_unbound_inc_list(self):
-                """Returns list of packages containing incorporation
-                dependencies on which no other pkgs depend."""
-
-                inc_tuples = []
-                dependents = set()
-
-                cat = self.get_catalog(self.IMG_CATALOG_INSTALLED)
-                for f, actions in cat.actions([cat.DEPENDENCY],
-                    excludes=self.list_excludes()):
-                        for a in actions:
-                                if a.name != "depend":
-                                        continue
-                                fmri_name = f.get_pkg_stem()
-                                con_fmri = a.get_constrained_fmri(self)
-                                if con_fmri:
-                                        con_name = con_fmri.get_pkg_stem()
-                                        dependents.add(con_name)
-                                        inc_tuples.append((fmri_name, con_name))
-
-                # remove those incorporations which are depended on by other
-                # incorporations.
-                deletions = 0
-                for i, a in enumerate(inc_tuples[:]):
-                        if a[0] in dependents:
-                                del inc_tuples[i - deletions]
-
-                for p in set([ a[0] for a in inc_tuples ]):
-                        yield pkg.fmri.PkgFmri(p, self.attrs["Build-Release"])
-
         def get_user_by_name(self, name):
                 return portable.get_user_by_name(name, self.root,
                     self.type != IMG_USER)
@@ -2399,258 +2289,50 @@
                 return olist, onames
 
         def make_install_plan(self, pkg_list, progtrack, check_cancelation,
-            noexecute, filters=None, verbose=False, multimatch_ignore=False):
+            noexecute, verbose=False):
                 """Take a list of packages, specified in pkg_list, and attempt
                 to assemble an appropriate image plan.  This is a helper
                 routine for some common operations in the client.
-
-                This method checks all publishers for a package match;
-                however, it defaults to choosing the preferred publisher
-                when an ambiguous package name is specified.  If the user
-                wishes to install a package from a non-preferred publisher,
-                the full FMRI that contains a publisher should be used
-                to name the package.
-
-                'multimatch_ignore' is an optional, boolean value that
-                indicates whether packages that have multiple matches for
-                only non-preferred publishers should be ignored when creating
-                the install plan.  This is intended to be used during an
-                image-update.
                 """
-
-                if filters is None:
-                        filters = []
-
-                error = 0
                 ip = imageplan.ImagePlan(self, progtrack, check_cancelation,
-                    filters=filters, noexecute=noexecute)
+                    noexecute=noexecute)
 
                 progtrack.evaluate_start()
-                self.load_constraints(progtrack)
 
-                unmatched_fmris = []
-                multiple_matches = []
-                illegal_fmris = []
-                constraint_violations = []
-
-                # order package list so that any unbound incorporations are
-                # done first
-
-                inc_list = list(self.__get_installed_unbound_inc_list())
-
-                head = []
-                tail = []
-                for s in pkg_list:
-                        try:
-                                p = pkg.fmri.PkgFmri(s,
-                                    self.attrs["Build-Release"])
-                        except pkg.fmri.IllegalFmri:
-                                illegal_fmris.append(s)
-                                error = 1
-                                continue
-
-                        for inc in inc_list:
-                                if inc.pkg_name == p.pkg_name:
-                                        head.append((s, p))
-                                        break
-                        else:
-                                tail.append((s, p))
-                pkg_list = head + tail
-
-                # This approach works only for cases w/ simple
-                # incorporations; the apply_constraints_to_fmri
-                # call below binds the version too quickly.  This
-                # awaits a proper solver.
-
-                # XXX This logic is very inefficient for cases like image-update
-                # where the entire package inventory is matched against each
-                # package (think num_installed * available_pkgs).
-                ppub = self.get_preferred_publisher()
-                for p, conp in pkg_list:
-                        progtrack.evaluate_progress()
-                        try:
-                                conp = \
-                                    self.constraints.apply_constraints_to_fmri(
-                                    conp, auto=True)
-                        except constraint.ConstraintException, e:
-                                error = 1
-                                constraint_violations.extend(str(e).split("\n"))
-                                continue
-
-                        # If we were passed in an fmri object or a string that
-                        # anchors the package stem with the scheme, match on the
-                        # stem exactly as given.  Otherwise we can let the
-                        # default, looser matching mechanism be used.
-                        # inventory() will override if globbing characters are
-                        # used.
-                        matcher = None
-                        if isinstance(p, pkg.fmri.PkgFmri) or \
-                            p.startswith("pkg:/"):
-                                matcher = pkg.fmri.exact_name_match
-
-                        try:
-                                matches = list(self.inventory([conp],
-                                    all_known=True, matcher=matcher,
-                                    ordered=False))
-                        except api_errors.InventoryException, e:
-                                assert(not (e.notfound and e.illegal))
-                                assert(e.notfound or e.illegal)
-                                error = 1
-                                if e.notfound:
-                                        unmatched_fmris.append(p)
-                                else:
-                                        illegal_fmris.append(p)
-                                continue
-
-                        pnames = set()
-                        pmatch = []
-                        npnames = set()
-                        npmatch = []
-                        for m, st in matches:
-                                if m.publisher == ppub:
-                                        pnames.add(m.get_pkg_stem())
-                                        pmatch.append((m, st))
-                                else:
-                                        npnames.add(m.get_pkg_stem())
-                                        npmatch.append((m, st))
+                try:
+                        ip.plan_install(pkg_list)
 
-                        pfmri = None
-                        # If we have more than one possible match from the
-                        # preferred publisher, try to narrow it down to one
-                        # non-obsolete package.
-                        if len(pnames) > 1:
-                                # Filter out packages whose newest version is
-                                # obsolete.  If there's more than one package
-                                # left, then there's still a conflict, but if
-                                # there's only one, we can assume that's the one
-                                # we want.  If there are none, we'll try again
-                                # with the non-preferred publisher.
-                                d = {}
-                                for m, st in pmatch:
-                                        d.setdefault(m.get_pkg_stem(), (m, st))
-                                nonobs = [
-                                    (m, st)
-                                    for stem, (m, st) in d.iteritems()
-                                    if not st["obsolete"] and not st["renamed"]
-                                ]
-                                if len(nonobs) > 1:
-                                        # There can only be one preferred publisher, so
-                                        # filtering is pointless and these are truly
-                                        # ambiguous matches.
-                                        multiple_matches.append((p, pnames))
-                                        error = 1
-                                        continue
-                                if nonobs:
-                                        pfmri, status = nonobs[0]
-
-                        # If we couldn't find exactly one non-obsolete match
-                        # from the preferred publisher, try to find one from the
-                        # non-preferred publishers, if any matches exist.
-                        if not pfmri and len(npnames) > 1:
-                                npmatch, npnames = \
-                                    self.__filter_install_matches(npmatch)
-                                d = {}
-                                for m, st in npmatch:
-                                        d.setdefault(m.get_pkg_stem(), (m, st))
-                                nonobs = [
-                                    (m, st)
-                                    for stem, (m, st) in d.iteritems()
-                                    if not st["obsolete"] and not st["renamed"]
-                                ]
-                                if len(nonobs) > 1:
-                                        if multimatch_ignore:
-                                                # Caller has requested that this
-                                                # package be skipped if multiple
-                                                # matches are found.
-                                                continue
-                                        # If there are still multiple matches
-                                        # after filtering, fail.
-                                        multiple_matches.append((p, npnames))
-                                        error = 1
-                                        continue
-                                if nonobs:
-                                        pfmri, status = nonobs[0]
-
-
-                        # At this point, either there's exactly one preferred or
-                        # non-preferred match, or all possible matches are
-                        # obsolete, so it doesn't really matter what we choose,
-                        # nothing will actually get installed.  So choose the
-                        # latest.
-                        #
-                        # Although looking at what's already installed in the
-                        # procedure above is incredibly complicated, here's an
-                        # instance where it would give us better results.  If we
-                        # chose something already installed, then it would get
-                        # removed, rather than nothing happening.  If things
-                        # were still ambiguous, we could error out.
-                        if not pfmri:
-                                if pmatch:
-                                        pfmri, status = pmatch[0]
-                                else:
-                                        pfmri, status = npmatch[0]
-
-                        if status["obsolete"]:
-                                vi = self.get_version_installed(pfmri)
-                                if vi:
-                                        ip.propose_fmri_removal(vi)
-                        else:
-                                # Take this fork for a renamed package, since
-                                # the rename will happen as part of the eval.
-                                ip.propose_fmri(pfmri)
-
-                if error != 0:
-                        raise api_errors.PlanCreationException(unmatched_fmris,
-                            multiple_matches, [], illegal_fmris,
-                            constraint_violations=constraint_violations)
+                except api_errors.ApiException, e:
+                        ip.show_failure(verbose)
+                        raise
 
                 self.__call_imageplan_evaluate(ip, verbose)
 
-        def make_uninstall_plan(self, fmri_list, recursive_removal,
-            progresstracker, check_cancelation, noexecute, verbose=False):
-                ip = imageplan.ImagePlan(self, progresstracker,
-                    check_cancelation, recursive_removal, noexecute=noexecute)
-
-                err = 0
+        def make_update_plan(self, progtrack, check_cancelation,
+            noexecute, verbose=False):
+                """Create a plan to update all packages as far as
+                possible."""
 
-                unmatched_fmris = []
-                multiple_matches = []
-                missing_matches = []
-                illegal_fmris = []
+                progtrack.evaluate_start()
 
-                progresstracker.evaluate_start()
+                ip = imageplan.ImagePlan(self, progtrack, check_cancelation,
+                    noexecute=noexecute)
 
-                for ppat in fmri_list:
-                        progresstracker.evaluate_progress()
-                        try:
-                                matches = list(self.inventory([ppat],
-                                    ordered=False))
-                        except api_errors.InventoryException, e:
-                                if e.illegal:
-                                        illegal_fmris.append(ppat)
-                                else:
-                                        try:
-                                                list(self.inventory([ppat],
-                                                    all_known=True,
-                                                    ordered=False))
-                                                missing_matches.append(ppat)
-                                        except api_errors.InventoryException:
-                                                unmatched_fmris.append(ppat)
-                                err = 1
-                                continue
+                ip.plan_update()
+                
+                self.__call_imageplan_evaluate(ip, verbose)
 
-                        if len(matches) > 1:
-                                matchlist = [m[0] for m in matches]
-                                multiple_matches.append((ppat, matchlist))
-                                err = 1
-                                continue
+        def make_uninstall_plan(self, fmri_list, recursive_removal,
+            progtrack, check_cancelation, noexecute, verbose=False):
+                """Create uninstall plan to remove the specified packages;
+                do so recursively iff recursive_removal is set"""
 
-                        # Propose the removal of the first (and only!) match.
-                        ip.propose_fmri_removal(matches[0][0])
+                progtrack.evaluate_start()
 
-                if err == 1:
-                        raise api_errors.PlanCreationException(unmatched_fmris,
-                            multiple_matches, missing_matches, illegal_fmris)
+                ip = imageplan.ImagePlan(self, progtrack,
+                    check_cancelation, noexecute=noexecute)
+
+                ip.plan_uninstall(fmri_list, recursive_removal)
 
                 self.__call_imageplan_evaluate(ip, verbose)
 
@@ -2713,7 +2395,7 @@
                 # XXX call to progress tracker that SUNWipkg is being refreshed
 
                 img.make_install_plan(["SUNWipkg"], progtrack,
-                    check_cancelation, noexecute, filters = [])
+                    check_cancelation, noexecute)
 
                 return img.imageplan.nothingtodo()
 
--- a/src/modules/client/imageconfig.py	Tue Nov 17 17:06:35 2009 -0600
+++ b/src/modules/client/imageconfig.py	Wed Nov 18 15:53:48 2009 -0800
@@ -32,22 +32,23 @@
 from pkg.client import global_settings
 logger = global_settings.logger
 
-import pkg.client.api_errors as api_errors
-import pkg.client.publisher as publisher
-import pkg.fmri as fmri
-import pkg.portable as portable
-import pkg.variant as variant
+import pkg.client.api_errors  as api_errors
+import pkg.client.publisher   as publisher
+import pkg.facet              as facet
+import pkg.fmri               as fmri
+import pkg.portable           as portable
+import pkg.variant            as variant
 
+from pkg.misc import DictProperty
 # The default_policies dictionary defines the policies that are supported by 
 # pkg(5) and their default values. Calls to the ImageConfig.get_policy method
 # should use the constants defined here.
-REQUIRE_OPTIONAL = "require-optional"
+
 PURSUE_LATEST = "pursue-latest"
 DISPLAY_COPYRIGHTS = "display-copyrights"
 FLUSH_CONTENT_CACHE = "flush-content-cache-on-success"
 SEND_UUID = "send-uuid"
 default_policies = { 
-    REQUIRE_OPTIONAL: False,
     PURSUE_LATEST: True,
     DISPLAY_COPYRIGHTS: True,
     FLUSH_CONTENT_CACHE: False,
@@ -62,6 +63,7 @@
 CFG_FILE = "cfg_cache"
 DA_FILE = "disabled_auth"
 
+
 class ImageConfig(object):
         """An ImageConfig object is a collection of configuration information:
         URLs, publishers, properties, etc. that allow an Image to operate."""
@@ -76,18 +78,76 @@
         def __init__(self, imgroot, pubdir):
                 self.__imgroot = imgroot
                 self.__pubdir = pubdir
-                self.publishers = {}
+                self.__publishers = {}
+                self.__publisher_search_order = []
+
                 self.properties = dict((
                     (p, str(v)) 
                     for p, v in default_policies.iteritems()
                 ))
-                self.preferred_publisher = None
-                self.filters = {}
+                self.facets = facet.Facets()
                 self.variants = variant.Variants()
                 self.children = []
 
+
         def __str__(self):
-                return "%s\n%s" % (self.publishers, self.properties)
+                return "%s\n%s" % (self.__publishers, self.properties)
+
+        def __get_preferred_publisher(self):
+                """Returns prefix of preferred publisher"""
+
+                for p in self.__publisher_search_order:
+                        if not self.__publishers[p].disabled:
+                                return p
+                raise KeyError, "No preferred publisher"
+
+        def __set_preferred_publisher(self, prefix):
+                """Enforce search order rules"""
+                if prefix not in self.__publishers:
+                        raise KeyError, "Publisher %s not found" % prefix
+                self.__publisher_search_order.remove(prefix)
+                self.__publisher_search_order.insert(0, prefix)
+
+        def remove_publisher(self, prefix):
+                """External functional interface - use property interface"""
+                del self.publishers[prefix]
+
+        def change_publisher_search_order(self, new_world_order):
+                """Change search order to desired value"""
+                if sorted(new_world_order) != sorted(self.__publisher_search_order):
+                        raise ValueError, "publishers added or removed"
+                self.__publisher_search_order = new_world_order
+                
+        def __get_publisher(self, prefix):
+                """Accessor method for publishers dictionary"""
+                return self.__publishers[prefix]
+
+        def __set_publisher(self, prefix, value):
+                """Accesor method to keep search order correct on insert"""
+                if prefix not in self.__publisher_search_order:
+                        self.__publisher_search_order.append(prefix)
+                self.__publishers[prefix] = value
+
+        def __del_publisher(self, prefix):
+                """Accessor method for publishers"""
+                if prefix in self.__publisher_search_order:
+                        self.__publisher_search_order.remove(prefix)
+                del self.__publishers[prefix]
+
+        def __publisher_iter(self):
+                return self.__publishers.__iter__()
+
+        def __publisher_iteritems(self):
+                """Support iteritems on publishers"""
+                return self.__publishers.iteritems()
+
+        def __publisher_keys(self):
+                """Support keys() on publishers"""
+                return self.__publishers.keys()
+
+        def __publisher_values(self):
+                """Support values() on publishers"""
+                return self.__publishers.values()
 
         def get_policy(self, policy):
                 """Return a boolean value for the named policy.  Returns
@@ -107,6 +167,7 @@
                 changed = False
 
                 cp = ConfigParser.SafeConfigParser()
+                cp.optionxform = str # preserve option case
 
                 ccfile = os.path.join(path, CFG_FILE)
                 r = cp.read(ccfile)
@@ -121,11 +182,16 @@
 
                 #
                 # Must load filters first, since the value of a filter can
-                # impact the default value of the zone variant.
+                # impact the default value of the zone variant.  This is
+                # legacy code, and should be removed when upgrade from
+                # pre-variant versions of opensolaris is no longer 
+                # supported
                 #
+
+                filters = {}
                 if cp.has_section("filter"):
                         for o in cp.options("filter"):
-                                self.filters[o] = cp.get("filter", o)
+                                filters[o] = cp.get("filter", o)
 
                 #
                 # Must load variants next, since in the case of zones,
@@ -134,7 +200,10 @@
                 if cp.has_section("variant"):
                         for o in cp.options("variant"):
                                 self.variants[o] = cp.get("variant", o)
-
+                # facets
+                if cp.has_section("facet"):
+                        for o in cp.options("facet"):
+                                self.facets[o] = cp.get("facet", o) != "False"
                 # make sure we define architecture variant
                 if "variant.arch" not in self.variants:
                         self.variants["variant.arch"] = platform.processor()
@@ -142,7 +211,7 @@
 
                 # make sure we define zone variant
                 if "variant.opensolaris.zone" not in self.variants:
-                        zone = self.filters.get("opensolaris.zone", "")
+                        zone = filters.get("opensolaris.zone", "")
                         if zone == "nonglobal":
                                 self.variants[
                                     "variant.opensolaris.zone"] = "nonglobal"
@@ -151,15 +220,15 @@
                                     "variant.opensolaris.zone"] = "global"
                         changed = True
 
+                preferred_publisher = None
                 for s in cp.sections():
                         if re.match("authority_.*", s):
                                 k, a, c = self.read_publisher(pmroot, cp, s)
                                 changed |= c
-
                                 self.publishers[k] = a
-                               
-                                if self.preferred_publisher == None:
-                                        self.preferred_publisher = k
+                                # just in case there's no other indication
+                                if preferred_publisher is None:
+                                        preferred_publisher = k
 
                 # read in the policy section to provide backward
                 # compatibility for older images
@@ -173,18 +242,33 @@
                                     o, raw=True).decode('utf-8')
 
                 try:
-                        self.preferred_publisher = \
-                            self.properties["preferred-publisher"]
+                        preferred_publisher = \
+                            str(self.properties["preferred-publisher"])
                 except KeyError:
                         try:
                                 # Compatibility with older clients.
                                 self.properties["preferred-publisher"] = \
-                                    self.properties["preferred-authority"]
-                                self.preferred_publisher = \
+                                    str(self.properties["preferred-authority"])
+                                preferred_publisher = \
                                     self.properties["preferred-publisher"]
                                 del self.properties["preferred-authority"]
                         except KeyError:
-                                pass
+                                pass                
+                try:
+                        self.__publisher_search_order = self.read_list(
+                            str(self.properties["publisher-search-order"]))
+                except KeyError:
+                        # make up the default - preferred, then the rest in alpha order
+                        self.__publisher_search_order = [preferred_publisher] + \
+                            sorted([ 
+                                name 
+                                for name in self.__publishers.keys() 
+                                if name != preferred_publisher
+                                ])
+
+                        self.properties["publisher-search-order"] = \
+                            str(self.__publisher_search_order)
+                        changed = True
 
                 # read disabled publisher file
                 # XXX when compatility with the old code is no longer needed,
@@ -218,6 +302,7 @@
                 # XXX the use of the disabled_auth file can be removed when
                 # compatibility with the older code is no longer needed
                 da = ConfigParser.SafeConfigParser()
+                cp.optionxform = str # preserve option case
 
                 # For compatibility, the preferred-publisher is written out
                 # as the preferred-authority.  Modify a copy so that we don't
@@ -227,23 +312,25 @@
                         del props["preferred-publisher"]
                 except KeyError:
                         pass
-                props["preferred-authority"] = self.preferred_publisher
+                props["preferred-authority"] = str(self.__publisher_search_order[0])
+                props["publisher-search-order"] = str(self.__publisher_search_order)
 
                 cp.add_section("property")
                 for p in props:
-                        cp.set("property", p,
-                            props[p].encode("utf-8"))
-
-                cp.add_section("filter")
-                for f in self.filters:
-                        cp.set("filter", f, str(self.filters[f]))
+                        cp.set("property", p, props[p].encode("utf-8"))
 
                 cp.add_section("variant")
                 for f in self.variants:
                         cp.set("variant", f, str(self.variants[f]))
 
-                for prefix in self.publishers:
-                        pub = self.publishers[prefix]
+                cp.add_section("facet")
+
+                for f in self.facets:
+                        
+                        cp.set("facet", f, str(self.facets[f]))
+
+                for prefix in self.__publishers:
+                        pub = self.__publishers[prefix]
                         section = "authority_%s" % pub.prefix
 
                         c = cp
@@ -254,6 +341,7 @@
                         c.set(section, "alias", str(pub.alias))
                         c.set(section, "prefix", str(pub.prefix))
                         c.set(section, "disabled", str(pub.disabled))
+                        c.set(section, "sticky", str(pub.sticky))
 
                         repo = pub.selected_repository
 
@@ -335,9 +423,6 @@
                                 raise
                         acp.write(f)
 
-        def remove_publisher(self, prefix):
-                del self.publishers[prefix]
-
         @staticmethod
         def read_list(list_str):
                 """Take a list in string representation and convert it back
@@ -371,6 +456,11 @@
                             "Invalid Publisher name: %s" % prefix)
 
                 try:
+                        sticky = cp.getboolean(s, "sticky")
+                except (ConfigParser.NoOptionError, ValueError):
+                        sticky = True
+
+                try:
                         d = cp.get(s, "disabled")
                 except ConfigParser.NoOptionError:
                         d = 'False'
@@ -522,10 +612,19 @@
 
                 pub = publisher.Publisher(prefix, alias=alias,
                     client_uuid=client_uuid, disabled=disabled,
-                    meta_root=pmroot, repositories=[r])
+                    meta_root=pmroot, repositories=[r], sticky=sticky)
 
                 # write out the UUID if it was set
                 if pub.client_uuid != client_uuid:
                         changed = True
 
                 return prefix, pub, changed
+
+        # properties so we can enforce rules
+
+        publisher_search_order = property(lambda self: self.__publisher_search_order[:])
+        preferred_publisher = property(__get_preferred_publisher, __set_preferred_publisher,
+            doc="The publisher we prefer - first non-disabled publisher in search order")
+        publishers = DictProperty(__get_publisher, __set_publisher, __del_publisher, 
+            __publisher_iteritems, __publisher_keys, __publisher_values, __publisher_iter,
+            doc="A dict mapping publisher prefixes to publisher objects")
--- a/src/modules/client/imageplan.py	Tue Nov 17 17:06:35 2009 -0600
+++ b/src/modules/client/imageplan.py	Wed Nov 18 15:53:48 2009 -0800
@@ -25,23 +25,30 @@
 # Use is subject to license terms.
 #
 
+import errno
+import operator
 import os
-import errno
 import traceback
 
 from pkg.client import global_settings
 logger = global_settings.logger
 
 import pkg.actions
-import pkg.client.actuator as actuator
+import pkg.catalog
+import pkg.client.actuator   as actuator
+import pkg.client.indexer    as indexer
 import pkg.client.api_errors as api_errors
-import pkg.client.imagestate as imagestate
-import pkg.client.pkgplan as pkgplan
-import pkg.client.indexer as indexer
-import pkg.fmri as fmri
-import pkg.search_errors as se
+import pkg.client.pkgplan    as pkgplan
+import pkg.client.pkg_solver as pkg_solver
+import pkg.fmri
+import pkg.manifest          as manifest
+import pkg.search_errors     as se
+import pkg.version
 
-from pkg.client.filter import compile_filter
+from pkg.client.debugvalues import DebugValues
+
+from pkg.misc import msg
+
 
 UNEVALUATED       = 0 # nothing done yet
 EVALUATED_PKGS    = 1 # established fmri changes
@@ -51,173 +58,307 @@
 EXECUTED_OK       = 5 # finished execution
 EXECUTED_ERROR    = 6 # failed
 
-class ImagePlan(object):
-        """An image plan takes a list of requested packages, an Image (and its
-        policy restrictions), and returns the set of package operations needed
-        to transform the Image to the list of requested packages.
+PLANNED_NOTHING   = "no-plan"
+PLANNED_INSTALL   = "install"
+PLANNED_UNINSTALL = "uninstall"
+PLANNED_UPDATE    = "image-update"
+PLANNED_FIX       = "fix"
+PLANNED_VARIANT   = "change-variant"
 
-        Use of an ImagePlan involves the identification of the Image, the
-        Catalogs (implicitly), and a set of complete or partial package FMRIs.
-        The Image's policy, which is derived from its type and configuration
-        will cause the formulation of the plan or an exception state.
 
-        XXX In the current formulation, an ImagePlan can handle [null ->
-        PkgFmri] and [PkgFmri@Version1 -> PkgFmri@Version2], for a set of
-        PkgFmri objects.  With a correct Action object definition, deletion
-        should be able to be represented as [PkgFmri@V1 -> null].
+class ImagePlan(object):
+        """ImagePlan object contains the plan for changing the image...
+        there are separate routines for planning the various types of
+        image modifying operations; evaluation (comparing manifests
+        and buildig lists of removeal, install and update actions
+        and their execution is all common code"""
 
-        XXX Should we allow downgrades?  There's an "arrow of time" associated
-        with the smf(5) configuration method, so it's better to direct
-        manipulators to snapshot-based rollback, but if people are going to do
-        "pkg delete fmri; pkg install fmri@v(n - 1)", then we'd better have a
-        plan to identify when this operation is safe or unsafe."""
+        def __init__(self, image, progtrack, check_cancel, noexecute=False):
+                self.image = image
+                self.pkg_plans = []
 
-        def __init__(self, image, progtrack, check_cancelation,
-            recursive_removal=False, filters=None, variants=None,
-            noexecute=False):
-                if filters is None:
-                        filters = []
-                self.image = image
                 self.state = UNEVALUATED
-                self.recursive_removal = recursive_removal
-                self.progtrack = progtrack
+                self.__progtrack = progtrack
+                self.__noexecute = noexecute
+                
+                self.__fmri_changes = [] # install  (None, fmri)
+                                         # update   (oldfmri, newfmri)
+                                         # remove   (oldfmri, None)
+                                         # reinstall(oldfmri, oldfmri)
 
-                self.noexecute = noexecute
-                if noexecute:
-                        self.__intent = imagestate.INTENT_EVALUATE
-                else:
-                        self.__intent = imagestate.INTENT_PROCESS
+                self.update_actions  = []
+                self.removal_actions = []
+                self.install_actions = []
 
-                self.target_fmris = []
-                self.target_rem_fmris = []
-                self.pkg_plans = []
-                self.target_insall_count = 0
-                self.target_update_count = 0
+                self.__target_install_count = 0
+                self.__target_update_count  = 0
+                self.__target_removal_count = 0
 
-                self.__directories = None
+                self.__directories = None  # implement ref counting
+                self.__symlinks = None     # for dirs and links
                 self.__cached_actions = {}
 
-                ifilters = [
-                    "%s = %s" % (k, v)
-                    for k, v in image.cfg_cache.filters.iteritems()
-                ]
-                self.filters = [ compile_filter(f) for f in filters + ifilters ]
+                self.__old_excludes = image.list_excludes()
+                self.__new_excludes = self.__old_excludes
 
-                self.old_excludes = image.list_excludes()
-                self.new_excludes = image.list_excludes(variants)
+                self.__check_cancelation = check_cancel
 
-                self.check_cancelation = check_cancelation
-
-                self.actuators = None
+                self.__actuators = None
 
                 self.update_index = True
 
-                self.preexecuted_indexing_error = None
+                self.__preexecuted_indexing_error = None
+                self.__planned_op = PLANNED_NOTHING
+                self.__pkg_solver = None
+                self.__new_variants = None
+                self.__new_facets = None
+                self.__variant_change = False
+                self.__references = {} # dict of fmri -> pattern
 
         def __str__(self):
+
                 if self.state == UNEVALUATED:
                         s = "UNEVALUATED:\n"
-                        for t in self.target_fmris:
-                                s = s + "+%s\n" % t
-                        for t in self.target_rem_fmris:
-                                s = s + "-%s\n" % t
+                        return s
+
+                s = "%s\n" % self.__pkg_solver 
+
+                if self.state < EVALUATED_PKGS:
+                        return s
+
+                s += "Package version changes:\n"
+
+                for pp in self.pkg_plans:
+                        s += "%s -> %s\n" % (pp.origin_fmri, pp.destination_fmri)
+
+                if self.__actuators:
+                        s = s + "Actuators:\n%s\n" % self.__actuators
+
+                if self.__old_excludes != self.__new_excludes:
+                        s = s + "Variants/Facet changes: %s -> %s\n" % (self.__old_excludes,
+                            self.__new_excludes)
+
+                return s
+
+        def __verbose_str(self):
+                s = str(self)
+
+                if self.state == EVALUATED_PKGS:
                         return s
 
-                s = "Package changes:\n"
-                for pp in self.pkg_plans:
-                        s = s + "%s\n" % pp
+                s = s + "Actions being removed:\n"
+                for pplan, o_action, ignore in self.removal_actions:
+                        s = s + "\t%s:%s\n" % ( pplan.origin_fmri, o_action)
+                
+                s = s + "\nActions being updated:\n"
+                for pplan, o_action, d_action in self.update_actions:
+                        s = s + "\t%s:%s -> %s%s\n" % ( 
+                            pplan.origin_fmri, o_action,
+                            pplan.destination_fmri, d_action )
+
+                s = s + "\nActions being installed:\n"
+                for pplan, ignore, d_action in self.removal_actions:
+                        s = s + "\t%s:%s\n" % ( pplan.destination_fmri, d_action)
+
+                return s
+
+        def show_failure(self, verbose):
+                """Here's where extensive messaging needs to go"""
+
+                if self.__pkg_solver:
+                        logger.info(_("Planning for %s failed: %s\n") % 
+                            (self.__planned_op, self.__pkg_solver.gen_failure_report(verbose)))
+
+        def plan_install(self, pkgs_to_install):
+                """Determine the fmri changes needed to install the specified pkgs"""
+                self.__planned_op = PLANNED_INSTALL
+
+                # get ranking of publishers
+                pub_ranks = self.image.get_publisher_ranks()
+
+                # build installed dict
+                installed_dict = ImagePlan.__fmris2dict(self.image.gen_installed_pkgs())
+                
+                # build installed publisher dictionary
+                installed_pubs = dict((
+                                (f.pkg_name, f.get_publisher()) 
+                                for f in installed_dict.values()
+                                ))
+
+                proposed_dict, self.__references = self.match_user_fmris(pkgs_to_install, 
+                    True, pub_ranks, installed_pubs)
+                
+                # instantiate solver
+                self.__pkg_solver = pkg_solver.PkgSolver(
+                    self.image.get_catalog(self.image.IMG_CATALOG_KNOWN),
+                    installed_dict, 
+                    pub_ranks,
+                    self.image.get_variants(),
+                    self.__progtrack)
+
+                # Solve... will raise exceptions if no solution is found 
+                new_vector = self.__pkg_solver.solve_install([], proposed_dict, 
+                    self.__new_excludes)
+
+                self.__fmri_changes = [ 
+                        (a, b)
+                        for a, b in ImagePlan.__dicts2fmrichanges(installed_dict, 
+                            ImagePlan.__fmris2dict(new_vector))
+                        if a != b
+                        ]
+ 
+                self.state = EVALUATED_PKGS
+
+        def plan_uninstall(self, pkgs_to_uninstall, recursive_removal=False):
+                self.__planned_op = PLANNED_UNINSTALL
+                proposed_dict, self.__references = self.match_user_fmris(pkgs_to_uninstall, 
+                    False, None, None)
+                # merge patterns together
+                proposed_removals = set([
+                                f 
+                                for each in proposed_dict.values()
+                                for f in each
+                                ])
+
+
+                # compute removal of  packages; until we implement require 
+                # either A or B type dependencies no solver is needed
+                if recursive_removal:
+                        needs_processing = proposed_removals
+                        already_processed = set()
+                        while needs_processing:
+                                pfmri = needs_processing.pop()
+                                already_processed.add(pfmri)
+                                needs_processing |= set(self.image.get_dependents(pfmri,
+                                    self.__progtrack)) - already_processed
+                        proposed_removals = already_processed
+
+                for pfmri in proposed_removals:
+                        self.__progtrack.evaluate_progress(pfmri)
+                        dependents = set(self.image.get_dependents(pfmri,
+                            self.__progtrack))
+                        if dependents - proposed_removals:
+                                raise api_errors.NonLeafPackageException(pfmri,
+                                    dependents)
+
+                self.__fmri_changes = [(f, None) for f in proposed_removals]
 
-                s = s + "Actuators:\n%s" % self.actuators
+                self.state = EVALUATED_PKGS
+
+        @staticmethod
+        def __fmris2dict(fmri_list):
+                return  dict([
+                        (f.pkg_name, f)
+                        for f in fmri_list
+                        ])
+
+        @staticmethod
+        def __dicts2fmrichanges(olddict, newdict):
+                return [
+                        (olddict.get(k, None), newdict.get(k, None))
+                        for k in set(olddict.keys() + newdict.keys())
+                        ]
+
+        def plan_update(self):
+                """Determine the fmri changes needed to update all
+                pkgs"""
+                self.__planned_op = PLANNED_UPDATE
+
+                # build installed dict
+                installed_dict = dict([
+                        (f.pkg_name, f)
+                        for f in self.image.gen_installed_pkgs()
+                        ])
+                                
+                # instantiate solver
+                self.__pkg_solver = pkg_solver.PkgSolver(
+                    self.image.get_catalog(self.image.IMG_CATALOG_KNOWN),
+                    installed_dict, 
+                    self.image.get_publisher_ranks(),
+                    self.image.get_variants(),
+                    self.__progtrack)
+                # 
+                new_vector = self.__pkg_solver.solve_update([],  self.__new_excludes)
+
+                self.__fmri_changes = [ 
+                        (a, b)
+                        for a, b in ImagePlan.__dicts2fmrichanges(installed_dict, 
+                            ImagePlan.__fmris2dict(new_vector))
+                        if a != b
+                        ]
+              
+                self.state = EVALUATED_PKGS
+
 
-                s = s + "Variants: %s -> %s\n" % (self.old_excludes, self.new_excludes)
-                return s
+        def plan_fix(self, pkgs_to_fix):
+                """Create the list of pkgs to fix"""
+                self.__planned_op = PLANNED_FIX
+                # XXX complete this
+
+        def plan_change_varcets(self, variants, facets):
+                """Determine the fmri changes needed to change
+                the specified variants/facets"""
+                self.__planned_op = PLANNED_VARIANT
+
+                if variants == None and facets == None: # nothing to do
+                        self.state = EVALUATED_PKGS
+                        return
+
+                self.__variant_change = True
+
+                # build installed dict
+                installed_dict = dict([
+                        (f.pkg_name, f)
+                        for f in self.image.gen_installed_pkgs()
+                        ])
+                                
+                # instantiate solver
+                self.__pkg_solver = pkg_solver.PkgSolver(
+                    self.image.get_catalog(self.image.IMG_CATALOG_KNOWN),
+                    installed_dict, 
+                    self.image.get_publisher_ranks(),
+                    self.image.get_variants(),
+                    self.__progtrack)
+
+                self.__new_excludes = self.image.list_excludes(variants, facets)
+
+                new_vector = self.__pkg_solver.solve_change_varcets([],
+                    variants, facets, self.__new_excludes)
+
+                self.__new_variants = variants
+                self.__new_facets   = facets
+
+                self.__fmri_changes = [ 
+                        (a, b)
+                        for a, b in ImagePlan.__dicts2fmrichanges(installed_dict, 
+                            ImagePlan.__fmris2dict(new_vector))              
+                        ]
+
+                self.state = EVALUATED_PKGS
+                return
+
+        def reboot_needed(self):
+                """Check if evaluated imageplan requires a reboot"""
+                assert self.state >= EVALUATED_OK
+                return self.__actuators.reboot_needed()
+
 
         def get_plan(self, full=True):
                 if full:
                         return str(self)
 
                 output = ""
-                for pp in self.pkg_plans:
-                        output += "%s -> %s\n" % (pp.origin_fmri,
-                            pp.destination_fmri)
-
+                
+                for t in self.__fmri_changes:
+                        output += "%s -> %s\n" % t
                 return output
 
         def display(self):
-                for pp in self.pkg_plans:
-                        logger.info("%s -> %s" % (pp.origin_fmri, pp.destination_fmri))
-                logger.info("Actuators:\n%s" % self.actuators)
-
-        def is_proposed_fmri(self, pfmri):
-                for pf in self.target_fmris:
-                        if pfmri.is_same_pkg(pf):
-                                return not pfmri.is_successor(pf)
-                return False
-
-        def is_proposed_rem_fmri(self, pfmri):
-                for pf in self.target_rem_fmris:
-                        if pfmri.is_same_pkg(pf):
-                                return True
-                return False
-
-        def propose_fmri(self, pfmri):
-                # is a version of fmri.stem in the inventory?
-                if self.image.has_version_installed(pfmri) and \
-                    self.old_excludes == self.new_excludes:
-                        return
-
-                #   is there a freeze or incorporation statement?
-                #   do any of them eliminate this fmri version?
-                #     discard
-
-                #
-                # update so that we meet any optional dependencies
-                #
-
-                pfmri = self.image.constraints.apply_constraints_to_fmri(pfmri)
-                self.image.fmri_set_default_publisher(pfmri)
+                if DebugValues["plan"]:
+                        logger.info(self.__verbose_str())
+                else:
+                        logger.info(str(self))
 
-                # Add fmri to target list only if it (or a successor) isn't
-                # there already.
-                for i, p in enumerate(self.target_fmris):
-                        if pfmri.is_successor(p):
-                                self.target_fmris[i] = pfmri
-                                break
-                        if p.is_successor(pfmri):
-                                break
-                else:
-                        self.target_fmris.append(pfmri)
-                return
-
-        def get_proposed_version(self, pfmri):
-                """ Return version of fmri already proposed, or None
-                if not proposed yet."""
-                for p in self.target_fmris:
-                        if pfmri.get_name() == p.get_name():
-                                return p
-                else:
-                        return None
-
-        def older_version_proposed(self, pfmri):
-                # returns true if older version of this pfmri has been proposed
-                # already
-                for p in self.target_fmris:
-                        if pfmri.is_successor(p):
-                                return True
-                return False
-
-        # XXX Need to make sure that the same package isn't being added and
-        # removed in the same imageplan.
-        def propose_fmri_removal(self, pfmri):
-                if not self.image.has_version_installed(pfmri):
-                        return
-
-                for i, p in enumerate(self.target_rem_fmris):
-                        if pfmri.is_successor(p):
-                                self.target_rem_fmris[i] = pfmri
-                                break
-                else:
-                        self.target_rem_fmris.append(pfmri)
 
         def gen_new_installed_pkgs(self):
                 """ generates all the fmris in the new set of installed pkgs"""
@@ -232,17 +373,19 @@
 
         def gen_new_installed_actions(self):
                 """generates actions in new installed image"""
+                assert self.state >= EVALUATED_PKGS
                 for pfmri in self.gen_new_installed_pkgs():
                         m = self.image.get_manifest(pfmri)
-                        for act in m.gen_actions(self.new_excludes):
+                        for act in m.gen_actions(self.__new_excludes):
                                 yield act
 
         def gen_new_installed_actions_bytype(self, atype):
                 """generates actions in new installed image"""
+                assert self.state >= EVALUATED_PKGS
                 for pfmri in self.gen_new_installed_pkgs():
                         m = self.image.get_manifest(pfmri)
                         for act in m.gen_actions_by_type(atype,
-                            self.new_excludes):
+                            self.__new_excludes):
                                 yield act
 
         def get_directories(self):
@@ -254,13 +397,22 @@
                                     "var/pkg",
                                     "var/sadm",
                                     "var/sadm/install"])
-                        for fmri in self.gen_new_installed_pkgs():
-                                m = self.image.get_manifest(fmri)
-                                for d in m.get_directories(self.new_excludes):
+                        for pfmri in self.gen_new_installed_pkgs():
+                                m = self.image.get_manifest(pfmri)
+                                for d in m.get_directories(self.__new_excludes):
                                         dirs.add(os.path.normpath(d))
                         self.__directories = dirs
                 return self.__directories
 
+        def __get_symlinks(self):
+                """ return a set of all symlinks in target image"""
+                if self.__symlinks == None:
+                        self.__symlinks = set((
+                                        a.attrs["path"]
+                                        for a in self.gen_new_installed_actions_bytype("link")
+                                        ))
+                return self.__symlinks
+
         def get_actions(self, name, key=None):
                 """Return a dictionary of actions of the type given by 'name'
                 describing the target image.  If 'key' is given and not None,
@@ -282,209 +434,111 @@
                 self.__cached_actions[(name, key)] = d
                 return self.__cached_actions[(name, key)]
 
-        def evaluate_fmri(self, pfmri):
-                self.progtrack.evaluate_progress(pfmri)
-
-                if self.check_cancelation():
-                        raise api_errors.CanceledException()
-
-                ppub = self.image.get_preferred_publisher()
-                self.image.fmri_set_default_publisher(pfmri)
-
-                cat = self.image.get_catalog(self.image.IMG_CATALOG_KNOWN)
-
-                # check to make sure package is not tagged as being only
-                # for other architecture(s)
-                supported = cat.get_entry_variants(pfmri, "variant.arch")
-                if supported and self.image.get_arch() not in supported:
-                        raise api_errors.PlanCreationException(badarch=(pfmri,
-                            supported, self.image.get_arch()))
+        def __get_manifest(self, pfmri, intent):
+                """Return manifest for pfmri"""
+                if pfmri:
+                        return self.image.get_manifest(pfmri, 
+                            all_variants=self.__variant_change, intent=intent)
+                else:
+                        return manifest.NullCachedManifest
 
-                # build list of (action, fmri, constraint) of dependencies
-                a_list = [
-                    (a,) + a.parse(self.image, pfmri.get_name())
-                    for a in cat.get_entry_actions(pfmri, [cat.DEPENDENCY],
-                    excludes=self.new_excludes)
-                    if a.name == "depend"
-                ]
+        def __create_intent(self, old_fmri, new_fmri):
+                """Return intent strings (or None).  Given a pair
+                of fmris describing a package operation, this
+                routines returns intent strings to be passed to
+                originating publisher describing manifest
+                operations.  We never send publisher info to
+                prevent cross-publisher leakage of info."""
 
-                # Update constraints first to avoid problems w/ depth first
-                # traversal of dependencies; we may violate an existing
-                # constraint here.
-                if self.image.constraints.start_loading(pfmri):
-                        for a, f, constraint in a_list:
-                                self.image.constraints.update_constraints(
-                                    constraint)
-                        self.image.constraints.finish_loading(pfmri)
-
-                # now check what work is required
-                for a, f, constraint in a_list:
-
-                        # discover if we have an installed or proposed
-                        # version of this pkg already; proposed fmris
-                        # will always be newer
-                        ref_fmri = self.get_proposed_version(f)
-                        if not ref_fmri:
-                                ref_fmri = self.image.get_version_installed(f)
+                if self.__noexecute:
+                        return None, None
 
-                        # check if new constraint requires us to make any
-                        # changes to already proposed pkgs or existing ones.
-                        if not constraint.check_for_work(ref_fmri):
-                                continue
-                        # Apply any active optional/incorporation constraints
-                        # from other packages
-
-                        cf = self.image.constraints.apply_constraints_to_fmri(f)
-
-                        # This will be the newest version of the specified
-                        # dependency package.  Package names specified in
-                        # dependencies are treated as exact.  Matches from the
-                        # preferred publisher are used first, then matches from
-                        # the same publisher as the evaluated fmri, and then
-                        # first available.  Callers can override this behavior
-                        # by specifying the publisher prefix as part of the FMRI.
-                        matches = list(self.image.inventory([cf], all_known=True,
-                            matcher=fmri.exact_name_match, preferred=True,
-                            ordered=False))
+                if new_fmri:
+                        reference = self.__references.get(new_fmri, None)
+                        # don't leak prev. version info across publishers
+                        if old_fmri:
+                                if old_fmri.get_publisher() != \
+                                    new_fmri.get_publisher():
+                                        old_fmri = "unknown"
+                                else:
+                                        old_fmri = old_fmri.get_fmri(anarchy=True)
+                        new_fmri = new_fmri.get_fmri(anarchy=True)# don't send pub
+                else:
+                        reference = self.__references.get(old_fmri, None)
+                        old_fmri = old_fmri.get_fmri(anarchy=True)# don't send pub
 
-                        cf = matches[0][0]
-                        cs = matches[0][1]
-                        evalpub = pfmri.get_publisher()
-                        if len(matches) > 1 and not cf.get_publisher() == ppub \
-                            and cf.get_publisher() != evalpub:
-                                # If more than one match was returned, and it
-                                # wasn't for the preferred publisher or for the
-                                # same publisher as the fmri being evaluated,
-                                # then try to find a match that has the same
-                                # publisher as the fmri being evaluated.
-                                for f, st in matches[1:]:
-                                        if f.get_publisher() == evalpub:
-                                                cf = f
-                                                cs = st
-                                                break
+                info = {
+                    "operation": self.__planned_op,
+                    "old_fmri" : old_fmri,
+                    "new_fmri" : new_fmri,
+                    "reference": reference
+                }
 
-                        if cs["obsolete"]:
-                                # Depending on an obsolete package is an error,
-                                # unless the dependency is just there to move
-                                # the package forward.
-                                if constraint.presence == constraint.ALWAYS:
-                                        raise api_errors.PlanCreationException(
-                                            obsolete=((pfmri, cf),))
-
-                                vi = self.image.get_version_installed(cf)
-                                if vi:
-                                        self.evaluate_fmri_removal(vi)
-                        else:
-                                self.propose_fmri(cf)
-                                self.evaluate_fmri(cf)
+                s = "(%s)" % ";".join([
+                    "%s=%s" % (key, info[key]) for key in info
+                    if info[key] is not None
+                ])
 
-        def add_pkg_plan(self, pfmri):
-                """add a pkg plan to imageplan for fully evaluated frmi"""
-                m = self.image.get_manifest(pfmri)
-                pp = pkgplan.PkgPlan(self.image, self.progtrack, \
-                    self.check_cancelation)
-
-                if self.old_excludes != self.new_excludes:
-                        if self.image.is_pkg_installed(pfmri):
-                                pp.propose_reinstall(pfmri, m)
-                        else:
-                                pp.propose_destination(pfmri, m)
-                else:
-                        try:
-                                pp.propose_destination(pfmri, m)
-                        except RuntimeError:
-                                logger.info("pkg: %s already installed" % pfmri)
-                                return
-
-                pp.evaluate(self.old_excludes, self.new_excludes)
-
-                if pp.origin_fmri:
-                        self.target_update_count += 1
-                else:
-                        self.target_insall_count += 1
+                if new_fmri:
+                        return None, s # only report new on upgrade
+                return s, None         # handle uninstall
+                        
+        def evaluate(self, verbose=False):
+                """Given already determined fmri changes, 
+                build pkg plans and figure out exact impact of
+                proposed changes"""
 
-                self.pkg_plans.append(pp)
+                assert self.state == EVALUATED_PKGS, self
 
-        def evaluate_fmri_removal(self, pfmri):
-                # prob. needs breaking up as well
-                assert self.image.has_manifest(pfmri)
-
-                self.progtrack.evaluate_progress(pfmri)
-
-                dependents = set(self.image.get_dependents(pfmri,
-                    self.progtrack))
+                if self.__noexecute and not verbose:
+                        return # optimize performance if no one cares
 
-                # Don't consider those dependencies already being removed in
-                # this imageplan transaction.
-                dependents = dependents.difference(self.target_rem_fmris)
-
-                if dependents and not self.recursive_removal:
-                        raise api_errors.NonLeafPackageException(pfmri,
-                            dependents)
-
-                pp = pkgplan.PkgPlan(self.image, self.progtrack, \
-                    self.check_cancelation)
-
-                self.image.state.set_target(pfmri, self.__intent)
-                m = self.image.get_manifest(pfmri)
+                #prefetch manifests
+                                         
+                prefetch_list = [] # manifest, intents to be prefetched
+                eval_list = []     # oldfmri, oldintent, newfmri, newintent
+                                   # prefetched intents omitted
 
-                try:
-                        pp.propose_removal(pfmri, m)
-                except RuntimeError:
-                        self.image.state.set_target()
-                        logger.info("pkg %s not installed" % pfmri)
-                        return
-
-                pp.evaluate([], self.old_excludes)
+                for oldfmri, newfmri in self.__fmri_changes:
+                        self.__progtrack.evaluate_progress(oldfmri)
+                        old_in, new_in = self.__create_intent(oldfmri, newfmri)
+                        if oldfmri:
+                                if not self.image.has_manifest(oldfmri):
+                                        prefetch_list.append((oldfmri, old_in))
+                                        old_in = None # so we don't send it twice
+                        if newfmri:
+                                if not self.image.has_manifest(newfmri):
+                                        prefetch_list.append((newfmri, new_in))
+                                        new_in = None
+                        eval_list.append((oldfmri, old_in, newfmri, new_in))
 
-                for d in dependents:
-                        if self.is_proposed_rem_fmri(d):
-                                continue
-                        if not self.image.has_version_installed(d):
-                                continue
-                        self.target_rem_fmris.append(d)
-                        self.progtrack.evaluate_progress(d)
-                        self.evaluate_fmri_removal(d)
-
-                # Post-order append will ensure topological sorting for acyclic
-                # dependency graphs.  Cycles need to be arbitrarily broken, and
-                # are done so in the loop above.
-                self.pkg_plans.append(pp)
-                self.image.state.set_target()
+                self.image.transport.prefetch_manifests(prefetch_list, 
+                    progtrack=self.__progtrack,
+                    ccancel=self.__check_cancelation)
 
-        def evaluate(self):
-                assert self.state == UNEVALUATED
+                for oldfmri, old_in, newfmri, new_in in eval_list:
+                        pp = pkgplan.PkgPlan(self.image, self.__progtrack,
+                            self.__check_cancelation)
 
-                outstring = ""
+                        pp.propose(oldfmri, self.__get_manifest(oldfmri, old_in),
+                                   newfmri, self.__get_manifest(newfmri, new_in))
+
+                        pp.evaluate(self.__old_excludes, self.__new_excludes)
 
-                # Operate on a copy, as it will be modified in flight.
-                for f in self.target_fmris[:]:
-                        self.progtrack.evaluate_progress(f)
-                        try:
-                                self.evaluate_fmri(f)
-                        except KeyError, e:
-                                outstring += "Attempting to install %s " \
-                                    "causes:\n\t%s\n" % (f.get_name(), e)
-                if outstring:
-                        raise RuntimeError("No packages were installed because "
-                            "package dependencies could not be satisfied\n" +
-                            outstring)
+                        if pp.origin_fmri and pp.destination_fmri:
+                                self.__target_update_count += 1
+                        elif pp.destination_fmri:
+                                self.__target_install_count += 1
+                        elif pp.origin_fmri:
+                                self.__target_removal_count += 1
 
-                for f in self.target_fmris:
-                        self.add_pkg_plan(f)
-                        self.progtrack.evaluate_progress(f)
+                        self.pkg_plans.append(pp)
 
-                for f in self.target_rem_fmris[:]:
-                        self.evaluate_fmri_removal(f)
-                        self.progtrack.evaluate_progress(f)
-
-                # we now have a workable set of packages to add/upgrade/remove
+                # we now have a workable set of pkgplans to add/upgrade/remove
                 # now combine all actions together to create a synthetic single
                 # step upgrade operation, and handle editable files moving from
                 # package to package.  See theory comment in execute, below.
 
-                self.state = EVALUATED_PKGS
-
                 self.removal_actions = [
                     (p, src, dest)
                     for p in self.pkg_plans
@@ -503,9 +557,9 @@
                     for src, dest in p.gen_install_actions()
                 ]
 
-                self.progtrack.evaluate_progress()
+                self.__progtrack.evaluate_progress()
 
-                self.actuators = actuator.Actuator()
+                self.__actuators = actuator.Actuator()
 
                 # iterate over copy of removals since we're modding list
                 # keep track of deletion count so later use of index works
@@ -519,6 +573,14 @@
                                 del self.removal_actions[i - deletions]
                                 deletions += 1
                                 continue
+                        # remove link removal if link is still in final image
+                        # (implement reference count on removal due to borked pkgs)
+                        if a[1].name == "link" and \
+                            os.path.normpath(a[1].attrs["path"]) in \
+                            self.__get_symlinks():
+                                del self.removal_actions[i - deletions]
+                                deletions += 1
+                                continue
                         # store names of files being removed under own name
                         # or original name if specified
                         if a[1].name == "file":
@@ -530,9 +592,9 @@
                                     (i - deletions,
                                     id(self.removal_actions[i-deletions][1]))
 
-                        self.actuators.scan_removal(a[1].attrs)
+                        self.__actuators.scan_removal(a[1].attrs)
 
-                self.progtrack.evaluate_progress()
+                self.__progtrack.evaluate_progress()
 
                 for a in self.install_actions:
                         # In order to handle editable files that move their path
@@ -553,9 +615,9 @@
                                     "save_file"] = cache_name
                                 a[2].attrs["save_file"] = cache_name
 
-                        self.actuators.scan_install(a[2].attrs)
+                        self.__actuators.scan_install(a[2].attrs)
 
-                self.progtrack.evaluate_progress()
+                self.__progtrack.evaluate_progress()
                 # Go over update actions
                 l_actions = self.get_actions("hardlink",
                     lambda a: a.get_target_path())
@@ -575,8 +637,8 @@
                         # scan both old and new actions
                         # repairs may result in update action w/o orig action
                         if a[1]:
-                                self.actuators.scan_update(a[1].attrs)
-                        self.actuators.scan_update(a[2].attrs)
+                                self.__actuators.scan_update(a[1].attrs)
+                        self.__actuators.scan_update(a[2].attrs)
                 self.update_actions.extend(l_refresh)
 
                 # sort actions to match needed processing order
@@ -584,7 +646,6 @@
                 self.update_actions.sort(key = lambda obj:obj[2])
                 self.install_actions.sort(key = lambda obj:obj[2])
 
-                remove_npkgs = len(self.target_rem_fmris)
                 npkgs = 0
                 nfiles = 0
                 nbytes = 0
@@ -599,17 +660,22 @@
                         # install.
                         npkgs += 1
 
-                self.progtrack.download_set_goal(npkgs, nfiles, nbytes)
+                self.__progtrack.download_set_goal(npkgs, nfiles, nbytes)
 
-                self.progtrack.evaluate_done(self.target_insall_count, \
-                    self.target_update_count, remove_npkgs)
+                self.__progtrack.evaluate_done(self.__target_install_count, \
+                    self.__target_update_count, self.__target_removal_count)
 
                 self.state = EVALUATED_OK
 
+
         def nothingtodo(self):
                 """ Test whether this image plan contains any work to do """
 
-                return not self.pkg_plans
+                # handle case w/ -n no verbose
+                if self.state == EVALUATED_PKGS:
+                        return not self.__fmri_changes
+                elif self.state >= EVALUATED_OK:
+                        return not self.pkg_plans
 
         def preexecute(self):
                 """Invoke the evaluated image plan
@@ -636,14 +702,14 @@
                                 ind = indexer.Indexer(self.image,
                                     self.image.get_manifest,
                                     self.image.get_manifest_path,
-                                    progtrack=self.progtrack,
-                                    excludes=self.old_excludes)
+                                    progtrack=self.__progtrack,
+                                    excludes=self.__old_excludes)
                                 if ind.check_index_existence():
                                         try:
                                                 ind.check_index_has_exactly_fmris(
                                                         self.image.gen_installed_pkg_names())
                                         except se.IncorrectIndexFileHash, e:
-                                                self.preexecuted_indexing_error = \
+                                                self.__preexecuted_indexing_error = \
                                                     api_errors.WrapSuccessfulIndexingException(
                                                         e,
                                                         traceback.format_exc(),
@@ -659,7 +725,7 @@
                                 # there's a problem updating the index on the
                                 # new image, that error needs to be
                                 # communicated to the user.
-                                self.preexecuted_indexing_error = \
+                                self.__preexecuted_indexing_error = \
                                     api_errors.WrapSuccessfulIndexingException(
                                         e, traceback.format_exc(),
                                         traceback.format_stack())
@@ -684,7 +750,7 @@
                                             e.filename)
                                 raise
 
-                        self.progtrack.download_done()
+                        self.__progtrack.download_done()
                 except:
                         self.state = PREEXECUTED_ERROR
                         raise
@@ -745,47 +811,47 @@
 
                 # It's necessary to do this check here because the state of the
                 # image before the current operation is performed is desired.
-                empty_image = self.is_image_empty()
+                empty_image = self.__is_image_empty()
 
-                self.actuators.exec_prep(self.image)
+                self.__actuators.exec_prep(self.image)
 
-                self.actuators.exec_pre_actuators(self.image)
+                self.__actuators.exec_pre_actuators(self.image)
 
                 try:
                         try:
 
                                 # execute removals
 
-                                self.progtrack.actions_set_goal(
+                                self.__progtrack.actions_set_goal(
                                     _("Removal Phase"),
                                     len(self.removal_actions))
                                 for p, src, dest in self.removal_actions:
                                         p.execute_removal(src, dest)
-                                        self.progtrack.actions_add_progress()
-                                self.progtrack.actions_done()
+                                        self.__progtrack.actions_add_progress()
+                                self.__progtrack.actions_done()
 
                                 # execute installs
 
-                                self.progtrack.actions_set_goal(
+                                self.__progtrack.actions_set_goal(
                                     _("Install Phase"),
                                     len(self.install_actions))
 
                                 for p, src, dest in self.install_actions:
                                         p.execute_install(src, dest)
-                                        self.progtrack.actions_add_progress()
-                                self.progtrack.actions_done()
+                                        self.__progtrack.actions_add_progress()
+                                self.__progtrack.actions_done()
 
                                 # execute updates
 
-                                self.progtrack.actions_set_goal(
+                                self.__progtrack.actions_set_goal(
                                     _("Update Phase"),
                                     len(self.update_actions))
 
                                 for p, src, dest in self.update_actions:
                                         p.execute_update(src, dest)
-                                        self.progtrack.actions_add_progress()
+                                        self.__progtrack.actions_add_progress()
 
-                                self.progtrack.actions_done()
+                                self.__progtrack.actions_done()
 
                                 # handle any postexecute operations
                                 for p in self.pkg_plans:
@@ -795,7 +861,11 @@
                                 self.image.save_pkg_state()
 
                                 # write out variant changes to the image config
-                                self.image.image_config_update()
+                                if self.__variant_change:
+                                        self.image.image_config_update(
+                                            self.__new_variants,
+                                            self.__new_facets)
+
                         except EnvironmentError, e:
                                 if e.errno == errno.EACCES or \
                                     e.errno == errno.EPERM:
@@ -805,24 +875,21 @@
                                         raise api_errors.ReadOnlyFileSystemException(e.filename)
                                 raise
                 except:
-                        self.actuators.exec_fail_actuators(self.image)
+                        self.__actuators.exec_fail_actuators(self.image)
                         raise
                 else:
-                        self.actuators.exec_post_actuators(self.image)
+                        self.__actuators.exec_post_actuators(self.image)
 
                 self.state = EXECUTED_OK
 
                 # reduce memory consumption
 
-                del self.removal_actions
-                del self.update_actions
-                del self.install_actions
-
-                del self.target_rem_fmris
-                del self.target_fmris
-                del self.__directories
-
-                del self.actuators
+                self.removal_actions = []
+                self.update_actions  = []
+                self.install_actions = []
+                self.__fmri_changes  = []
+                self.__directories   = []
+                self.__actuators     = []
 
                 # Perform the incremental update to the search indexes
                 # for all changed packages
@@ -833,19 +900,19 @@
                             in self.pkg_plans
                         ]
                         del self.pkg_plans
-                        self.progtrack.actions_set_goal(_("Index Phase"),
+                        self.__progtrack.actions_set_goal(_("Index Phase"),
                             len(plan_info))
                         self.image.update_index_dir()
                         ind = indexer.Indexer(self.image,
                             self.image.get_manifest,
                             self.image.get_manifest_path,
-                            progtrack=self.progtrack,
-                            excludes=self.new_excludes)
+                            progtrack=self.__progtrack,
+                            excludes=self.__new_excludes)
                         try:
                                 if empty_image:
                                         ind.setup()
                                 if empty_image or ind.check_index_existence():
-                                        ind.client_update_index((self.filters,
+                                        ind.client_update_index(([],
                                             plan_info), self.image)
                         except KeyboardInterrupt:
                                 raise
@@ -873,8 +940,8 @@
                                         ind = indexer.Indexer(self.image,
                                             self.image.get_manifest,
                                             self.image.get_manifest_path,
-                                            progtrack=self.progtrack,
-                                            excludes=self.new_excludes)
+                                            progtrack=self.__progtrack,
+                                            excludes=self.__new_excludes)
                                         ind.rebuild_index_from_scratch(
                                             self.image.gen_installed_pkgs())
                                 except Exception, e:
@@ -885,12 +952,237 @@
                                     api_errors.WrapSuccessfulIndexingException(
                                         e, traceback.format_exc(),
                                         traceback.format_stack())
-                        if self.preexecuted_indexing_error is not None:
-                                raise self.preexecuted_indexing_error
+                        if self.__preexecuted_indexing_error is not None:
+                                raise self.__preexecuted_indexing_error
 
-        def is_image_empty(self):
+        def __is_image_empty(self):
                 try:
                         self.image.gen_installed_pkg_names().next()
                         return False
                 except StopIteration:
                         return True
+
+        def match_user_fmris(self, patterns, all_known, pub_ranks, installed_pubs):
+                """Given a user-specified list of patterns, return a dictionary
+                of matching fmris:
+
+                {pkgname: [fmri1, fmri2, ...]
+                 pkgname: [fmri1, fmri2, ...],
+                 ...
+                }
+
+                Constraint used is always AUTO as per expected UI behavior.
+                If all_known is true, matching is done against all known package,
+                otherwise just all installed pkgs.
+
+                Note that patterns starting w/ pkg:/ require an exact match; patterns 
+                containing '*' will using fnmatch rules; the default trailing match 
+                rules are used for remaining patterns.
+
+                Exactly duplicated patterns are ignored.
+
+                Routine raises PlanCreationException if errors occur:
+                it is illegal to specify multiple different pattens that match
+                the same pkg name.  Only patterns that contain wildcards are allowed
+                to match multiple packages.
+
+                Fmri lists are trimmed by publisher, either by pattern specification,
+                installed version or publisher ranking, in that order when all_known
+                is True.
+                """
+
+                # problems we check for
+                illegals      = []
+                nonmatch      = []
+                multimatch    = []
+                not_installed = []
+                multispec     = []
+                wrongpub      = []
+
+                matchers = []
+                fmris    = []
+                pubs     = []
+                versions = []
+
+                wildcard_patterns = []
+
+                renamed_fmris = {}
+                obsolete_fmris = []
+
+                # ignore dups
+                patterns = list(set(patterns))
+                # print patterns, all_known, pub_ranks, installed_pubs
+
+                # figure out which kind of matching rules to employ
+                try:
+                        for pat in patterns:
+                                if "*" in pat or "?" in pat:
+                                        matcher = pkg.fmri.glob_match
+                                        fmri = pkg.fmri.MatchingPkgFmri(
+                                                                pat, "5.11")
+                                        wildcard_patterns.append(pat)
+                                elif pat.startswith("pkg:/"):
+                                        matcher = pkg.fmri.exact_name_match
+                                        fmri = pkg.fmri.PkgFmri(pat,
+                                                            "5.11")
+                                else:
+                                        matcher = pkg.fmri.fmri_match
+                                        fmri = pkg.fmri.PkgFmri(pat,
+                                                            "5.11")
+
+                                matchers.append(matcher)
+                                pubs.append(fmri.get_publisher())
+                                versions.append(fmri.version)
+                                fmris.append(fmri)
+
+                except pkg.fmri.IllegalFmri, e:
+                        illegals.append(e)
+                
+                # Create a dictionary of patterns, with each value being
+                # a dictionary of pkg names & fmris that match that pattern.
+                ret = dict(zip(patterns, [dict() for i in patterns]))
+
+                # keep track of publishers we reject due to implict selection of
+                # installed publisher to produce better error message.
+                rejected_pubs = {}
+
+                if all_known:
+                        cat = self.image.get_catalog(self.image.IMG_CATALOG_KNOWN)
+                        info_needed = [pkg.catalog.Catalog.DEPENDENCY]
+                else:
+                        cat = self.image.get_catalog(self.image.IMG_CATALOG_INSTALLED)
+                        info_needed = []
+
+                for name in cat.names():
+                        for pat, matcher, fmri, version, pub in \
+                            zip(patterns, matchers, fmris, versions, pubs):
+                                if not matcher(name, fmri.pkg_name):
+                                        continue # name doesn't match
+                                for ver, entries in cat.entries_by_version(name, 
+                                    info_needed=info_needed):
+                                        if version and not ver.is_successor(version,
+                                            pkg.version.CONSTRAINT_AUTO):
+                                                continue # version doesn't match
+                                        for f, metadata in entries:
+                                                fpub = f.get_publisher()
+                                                if pub and pub != fpub:
+                                                        continue # specified pubs conflict
+                                                elif not pub and all_known and \
+                                                    name in installed_pubs and \
+                                                    pub_ranks[installed_pubs[name]][1] \
+                                                    == True and installed_pubs[name] != \
+                                                    fpub:
+                                                        rejected_pubs.setdefault(pat, 
+                                                            set()).add(fpub)                                                            
+                                                        continue # installed sticky pub
+                                                ret[pat].setdefault(f.pkg_name, 
+                                                    []).append(f)
+                                                states = metadata["metadata"]["states"]
+                                                if self.image.PKG_STATE_OBSOLETE in states:
+                                                        obsolete_fmris.append(f)
+                                                if self.image.PKG_STATE_RENAMED in states and \
+                                                    "actions" in metadata:
+                                                        renamed_fmris[f] = metadata["actions"]
+
+                # remove multiple matches if all versions are obsolete
+                for p in patterns:                
+                        if len(ret[p]) > 1 and p not in wildcard_patterns:
+                                # create dictionary of obsolete status vs pkg_name
+                                obsolete = dict([                                        
+                                        (pkg_name, reduce(operator.or_, 
+                                        [f in obsolete_fmris for f in ret[p][pkg_name]]))
+                                        for pkg_name in ret[p]
+                                        ])
+                                # remove all obsolete match if non-obsolete match also exists
+                                if set([True, False]) == set(obsolete.values()):
+                                        for pkg_name in obsolete:
+                                                if obsolete[pkg_name]:
+                                                        del ret[p][pkg_name]
+
+                # remove newer multiple match if renamed version exists
+                for p in patterns:                
+                        if len(ret[p]) > 1 and p not in wildcard_patterns:
+                                targets = []
+                                renamed_matches = (
+                                    pfmri
+                                    for pkg_name in ret[p]
+                                    for pfmri in ret[p][pkg_name]
+                                    if pfmri in renamed_fmris
+                                    )
+                                for f in renamed_matches:
+                                        for a in renamed_fmris[f]:
+                                                a = pkg.actions.fromstr(a)
+                                                if a.name != "depend":
+                                                        continue
+                                                if a.attrs["type"] != "require":
+                                                        continue
+                                                targets.append(pkg.fmri.PkgFmri(
+                                                    a.attrs["fmri"], "5.11"
+                                                    ).pkg_name)
+
+                                for pkg_name in ret[p].keys():
+                                        if pkg_name in targets:
+                                                del ret[p][pkg_name]
+
+                matchdict = {} 
+                for p in patterns:
+                        l = len(ret[p])
+                        if l == 0: # no matches at all
+                                if not all_known or p not in rejected_pubs:
+                                        nonmatch.append(p)
+                                elif p in rejected_pubs:
+                                        wrongpub.append((p, rejected_pubs[p]))
+                        elif l > 1 and p not in wildcard_patterns:  # multiple matches
+                                multimatch.append((p, [n for n in ret[p]]))
+                        else:      # single match or wildcard
+                                for k in ret[p].keys(): # for each matching package name
+                                        matchdict.setdefault(k, []).append(p)
+                
+                for name in matchdict:
+                        if len(matchdict[name]) > 1: # different pats, same pkg
+                                multispec.append(tuple([name] + matchdict[name]))
+
+                if not all_known:
+                        not_installed, nonmatch = nonmatch, not_installed
+                        
+                if illegals or nonmatch or multimatch or not_installed or \
+                    multispec or wrongpub:
+                        raise api_errors.PlanCreationException(unmatched_fmris=nonmatch,
+                            multiple_matches=multimatch, illegal=illegals,
+                            missing_matches=not_installed, multispec=multispec, wrong_publishers=wrongpub)
+                # merge patterns together now that there are no conflicts
+                proposed_dict = {}
+                for d in ret.values():
+                        proposed_dict.update(d)
+                
+                # eliminate lower ranked publishers
+
+                if all_known: # no point for installed pkgs....
+                        for pkg_name in proposed_dict:
+                                pubs_found = set([
+                                                f.get_publisher()
+                                                for f in proposed_dict[pkg_name]
+                                                ])
+                                # 1000 is hack for installed but unconfigured publishers
+                                best_pub = sorted([
+                                                (pub_ranks.get(p, (1000, True))[0], p) 
+                                                for p in pubs_found
+                                                ])[0][1]
+
+                                proposed_dict[pkg_name] = [
+                                        f
+                                        for f in proposed_dict[pkg_name]
+                                        if f.get_publisher() == best_pub
+                                        ]
+
+                # construct references so that we can know which pattern
+                # generated which fmris...
+
+                references = dict([
+                        (f, p)
+                        for p in ret.keys()
+                        for flist in ret[p].values()
+                        for f in flist
+                        ])
+                
+                return proposed_dict, references
--- a/src/modules/client/imagestate.py	Tue Nov 17 17:06:35 2009 -0600
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,272 +0,0 @@
-#!/usr/bin/python2.4
-#
-# CDDL HEADER START
-#
-# The contents of this file are subject to the terms of the
-# Common Development and Distribution License (the "License").
-# You may not use this file except in compliance with the License.
-#
-# You can obtain a copy of the license at usr/src/OPENSOLARIS.LICENSE
-# or http://www.opensolaris.org/os/licensing.
-# See the License for the specific language governing permissions
-# and limitations under the License.
-#
-# When distributing Covered Code, include this CDDL HEADER in each
-# file and include the License file at usr/src/OPENSOLARIS.LICENSE.
-# If applicable, add the following below this CDDL HEADER, with the
-# fields enclosed by brackets "[]" replaced with your own identifying
-# information: Portions Copyright [yyyy] [name of copyright owner]
-#
-# CDDL HEADER END
-#
-
-# Copyright 2009 Sun Microsystems, Inc.  All rights reserved.
-# Use is subject to license terms.
-
-# Indicates that the fmri is being used strictly for information.
-INTENT_INFO = "info"
-
-# Indicates that the fmri is being used to perform a dry-run evaluation of an
-# image-modifying operation.
-INTENT_EVALUATE = "evaluate"
-
-# Indicates that the fmri is being processed as part of an image-modifying
-# operation.
-INTENT_PROCESS = "process"
-
-class ImageState(object):
-        """An ImageState object provides a temporary place to store information
-        about operations that are being performed on an image (e.g. fmris of
-        packages that are being installed, uninstalled, etc.).
-        """
-
-        def __init__(self, image):
-                self.__fmri_intent_stack = []
-                self.__image = image
-
-                # A place to keep track of which manifests (based on fmri and
-                # operation) have already provided intent information.
-                self.__touched_manifests = {}
-
-        def __str__(self):
-                return "%s" % self.__fmri_intent_stack
-
-        def set_target(self, fmri=None, intent=INTENT_INFO):
-                """Indicates that the given fmri is currently being evaluated
-                or manipulated for an image operation.  A value of None for
-                fmri will clear the current target.
-                """
-                if fmri:
-                        self.__fmri_intent_stack.append((fmri, intent))
-                else:
-                        del self.__fmri_intent_stack[-1]
-
-        def get_target(self):
-                """Returns a tuple of the format (fmri, intent) representing an
-                fmri currently being evaluated or manipulated for an image
-                operation.  A tuple containing (None, None) will be returned if
-                no target has been set.
-                """
-                try:
-                        return self.__fmri_intent_stack[-1]
-                except IndexError:
-                        return (None, None)
-
-        def get_targets(self):
-                """Returns a list of tuples of the format (fmri, intent)
-                representing fmris currently being evaluated or manipulated for
-                an image operation.  An empty list is returned if there are no
-                targets.
-                """
-                return self.__fmri_intent_stack[:]
-
-        def get_touched_manifest(self, fmri, intent):
-                """Returns whether intent information has been provided for the
-                given fmri."""
-
-                op = self.__image.history.operation_name
-                if not op:
-                        # The client may not have provided the name of the
-                        # operation it is performing.
-                        op = "unknown"
-
-                if op not in self.__touched_manifests:
-                        # No intent information has been provided for fmris
-                        # for the current operation.
-                        return False
-
-                f = str(fmri)
-                if f not in self.__touched_manifests[op]:
-                        # No intent information has been provided for this
-                        # fmri for the current operation.
-                        return False
-
-                if intent not in self.__touched_manifests[op][f]:
-                        # No intent information has been provided for this
-                        # fmri for the current operation and reason.
-                        return False
-
-                return True
-
-        def set_touched_manifest(self, fmri, intent):
-                """Records that intent information has been provided for the
-                given fmri's manifest."""
-
-                op = self.__image.history.operation_name
-                if not op:
-                        # The client may not have provided the name of the
-                        # operation it is performing.
-                        op = "unknown"
-
-                if op not in self.__touched_manifests:
-                        # No intent information has yet been provided for fmris
-                        # for the current operation.
-                        self.__touched_manifests[op] = {}
-
-                f = str(fmri)
-                if f not in self.__touched_manifests[op]:
-                        # No intent information has yet been provided for this
-                        # fmri for the current operation.
-                        self.__touched_manifests[op][f] = { intent: None }
-                else:
-                        # No intent information has yet been provided for this
-                        # fmri for the current operation and reason.
-                        self.__touched_manifests[op][f][intent] = None
-
-        def get_intent_str(self, fmri):
-                """Returns a string representing the intent of the client
-                in retrieving information based on the operation information
-                provided by the image history object.
-                """
-
-                op = self.__image.history.operation_name
-                if not op:
-                        # The client hasn't indicated what operation
-                        # is executing.
-                        op = "unknown"
-
-                reason = INTENT_INFO
-                target_pkg = None
-                initial_pkg = None
-                needed_by_pkg = None
-                current_pub = fmri.get_publisher()
-
-                targets = self.get_targets()
-                if targets:
-                        # Attempt to determine why the client is retrieving the
-                        # manifest for this fmri and what its current target is.
-                        target, reason = targets[-1]
-
-                        # Compare the FMRIs with no publisher information
-                        # embedded.
-                        na_current = fmri.get_fmri(anarchy=True,
-                            include_scheme=False)
-                        na_target = target.get_fmri(anarchy=True,
-                            include_scheme=False)
-
-                        if na_target == na_current:
-                                # Only provide this information if the fmri for
-                                # the manifest being retrieved matches the fmri
-                                # of the target.  If they do not match, then the
-                                # target fmri is being retrieved for information
-                                # purposes only (e.g.  dependency calculation,
-                                # etc.).
-                                target_pub = target.get_publisher()
-                                if target_pub == current_pub:
-                                        # Prevent providing information across
-                                        # publishers.
-                                        target_pkg = na_target
-                                else:
-                                        target_pkg = "unknown"
-
-                                # The very first fmri should be the initial
-                                # target that caused the current and needed_by
-                                # fmris to be retrieved.
-                                initial = targets[0][0]
-                                initial_pub = initial.get_publisher()
-                                if initial_pub == current_pub:
-                                        # Prevent providing information across
-                                        # publishers.
-                                        initial_pkg = initial.get_fmri(
-                                            anarchy=True, include_scheme=False)
-
-                                        if target_pkg == initial_pkg:
-                                                # Don't bother sending the
-                                                # target information if it is
-                                                # the same as the initial target
-                                                # (i.e. the manifest for [email protected]
-                                                # is being retrieved because the
-                                                # user is installing [email protected]).
-                                                target_pkg = None
-
-                                else:
-                                        # If they didn't match, indicate that
-                                        # the needed_by_pkg was a dependency of
-                                        # another, but not which one.
-                                        initial_pkg = "unknown"
-
-                                if len(targets) > 1:
-                                        # The fmri responsible for the current
-                                        # one being processed should immediately
-                                        # precede the current one in the target
-                                        # list.
-                                        needed_by = targets[-2][0]
-
-                                        needed_by_pub = \
-                                            needed_by.get_publisher()
-                                        if needed_by_pub == current_pub:
-                                                # To prevent dependency
-                                                # information being shared
-                                                # across publisher boundaries,
-                                                # publishers must match.
-                                                needed_by_pkg = \
-                                                    needed_by.get_fmri(
-                                                    anarchy=True,
-                                                    include_scheme=False)
-                                        else:
-                                                # If they didn't match, indicate
-                                                # that the package is needed by
-                                                # another, but not which one.
-                                                needed_by_pkg = "unknown"
-                else:
-                        # An operation is being performed that has not provided
-                        # any target information and is likely for informational
-                        # purposes only.  Assume the "initial target" is what is
-                        # being retrieved.
-                        initial_pkg = fmri.get_fmri(anarchy=True,
-                            include_scheme=False)
-
-                prior_version = None
-                if reason != INTENT_INFO:
-                        # Only provide version information for non-informational
-                        # operations.
-                        prior = self.__image.get_version_installed(fmri)
-
-                        try:
-                                prior_version = prior.version
-                        except AttributeError:
-                                # We didn't get a match back, drive on.
-                                pass
-                        else:
-                                prior_pub = prior.get_publisher()
-                                if prior_pub != current_pub:
-                                        # Prevent providing information across
-                                        # publishers by indicating that a prior
-                                        # version was installed, but not which
-                                        # one.
-                                        prior_version = "unknown"
-
-                info = {
-                    "operation": op,
-                    "prior_version": prior_version,
-                    "reason": reason,
-                    "target": target_pkg,
-                    "initial_target": initial_pkg,
-                    "needed_by": needed_by_pkg,
-                }
-
-                # op/prior_version/reason/initial_target/needed_by/
-                return "(%s)" % ";".join([
-                    "%s=%s" % (key, info[key]) for key in info
-                    if info[key] is not None
-                ])
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/src/modules/client/pkg_solver.py	Wed Nov 18 15:53:48 2009 -0800
@@ -0,0 +1,923 @@
+#!/usr/bin/python
+#
+# CDDL HEADER START
+#
+# The contents of this file are subject to the terms of the
+# Common Development and Distribution License (the "License").
+# You may not use this file except in compliance with the License.
+#
+# You can obtain a copy of the license at usr/src/OPENSOLARIS.LICENSE
+# or http://www.opensolaris.org/os/licensing.
+# See the License for the specific language governing permissions
+# and limitations under the License.
+#
+# When distributing Covered Code, include this CDDL HEADER in each
+# file and include the License file at usr/src/OPENSOLARIS.LICENSE.
+# If applicable, add the following below this CDDL HEADER, with the
+# fields enclosed by brackets "[]" replaced with your own identifying
+# information: Portions Copyright [yyyy] [name of copyright owner]
+#
+# CDDL HEADER END
+#
+
+#
+# Copyright 2009 Sun Microsystems, Inc.  All rights reserved.
+# Use is subject to license terms.
+#
+
+import pkg.client.api_errors as api_errors
+import pkg.catalog           as catalog
+import pkg.solver
+import pkg.version           as version
+import time
+import sys
+
+from pkg.misc import EmptyI
+
+
+SOLVER_INIT    = "Initialized"
+SOLVER_OXY     = "Not poseable"
+SOLVER_FAIL    = "Failed"
+SOLVER_SUCCESS = "Succeeded"
+
+class PkgSolver(object):
+
+        def __init__(self, cat, installed_fmris, pub_ranks, variants, progtrack):
+                """Create a PkgSolver instance; catalog
+                should contain all known pkgs, installed fmris
+                should be a dict of fmris indexed by name that define
+                pkgs current installed in the image. Pub_ranks dict contains
+                (rank, stickiness) for each publisher; disabled publishers
+                should not be included"""
+                self.__catalog = cat
+                self.__installed_fmris = {}	# indexed by stem
+                self.__publisher = {}		# indexed by stem
+                self.__possible_dict = {}	# indexed by stem
+                self.__pub_ranks = pub_ranks    # rank indexed by pub
+                self.__trim_dict = {}           # fmris trimmed from
+                				# consideration
+
+                self.__pub_trim = {}		# pkg names already
+                                                # trimmed by pub.
+                self.__installed_fmris = installed_fmris.copy()
+                
+                for f in installed_fmris.values(): # record only sticky pubs
+                        pub = f.get_publisher()
+                        if self.__pub_ranks[pub][1]:
+                                self.__publisher[f.pkg_name] = f.get_publisher()
+
+                self.__id2fmri = {} 		# map ids -> fmris
+                self.__fmri2id = {} 		# and reverse
+
+                self.__solver = pkg.solver.msat_solver()
+
+                self.__poss_set = set()         # possible fmris after assign
+                self.__progtrack = progtrack    # progress tracker
+
+                self.__addclause_failure = False
+
+                self.__variant_dict = {}        # fmris -> variant cache
+                self.__variants = variants      # variants supported by image
+                
+                self.__cache = {}
+                self.__trimdone = False         # indicate we're finished trimming
+                self.__fmri_state = {}          # cache of obsolete, renamed bits
+                # so we can print something reasonable
+                self.__state = SOLVER_INIT
+                self.__iterations = 0
+                self.__clauses     = 0
+                self.__variables   = 0
+                self.__timings = []
+                self.__start_time = 0
+                self.__failure_info = ""
+
+        def __str__(self):
+
+                s = "Solver: [" 
+                if self.__state in [SOLVER_FAIL, SOLVER_SUCCESS]:
+                        s += " Variables: %d Clauses: %d Iterations: %d" % (
+                            self.__variables, self.__clauses, self.__iterations)
+                s += " State: %s]" % self.__state
+
+                s += "\nTimings: ["
+                s += ", ".join(["%s: %s" % a for a in self.__timings])
+                s += "]"
+                return s
+
+        def __timeit(self, phase=None):
+                """Add timing records; set phase to None to reset"""
+                if phase == None:
+                        self.__start_time = time.time()
+                        self.__timings = []
+                else:
+                        now = time.time()
+                        self.__timings.append((phase, now - self.__start_time))
+                        self.__start_time = now
+                        
+        def gen_failure_report(self, verbose):
+                """grab saved failure list"""
+                if not verbose:
+                        return "\nUse -v option for more details"
+                else:
+                        return "\n".join(self.__failure_info)
+
+        def solve_install(self, existing_freezes, proposed_dict, excludes=EmptyI):
+                """Existing_freezes is a list of incorp. style
+                fmris that constrain pkg motion, proposed_dict 
+                contains user specified fmris indexed by pkg_name;
+                returns FMRIs to be installed/upgraded in system"""
+
+                self.__state = SOLVER_OXY
+
+                self.__progtrack.evaluate_progress()
+
+                self.__timeit()
+
+                # proposed_dict already contains publisher selection logic,
+                # so overwrite __publisher dict w/ values therein
+                for name in proposed_dict:
+                        self.__publisher[name] = proposed_dict[name][0].get_publisher()
+
+                self.__progtrack.evaluate_progress()
+
+                # find list of incorps we don't let change as a side
+                # effect of other changes; exclude any specified on
+                # command line
+                inc_list, con_lists = self.__get_installed_unbound_inc_list(proposed_dict,
+                    excludes=excludes)
+
+                self.__progtrack.evaluate_progress()
+                
+                # trim fmris we cannot install because they're older
+                self.__timeit("phase 1")
+
+                for f in self.__installed_fmris.values():
+                        self.__trim_older(f)                
+
+                self.__progtrack.evaluate_progress()
+
+                # trim fmris we excluded via proposed_fmris
+                for name in proposed_dict:
+                        reason = _("This version excluded by specified installation version")
+                        self.__trim(set(self.__get_catalog_fmris(name)) - 
+                            set(proposed_dict[name]), reason)
+
+                self.__timeit("phase 2")
+                self.__progtrack.evaluate_progress()
+
+                # now trim pkgs we cannot update due to unconstrained incorporations
+                for i, flist in zip(inc_list, con_lists):
+                        reason = _("This version is excluded by installed incorporation %s") % i
+                        self.__trim(self.__comb_auto_fmris(i)[1], reason)
+                        for f in flist:
+                                self.__trim(self.__comb_auto_fmris(f)[1], reason)
+                
+                self.__timeit("phase 3")
+                self.__progtrack.evaluate_progress()
+
+                # now trim any pkgs we cannot update due to freezes
+                for f in existing_freezes:
+                        reason = _("This version is excluded by freeze %s") % f
+                        self.__trim(self.__comb_auto_fmris(f)[1], reason)
+
+                self.__progtrack.evaluate_progress()
+                
+                # elide any proposed versions that don't match variants (arch usually)
+                self.__timeit("phase 4")
+                for name in proposed_dict:
+                        for fmri in proposed_dict[name]:
+                                self.__trim_nonmatching_variants(fmri)
+
+                # remove any versions from proposed_dict that are in trim_dict
+                self.__timeit("phase 5")
+                for name in proposed_dict:
+                        tv = self.__dotrim(proposed_dict[name])
+                        if not tv:
+                                ret = [_("No matching version of %s can be installed:") % name]
+                                
+                                for f in proposed_dict[name]:
+                                        ret += ["%s: %s\n" % (f, "\n\t".join(self.__trim_dict[f]))]
+                                raise api_errors.PlanCreationException(no_version=ret)
+                        proposed_dict[name] = tv
+
+                self.__progtrack.evaluate_progress()
+
+                # build set of possible pkgs
+                self.__timeit("phase 6")                
+                possible_set = set()
+
+                for f in self.__installed_fmris.values():
+                        possible_set |= self.__comb_newer_fmris(f)[0]
+
+                for name in proposed_dict:
+                        for f in proposed_dict[name]:
+                                possible_set.add(f)
+                self.__timeit("phase 7")
+                possible_set.update(self.__generate_dependency_closure(possible_set,
+                    excludes=excludes))
+
+                # remove any versions from proposed_dict that are in trim_dict as
+                # trim dict has been updated w/ missing dependencies
+                self.__timeit("phase 8")
+                for name in proposed_dict:
+                        tv = self.__dotrim(proposed_dict[name])
+                        if not tv:
+                                ret = [_("No version of %s can be installed:") % name]
+                                
+                                for f in proposed_dict[name]:
+                                        ret += ["%s: %s\n" % (f, "\n\t".join(self.__trim_dict[f]))]
+                                raise api_errors.PlanCreationException(no_version=ret)
+                        proposed_dict[name] = tv
+
+                self.__timeit("phase 9")
+                self.__progtrack.evaluate_progress()
+
+                # generate ids, possible_dict for clause generation 
+                self.__assign_fmri_ids(possible_set)
+
+                # generate clauses for only one version of each package, and
+                # for dependencies for each package.  Do so for all possible fmris.
+                
+                for name in self.__possible_dict.keys():
+                        self.__progtrack.evaluate_progress()
+                        # insure that at most one version of a package is installed
+                        self.__addclauses(self.__gen_highlander_clauses(
+                            self.__possible_dict[name]))
+                        # generate dependency clauses for each pkg
+                        for fmri in self.__possible_dict[name]:
+                                for da in self.__get_dependency_actions(fmri, 
+                                    excludes=excludes):
+                                        self.__addclauses(self.__gen_dependency_clauses(
+                                            fmri, da))
+
+                self.__timeit("phase 10")
+                # generate clauses for installed and to be installed pkgs    
+                for name in set(proposed_dict.keys() + 
+                    self.__installed_fmris.keys()) :
+                        self.__progtrack.evaluate_progress()
+                        self.__addclauses(self.__gen_one_of_these_clauses(
+                            self.__possible_dict[name]))
+
+                # save a solver instance so we can come back here
+                # this is where errors happen...
+                saved_solver = self.__save_solver()
+                try:
+                        saved_solution = self.__solve()
+                except api_errors.PlanCreationException:
+                        info = []
+                        info.append("package solver error")
+                        info.append("attempted operation: install")
+                        info.append("already installed packages:")
+                        for name in sorted(self.__installed_fmris):
+                                f = self.__installed_fmris[name]
+                                info.append("\t%s" % f)
+                                for s in self.__print_dependencies(f, excludes):
+                                        info.append("\t\t\t%s" % s)
+                        info.append("proposed pkgs:")
+                        for name in proposed_dict:
+                                info.append("\t%s" % name)
+                                for f in proposed_dict[name]:
+                                        info.append("\t\t%s %s" % 
+                                            (f, self.__trim_dict.get(f, "")))
+                                        for s in self.__print_dependencies(f, excludes):
+                                                info.append("\t\t\t%s" % s)
+                        
+                        if inc_list:
+                                il = ", ".join([str(i) for i in inc_list])
+                        else:
+                                il = "None"
+
+                        info.append("maintained incorporations: %s" % il)
+                        
+                        self.__failure_info = info
+                        raise
+
+                self.__timeit("phase 11")
+                
+                # we have a solution that works... attempt to
+                # reduce collateral damage to other packages
+                # while still keeping command line pkgs at their
+                # optimum level
+
+                self.__restore_solver(saved_solver)
+
+                # fix the fmris that were specified on the cmd line
+                # at their optimum (newest) level along with the
+                # new dependencies, but try and avoid upgrading 
+                # already installed pkgs.
+
+                for fmri in saved_solution:
+                        if fmri.pkg_name in proposed_dict or \
+                           fmri.pkg_name not in self.__installed_fmris:
+                                self.__addclauses(
+                                    self.__gen_one_of_these_clauses([fmri]))
+
+                # save context
+                saved_solver = self.__save_solver()
+
+                saved_solution = self.__solve(older=True)
+                # Now we have the oldest possible original fmris
+                # but we may have some that are not original
+                # Since we want to move as far forward as possible
+                # when we have to move a package, fix the originals
+                # and drive forward again w/ the remainder
+                self.__restore_solver(saved_solver)
+
+                for fmri in (saved_solution & set(self.__installed_fmris.values())):
+                        self.__addclauses(self.__gen_one_of_these_clauses([fmri]))
+
+                solution = self.__solve()
+
+                for f in solution.copy():
+                        if self.__fmri_is_obsolete(f):
+                                solution.remove(f)
+                                
+                self.__timeit("phase 12")
+                return solution
+
+        def solve_update(self, existing_freezes, excludes=EmptyI):
+                # trim fmris we cannot install because they're older
+                self.__timeit()
+
+                for f in self.__installed_fmris.values():
+                        self.__trim_older(f)                
+
+                self.__timeit("phase 1")
+
+                # generate set of possible fmris
+                possible_set = set()
+
+                for f in self.__installed_fmris.values():
+                        possible_set.add(f) # in case we cannot talk to publisher
+                        possible_set |= self.__comb_newer_fmris(f)[0]
+
+                self.__timeit("phase 2")
+
+                possible_set.update(self.__generate_dependency_closure(possible_set,
+                    excludes=excludes))
+
+
+                self.__timeit("phase 3")
+
+                # generate ids, possible_dict for clause generation 
+                self.__assign_fmri_ids(possible_set)
+
+                # generate clauses for only one version of each package, and
+                # for dependencies for each package.  Do so for all possible fmris.
+                
+                for name in self.__possible_dict.keys():
+                        # insure that at most one version of a package is installed
+                        self.__addclauses(self.__gen_highlander_clauses(
+                            self.__possible_dict[name]))
+                        # generate dependency clauses for each pkg
+                        for fmri in self.__possible_dict[name]:
+                                for da in self.__get_dependency_actions(fmri, 
+                                    excludes=excludes):
+                                        self.__addclauses(self.__gen_dependency_clauses(
+                                            fmri, da))
+                self.__timeit("phase 4")
+
+                # generate clauses for installed pkgs
+
+                for name in self.__installed_fmris.keys():
+                        self.__addclauses(self.__gen_one_of_these_clauses(
+                            self.__possible_dict[name]))
+
+                self.__timeit("phase 5")
+                solution = self.__solve()
+
+                for f in solution.copy():
+                        if self.__fmri_is_obsolete(f):
+                                solution.remove(f)
+                        
+                return solution 
+
+        def solve_change_varcets(self, existing_freezes, new_variants, new_facets, new_excludes):
+                """Compute packaging changes needed to effect
+                desired variant and or facet change"""
+
+                # First, determine if there are any packages that are
+                # not compatible w/ the new variants, and compute
+                # their removal
+
+                keep_set = set()
+                removal_set = set()
+
+                if new_variants:
+                        self.__variants = new_variants
+                self.__excludes = new_excludes #must include facet changes
+
+                if new_variants:
+                        for f in self.__installed_fmris.values():                        
+                                d = self.__get_variant_dict(f)
+                                for k in new_variants.keys():
+                                        if k in d and new_variants[k] not in \
+                                            d[k]:
+                                                removal_set.add(f)
+                                                break
+                                else:
+                                        keep_set.add(f)
+                else: # keep the same pkgs as a starting point for facet changes only
+                        keep_set = set(self.__installed_fmris.values())
+                        
+                # XXX check existing freezes to see if they permit removals
+
+                # recompute solution as if a blank image was being
+                # considered; if a generic package depends on a 
+                # architecture specific one, the operation will fail.
+
+
+                if not keep_set:
+                        return [] # in case this deletes our last package
+
+                blank_solver = PkgSolver(self.__catalog, {} , self.__pub_ranks, 
+                    self.__variants, self.__progtrack)
+
+                proposed_dict = dict([(f.pkg_name, [f]) for f in keep_set])
+                return blank_solver.solve_install(existing_freezes, proposed_dict, new_excludes)
+
+        def __save_solver(self):
+                """Create a saved copy of the current solver state and return it"""
+                return (self.__addclause_failure, 
+                        pkg.solver.msat_solver(self.__solver))
+
+        def __restore_solver(self, solver):
+                """Set the current solver state to the previously saved one"""
+
+                self.__addclause_failure, self.__solver = solver
+                self.__iterations = 0
+        
+        def __solve(self, older=False):
+                """Perform iterative solution; try for newest pkgs unless older=True"""
+                solution_vector = []
+                self.__state = SOLVER_FAIL
+
+                while not self.__addclause_failure and self.__solver.solve([]):
+                        self.__iterations += 1
+                        solution_vector = self.__get_solution_vector()
+                        # prevent the selection of any older pkgs
+                        for fid in solution_vector:
+                                if not older:
+                                        for f in self.__comb_newer_fmris(
+                                            self.__getfmri(fid))[1]:
+                                                self.__addclauses([[-self.__getid(f)]])
+                                else:
+                                        pfmri = self.__getfmri(fid)
+                                        for f in self.__comb_newer_fmris(pfmri)[0] - \
+                                            set([pfmri]):
+                                                self.__addclauses([[-self.__getid(f)]])
+
+                        # prevent the selection of this exact combo; permit [] solution
+                        if not solution_vector:
+                                break
+                        self.__addclauses([[-i for i in solution_vector]])
+
+                if not self.__iterations:
+                        raise api_errors.PlanCreationException(no_solution=True)
+
+                self.__state = SOLVER_SUCCESS
+
+                solution = set([self.__getfmri(i) for i in solution_vector])
+
+                return solution
+
+        def __get_solution_vector(self):
+                """Return solution vector from solver"""
+                return sorted([
+                    (i + 1) for i in range(self.__solver.get_variables())
+                    if self.__solver.dereference(i)
+                ])
+                        
+        def __assign_fmri_ids(self, possible_set):
+                """ give a set of possible fmris, assign ids"""
+                # generate dictionary of possible pkgs fmris by pkg stem
+                self.__possible_dict.clear()
+                self.__poss_set |= possible_set
+
+                for f in possible_set:
+                        self.__possible_dict.setdefault(f.pkg_name, []).append(f)
+                for name in self.__possible_dict:
+                        self.__possible_dict[name].sort()
+                # assign clause numbers (ids) to possible pkgs
+                pkgid = 1
+                for name in sorted(self.__possible_dict.keys()):
+                        for fmri in reversed(self.__possible_dict[name]):
+                                self.__id2fmri[pkgid] = fmri
+                                self.__fmri2id[fmri] = pkgid
+                                pkgid += 1
+
+                self.__variables = pkgid - 1
+                self.__trimdone = True
+
+        def __getid(self, fmri):
+                """Translate fmri to variable number (id)"""
+                return self.__fmri2id[fmri]
+
+        def __getfmri(self, fid):
+                """Translate variable number (id) to fmris"""
+                return self.__id2fmri[fid]
+
+        def __get_fmris_by_version(self, pkg_name):
+                """Cache for catalog entries; helps performance"""
+                if pkg_name not in self.__cache:
+                        self.__cache[pkg_name] = \
+                            [t for t in self.__catalog.fmris_by_version(pkg_name)]
+                return self.__cache[pkg_name]
+
+        def __get_catalog_fmris(self, pkg_name, dotrim=True):
+                """ return the list of fmris in catalog for this pkg name"""
+                if dotrim and pkg_name not in self.__pub_trim:
+                        self.__filter_publishers(pkg_name)
+
+                return [
+                        f
+                        for tp in self.__get_fmris_by_version(pkg_name)
+                        for f in tp[1]
+                        if not dotrim or (f not in self.__trim_dict and
+                                          (not self.__poss_set or f in self.__poss_set))
+                        ]
+
+        def __comb_newer_fmris(self, fmri, dotrim=True, obsolete_ok=True):
+                """Returns tuple of set of fmris that are match witinin 
+                CONSTRAINT.NONE of specified version and set of remaining fmris."""
+                return self.__comb_common(fmri, dotrim, version.CONSTRAINT_NONE, obsolete_ok) 
+
+        def __comb_common(self, fmri, dotrim, constraint, obsolete_ok):
+                """Underlying impl. of other comb routines"""
+                tp = (fmri, dotrim, constraint) # cache index
+                # determine if the data is cachable or cached:
+                if (not self.__trimdone and dotrim) or tp not in self.__cache:
+                        all_fmris = set(self.__get_catalog_fmris(fmri.pkg_name, dotrim))
+                        matching = set([
+                                        f
+                                        for f in all_fmris
+                                        if not fmri.version or
+                                        fmri.version == f.version or
+                                        f.version.is_successor(fmri.version, 
+                                            constraint=constraint)
+                                        if obsolete_ok or not self.__fmri_is_obsolete(f)
+                                        ])
+                        # if we haven't finished triming, don't cache this
+                        if not self.__trimdone and dotrim:
+                                return matching, all_fmris - matching
+                        # cache the result
+                        self.__cache[tp] = (matching, all_fmris - matching)
+                return self.__cache[tp]
+                
+        def __comb_older_fmris(self, fmri, dotrim=True, obsolete_ok=True):
+                """Returns tuple of set of fmris that are older than 
+                specified version and set of remaining fmris."""
+                newer, older = self.__comb_newer_fmris(fmri, dotrim, obsolete_ok)
+                return older, newer
+
+        def __comb_auto_fmris(self, fmri, dotrim=True, obsolete_ok=True):
+                """Returns tuple of set of fmris that are match witinin 
+                CONSTRAINT.AUTO of specified version and set of remaining fmris."""
+                return self.__comb_common(fmri, dotrim, version.CONSTRAINT_AUTO, obsolete_ok)
+
+        def __fmri_loadstate(self, fmri, excludes):
+                """load fmri state (obsolete == True, renamed == True)"""
+                
+                relevant = dict([ 
+                        (a.attrs["name"], a.attrs["value"])
+                        for a in self.__catalog.get_entry_actions(fmri, 
+                        [catalog.Catalog.DEPENDENCY], excludes=excludes)
+                        if a.name == "set" and \
+                            a.attrs["name"] in ["pkg.renamed", "pkg.obsolete"]
+                        ])
+                self.__fmri_state[fmri] = (
+                    relevant.get("pkg.obsolete", "false").lower() == "true",
+                    relevant.get("pkg.renamed", "false").lower() == "true")
+                        
+        def __fmri_is_obsolete(self, fmri, excludes=EmptyI):
+                """check to see if fmri is obsolete"""
+                if fmri not in self.__fmri_state:
+                        self.__fmri_loadstate(fmri, excludes)
+                return self.__fmri_state[fmri][0]
+
+        def __fmri_is_renamed(self, fmri, excludes=EmptyI):
+                """check to see if fmri is obsolete"""
+                if fmri not in self.__fmri_state:
+                        self.__fmri_loadstate(fmri, excludes)
+                return self.__fmri_state[fmri][1]
+                                        
+        def __get_dependency_actions(self, fmri, excludes=EmptyI):
+                """Return list of all dependency actions for this fmri"""
+
+                return [
+                        a
+                        for a in self.__catalog.get_entry_actions(fmri, 
+                            [catalog.Catalog.DEPENDENCY], excludes=excludes)
+                        if a.name == "depend"
+                        ]
+
+        def __get_variant_dict(self, fmri, excludes=EmptyI):
+                """Return dictionary of variants suppported by fmri"""
+                if fmri not in self.__variant_dict:
+                        self.__variant_dict[fmri] = dict(
+                            self.__catalog.get_entry_all_variants(fmri)
+#                            [
+#                            (a.attrs["name"], a.attrs["value"])
+#                            for a in self.__catalog.get_entry_actions(fmri, 
+#                                [catalog.Catalog.DEPENDENCY], excludes=excludes)
+#                            if a.name == "set" and a.attrs["name"].startswith("variant.")
+#                            ]
+                            )
+                return self.__variant_dict[fmri]
+                
+        def __generate_dependency_closure(self, fmri_set, excludes=EmptyI, dotrim=True):
+                """return set of all fmris the set of specified fmris could depend on"""
+
+                needs_processing = fmri_set
+                already_processed = set()
+
+                while (needs_processing):
+                        fmri = needs_processing.pop()
+                        self.__progtrack.evaluate_progress()
+                        already_processed.add(fmri)
+                        needs_processing |= (self.__generate_dependencies(fmri, excludes,
+                            dotrim) - already_processed)
+                return already_processed
+
+        def __generate_dependencies(self, fmri, excludes=EmptyI, dotrim=True):
+                """return set of direct dependencies of this pkg"""
+                try:
+                        return set([
+                             f 
+                             for da in self.__get_dependency_actions(fmri, excludes)
+                             for f in self.__parse_dependency(da, dotrim, check_req=True)[1]
+                             if da.attrs["type"] == "require"
+                             ])
+
+                except RuntimeError, e:
+                        self.__trim(fmri, str(e))
+                        return set([])
+
+        def __parse_dependency(self, dependency_action, dotrim=True, check_req=False):
+                """Return tuple of (disallowed fmri list, allowed fmri list,
+                    dependency_type)"""
+                dtype = dependency_action.attrs["type"]
+                fmri =  pkg.fmri.PkgFmri(dependency_action.attrs["fmri"], "5.11")
+
+                if dtype == "require":
+                        matching, nonmatching = \
+                            self.__comb_newer_fmris(fmri, dotrim, obsolete_ok=False)                        
+                elif dtype == "optional":
+                        matching, nonmatching = \
+                            self.__comb_newer_fmris(fmri, dotrim, obsolete_ok=True)                        
+                elif dtype == "exclude":
+                        matching, nonmatching = \
+                            self.__comb_older_fmris(fmri, dotrim, obsolete_ok=True)
+                elif dtype == "incorporate":
+                        matching, nonmatching = \
+                            self.__comb_auto_fmris(fmri, dotrim, obsolete_ok=True)
+                else:
+                        matching, nonmatching = [],[] # no idea what this dependency is
+
+                if check_req and not matching and dtype == "require":
+                        matching, nonmatching = \
+                            self.__comb_newer_fmris(fmri, dotrim, obsolete_ok=True)
+                        if not matching:
+                                raise RuntimeError, \
+                                    "Suitable required dependency %s cannot be found" % fmri
+                        else:
+                                raise RuntimeError, \
+                                    "Required dependency %s is obsolete" % fmri                                    
+
+                return nonmatching, matching, dtype
+
+        def __print_dependencies(self, fmri, excludes=EmptyI):
+                """ used to display dependencies when things go wrong"""
+                ret = []
+
+                for a in self.__get_dependency_actions(fmri, excludes):
+
+                        unmatch, match, dtype = self.__parse_dependency(a)
+
+                        if dtype == "require":
+                                if not match:
+                                        ms = "No matching packages found"
+                                else:
+                                        ms = "Requires: " + ", ".join([str(f) for f in match])
+                        else:
+                                if not unmatch:
+                                        ms = "No packages excluded"
+                                else:
+                                        ms = "Excludes: " + ", ".join([str(f) for f in unmatch])
+
+                        ret.append("%s dependency: %s: %s" % (dtype, a.attrs["fmri"], ms))
+
+                return ret
+
+        # clause generation routines
+
+        def __gen_dependency_clauses(self, fmri, da, dotrim=True):
+                """Return clauses to implement this dependency"""
+                nm, m, dtype = self.__parse_dependency(da, dotrim)
+                if dtype == "require":
+                        return self.__gen_require_clauses(fmri, m)
+                else:
+                        return self.__gen_negation_clauses(fmri, nm)
+
+
+        def __gen_highlander_clauses(self, fmri_list):
+                """Return a list of clauses that specifies only one or zero
+                of the fmris in fmri_list may be installed.  This prevents
+                multiple versions of the same package being installed
+                at once"""
+
+                # pair wise negation
+                # if a has 4 versions, we need 
+                # [
+                #  [-a.1, -a.2],
+                #  [-a.1, -a.3], 
+                #  [-a.1, -a.4],
+                #  [-a.2, -a.3],
+                #  [-a.2, -a.4],
+                #  [-a.3, -a.4]
+                # ]
+                # n*(n-1)/2 algorithms suck
+
+                if len(fmri_list) == 1: # avoid generation of singletons
+                        return []
+
+                id_list = [ -self.__getid(fmri) for fmri in fmri_list]
+                l = len(id_list)
+
+                return [
+                        [id_list[i], id_list[j]]
+                        for i in range(l-1)
+                        for j in range(i+1, l)
+                        ]
+
+        def __gen_require_clauses(self, fmri, matching_fmri_list):
+                """generate clause for require dependency: if fmri is
+                installed, one of fmri_list is required"""
+                # if a.1 requires b.2, b.3 or b.4:
+                # !a.1 | b.2 | b.3 | b.4
+
+                return [
+                        [-self.__getid(fmri)] + 
+                        [self.__getid(fmri) for fmri in matching_fmri_list]
+                        ]
+
+        def __gen_negation_clauses(self, fmri, non_matching_fmri_list):
+                """ generate clauses for optional, incorporate and
+                exclude dependencies to exclude non-acceptable versions"""
+                # if present, fmri must match ok list
+                # if a.1 optionally requires b.3:
+                # [
+                #   [!a.1 | !b.1],
+                #   [!a.1 | !b.2]
+                # ]
+                fmri_id = self.__getid(fmri)
+                return [[-fmri_id, -self.__getid(f)] for f in non_matching_fmri_list]
+
+        def __gen_one_of_these_clauses(self, fmri_list):
+                """generate clauses such that at least one of the fmri_list
+                members gets installed"""
+                # If a has four versions,
+                # a.1|a.2|a.3|a.4
+                # plus highlander clauses
+                assert fmri_list, "Empty list of which one is required"
+                return [[self.__getid(fmri) for fmri in fmri_list]]
+
+        def __addclauses(self, clauses):
+                """add list of clause lists to solver"""
+
+                for c in clauses:
+                        try:
+                                if not self.__solver.add_clause(c):
+                                        self.__addclause_failure = True
+                                self.__clauses += 1
+                        except TypeError:                                
+                                raise TypeError, "List of integers, not %s, expected" % c
+
+        def __get_installed_unbound_inc_list(self, proposed_fmris, excludes=EmptyI):
+                """Return the list of incorporations that are installed and do not
+                have any other pkg depending on any specific version being installed,
+                along w/ the list of constrained fmris"""
+                pkgs = {}
+                incorps = set()
+                versioned_dependents = set()
+                proposed_names = proposed_fmris.keys()
+                pkg_cons = {}
+
+                for f in self.__installed_fmris.values():
+                        pkgs[f.pkg_name] = f
+                        for d in self.__get_dependency_actions(f, excludes):
+                                fmri = pkg.fmri.PkgFmri(d.attrs["fmri"], "5.11")
+                                if d.attrs["type"] == "incorporate":
+                                        incorps.add(f.pkg_name)
+                                        pkg_cons.setdefault(f, []).append(fmri)
+                                if fmri.has_version:
+                                        versioned_dependents.add(fmri.pkg_name)
+
+                ret = [
+                    pkgs[f] 
+                    for f in incorps - versioned_dependents
+                    if f not in proposed_names
+                ]
+
+                con_list = [
+                        [
+                        i
+                        for i in pkg_cons[inc]
+                        ]
+                        for inc in ret
+                        ]
+
+                return ret, con_list                
+
+        def __filter_publishers(self, pkg_name):
+                """Given a list of fmris for various versions of
+                a package from various publishers, trim those
+                that are not suitable"""
+
+                if pkg_name in self.__pub_trim: # already done
+                        return
+
+                fmri_list = self.__get_catalog_fmris(pkg_name, dotrim=False)
+                version_dict = {}
+
+
+                self.__pub_trim[pkg_name] = True
+
+                if pkg_name in self.__publisher:
+                        acceptable_pubs = [self.__publisher[pkg_name]]
+                        reason = _("Publisher differs from installed or specifed version")
+                else:
+                        # order by pub_rank; choose highest possible tier for
+                        # pkgs
+                        pubs_found = list(set([f.get_publisher() for f in fmri_list]))
+                        ranked = sorted([(self.__pub_ranks[p][0], p) for p in pubs_found])
+                        acceptable_pubs = [ r[1] 
+                                            for r in ranked 
+                                            if r[0] == ranked[0][0]
+                                            ]
+                        reason = _("Publisher is lower ranked")
+
+                # generate a dictionary, indexed by version, of acceptable fmris
+                for f in fmri_list:
+                        if f.get_publisher() in acceptable_pubs:
+                                version_dict.setdefault(f.get_version(), []).append(f)
+
+                # add installed packages; always prefer the installed fmri
+                # if they match exactly to prevent needless re-installs
+                # avoid multiple publishers w/ exactly the same fmri to prevent 
+                # thrashing in the solver due to many equiv. solutions.
+
+                for f in fmri_list:
+                        v = f.get_version()
+                        if self.__installed_fmris.get(pkg_name, None) == f:
+                                if v not in version_dict:
+                                        version_dict[v] = [f]
+                                else:
+                                        for i, nf in enumerate(version_dict[v][:]):
+                                                if nf.version == f.version:
+                                                        version_dict[v][i] = f
+                acceptable_list = []
+                version_dict.values()
+                for l in version_dict.values():
+                        acceptable_list.extend(l)
+
+                for f in set(fmri_list) - set(acceptable_list):
+                        self.__trim(f, reason)
+
+        # routines to manage the trim dictionary
+        def __trim(self, fmri_list, reason):
+                """Remove specified fmri(s) from consideration for specified reason"""
+                try:
+                        it = iter(fmri_list)
+                except TypeError:
+                        it = [fmri_list]
+                for fmri in it:
+                        self.__trim_dict.setdefault(fmri, []).append(reason)
+
+        def __trim_older(self, fmri):
+                """Trim any fmris older than this one"""
+                reason = _("Newer version %s is already installed") % fmri
+                self.__trim(self.__comb_newer_fmris(fmri)[1], reason)
+
+        def __trim_nonmatching_variants(self, fmri):
+                vd = self.__get_variant_dict(fmri)
+
+                for v in self.__variants.keys():
+                        if v in vd and self.__variants[v] not in vd[v]:
+                                if vd == "variant.arch":
+                                        reason = _("Package doesn't support image architecture")
+                                else:
+                                        reason = _("Package doesn't support image variant %s") % v
+                                        
+                                self.__trim(fmri, reason)
+
+        def __dotrim(self, fmri_list):
+                """Return fmri_list trimmed of any fmris in self.__trim_dict"""
+
+
+                ret = [
+                        f
+                        for f in fmri_list
+                        if f not in self.__trim_dict
+                        ]
+                return ret
--- a/src/modules/client/pkgplan.py	Tue Nov 17 17:06:35 2009 -0600
+++ b/src/modules/client/pkgplan.py	Wed Nov 18 15:53:48 2009 -0800
@@ -32,7 +32,6 @@
 
 import pkg.actions.directory as directory
 import pkg.manifest as manifest
-import pkg.client.api_errors as api_errors
 from pkg.misc import expanddirs, get_pkg_otw_size, EmptyI
 
 class PkgPlan(object):
@@ -71,21 +70,22 @@
 
                 return s
 
-        def propose_reinstall(self, fmri, mfst):
-                self.destination_fmri = fmri
-                self.__destination_mfst = mfst
-                self.__legacy_info["version"] = self.destination_fmri.version
-                self.origin_fmri = fmri
-                self.__origin_mfst = mfst
-
-                if not self.image.is_pkg_installed(fmri):
-                        raise api_errors.PlanCreationException(
-                            not_installed=[fmri])
+        def propose(self, of, om, df, dm):
+                """Propose origin and dest fmri, manifest"""
+                self.origin_fmri = of
+                self.__origin_mfst = om
+                self.destination_fmri = df
+                self.__destination_mfst = dm
+                if self.destination_fmri:
+                        self.__legacy_info["version"] = self.destination_fmri.version
 
         def propose_repair(self, fmri, mfst, actions):
-                self.propose_reinstall(fmri, mfst)
-                self.origin_fmri = None
-
+                self.propose(fmri, mfst, fmri, mfst)
+                # self.origin_fmri = None
+                # I'd like a cleaner solution than this; we need to actually
+                # construct a list of actions as things currently are rather than
+                # just re-applying the current set of actions.
+                #
                 # Create a list of (src, dst) pairs for the actions to send to
                 # execute_repair.  src is none in this case since we aren't
                 # upgrading, just repairing.
@@ -94,23 +94,6 @@
                 # Only install actions, no update or remove
                 self.__repair_actions = lst
 
-        def propose_destination(self, fmri, mfst):
-                self.destination_fmri = fmri
-                self.__destination_mfst = mfst
-                self.__legacy_info["version"] = self.destination_fmri.version
-
-                if self.image.is_pkg_installed(fmri):
-                        raise api_errors.PlanCreationException(
-                            installed=[fmri])
-
-        def propose_removal(self, fmri, mfst):
-                self.origin_fmri = fmri
-                self.__origin_mfst = mfst
-
-                if not self.image.is_pkg_installed(fmri):
-                        raise api_errors.PlanCreationException(
-                            not_installed=[fmri])
-
         def get_actions(self):
                 raise NotImplementedError()
 
@@ -130,20 +113,6 @@
 
         def evaluate(self, old_excludes=EmptyI, new_excludes=EmptyI):
                 """Determine the actions required to transition the package."""
-                # if origin unset, determine if we're dealing with an previously
-                # installed version or if we're dealing with the null package
-                #
-                # XXX Perhaps make the pkgplan creator make this explicit, so we
-                # don't have to check?
-                f = None
-
-                if not self.origin_fmri:
-                        f = self.image.older_version_installed(
-                            self.destination_fmri)
-                        if f:
-                                self.origin_fmri = f
-                                self.__origin_mfst = \
-				    self.image.get_manifest(f)
 
                 # Assume that origin actions are unique, but make sure that
                 # destination ones are.
@@ -277,7 +246,7 @@
                         raise
 
         def execute_update(self, src, dest):
-                """ handle action updates"""
+                """ handle action updates"""                                
                 try:
                         dest.install(self, src)
                 except Exception, e:
--- a/src/modules/client/publisher.py	Tue Nov 17 17:06:35 2009 -0600
+++ b/src/modules/client/publisher.py	Wed Nov 18 15:53:48 2009 -0800
@@ -711,6 +711,7 @@
         __prefix = None
         __selected_repository = None
         __repositories = []
+        __sticky = True
         transport = None
 
         # Used to store the id of the original object this one was copied
@@ -719,7 +720,7 @@
 
         def __init__(self, prefix, alias=None, client_uuid=None, disabled=False,
             meta_root=None, repositories=None, selected_repository=None,
-            transport=None):
+            transport=None, sticky=True):
                 """Initialize a new publisher object."""
 
                 if client_uuid is None:
@@ -738,6 +739,7 @@
                 self.prefix = prefix
                 self.transport = transport
                 self.meta_root = meta_root
+                self.sticky = sticky
 
                 if repositories:
                         for r in repositories:
@@ -771,7 +773,8 @@
                 pub = Publisher(self.__prefix, alias=self.__alias,
                     client_uuid=self.__client_uuid, disabled=self.__disabled,
                     meta_root=self.meta_root, repositories=repositories,
-                    selected_repository=selected, transport=self.transport)
+                    selected_repository=selected, transport=self.transport,
+                    sticky=self.__sticky)
                 pub._source_object_id = id(self)
                 return pub
 
@@ -921,6 +924,9 @@
         def __set_client_uuid(self, value):
                 self.__client_uuid = value
 
+        def __set_stickiness(self, value):
+                self.__sticky = bool(value)
+
         def __str__(self):
                 return self.prefix
 
@@ -1491,3 +1497,7 @@
         selected_repository = property(lambda self: self.__selected_repository,
             __set_selected_repository,
             doc="A reference to the selected repository object.")
+
+        sticky = property(lambda self: self.__sticky, __set_stickiness,
+            doc="Whether or not installed packages from this publisher are"
+                " always preferred to other publishers.")
--- a/src/modules/client/transport/transport.py	Tue Nov 17 17:06:35 2009 -0600
+++ b/src/modules/client/transport/transport.py	Wed Nov 18 15:53:48 2009 -0800
@@ -611,8 +611,12 @@
 
                 self.__lock.acquire()
                 try:
-                        self._prefetch_manifests(fetchlist, excludes,
-                            progtrack, ccancel)
+                        try:
+                                self._prefetch_manifests(fetchlist, excludes,
+                                    progtrack, ccancel)
+                        except (apx.PermissionsException, 
+                            apx.InvalidDepotResponseException):
+                                pass             
                 finally:
                         self.__lock.release()
 
--- a/src/modules/depotcontroller.py	Tue Nov 17 17:06:35 2009 -0600
+++ b/src/modules/depotcontroller.py	Wed Nov 18 15:53:48 2009 -0800
@@ -30,6 +30,7 @@
 import urllib2
 import httplib
 import pkg.pkgsubprocess as subprocess
+
 from pkg.misc import versioned_urlopen
 
 class DepotStateException(Exception):
@@ -63,6 +64,7 @@
                 self.__state = self.HALTED
                 self.__writable_root = None
                 self.__sort_file_max_size = None
+                self.__starttime = 0
                 return
 
         def set_depotd_path(self, path):
@@ -267,6 +269,7 @@
                     close_fds=True)
                 if self.__depot_handle == None:
                         raise DepotStateException("Could not start Depot")
+                self.__starttime = time.time()
                 
         def start(self):
                 self.__initial_start()
@@ -329,6 +332,12 @@
                 return self.__depot_handle.poll()
 
         def kill(self):
+                """kill the depot; letting it live for
+                a little while helps get reliable death"""
+
+                lifetime = time.time() - self.__starttime 
+                if lifetime < 1.0:
+                        time.sleep(1.0 - lifetime)
 
                 if self.__depot_handle == None:
                         # XXX might want to remember and return saved
@@ -336,14 +345,12 @@
                         return 0
 
                 status = -1
-                #
-                # With sleeptime doubling every loop iter, and capped at
-                # 10.0 secs, the cumulative time waited will be 10 secs.
-                #
+
+                wait_to_exit = 5.0
                 sleeptime = 0.05
                 firsttime = True
 
-                while sleeptime <= 10.0:
+                while wait_to_exit > 0:
                         status = self.__depot_handle.poll()
                         if status is not None:
                                 break
@@ -358,6 +365,7 @@
                                 firsttime = False
 
                         time.sleep(sleeptime)
+                        wait_to_exit -= sleeptime
                         sleeptime *= 2
                 else:
                         assert status is None
@@ -365,7 +373,8 @@
                             "Depot did not shut down, trying kill -9 %d" % \
                             self.__depot_handle.pid
                         os.kill(self.__depot_handle.pid, signal.SIGKILL)
-                        status = self.__depot_handle.wait()
+                        #self.__depot_handle.wait()
+                        status = self.__depot_handle.poll()
 
                 # XXX do something useful with status
                 self.__state = self.HALTED
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/src/modules/facet.py	Wed Nov 18 15:53:48 2009 -0800
@@ -0,0 +1,132 @@
+#!/usr/bin/python
+#
+# CDDL HEADER START
+#
+# The contents of this file are subject to the terms of the
+# Common Development and Distribution License (the "License").
+# You may not use this file except in compliance with the License.
+#
+# You can obtain a copy of the license at usr/src/OPENSOLARIS.LICENSE
+# or http://www.opensolaris.org/os/licensing.
+# See the License for the specific language governing permissions
+# and limitations under the License.
+#
+# When distributing Covered Code, include this CDDL HEADER in each
+# file and include the License file at usr/src/OPENSOLARIS.LICENSE.
+# If applicable, add the following below this CDDL HEADER, with the
+# fields enclosed by brackets "[]" replaced with your own identifying
+# information: Portions Copyright [yyyy] [name of copyright owner]
+#
+# CDDL HEADER END
+#
+
+# Copyright 2009 Sun Microsystems, Inc.  All rights reserved.
+# Use is subject to license terms.
+
+# basic facet support
+
+from pkg.misc import EmptyI
+import fnmatch 
+
+class Facets(dict):
+        # store information on facets; subclass dict 
+        # and maintain ordered list of keys sorted
+        # by length.
+
+        # subclass __getitem_ so that queries w/ 
+        # actual facets find match
+
+        def __init__(self, init=EmptyI):
+                dict.__init__(self)
+                self.__keylist = []
+                for i in init:
+                        self[i] = init[i]
+
+        def __repr__(self):
+                s =  "<"
+                s += ", ".join(["%s:%s" % (k, dict.__getitem__(self, k)) for k in self.__keylist])
+                s += ">"
+
+                return s
+                
+        def __setitem__(self, item, value):                
+                if not item.startswith("facet."):
+                        raise KeyError, 'key must start with "facet".'
+
+                if not (value == True or value == False):
+                        raise ValueError, "value must be boolean"
+
+                if item not in self: 
+                        self.__keylist.append(item)
+                        self.__keylist.sort(cmp=lambda x, y: len(y) - len(x))
+                dict.__setitem__(self, item, value)
+
+        def __getitem__(self, item):
+                """implement facet lookup algorithm here"""
+                if not item.startswith("facet."):
+                        raise KeyError, "key must start w/ facet."
+
+                if item in self:
+                        return dict.__getitem__(self, item)
+                for k in self.__keylist:
+                        if fnmatch.fnmatch(item, k):
+                                return dict.__getitem__(self, k)
+
+                return True # be inclusive
+
+        def __delitem__(self, item):
+                dict.__delitem__(self, item)
+                self.__keylist.remove(item)
+
+        def pop(self, item, default=None):
+                self.__keylist.remove(item)
+                return dict.pop(self, item, default) 
+
+        def popitem(self):
+                popped = dict.popitem(self)
+                self.__keylist.remove(popped[0])
+                return popped
+
+        def setdefault(self, item, default=None):
+                if item not in self:
+                        self[item] = default
+                return self[item]
+
+        def update(self, d):
+                for k, v in d.iteritems():
+                        self[k] = v
+
+        def keys(self):
+                return self.__keylist[:]
+
+        def values(self):
+                return [self[k] for k in self.__keylist]
+
+        def items(self):
+                return [a for a in self.iteritems()]
+
+        def iteritems(self): # return in sorted order for display
+                for k in self.__keylist:
+                        yield k, self[k]
+
+        def copy(self):
+                return Facets(self)
+
+        def clear(self):
+                self.__keylist = []
+                dict.clear(self)
+
+        def allow_action(self, action):
+                """ determine if facets permit this action; if any facets
+                allow it, return True; also return True if no facets are present"""
+                facets = [k for k in action.attrs.keys() if k.startswith("facet.")]
+                
+                ret = True
+
+                for f in facets:
+                        if self[f]:
+                                return True
+                        else:
+                                ret = False
+
+                return ret
--- a/src/modules/fmri.py	Tue Nov 17 17:06:35 2009 -0600
+++ b/src/modules/fmri.py	Wed Nov 18 15:53:48 2009 -0800
@@ -333,13 +333,18 @@
                 if not other:
                         return 1
 
-                if self.pkg_name == other.pkg_name:
-                        return cmp(self.version, other.version)
+                c = cmp(self.publisher, other.publisher)
+
+                if c != 0:
+                        return c
 
-                if self.pkg_name > other.pkg_name:
-                        return 1
+                c = cmp(self.pkg_name, other.pkg_name)
+ 
+                if c != 0:
+                        return c
 
-                return -1
+                return cmp(self.version, other.version)
+
 
         def get_link_path(self, stemonly = False):
                 """Return the escaped link (or file) path fragment for this
--- a/src/modules/manifest.py	Tue Nov 17 17:06:35 2009 -0600
+++ b/src/modules/manifest.py	Wed Nov 18 15:53:48 2009 -0800
@@ -923,7 +923,7 @@
                 # origin has been removed.  This is an optimization for
                 # uninstall.
                 return ([], [],
-                    [(a, None) for a in origin.gen_actions(self_exclude)])
+                    [(a, None) for a in origin.gen_actions(origin_exclude)])
 
         @staticmethod
         def get_directories(excludes):
--- a/src/modules/misc.py	Tue Nov 17 17:06:35 2009 -0600
+++ b/src/modules/misc.py	Wed Nov 18 15:53:48 2009 -0800
@@ -525,7 +525,7 @@
                         t = cmp(f1.pkg_name, f2.pkg_name)
                         if t != 0:
                                 return t
-                        t = cmp(f2, f1)
+                        t = cmp(f2.version, f1.version)
                         if t != 0:
                                 return t
                         if f1.get_publisher() == ppub:
@@ -657,6 +657,74 @@
         def __oops(self):
                 raise TypeError, "Item assignment to ImmutableDict"
 
+# A way to have a dictionary be a property
+
+class DictProperty(object):
+        class __InternalProxy(object):
+                def __init__(self, obj, fget, fset, fdel, iteritems, keys, values, iterator):
+                        self.__obj = obj
+                        self.__fget = fget
+                        self.__fset = fset
+                        self.__fdel = fdel
+                        self.__iteritems = iteritems
+                        self.__keys = keys
+                        self.__values = values
+                        self.__iter = iterator
+
+                def __getitem__(self, key):
+                        if self.__fget is None:
+                                raise AttributeError, "unreadable attribute"
+
+                        return self.__fget(self.__obj, key)
+
+                def __setitem__(self, key, value):
+                        if self.__fset is None:
+                                raise AttributeError, "can't set attribute"
+                        self.__fset(self.__obj, key, value)
+
+                def __delitem__(self, key):
+                        if self.__fdel is None:
+                                raise AttributeError, "can't delete attribute"
+                        self.__fdel(self.__obj, key)
+
+                def iteritems(self):
+                        if self.__iteritems is None:
+                                raise AttributeError, "can't iterate over items"
+                        return self.__iteritems(self.__obj)
+
+                def keys(self):
+                        if self.__keys is None:
+                                raise AttributeError, "can't iterate over keys"
+                        return self.__keys(self.__obj)
+
+                def values(self):
+                        if self.__values is None:
+                                raise AttributeError, "can't iterate over values"
+                        return self.__values(self.__obj)
+
+                def __iter__(self):
+                        if self.__iter is None:
+                                raise AttributeError, "can't iterate"
+                        return self.__iter(self.__obj)
+
+        def __init__(self, fget=None, fset=None, fdel=None, iteritems=None, 
+            keys=None, values=None, iterator=None, doc=None):
+                self.__fget = fget
+                self.__fset = fset
+                self.__fdel = fdel
+                self.__iteritems = iteritems
+                self.__doc__ = doc
+                self.__keys = keys
+                self.__values = values
+                self.__iter = iterator
+
+        def __get__(self, obj, objtype=None):
+                if obj is None:
+                        return self
+                return self.__InternalProxy(obj, self.__fget, self.__fset, 
+                    self.__fdel, self.__iteritems, self.__keys, self.__values, self.__iter)
+
+        
 def get_sorted_publishers(pubs, preferred=None):
         spubs = []
         for p in sorted(pubs, key=operator.attrgetter("prefix")):
@@ -732,6 +800,20 @@
 
         return cert
 
+class Singleton(type):
+        """Set __metaclass__ to Singleton to create a singleton.
+        See http://en.wikipedia.org/wiki/Singleton_pattern """
+
+        def __init__(self, name, bases, dictionary):
+                super(Singleton, self).__init__(name, bases, dictionary)
+                self.instance = None
+ 
+        def __call__(self, *args, **kw):
+                if self.instance is None:
+                        self.instance = super(Singleton, self).__call__(*args, **kw)
+ 
+                return self.instance
+
 EmptyDict = ImmutableDict()
 
 # Setting the python file buffer size to 128k gives substantial performance
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/src/modules/solver/LICENSE	Wed Nov 18 15:53:48 2009 -0800
@@ -0,0 +1,20 @@
+MiniSat -- Copyright (c) 2005, Niklas Sorensson
+
+Permission is hereby granted, free of charge, to any person obtaining a
+copy of this software and associated documentation files (the
+"Software"), to deal in the Software without restriction, including
+without limitation the rights to use, copy, modify, merge, publish,
+distribute, sublicense, and/or sell copies of the Software, and to
+permit persons to whom the Software is furnished to do so, subject to
+the following conditions:
+
+The above copyright notice and this permission notice shall be included
+in all copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
+OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
+MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
+LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
+OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
+WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/src/modules/solver/Makefile.standalone	Wed Nov 18 15:53:48 2009 -0800
@@ -0,0 +1,61 @@
+#
+# CDDL HEADER START
+#
+# The contents of this file are subject to the terms of the
+# Common Development and Distribution License (the "License").
+# You may not use this file except in compliance with the License.
+#
+# You can obtain a copy of the license at usr/src/OPENSOLARIS.LICENSE
+# or http://www.opensolaris.org/os/licensing.
+# See the License for the specific language governing permissions
+# and limitations under the License.
+#
+# When distributing Covered Code, include this CDDL HEADER in each
+# file and include the License file at usr/src/OPENSOLARIS.LICENSE.
+# If applicable, add the following below this CDDL HEADER, with the
+# fields enclosed by brackets "[]" replaced with your own identifying
+# information: Portions Copyright [yyyy] [name of copyright owner]
+#
+# CDDL HEADER END
+#
+# Copyright 2008 Sun Microsystems, Inc.  All rights reserved.
+# Use is subject to license terms.
+#
+
+PYTHON_VERSION=2.4
+PYTHON = /usr/bin/python$(PYTHON_VERSION)
+
+MINISATMODULE = minisat.so
+
+MINISATSRCS = py_solver.c solver.c
+MINISATOBJS = $(MINISATSRCS:%.c=%.o)
+
+PYTHONINC = /usr/include/python$(PYTHON_VERSION)
+PYTHON_VENDOR = /usr/lib/python$(PYTHON_VERSION)/vendor-packages
+
+CFLAGS += -I$(PYTHONINC) -DWITH_DOC_STRINGS=1 -I. -xc99 -DNDEBUG -O3
+LDFLAGS += -zdefs -ztext -lc -lm -lpython$(PYTHON_VERSION)
+
+.KEEP_STATE:
+
+all: $(MINISATMODULE)
+
+install: $(PYTHON_VENDOR)/minisat.so
+
+$(PYTHON_VENDOR)/minisat.so: minisat.so
+	ginstall -m 0755 -o root -g bin minisat.so $(PYTHON_VENDOR)
+
+lint:
+	$(LINT) -c  -errhdr=%none $(CFLAGS) $(MINISATSRCS)
+
+$(MINISATMODULE): $(MINISATOBJS)
+	$(CC) -G $(MINISATOBJS) -o $@ $(LDFLAGS)
+
+%.o : %.c
+	$(CC) -c -Kpic $(CFLAGS) $<
+
+clobber: clean
+	rm -f minisat.so
+
+clean:
+	rm -f $(MINISATOBJS)
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/src/modules/solver/README	Wed Nov 18 15:53:48 2009 -0800
@@ -0,0 +1,15 @@
+MiniSat-C v1.14.1
+========================================
+
+* Fixed some serious bugs. 
+* Tweaked to be Visual Studio friendly (by Alan Mishchenko).
+  This disabled reading of gzipped DIMACS files and signal handling, but none
+  of these features are essential (and easy to re-enable, if wanted).
+
+MiniSat-C v1.14
+========================================
+
+Ok, we get it. You hate C++. You hate templates. We agree; C++ is a
+seriously messed up language. Although we are more pragmatic about the
+quirks and maldesigns in C++, we sympathize with you. So here is a
+pure C version of MiniSat, put together by Niklas S�rensson.
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/src/modules/solver/py_solver.c	Wed Nov 18 15:53:48 2009 -0800
@@ -0,0 +1,560 @@
+/*
+ * CDDL HEADER START
+ *
+ * The contents of this file are subject to the terms of the
+ * Common Development and Distribution License (the "License").
+ * You may not use this file except in compliance with the License.
+ *
+ * You can obtain a copy of the license at usr/src/OPENSOLARIS.LICENSE
+ * or http://www.opensolaris.org/os/licensing.
+ * See the License for the specific language governing permissions
+ * and limitations under the License.
+ *
+ * When distributing Covered Code, include this CDDL HEADER in each
+ * file and include the License file at usr/src/OPENSOLARIS.LICENSE.
+ * If applicable, add the following below this CDDL HEADER, with the
+ * fields enclosed by brackets "[]" replaced with your own identifying
+ * information: Portions Copyright [yyyy] [name of copyright owner]
+ *
+ * CDDL HEADER END
+ */
+/*
+ * Copyright 2009 Sun Microsystems, Inc.  All rights reserved.
+ * Use is subject to license terms.
+ */
+
+#include <Python.h>
+
+#include <sys/systeminfo.h>
+#include <sys/types.h>
+#include <stdlib.h>
+
+#include "solver.h"
+
+typedef void
+confunc_t(void *ptr, void *userarg);
+
+typedef struct
+{
+	int capacity;
+	int cnt;
+	void **buffer;
+} container_t;
+
+/*
+ * create a container
+ */
+
+static inline container_t *
+con_alloc(int initial_capacity)
+{
+	container_t *ptr = (container_t *) malloc(sizeof (container_t));
+	ptr->capacity = initial_capacity;
+	ptr->buffer = malloc(sizeof (void *) * ptr->capacity);
+	ptr->cnt = 0;
+	return (ptr);
+}
+
+/*
+ * add a pointer to a container
+ */
+
+static inline void
+con_addptr(container_t *container, void *ptr)
+{
+	if (container->cnt == container->capacity)
+		container->buffer = realloc(container->buffer,
+		    sizeof (void *) * (container->capacity += 1000));
+	container->buffer[container->cnt++] = ptr;
+}
+
+/*
+ * iterate over the void pointers in a container
+ */
+
+static inline void
+con_iterptrs(container_t *container, void *usr_arg, confunc_t *func)
+{
+	int i;
+	for (i = 0; i < container->cnt; i++)
+		func(container->buffer[i], usr_arg);
+}
+
+/*
+ * delete a container
+ */
+
+static inline void
+con_delete(container_t *container)
+{
+	free(container->buffer);
+	free((void *) container);
+}
+
+/* 
+ * allocate a ref-cnted pointer to a chunk of memory of specified size
+ * returns w/ refcnt set to 1.  Be able to retrieve size.
+ */
+
+static inline void *
+alloc_refcntptr(size_t size)
+{
+	long *ptr = malloc(size + sizeof (long) *2);
+	*ptr++ = size;
+	*ptr++ = 1;
+	return ((void *) ptr);
+}
+
+/*
+ * increment reference count on refcnted pointer
+ */
+
+static inline void *
+inc_refcntptr(void *ptr)
+{
+	long *lptr = (long *) ptr;
+
+	lptr[-1]++;
+
+	return (ptr);
+}
+
+/*
+ * decrement (and free if needed) refcnted pointer
+ */
+
+static inline void
+dec_refcntptr(void *ptr)
+{
+	long *lptr = (long *) ptr;
+
+	if (--(lptr[-1]) == 0)
+		free((void*) (lptr - 2));
+}
+
+static inline long
+size_refcntptr(void *ptr)
+{
+	long *lptr = (long *) ptr;
+	return (lptr[-2]);
+}
+
+
+/*
+ * routines dealing explicitly w/ containers of refcnted pointers
+ */
+
+/*
+ * duplicate a container of refcnted pointers
+ */
+
+static inline void
+cpyptr(void *ptr, void *usr)
+{
+	con_addptr((container_t *) usr, inc_refcntptr(ptr));
+}
+
+/*ARGSUSED*/
+static inline void
+decptr(void *ptr, void *usr)
+{
+	dec_refcntptr(ptr);
+}
+
+static inline container_t *
+refcntcon_dup(container_t *old)
+{
+	container_t *new = con_alloc(old->capacity);
+	con_iterptrs(old, new, cpyptr);
+	return (new);
+}
+
+static inline void
+refcntcon_del(container_t *old)
+{
+	if (old != NULL) {
+		con_iterptrs(old, NULL, decptr);
+		con_delete(old);
+	}
+}
+
+#define RETURN_NEEDS_RESET BAILOUT(PyExc_RuntimeError, "msat_solver failed; reset needed")
+#define RETURN_NEEDS_INTLIST BAILOUT(PyExc_TypeError, "List of integers expected")
+#define RETURN_NOT_SOLVER BAILOUT(PyExc_TypeError, "msat_solver expected")
+
+#define BAILOUT(exception, string) {PyErr_SetString(exception, string); return (NULL);}
+
+typedef struct
+{
+	PyObject_HEAD
+	solver *msat_instance;
+	int msat_needs_reset;
+	container_t *msat_clauses;
+} msat_solver;
+
+
+static void msat_dealloc(msat_solver *self);
+static PyMethodDef msat_methods[];
+static int msat_init(msat_solver * self, PyObject *args, PyObject *kwds);
+static PyObject *
+msat_new(PyTypeObject *type, PyObject *args, PyObject *kwds);
+
+static PyTypeObject minisat_solvertype = {
+	PyObject_HEAD_INIT(NULL)
+	0, /*ob_size*/
+	"solver.msat_solver", /*tp_name*/
+	sizeof (msat_solver), /*tp_basicsize*/
+	0, /*tp_itemsize*/
+	(destructor) msat_dealloc, /*tp_dealloc*/
+	0, /*tp_print*/
+	0, /*tp_getattr*/
+	0, /*tp_setattr*/
+	0, /*tp_compare*/
+	0, /*tp_repr*/
+	0, /*tp_as_number*/
+	0, /*tp_as_sequence*/
+	0, /*tp_as_mapping*/
+	0, /*tp_hash */
+	0, /*tp_call*/
+	0, /*tp_str*/
+	0, /*tp_getattro*/
+	0, /*tp_setattro*/
+	0, /*tp_as_buffer*/
+	Py_TPFLAGS_DEFAULT, /*tp_flags*/
+	"msat_solver object", /*tp_doc*/
+	0, /*tp_traverse*/
+	0, /*tp_clear*/
+	0, /*tp_richcompare*/
+	0, /*tp_weaklistoffset*/
+	0, /*tp_iter*/
+	0, /*tp_iternext*/
+	msat_methods, /*tp_methods*/
+	0, /*tp_members*/
+	0, /*tp_getset*/
+	0, /*tp_base*/
+	0, /*tp_dict*/
+	0, /*tp_descr_get*/
+	0, /*tp_descr_set*/
+	0, /*tp_dictoffset*/
+	(initproc) msat_init, /*tp_init*/
+	0, /*tp_alloc*/
+	msat_new /*tp_new*/
+};
+
+/*ARGSUSED*/
+static void
+msat_dealloc(msat_solver *self)
+{
+	refcntcon_del(self->msat_clauses);
+	if (self->msat_instance != NULL) 
+		solver_delete(self->msat_instance);
+	self->ob_type->tp_free((PyObject*) self);
+}
+
+static void
+add_clauses(void *ptr, void *arg)
+{
+	msat_solver *self = (msat_solver *) arg;
+	lbool ret = solver_addclause(self->msat_instance,
+	    (lit*) ptr, (lit*) ((char *) ptr + size_refcntptr(ptr)));
+
+	if (ret == l_False)
+		self->msat_needs_reset = 1;
+}
+
+/*ARGSUSED*/
+static PyObject *
+msat_new(PyTypeObject *type, PyObject *args, PyObject *kwds)
+{
+	msat_solver *self;
+	msat_solver *prototype_solver;
+	int arg_count = PyTuple_Size(args);
+
+	if ((self = (msat_solver *) type->tp_alloc(type, 0)) == NULL)
+		return (NULL);
+	
+	/*
+	 * we optionally allow another server instance
+	 * to be passed in to initialize the new solver
+	 */
+	switch (arg_count) {
+	case 0:
+		if ((self->msat_instance = solver_new()) == NULL) {
+			Py_DECREF(self);
+			return (NULL);
+		}
+		self->msat_instance->verbosity = 0;
+		self->msat_needs_reset = 0;
+		self->msat_clauses = con_alloc(1000);
+		return (PyObject *) self;
+	case 1:
+		prototype_solver = (msat_solver *) PyTuple_GetItem(args, 0);
+		if (prototype_solver == NULL) {
+			Py_DECREF(self);
+			return (NULL);
+		}
+		if (!PyObject_TypeCheck((PyObject *) prototype_solver,
+		    &minisat_solvertype)) {
+			Py_DECREF(self);
+			RETURN_NOT_SOLVER;
+		}
+		if (prototype_solver->msat_needs_reset != 0) {
+			Py_DECREF(self);
+			RETURN_NEEDS_RESET;
+		}
+		self->msat_instance = solver_new();
+		self->msat_instance->verbosity =
+		    prototype_solver->msat_instance->verbosity;
+		self->msat_clauses =
+		    refcntcon_dup(prototype_solver->msat_clauses);
+		self->msat_needs_reset = 0;
+		con_iterptrs(self->msat_clauses, self, add_clauses);
+		return (PyObject *) self;
+	default:
+		RETURN_NOT_SOLVER;
+	}
+
+}
+
+/*ARGSUSED*/
+static int
+msat_init(msat_solver * self, PyObject *args, PyObject *kwds)
+{
+	return (0);
+}
+
+/*ARGSUSED*/
+static PyObject *
+msat_reset(msat_solver *self, PyObject *args)
+{
+	int v = self->msat_instance->verbosity;
+	solver_delete(self->msat_instance);
+	self->msat_instance = solver_new();
+	self->msat_needs_reset = 0;
+	self->msat_instance->verbosity = v;
+	Py_RETURN_NONE;
+}
+
+static PyObject *
+msat_set_verbosity(msat_solver *self, PyObject *args)
+{
+	int index;
+
+	if (!PyArg_ParseTuple(args, "i", &index))
+		return (NULL);
+
+	self->msat_instance->verbosity = index;
+
+	Py_RETURN_NONE;
+}
+
+static PyObject *
+msat_adjust(msat_solver *self, PyObject *args)
+{
+	int index;
+
+	if (!PyArg_ParseTuple(args, "i", &index))
+		return (NULL);
+	solver_setnvars(self->msat_instance, index);
+
+	Py_RETURN_NONE;
+}
+
+/*ARGSUSED*/
+static PyObject *
+msat_get_variables(msat_solver *self, PyObject *args)
+{
+	if (self->msat_needs_reset)
+		RETURN_NEEDS_RESET;
+
+	return (Py_BuildValue("i", solver_nvars(self->msat_instance)));
+}
+
+/*ARGSUSED*/
+static PyObject *
+msat_get_added_clauses(msat_solver *self, PyObject *args)
+{
+	return (Py_BuildValue("i", solver_nclauses(self->msat_instance)));
+}
+
+static int *
+msat_unpack_integers(PyObject *list, int *nout)
+{
+	int i;
+	int n;
+	int *is;
+
+	if (!PyList_Check(list))
+		RETURN_NEEDS_INTLIST;
+
+	n = PyList_Size(list);
+
+	if ((is = (int *) alloc_refcntptr(n * sizeof (int))) == NULL) {
+		PyErr_NoMemory();
+		return (NULL);
+	}
+
+	/* each iteration: minisat_add(int) */
+	for (i = 0; i < n; i++) {
+		int l;
+		int v;
+
+		if ((l = PyInt_AsLong(PyList_GetItem(list, i))) == -1
+		&& PyErr_Occurred()) {
+			dec_refcntptr(is);
+			RETURN_NEEDS_INTLIST;
+		}
+
+		v = abs(l) - 1;
+		is[i] = (l > 0) ? toLit(v) : lit_neg(toLit(v));
+	}
+
+	*nout = n;
+	return (is);
+}
+
+/*ARGSUSED*/
+static PyObject *
+msat_add_clause(msat_solver *self, PyObject *args)
+{
+	int *is;
+	int n;
+	lbool ret;
+	PyObject *list;
+
+	if (self->msat_needs_reset)
+		RETURN_NEEDS_RESET;
+
+	if (!PyArg_ParseTuple(args, "O", &list))
+		return (NULL);
+
+	if ((is = msat_unpack_integers(list, &n)) == NULL)
+		return (NULL);
+
+	con_addptr(self->msat_clauses, is);
+
+	if (n == 0) {
+		dec_refcntptr(is);
+		RETURN_NEEDS_INTLIST;
+	}
+
+	ret = solver_addclause(self->msat_instance, is, &(is[n]));
+
+	if (ret == l_True)
+		Py_RETURN_TRUE;
+	else if (ret == l_False) {
+		self->msat_needs_reset = 1;
+		Py_RETURN_FALSE;
+	}
+
+	Py_RETURN_NONE;
+}
+
+static PyObject *
+msat_solve(msat_solver *self, PyObject *args, PyObject *keywds)
+{
+	int *as;
+	int *as_top;
+	int n;
+	PyObject *assume;
+	lbool ret;
+	int limit;
+
+	static char *kwlist[] = {"assume", "limit", NULL};
+
+	if (self->msat_needs_reset)
+		RETURN_NEEDS_RESET;
+
+	if (!PyArg_ParseTupleAndKeywords(args, keywds, "|Oi", kwlist,
+	&assume, &limit))
+		return (NULL);
+
+	if ((as = msat_unpack_integers(assume, &n)) == NULL)
+		return (NULL);
+
+	if (n > 0) {
+		as_top = &(as[n]);
+	} else {
+		dec_refcntptr(as);
+		as = NULL;
+		as_top = NULL;
+	}
+
+	ret = solver_solve(self->msat_instance, as, as_top);
+
+	if (as != NULL)
+		dec_refcntptr(as);
+
+	if (ret)
+		Py_RETURN_TRUE;
+	else {
+		self->msat_needs_reset = 1;
+		Py_RETURN_FALSE;
+	}
+}
+
+static PyObject *
+msat_dereference(msat_solver *self, PyObject *args)
+{
+	int literal;
+
+	if (self->msat_needs_reset)
+		RETURN_NEEDS_RESET;
+
+	if (!PyArg_ParseTuple(args, "i", &literal))
+		return (NULL);
+
+	if (self->msat_instance->model.ptr[literal] == l_True)
+		Py_RETURN_TRUE;
+
+	Py_RETURN_FALSE;
+}
+
+/*
+ * Should we provide enough Python to allow the use of a higher level function
+ * to build clauses, or should we just leave that to the caller?
+ */
+
+static PyMethodDef msat_methods[] = {
+	{ "reset", (PyCFunction) msat_reset,
+		METH_VARARGS,
+		"Reset solver after solution failure"},
+	{ "set_verbose", (PyCFunction) msat_set_verbosity,
+		METH_VARARGS,
+		"specify level of debugging output"},
+	{ "hint_variables", (PyCFunction) msat_adjust,
+		METH_VARARGS, NULL},
+	{ "get_variables", (PyCFunction) msat_get_variables,
+		METH_VARARGS, NULL},
+	{ "get_added_clauses", (PyCFunction) msat_get_added_clauses,
+		METH_VARARGS, NULL},
+	{ "add_clause", (PyCFunction) msat_add_clause,
+		METH_VARARGS,
+		"Add another clause (as list of integers) to solution space"},
+	{ "solve", (PyCFunction) msat_solve,
+		METH_VARARGS | METH_KEYWORDS,
+		"Attempt to satisfy current clauses and assumptions."},
+	{ "dereference", (PyCFunction) msat_dereference,
+		METH_VARARGS,
+		"Retrieve literal value in solution, if available after solve "
+		"attempt."},
+	{ NULL, NULL, 0, NULL}
+};
+
+
+static PyMethodDef no_module_methods[] = {
+	{NULL} /* Sentinel */
+};
+
+void
+initsolver()
+{
+	PyObject *m;
+
+	if (PyType_Ready(&minisat_solvertype) < 0)
+		return;
+	m = Py_InitModule3("solver", no_module_methods,
+	    "MINISAT SAT solver module");
+	Py_INCREF(&minisat_solvertype);
+	PyModule_AddObject(m, "msat_solver", (PyObject*) &minisat_solvertype);
+}
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/src/modules/solver/solver.c	Wed Nov 18 15:53:48 2009 -0800
@@ -0,0 +1,1289 @@
+/**************************************************************************************************
+MiniSat -- Copyright (c) 2005, Niklas Sorensson
+http://www.cs.chalmers.se/Cs/Research/FormalMethods/MiniSat/
+
+Permission is hereby granted, free of charge, to any person obtaining a copy of this software and
+associated documentation files (the "Software"), to deal in the Software without restriction,
+including without limitation the rights to use, copy, modify, merge, publish, distribute,
+sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is
+furnished to do so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in all copies or
+substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT
+NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
+DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT
+OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+**************************************************************************************************/
+// Modified to compile with MS Visual Studio 6.0 by Alan Mishchenko
+
+#include <stdio.h>
+#include <assert.h>
+#include <math.h>
+
+#include "solver.h"
+
+//=================================================================================================
+// Debug:
+
+//#define VERBOSEDEBUG
+
+// For derivation output (verbosity level 2)
+#define L_IND    "%-*d"
+#define L_ind    solver_dlevel(s)*3+3,solver_dlevel(s)
+#define L_LIT    "%sx%d"
+#define L_lit(p) lit_sign(p)?"~":"", (lit_var(p))
+
+// Just like 'assert()' but expression will be evaluated in the release version as well.
+static inline void check(int expr) { assert(expr); }
+
+static void printlits(lit* begin, lit* end)
+{
+    int i;
+    for (i = 0; i < end - begin; i++)
+        printf(L_LIT" ",L_lit(begin[i]));
+}
+
+//=================================================================================================
+// Random numbers:
+
+
+// Returns a random float 0 <= x < 1. Seed must never be 0.
+static inline double drand(double* seed) {
+    int q;
+    *seed *= 1389796;
+    q = (int)(*seed / 2147483647);
+    *seed -= (double)q * 2147483647;
+    return *seed / 2147483647; }
+
+
+// Returns a random integer 0 <= x < size. Seed must never be 0.
+static inline int irand(double* seed, int size) {
+    return (int)(drand(seed) * size); }
+
+
+//=================================================================================================
+// Predeclarations:
+
+void sort(void** array, int size, int(*comp)(const void *, const void *));
+
+//=================================================================================================
+// Clause datatype + minor functions:
+
+struct clause_t
+{
+    int size_learnt;
+    lit lits[];
+};
+
+static inline int   clause_size       (clause* c)          { return c->size_learnt >> 1; }
+static inline lit*  clause_begin      (clause* c)          { return c->lits; }
+static inline int   clause_learnt     (clause* c)          { return c->size_learnt & 1; }
+static inline float clause_activity   (clause* c)          { return *((float*)&c->lits[c->size_learnt>>1]); }
+static inline void  clause_setactivity(clause* c, float a) { *((float*)&c->lits[c->size_learnt>>1]) = a; }
+static inline int   clause_bytesize   (clause *c)
+{
+  return clause_size(c) * sizeof (lit) + 
+    clause_learnt(c) * sizeof (float) + 
+       sizeof (clause);
+}
+
+
+//=================================================================================================
+// Encode literals in clause pointers:
+
+clause* clause_from_lit (lit l)     { return (clause*)((unsigned long)l + (unsigned long)l + 1);  }
+bool    clause_is_lit   (clause* c) { return ((unsigned long)c & 1);                              }
+lit     clause_read_lit (clause* c) { return (lit)((unsigned long)c >> 1);                        }
+
+//=================================================================================================
+// Simple helpers:
+
+static inline int     solver_dlevel(solver* s)    { return veci_size(&s->trail_lim); }
+static inline vecp*   solver_read_wlist     (solver* s, lit l){ return &s->wlists[l]; }
+static inline void    vecp_remove(vecp* v, void* e)
+{
+    void** ws = vecp_begin(v);
+    int    j  = 0;
+
+    for (; ws[j] != e  ; j++);
+    assert(j < vecp_size(v));
+    for (; j < vecp_size(v)-1; j++) ws[j] = ws[j+1];
+    vecp_resize(v,vecp_size(v)-1);
+}
+
+#ifdef SOLVER_COPY
+static inline void    vecp_replace(vecp* v, void *old, void *new)
+{
+    void** ws = vecp_begin(v);
+    int    j  = 0;
+
+    for (j = 0;j < vecp_size(v); j++)
+      if (ws[j] == old)
+	ws[j] = new;
+}
+#endif
+      
+//=================================================================================================
+// Variable order functions:
+
+static inline void order_update(solver* s, int v) // updateorder
+{
+    int*    orderpos = s->orderpos;
+    double* activity = s->activity;
+    int*    heap     = veci_begin(&s->order);
+    int     i        = orderpos[v];
+    int     x        = heap[i];
+    int     parent   = (i - 1) / 2;
+
+    assert(s->orderpos[v] != -1);
+
+    while (i != 0 && activity[x] > activity[heap[parent]]){
+        heap[i]           = heap[parent];
+        orderpos[heap[i]] = i;
+        i                 = parent;
+        parent            = (i - 1) / 2;
+    }
+    heap[i]     = x;
+    orderpos[x] = i;
+}
+
+static inline void order_assigned(solver* s, int v) 
+{
+}
+
+static inline void order_unassigned(solver* s, int v) // undoorder
+{
+    int* orderpos = s->orderpos;
+    if (orderpos[v] == -1){
+        orderpos[v] = veci_size(&s->order);
+        veci_push(&s->order,v);
+        order_update(s,v);
+    }
+}
+
+static int  order_select(solver* s, float random_var_freq) // selectvar
+{
+    int*    heap;
+    double* activity;
+    int*    orderpos;
+
+    lbool* values = s->assigns;
+
+    // Random decision:
+    if (drand(&s->random_seed) < random_var_freq){
+        int next = irand(&s->random_seed,s->size);
+        assert(next >= 0 && next < s->size);
+        if (values[next] == l_Undef)
+            return next;
+    }
+
+    // Activity based decision:
+
+    heap     = veci_begin(&s->order);
+    activity = s->activity;
+    orderpos = s->orderpos;
+
+
+    while (veci_size(&s->order) > 0){
+        int    next  = heap[0];
+        int    size  = veci_size(&s->order)-1;
+        int    x     = heap[size];
+
+        veci_resize(&s->order,size);
+
+        orderpos[next] = -1;
+
+        if (size > 0){
+            double act   = activity[x];
+
+            int    i     = 0;
+            int    child = 1;
+
+
+            while (child < size){
+                if (child+1 < size && activity[heap[child]] < activity[heap[child+1]])
+                    child++;
+
+                assert(child < size);
+
+                if (act >= activity[heap[child]])
+                    break;
+
+                heap[i]           = heap[child];
+                orderpos[heap[i]] = i;
+                i                 = child;
+                child             = 2 * child + 1;
+            }
+            heap[i]           = x;
+            orderpos[heap[i]] = i;
+        }
+
+        if (values[next] == l_Undef)
+            return next;
+    }
+
+    return var_Undef;
+}
+
+//=================================================================================================
+// Activity functions:
+
+static inline void act_var_rescale(solver* s) {
+    double* activity = s->activity;
+    int i;
+    for (i = 0; i < s->size; i++)
+        activity[i] *= 1e-100;
+    s->var_inc *= 1e-100;
+}
+
+static inline void act_var_bump(solver* s, int v) {
+    double* activity = s->activity;
+    if ((activity[v] += s->var_inc) > 1e100)
+        act_var_rescale(s);
+
+    //printf("bump %d %f\n", v-1, activity[v]);
+
+    if (s->orderpos[v] != -1)
+        order_update(s,v);
+
+}
+
+static inline void act_var_decay(solver* s) { s->var_inc *= s->var_decay; }
+
+static inline void act_clause_rescale(solver* s) {
+    clause** cs = (clause**)vecp_begin(&s->learnts);
+    int i;
+    for (i = 0; i < vecp_size(&s->learnts); i++){
+        float a = clause_activity(cs[i]);
+        clause_setactivity(cs[i], a * (float)1e-20);
+    }
+    s->cla_inc *= (float)1e-20;
+}
+
+
+static inline void act_clause_bump(solver* s, clause *c) {
+    float a = clause_activity(c) + s->cla_inc;
+    clause_setactivity(c,a);
+    if (a > 1e20) act_clause_rescale(s);
+}
+
+static inline void act_clause_decay(solver* s) { s->cla_inc *= s->cla_decay; }
+
+
+//=================================================================================================
+// Clause functions:
+
+/* pre: size > 1 && no variable occurs twice
+ */
+static clause* clause_new(solver* s, lit* begin, lit* end, int learnt)
+{
+    int size;
+    clause* c;
+    int i;
+
+    assert(end - begin > 1);
+    assert(learnt >= 0 && learnt < 2);
+    size           = end - begin;
+    c              = (clause*)malloc(sizeof(clause) + sizeof(lit) * size + learnt * sizeof(float));
+    c->size_learnt = (size << 1) | learnt;
+    assert(((unsigned int)c & 1) == 0);
+
+    for (i = 0; i < size; i++)
+        c->lits[i] = begin[i];
+
+    if (learnt)
+        *((float*)&c->lits[size]) = 0.0;
+
+    assert(begin[0] >= 0);
+    assert(begin[0] < s->size*2);
+    assert(begin[1] >= 0);
+    assert(begin[1] < s->size*2);
+
+    assert(lit_neg(begin[0]) < s->size*2);
+    assert(lit_neg(begin[1]) < s->size*2);
+
+    //vecp_push(solver_read_wlist(s,lit_neg(begin[0])),(void*)c);
+    //vecp_push(solver_read_wlist(s,lit_neg(begin[1])),(void*)c);
+
+    vecp_push(solver_read_wlist(s,lit_neg(begin[0])),(void*)(size > 2 ? c : clause_from_lit(begin[1])));
+    vecp_push(solver_read_wlist(s,lit_neg(begin[1])),(void*)(size > 2 ? c : clause_from_lit(begin[0])));
+
+    return c;
+}
+
+static void clause_remove(solver* s, clause* c)
+{
+    lit* lits = clause_begin(c);
+    assert(lit_neg(lits[0]) < s->size*2);
+    assert(lit_neg(lits[1]) < s->size*2);
+
+    //vecp_remove(solver_read_wlist(s,lit_neg(lits[0])),(void*)c);
+    //vecp_remove(solver_read_wlist(s,lit_neg(lits[1])),(void*)c);
+
+    assert(lits[0] < s->size*2);
+    vecp_remove(solver_read_wlist(s,lit_neg(lits[0])),(void*)(clause_size(c) > 2 ? c : clause_from_lit(lits[1])));
+    vecp_remove(solver_read_wlist(s,lit_neg(lits[1])),(void*)(clause_size(c) > 2 ? c : clause_from_lit(lits[0])));
+
+    if (clause_learnt(c)){
+        s->stats.learnts--;
+        s->stats.learnts_literals -= clause_size(c);
+    }else{
+        s->stats.clauses--;
+        s->stats.clauses_literals -= clause_size(c);
+    }
+
+    free(c);
+}
+
+
+static lbool clause_simplify(solver* s, clause* c)
+{
+    lit*   lits   = clause_begin(c);
+    lbool* values = s->assigns;
+    int i;
+
+    assert(solver_dlevel(s) == 0);
+
+    for (i = 0; i < clause_size(c); i++){
+        lbool sig = !lit_sign(lits[i]); sig += sig - 1;
+        if (values[lit_var(lits[i])] == sig)
+            return l_True;
+    }
+    return l_False;
+}
+
+//=================================================================================================
+// Minor (solver) functions:
+
+void solver_setnvars(solver* s,int n)
+{
+    int var;
+
+    if (s->cap < n){
+
+        while (s->cap < n) s->cap = s->cap*2+1;
+
+        s->wlists    = (vecp*)   realloc(s->wlists,   sizeof(vecp)*s->cap*2);
+        s->activity  = (double*) realloc(s->activity, sizeof(double)*s->cap);
+        s->assigns   = (lbool*)  realloc(s->assigns,  sizeof(lbool)*s->cap);
+        s->orderpos  = (int*)    realloc(s->orderpos, sizeof(int)*s->cap);
+        s->reasons   = (clause**)realloc(s->reasons,  sizeof(clause*)*s->cap);
+        s->levels    = (int*)    realloc(s->levels,   sizeof(int)*s->cap);
+        s->tags      = (lbool*)  realloc(s->tags,     sizeof(lbool)*s->cap);
+        s->trail     = (lit*)    realloc(s->trail,    sizeof(lit)*s->cap);
+    }
+
+    for (var = s->size; var < n; var++){
+        vecp_new(&s->wlists[2*var]);
+        vecp_new(&s->wlists[2*var+1]);
+        s->activity [var] = 0;
+        s->assigns  [var] = l_Undef;
+        s->orderpos [var] = veci_size(&s->order);
+        s->reasons  [var] = (clause*)0;
+        s->levels   [var] = 0;
+        s->tags     [var] = l_Undef;
+        
+        /* does not hold because variables enqueued at top level will not be reinserted in the heap
+           assert(veci_size(&s->order) == var); 
+         */
+        veci_push(&s->order,var);
+        order_update(s, var);
+    }
+
+    s->size = n > s->size ? n : s->size;
+}
+
+
+static inline bool enqueue(solver* s, lit l, clause* from)
+{
+    lbool* values = s->assigns;
+    int    v      = lit_var(l);
+    lbool  val    = values[v];
+#ifdef VERBOSEDEBUG
+    printf(L_IND"enqueue("L_LIT")\n", L_ind, L_lit(l));
+#endif
+
+    lbool sig = !lit_sign(l); sig += sig - 1;
+    if (val != l_Undef){
+        return val == sig;
+    }else{
+        // New fact -- store it.
+#ifdef VERBOSEDEBUG
+        printf(L_IND"bind("L_LIT")\n", L_ind, L_lit(l));
+#endif
+        int*     levels  = s->levels;
+        clause** reasons = s->reasons;
+
+        values [v] = sig;
+        levels [v] = solver_dlevel(s);
+        reasons[v] = from;
+        s->trail[s->qtail++] = l;
+
+        order_assigned(s, v);
+        return true;
+    }
+}
+
+
+static inline void assume(solver* s, lit l){
+    assert(s->qtail == s->qhead);
+    assert(s->assigns[lit_var(l)] == l_Undef);
+#ifdef VERBOSEDEBUG
+    printf(L_IND"assume("L_LIT")\n", L_ind, L_lit(l));
+#endif
+    veci_push(&s->trail_lim,s->qtail);
+    enqueue(s,l,(clause*)0);
+}
+
+
+static inline void solver_canceluntil(solver* s, int level) {
+    lit*     trail;   
+    lbool*   values;  
+    clause** reasons; 
+    int      bound;
+    int      c;
+    
+    if (solver_dlevel(s) <= level)
+        return;
+
+    trail   = s->trail;
+    values  = s->assigns;
+    reasons = s->reasons;
+    bound   = (veci_begin(&s->trail_lim))[level];
+
+    for (c = s->qtail-1; c >= bound; c--) {
+        int     x  = lit_var(trail[c]);
+        values [x] = l_Undef;
+        reasons[x] = (clause*)0;
+    }
+
+    for (c = s->qhead-1; c >= bound; c--)
+        order_unassigned(s,lit_var(trail[c]));
+
+    s->qhead = s->qtail = bound;
+    veci_resize(&s->trail_lim,level);
+}
+
+static void solver_record(solver* s, veci* cls)
+{
+    lit*    begin = veci_begin(cls);
+    lit*    end   = begin + veci_size(cls);
+    clause* c     = (veci_size(cls) > 1) ? clause_new(s,begin,end,1) : (clause*)0;
+    enqueue(s,*begin,c);
+
+    assert(veci_size(cls) > 0);
+
+    if (c != 0) {
+        vecp_push(&s->learnts,c);
+        act_clause_bump(s,c);
+        s->stats.learnts++;
+        s->stats.learnts_literals += veci_size(cls);
+    }
+}
+
+
+static double solver_progress(solver* s)
+{
+    lbool*  values = s->assigns;
+    int*    levels = s->levels;
+    int     i;
+
+    double  progress = 0;
+    double  F        = 1.0 / s->size;
+    for (i = 0; i < s->size; i++)
+        if (values[i] != l_Undef)
+            progress += pow(F, levels[i]);
+    return progress / s->size;
+}
+
+//=================================================================================================
+// Major methods:
+
+static bool solver_lit_removable(solver* s, lit l, int minl)
+{
+    lbool*   tags    = s->tags;
+    clause** reasons = s->reasons;
+    int*     levels  = s->levels;
+    int      top     = veci_size(&s->tagged);
+
+    assert(lit_var(l) >= 0 && lit_var(l) < s->size);
+    assert(reasons[lit_var(l)] != 0);
+    veci_resize(&s->stack,0);
+    veci_push(&s->stack,lit_var(l));
+
+    while (veci_size(&s->stack) > 0){
+        clause* c;
+        int v = veci_begin(&s->stack)[veci_size(&s->stack)-1];
+        assert(v >= 0 && v < s->size);
+        veci_resize(&s->stack,veci_size(&s->stack)-1);
+        assert(reasons[v] != 0);
+        c    = reasons[v];
+
+        if (clause_is_lit(c)){
+            int v = lit_var(clause_read_lit(c));
+            if (tags[v] == l_Undef && levels[v] != 0){
+                if (reasons[v] != 0 && ((1 << (levels[v] & 31)) & minl)){
+                    veci_push(&s->stack,v);
+                    tags[v] = l_True;
+                    veci_push(&s->tagged,v);
+                }else{
+                    int* tagged = veci_begin(&s->tagged);
+                    int j;
+                    for (j = top; j < veci_size(&s->tagged); j++)
+                        tags[tagged[j]] = l_Undef;
+                    veci_resize(&s->tagged,top);
+                    return false;
+                }
+            }
+        }else{
+            lit*    lits = clause_begin(c);
+            int     i, j;
+
+            for (i = 1; i < clause_size(c); i++){
+                int v = lit_var(lits[i]);
+                if (tags[v] == l_Undef && levels[v] != 0){
+                    if (reasons[v] != 0 && ((1 << (levels[v] & 31)) & minl)){
+
+                        veci_push(&s->stack,lit_var(lits[i]));
+                        tags[v] = l_True;
+                        veci_push(&s->tagged,v);
+                    }else{
+                        int* tagged = veci_begin(&s->tagged);
+                        for (j = top; j < veci_size(&s->tagged); j++)
+                            tags[tagged[j]] = l_Undef;
+                        veci_resize(&s->tagged,top);
+                        return false;
+                    }
+                }
+            }
+        }
+    }
+
+    return true;
+}
+
+static void solver_analyze(solver* s, clause* c, veci* learnt)
+{
+    lit*     trail   = s->trail;
+    lbool*   tags    = s->tags;
+    clause** reasons = s->reasons;
+    int*     levels  = s->levels;
+    int      cnt     = 0;
+    lit      p       = lit_Undef;
+    int      ind     = s->qtail-1;
+    lit*     lits;
+    int      i, j, minl;
+    int*     tagged;
+
+    veci_push(learnt,lit_Undef);
+
+    do{
+        assert(c != 0);
+
+        if (clause_is_lit(c)){
+            lit q = clause_read_lit(c);
+            assert(lit_var(q) >= 0 && lit_var(q) < s->size);
+            if (tags[lit_var(q)] == l_Undef && levels[lit_var(q)] > 0){
+                tags[lit_var(q)] = l_True;
+                veci_push(&s->tagged,lit_var(q));
+                act_var_bump(s,lit_var(q));
+                if (levels[lit_var(q)] == solver_dlevel(s))
+                    cnt++;
+                else
+                    veci_push(learnt,q);
+            }
+        }else{
+
+            if (clause_learnt(c))
+                act_clause_bump(s,c);
+
+            lits = clause_begin(c);
+            //printlits(lits,lits+clause_size(c)); printf("\n");
+            for (j = (p == lit_Undef ? 0 : 1); j < clause_size(c); j++){
+                lit q = lits[j];
+                assert(lit_var(q) >= 0 && lit_var(q) < s->size);
+                if (tags[lit_var(q)] == l_Undef && levels[lit_var(q)] > 0){
+                    tags[lit_var(q)] = l_True;
+                    veci_push(&s->tagged,lit_var(q));
+                    act_var_bump(s,lit_var(q));
+                    if (levels[lit_var(q)] == solver_dlevel(s))
+                        cnt++;
+                    else
+                        veci_push(learnt,q);
+                }
+            }
+        }
+
+        while (tags[lit_var(trail[ind--])] == l_Undef);
+
+        p = trail[ind+1];
+        c = reasons[lit_var(p)];
+        cnt--;
+
+    }while (cnt > 0);
+
+    *veci_begin(learnt) = lit_neg(p);
+
+    lits = veci_begin(learnt);
+    minl = 0;
+    for (i = 1; i < veci_size(learnt); i++){
+        int lev = levels[lit_var(lits[i])];
+        minl    |= 1 << (lev & 31);
+    }
+
+    // simplify (full)
+    for (i = j = 1; i < veci_size(learnt); i++){
+        if (reasons[lit_var(lits[i])] == 0 || !solver_lit_removable(s,lits[i],minl))
+            lits[j++] = lits[i];
+    }
+
+    // update size of learnt + statistics
+    s->stats.max_literals += veci_size(learnt);
+    veci_resize(learnt,j);
+    s->stats.tot_literals += j;
+
+    // clear tags
+    tagged = veci_begin(&s->tagged);
+    for (i = 0; i < veci_size(&s->tagged); i++)
+        tags[tagged[i]] = l_Undef;
+    veci_resize(&s->tagged,0);
+
+#ifdef DEBUG
+    for (i = 0; i < s->size; i++)
+        assert(tags[i] == l_Undef);
+#endif
+
+#ifdef VERBOSEDEBUG
+    printf(L_IND"Learnt {", L_ind);
+    for (i = 0; i < veci_size(learnt); i++) printf(" "L_LIT, L_lit(lits[i]));
+#endif
+    if (veci_size(learnt) > 1){
+        int max_i = 1;
+        int max   = levels[lit_var(lits[1])];
+        lit tmp;
+
+        for (i = 2; i < veci_size(learnt); i++)
+            if (levels[lit_var(lits[i])] > max){
+                max   = levels[lit_var(lits[i])];
+                max_i = i;
+            }
+
+        tmp         = lits[1];
+        lits[1]     = lits[max_i];
+        lits[max_i] = tmp;
+    }
+#ifdef VERBOSEDEBUG
+    {
+        int lev = veci_size(learnt) > 1 ? levels[lit_var(lits[1])] : 0;
+        printf(" } at level %d\n", lev);
+    }
+#endif
+}
+
+
+clause* solver_propagate(solver* s)
+{
+    lbool*  values = s->assigns;
+    clause* confl  = (clause*)0;
+    lit*    lits;
+
+    //printf("solver_propagate\n");
+    while (confl == 0 && s->qtail - s->qhead > 0){
+        lit  p  = s->trail[s->qhead++];
+        vecp* ws = solver_read_wlist(s,p);
+        clause **begin = (clause**)vecp_begin(ws);
+        clause **end   = begin + vecp_size(ws);
+        clause **i, **j;
+
+        s->stats.propagations++;
+        s->simpdb_props--;
+
+        //printf("checking lit %d: "L_LIT"\n", veci_size(ws), L_lit(p));
+        for (i = j = begin; i < end; ){
+            if (clause_is_lit(*i)){
+                *j++ = *i;
+                if (!enqueue(s,clause_read_lit(*i),clause_from_lit(p))){
+                    confl = s->binary;
+                    (clause_begin(confl))[1] = lit_neg(p);
+                    (clause_begin(confl))[0] = clause_read_lit(*i++);
+
+                    // Copy the remaining watches:
+                    while (i < end)
+                        *j++ = *i++;
+                }
+            }else{
+                lit false_lit;
+                lbool sig;
+
+                lits = clause_begin(*i);
+
+                // Make sure the false literal is data[1]:
+                false_lit = lit_neg(p);
+                if (lits[0] == false_lit){
+                    lits[0] = lits[1];
+                    lits[1] = false_lit;
+                }
+                assert(lits[1] == false_lit);
+                //printf("checking clause: "); printlits(lits, lits+clause_size(*i)); printf("\n");
+
+                // If 0th watch is true, then clause is already satisfied.
+                sig = !lit_sign(lits[0]); sig += sig - 1;
+                if (values[lit_var(lits[0])] == sig){
+                    *j++ = *i;
+                }else{
+                    // Look for new watch:
+                    lit* stop = lits + clause_size(*i);
+                    lit* k;
+                    for (k = lits + 2; k < stop; k++){
+                        lbool sig = lit_sign(*k); sig += sig - 1;
+                        if (values[lit_var(*k)] != sig){
+                            lits[1] = *k;
+                            *k = false_lit;
+                            vecp_push(solver_read_wlist(s,lit_neg(lits[1])),*i);
+                            goto next; }
+                    }
+
+                    *j++ = *i;
+                    // Clause is unit under assignment:
+                    if (!enqueue(s,lits[0], *i)){
+                        confl = *i++;
+                        // Copy the remaining watches:
+                        while (i < end)
+                            *j++ = *i++;
+                    }
+                }
+            }
+        next:
+            i++;
+        }
+
+        s->stats.inspects += j - (clause**)vecp_begin(ws);
+        vecp_resize(ws,j - (clause**)vecp_begin(ws));
+    }
+
+    return confl;
+}
+
+static inline int clause_cmp (const void* x, const void* y) {
+    return clause_size((clause*)x) > 2 && (clause_size((clause*)y) == 2 || clause_activity((clause*)x) < clause_activity((clause*)y)) ? -1 : 1; }
+
+void solver_reducedb(solver* s)
+{
+    int      i, j;
+    double   extra_lim = s->cla_inc / vecp_size(&s->learnts); // Remove any clause below this activity
+    clause** learnts = (clause**)vecp_begin(&s->learnts);
+    clause** reasons = s->reasons;
+
+    sort(vecp_begin(&s->learnts), vecp_size(&s->learnts), &clause_cmp);
+
+    for (i = j = 0; i < vecp_size(&s->learnts) / 2; i++){
+        if (clause_size(learnts[i]) > 2 && reasons[lit_var(*clause_begin(learnts[i]))] != learnts[i])
+            clause_remove(s,learnts[i]);
+        else
+            learnts[j++] = learnts[i];
+    }
+    for (; i < vecp_size(&s->learnts); i++){
+        if (clause_size(learnts[i]) > 2 && reasons[lit_var(*clause_begin(learnts[i]))] != learnts[i] && clause_activity(learnts[i]) < extra_lim)
+            clause_remove(s,learnts[i]);
+        else
+            learnts[j++] = learnts[i];
+    }
+
+    //printf("reducedb deleted %d\n", vecp_size(&s->learnts) - j);
+
+
+    vecp_resize(&s->learnts,j);
+}
+
+static lbool solver_search(solver* s, int nof_conflicts, int nof_learnts)
+{
+    int*    levels          = s->levels;
+    double  var_decay       = 0.95;
+    double  clause_decay    = 0.999;
+    double  random_var_freq = 0.02;
+
+    int     conflictC       = 0;
+    veci    learnt_clause;
+
+    assert(s->root_level == solver_dlevel(s));
+
+    s->stats.starts++;
+    s->var_decay = (float)(1 / var_decay   );
+    s->cla_decay = (float)(1 / clause_decay);
+    veci_resize(&s->model,0);
+    veci_new(&learnt_clause);
+
+    for (;;){
+        clause* confl = solver_propagate(s);
+        if (confl != 0){
+            // CONFLICT
+            int blevel;
+
+#ifdef VERBOSEDEBUG
+            printf(L_IND"**CONFLICT**\n", L_ind);
+#endif
+            s->stats.conflicts++; conflictC++;
+            if (solver_dlevel(s) == s->root_level){
+                veci_delete(&learnt_clause);
+                return l_False;
+            }
+
+            veci_resize(&learnt_clause,0);
+            solver_analyze(s, confl, &learnt_clause);
+            blevel = veci_size(&learnt_clause) > 1 ? levels[lit_var(veci_begin(&learnt_clause)[1])] : s->root_level;
+            blevel = s->root_level > blevel ? s->root_level : blevel;
+            solver_canceluntil(s,blevel);
+            solver_record(s,&learnt_clause);
+            act_var_decay(s);
+            act_clause_decay(s);
+
+        }else{
+            // NO CONFLICT
+            int next;
+
+            if (nof_conflicts >= 0 && conflictC >= nof_conflicts){
+                // Reached bound on number of conflicts:
+                s->progress_estimate = solver_progress(s);
+                solver_canceluntil(s,s->root_level);
+                veci_delete(&learnt_clause);
+                return l_Undef; }
+
+            if (solver_dlevel(s) == 0)
+                // Simplify the set of problem clauses:
+                solver_simplify(s);
+
+            if (nof_learnts >= 0 && vecp_size(&s->learnts) - s->qtail >= nof_learnts)
+                // Reduce the set of learnt clauses:
+                solver_reducedb(s);
+
+            // New variable decision:
+            s->stats.decisions++;
+            next = order_select(s,(float)random_var_freq);
+
+            if (next == var_Undef){
+                // Model found:
+                lbool* values = s->assigns;
+                int i;
+                for (i = 0; i < s->size; i++) veci_push(&s->model,(int)values[i]);
+                solver_canceluntil(s,s->root_level);
+                veci_delete(&learnt_clause);
+
+                /*
+                veci apa; veci_new(&apa);
+                for (i = 0; i < s->size; i++) 
+                    veci_push(&apa,(int)(s->model.ptr[i] == l_True ? toLit(i) : lit_neg(toLit(i))));
+                printf("model: "); printlits((lit*)apa.ptr, (lit*)apa.ptr + veci_size(&apa)); printf("\n");
+                veci_delete(&apa);
+                */
+
+                return l_True;
+            }
+
+            assume(s,lit_neg(toLit(next)));
+        }
+    }
+
+    return l_Undef; // cannot happen
+}
+
+//=================================================================================================
+// External solver functions:
+
+solver* solver_new(void)
+{
+    solver* s = (solver*)malloc(sizeof(solver));
+
+    // initialize vectors
+    vecp_new(&s->clauses);
+    vecp_new(&s->learnts);
+    veci_new(&s->order);
+    veci_new(&s->trail_lim);
+    veci_new(&s->tagged);
+    veci_new(&s->stack);
+    veci_new(&s->model);
+
+    // initialize arrays
+    s->wlists    = 0;
+    s->activity  = 0;
+    s->assigns   = 0;
+    s->orderpos  = 0;
+    s->reasons   = 0;
+    s->levels    = 0;
+    s->tags      = 0;
+    s->trail     = 0;
+
+
+    // initialize other vars
+    s->size                   = 0;
+    s->cap                    = 0;
+    s->qhead                  = 0;
+    s->qtail                  = 0;
+    s->cla_inc                = 1;
+    s->cla_decay              = 1;
+    s->var_inc                = 1;
+    s->var_decay              = 1;
+    s->root_level             = 0;
+    s->simpdb_assigns         = 0;
+    s->simpdb_props           = 0;
+    s->random_seed            = 91648253;
+    s->progress_estimate      = 0;
+    s->binary                 = (clause*)malloc(sizeof(clause) + sizeof(lit)*2);
+    s->binary->size_learnt    = (2 << 1);
+    s->verbosity              = 0;
+
+    s->stats.starts           = 0;
+    s->stats.decisions        = 0;
+    s->stats.propagations     = 0;
+    s->stats.inspects         = 0;
+    s->stats.conflicts        = 0;
+    s->stats.clauses          = 0;
+    s->stats.clauses_literals = 0;
+    s->stats.learnts          = 0;
+    s->stats.learnts_literals = 0;
+    s->stats.max_literals     = 0;
+    s->stats.tot_literals     = 0;
+
+    return s;
+}
+
+#ifdef SOLVER_COPY
+/*
+ * Copy an existing solver instance; useful to
+ * try alternatives w/o expensse of reloading
+ */
+void *dupalloc(void *ptr, size_t s)
+{
+    return (memcpy(malloc(s), ptr, s));
+}
+
+solver *solver_copy(solver* s)
+{
+    /*
+     * duplicate base solver structure
+     */
+    solver *ns;
+    int i;
+
+    ns = (solver*)malloc(sizeof(solver));
+    *ns = *s;
+    /*
+     * duplicate first level structures
+     */
+    ns->binary    = (clause*) dupalloc(s->binary, sizeof(clause) + sizeof(lit)*2);    
+
+    if (s->wlists != NULL) {
+      ns->wlists    = (vecp*)   dupalloc(s->wlists,   sizeof(vecp)*s->cap*2);
+      ns->activity  = (double*) dupalloc(s->activity, sizeof(double)*s->cap);
+      ns->assigns   = (lbool*)  dupalloc(s->assigns,  sizeof(lbool)*s->cap);
+      ns->orderpos  = (int*)    dupalloc(s->orderpos, sizeof(int)*s->cap);
+      ns->reasons   = (clause**)dupalloc(s->reasons,  sizeof(clause*)*s->cap);
+      ns->levels    = (int*)    dupalloc(s->levels,   sizeof(int)*s->cap);
+      ns->tags      = (lbool*)  dupalloc(s->tags,     sizeof(lbool)*s->cap);
+      ns->trail     = (lit*)    dupalloc(s->trail,    sizeof(lit)*s->cap);
+    }
+
+
+    veci_dup(&ns->order,     &s->order);
+    veci_dup(&ns->trail_lim, &s->trail_lim);
+    veci_dup(&ns->tagged,    &s->tagged);
+    veci_dup(&ns->stack,     &s->stack);
+    veci_dup(&ns->model,     &s->model);
+
+    /*
+     * duplicate clause vectors
+     */
+
+    /*    XXX need to replace clause vectors in wlists
+	  note that not all wlist entries are real clauses
+	  iteration over clause vector required; non-clauses
+	  can stay as they're not pointers 
+
+
+    vecp_push(solver_read_wlist(s,lit_neg(begin[0])),(void*)(size > 2 ? c : clause_from_lit(begin[1])));
+    vecp_push(solver_read_wlist(s,lit_neg(begin[1])),(void*)(size > 2 ? c : clause_from_lit(begin[0])));
+
+
+
+    */
+    vecp_dup(&ns->clauses, &s->clauses);
+
+    for (i = 0; i < vecp_size(&s->clauses); i++) {
+      clause *old = (clause *)vecp_begin(&s->clauses)[i];
+      clause *new = (clause *)dupalloc(old, clause_bytesize(old));
+
+      vecp_replace(solver_read_wlist(s, lit_neg(old->lits[0])), old, new);
+      vecp_begin(&ns->clauses)[i] = new;
+    }
+
+    vecp_dup(&ns->learnts, &s->learnts);
+
+    for (i = 0; i < vecp_size(&s->learnts); i++) {
+      clause *old = (clause *)vecp_begin(&s->learnts)[i];
+      clause *new = (clause *)dupalloc(old, clause_bytesize(old));
+
+      vecp_replace(solver_read_wlist(s, lit_neg(old->lits[0])), old, new);
+      vecp_begin(&ns->learnts)[i] = new;
+    }
+}
+#endif
+
+void solver_delete(solver* s)
+{
+    int i;
+    for (i = 0; i < vecp_size(&s->clauses); i++)
+        free(vecp_begin(&s->clauses)[i]);
+
+    for (i = 0; i < vecp_size(&s->learnts); i++)
+        free(vecp_begin(&s->learnts)[i]);
+
+    // delete vectors
+    vecp_delete(&s->clauses);
+    vecp_delete(&s->learnts);
+    veci_delete(&s->order);
+    veci_delete(&s->trail_lim);
+    veci_delete(&s->tagged);
+    veci_delete(&s->stack);
+    veci_delete(&s->model);
+    free(s->binary);
+
+    // delete arrays
+    if (s->wlists != 0){
+        int i;
+        for (i = 0; i < s->size*2; i++)
+            vecp_delete(&s->wlists[i]);
+
+        // if one is different from null, all are
+        free(s->wlists);
+        free(s->activity );
+        free(s->assigns  );
+        free(s->orderpos );
+        free(s->reasons  );
+        free(s->levels   );
+        free(s->trail    );
+        free(s->tags     );
+    }
+
+    free(s);
+}
+
+
+bool solver_addclause(solver* s, lit* begin, lit* end)
+{
+    lit *i,*j;
+    int maxvar;
+    lbool* values;
+    lit last;
+
+    if (begin == end) return false;
+
+    //printlits(begin,end); printf("\n");
+    // insertion sort
+    maxvar = lit_var(*begin);
+    for (i = begin + 1; i < end; i++){
+        lit l = *i;
+        maxvar = lit_var(l) > maxvar ? lit_var(l) : maxvar;
+        for (j = i; j > begin && *(j-1) > l; j--)
+            *j = *(j-1);
+        *j = l;
+    }
+    solver_setnvars(s,maxvar+1);
+
+    //printlits(begin,end); printf("\n");
+    values = s->assigns;
+
+    // delete duplicates
+    last = lit_Undef;
+    for (i = j = begin; i < end; i++){
+        //printf("lit: "L_LIT", value = %d\n", L_lit(*i), (lit_sign(*i) ? -values[lit_var(*i)] : values[lit_var(*i)]));
+        lbool sig = !lit_sign(*i); sig += sig - 1;
+        if (*i == lit_neg(last) || sig == values[lit_var(*i)])
+            return true;   // tautology
+        else if (*i != last && values[lit_var(*i)] == l_Undef)
+            last = *j++ = *i;
+    }
+
+    //printf("final: "); printlits(begin,j); printf("\n");
+
+    if (j == begin)          // empty clause
+        return false;
+    else if (j - begin == 1) // unit clause
+        return enqueue(s,*begin,(clause*)0);
+
+    // create new clause
+    vecp_push(&s->clauses,clause_new(s,begin,j,0));
+
+
+    s->stats.clauses++;
+    s->stats.clauses_literals += j - begin;
+
+    return true;
+}
+
+
+bool   solver_simplify(solver* s)
+{
+    clause** reasons;
+    int type;
+
+    assert(solver_dlevel(s) == 0);
+
+    if (solver_propagate(s) != 0)
+        return false;
+
+    if (s->qhead == s->simpdb_assigns || s->simpdb_props > 0)
+        return true;
+
+    reasons = s->reasons;
+    for (type = 0; type < 2; type++){
+        vecp*    cs  = type ? &s->learnts : &s->clauses;
+        clause** cls = (clause**)vecp_begin(cs);
+
+        int i, j;
+        for (j = i = 0; i < vecp_size(cs); i++){
+            if (reasons[lit_var(*clause_begin(cls[i]))] != cls[i] &&
+                clause_simplify(s,cls[i]) == l_True)
+                clause_remove(s,cls[i]);
+            else
+                cls[j++] = cls[i];
+        }
+        vecp_resize(cs,j);
+    }
+
+    s->simpdb_assigns = s->qhead;
+    // (shouldn't depend on 'stats' really, but it will do for now)
+    s->simpdb_props   = (int)(s->stats.clauses_literals + s->stats.learnts_literals);
+
+    return true;
+}
+
+
+bool   solver_solve(solver* s, lit* begin, lit* end)
+{
+    double  nof_conflicts = 100;
+    double  nof_learnts   = solver_nclauses(s) / 3;
+    lbool   status        = l_Undef;
+    lbool*  values        = s->assigns;
+    lit*    i;
+    
+    //printf("solve: "); printlits(begin, end); printf("\n");
+    for (i = begin; i < end; i++){
+        switch (lit_sign(*i) ? -values[lit_var(*i)] : values[lit_var(*i)]){
+        case 1: /* l_True: */
+            break;
+        case 0: /* l_Undef */
+            assume(s, *i);
+            if (solver_propagate(s) == NULL)
+                break;
+            // falltrough
+        case -1: /* l_False */
+            solver_canceluntil(s, 0);
+            return false;
+        }
+    }
+
+    s->root_level = solver_dlevel(s);
+
+    if (s->verbosity >= 1){
+        printf("==================================[MINISAT]===================================\n");
+        printf("| Conflicts |     ORIGINAL     |              LEARNT              | Progress |\n");
+        printf("|           | Clauses Literals |   Limit Clauses Literals  Lit/Cl |          |\n");
+        printf("==============================================================================\n");
+    }
+
+    while (status == l_Undef){
+        double Ratio = (s->stats.learnts == 0)? 0.0 :
+            s->stats.learnts_literals / (double)s->stats.learnts;
+
+        if (s->verbosity >= 1){
+            printf("| %9.0f | %7.0f %8.0f | %7.0f %7.0f %8.0f %7.1f | %6.3f %% |\n", 
+                (double)s->stats.conflicts,
+                (double)s->stats.clauses, 
+                (double)s->stats.clauses_literals,
+                (double)nof_learnts, 
+                (double)s->stats.learnts, 
+                (double)s->stats.learnts_literals,
+                Ratio,
+                s->progress_estimate*100);
+            fflush(stdout);
+        }
+        status = solver_search(s,(int)nof_conflicts, (int)nof_learnts);
+        nof_conflicts *= 1.5;
+        nof_learnts   *= 1.1;
+    }
+    if (s->verbosity >= 1)
+        printf("==============================================================================\n");
+
+    solver_canceluntil(s,0);
+    return status != l_False;
+}
+
+
+int solver_nvars(solver* s)
+{
+    return s->size;
+}
+
+
+int solver_nclauses(solver* s)
+{
+    return vecp_size(&s->clauses);
+}
+
+
+int solver_nconflicts(solver* s)
+{
+    return (int)s->stats.conflicts;
+}
+
+//=================================================================================================
+// Sorting functions (sigh):
+
+static inline void selectionsort(void** array, int size, int(*comp)(const void *, const void *))
+{
+    int     i, j, best_i;
+    void*   tmp;
+
+    for (i = 0; i < size-1; i++){
+        best_i = i;
+        for (j = i+1; j < size; j++){
+            if (comp(array[j], array[best_i]) < 0)
+                best_i = j;
+        }
+        tmp = array[i]; array[i] = array[best_i]; array[best_i] = tmp;
+    }
+}
+
+
+static void sortrnd(void** array, int size, int(*comp)(const void *, const void *), double* seed)
+{
+    if (size <= 15)
+        selectionsort(array, size, comp);
+
+    else{
+        void*       pivot = array[irand(seed, size)];
+        void*       tmp;
+        int         i = -1;
+        int         j = size;
+
+        for(;;){
+            do i++; while(comp(array[i], pivot)<0);
+            do j--; while(comp(pivot, array[j])<0);
+
+            if (i >= j) break;
+
+            tmp = array[i]; array[i] = array[j]; array[j] = tmp;
+        }
+
+        sortrnd(array    , i     , comp, seed);
+        sortrnd(&array[i], size-i, comp, seed);
+    }
+}
+
+void sort(void** array, int size, int(*comp)(const void *, const void *))
+{
+    double seed = 91648253;
+    sortrnd(array,size,comp,&seed);
+}
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/src/modules/solver/solver.h	Wed Nov 18 15:53:48 2009 -0800
@@ -0,0 +1,142 @@
+/**************************************************************************************************
+MiniSat -- Copyright (c) 2005, Niklas Sorensson
+http://www.cs.chalmers.se/Cs/Research/FormalMethods/MiniSat/
+
+Permission is hereby granted, free of charge, to any person obtaining a copy of this software and
+associated documentation files (the "Software"), to deal in the Software without restriction,
+including without limitation the rights to use, copy, modify, merge, publish, distribute,
+sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is
+furnished to do so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in all copies or
+substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT
+NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
+DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT
+OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+**************************************************************************************************/
+// Modified to compile with MS Visual Studio 6.0 by Alan Mishchenko
+
+#ifndef solver_h
+#define solver_h
+
+#ifdef _WIN32
+#define inline __inline // compatible with MS VS 6.0
+#endif
+
+#include "vec.h"
+
+//=================================================================================================
+// Simple types:
+
+// does not work for c++
+typedef int  bool;
+static const bool  true      = 1;
+static const bool  false     = 0;
+
+typedef int                lit;
+typedef char               lbool;
+
+#ifdef _WIN32
+typedef signed __int64     uint64;   // compatible with MS VS 6.0
+#else
+typedef unsigned long long uint64;
+#endif
+
+static const int   var_Undef = -1;
+static const lit   lit_Undef = -2;
+
+static const lbool l_Undef   =  0;
+static const lbool l_True    =  1;
+static const lbool l_False   = -1;
+
+static inline lit  toLit   (int v) { return v + v; }
+static inline lit  lit_neg (lit l) { return l ^ 1; }
+static inline int  lit_var (lit l) { return l >> 1; }
+static inline int  lit_sign(lit l) { return (l & 1); }
+
+
+//=================================================================================================
+// Public interface:
+
+struct solver_t;
+typedef struct solver_t solver;
+
+extern solver* solver_new(void);
+
+#ifdef SOLVER_COPY
+extern solver* solver_copy(solver* s);
+#endif
+
+extern void    solver_delete(solver* s);
+
+extern bool    solver_addclause(solver* s, lit* begin, lit* end);
+extern bool    solver_simplify(solver* s);
+extern bool    solver_solve(solver* s, lit* begin, lit* end);
+
+extern int     solver_nvars(solver* s);
+extern int     solver_nclauses(solver* s);
+extern int     solver_nconflicts(solver* s);
+
+extern void    solver_setnvars(solver* s,int n);
+
+struct stats_t
+{
+    uint64   starts, decisions, propagations, inspects, conflicts;
+    uint64   clauses, clauses_literals, learnts, learnts_literals, max_literals, tot_literals;
+};
+typedef struct stats_t stats;
+
+//=================================================================================================
+// Solver representation:
+
+struct clause_t;
+typedef struct clause_t clause;
+
+struct solver_t
+{
+    int      size;          // nof variables
+    int      cap;           // size of varmaps
+    int      qhead;         // Head index of queue.
+    int      qtail;         // Tail index of queue.
+
+    // clauses
+    vecp     clauses;       // List of problem constraints. (contains: clause*)
+    vecp     learnts;       // List of learnt clauses. (contains: clause*)
+
+    // activities
+    double   var_inc;       // Amount to bump next variable with.
+    double   var_decay;     // INVERSE decay factor for variable activity: stores 1/decay. 
+    float    cla_inc;       // Amount to bump next clause with.
+    float    cla_decay;     // INVERSE decay factor for clause activity: stores 1/decay.
+
+    vecp*    wlists;        // 
+    double*  activity;      // A heuristic measurement of the activity of a variable.
+    lbool*   assigns;       // Current values of variables.
+    int*     orderpos;      // Index in variable order.
+    clause** reasons;       //
+    int*     levels;        //
+    lit*     trail;
+
+    clause*  binary;        // A temporary binary clause
+    lbool*   tags;          //
+    veci     tagged;        // (contains: var)
+    veci     stack;         // (contains: var)
+
+    veci     order;         // Variable order. (heap) (contains: var)
+    veci     trail_lim;     // Separator indices for different decision levels in 'trail'. (contains: int)
+    veci     model;         // If problem is solved, this vector contains the model (contains: lbool).
+
+    int      root_level;    // Level of first proper decision.
+    int      simpdb_assigns;// Number of top-level assignments at last 'simplifyDB()'.
+    int      simpdb_props;  // Number of propagations before next 'simplifyDB()'.
+    double   random_seed;
+    double   progress_estimate;
+    int      verbosity;     // Verbosity level. 0=silent, 1=some progress report, 2=everything
+
+    stats    stats;
+};
+
+#endif
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/src/modules/solver/vec.h	Wed Nov 18 15:53:48 2009 -0800
@@ -0,0 +1,93 @@
+/**************************************************************************************************
+MiniSat -- Copyright (c) 2005, Niklas Sorensson
+http://www.cs.chalmers.se/Cs/Research/FormalMethods/MiniSat/
+
+Permission is hereby granted, free of charge, to any person obtaining a copy of this software and
+associated documentation files (the "Software"), to deal in the Software without restriction,
+including without limitation the rights to use, copy, modify, merge, publish, distribute,
+sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is
+furnished to do so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in all copies or
+substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT
+NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
+DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT
+OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+**************************************************************************************************/
+// Modified to compile with MS Visual Studio 6.0 by Alan Mishchenko
+
+#ifndef vec_h
+#define vec_h
+
+#include <stdlib.h>
+#include <string.h>
+
+// vector of 32-bit intergers (added for 64-bit portability)
+struct veci_t {
+    int    size;
+    int    cap;
+    int*   ptr;
+};
+typedef struct veci_t veci;
+
+static inline void veci_new (veci* v) {
+    v->size = 0;
+    v->cap  = 4;
+    v->ptr  = (int*)malloc(sizeof(int)*v->cap);
+}
+
+static inline void   veci_delete (veci* v)          { free(v->ptr);   }
+static inline int*   veci_begin  (veci* v)          { return v->ptr;  }
+static inline int    veci_size   (veci* v)          { return v->size; }
+static inline void   veci_resize (veci* v, int k)   { v->size = k;    } // only safe to shrink !!
+static inline void   veci_push   (veci* v, int e)
+{
+    if (v->size == v->cap) {
+        int newsize = v->cap * 2+1;
+        v->ptr = (int*)realloc(v->ptr,sizeof(int)*newsize);
+        v->cap = newsize; }
+    v->ptr[v->size++] = e;
+}
+static inline void *veci_dup(veci *new, veci *old) 
+{
+  *new = *old;
+  new->ptr = (int*)memcpy(malloc(sizeof(int)*new->cap), old->ptr, sizeof(int)*new->cap);
+}
+
+// vector of 32- or 64-bit pointers
+struct vecp_t {
+    int    size;
+    int    cap;
+    void** ptr;
+};
+typedef struct vecp_t vecp;
+
+static inline void vecp_new (vecp* v) {
+    v->size = 0;
+    v->cap  = 4;
+    v->ptr  = (void**)malloc(sizeof(void*)*v->cap);
+}
+
+static inline void   vecp_delete (vecp* v)          { free(v->ptr);   }
+static inline void** vecp_begin  (vecp* v)          { return v->ptr;  }
+static inline int    vecp_size   (vecp* v)          { return v->size; }
+static inline void   vecp_resize (vecp* v, int   k) { v->size = k;    } // only safe to shrink !!
+static inline void   vecp_push   (vecp* v, void* e)
+{
+    if (v->size == v->cap) {
+        int newsize = v->cap * 2+1;
+        v->ptr = (void**)realloc(v->ptr,sizeof(void*)*newsize);
+        v->cap = newsize; }
+    v->ptr[v->size++] = e;
+}
+static inline void *vecp_dup(vecp *new, vecp *old) 
+{
+  *new = *old;
+  new->ptr = (void**)memcpy(malloc(sizeof(void*)*new->cap), old->ptr, sizeof(int)*new->cap);
+}
+
+
+#endif
--- a/src/modules/variant.py	Tue Nov 17 17:06:35 2009 -0600
+++ b/src/modules/variant.py	Wed Nov 18 15:53:48 2009 -0800
@@ -73,8 +73,21 @@
         # Methods which are unique to variants
         def allow_action(self, action):
                 """ determine if variants permit this action """
-                for a in set(action.attrs.keys()) & \
-                    self.__keyset:
+                aset = set([k for k in action.attrs.keys() if k.startswith("variant.")])
+
+                unknown_variants = aset - self.__keyset
+
+                # handle variant.debug
+
+                for u in unknown_variants:
+                        # install only unknown variant.debug
+                        # actions tagged w/ "false"
+                        if u.startswith("variant.debug.") and \
+                            action.attrs[u] != "false":
+                                return False
+                        # could assert here for other
+                        # unknown variants... best course TBD
+                for a in aset & self.__keyset:
                         if self[a] != action.attrs[a]:
                                 return False
                 return True
--- a/src/pkgdefs/SUNWipkg/copyright	Tue Nov 17 17:06:35 2009 -0600
+++ b/src/pkgdefs/SUNWipkg/copyright	Wed Nov 18 15:53:48 2009 -0800
@@ -45,3 +45,27 @@
 #
 # CDDL HEADER END
 #
+
+--------------------------------------------------------------------
+
+
+MiniSat -- Copyright (c) 2005, Niklas Sorensson
+
+Permission is hereby granted, free of charge, to any person obtaining a
+copy of this software and associated documentation files (the
+"Software"), to deal in the Software without restriction, including
+without limitation the rights to use, copy, modify, merge, publish,
+distribute, sublicense, and/or sell copies of the Software, and to
+permit persons to whom the Software is furnished to do so, subject to
+the following conditions:
+
+The above copyright notice and this permission notice shall be included
+in all copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
+OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
+MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
+LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
+OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
+WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
--- a/src/pkgdefs/SUNWipkg/prototype	Tue Nov 17 17:06:35 2009 -0600
+++ b/src/pkgdefs/SUNWipkg/prototype	Wed Nov 18 15:53:48 2009 -0800
@@ -86,12 +86,8 @@
 f none usr/lib/python2.4/vendor-packages/pkg/client/api_errors.pyc 444 root bin
 f none usr/lib/python2.4/vendor-packages/pkg/client/bootenv.py 444 root bin
 f none usr/lib/python2.4/vendor-packages/pkg/client/bootenv.pyc 444 root bin
-f none usr/lib/python2.4/vendor-packages/pkg/client/constraint.py 444 root bin
-f none usr/lib/python2.4/vendor-packages/pkg/client/constraint.pyc 444 root bin
 f none usr/lib/python2.4/vendor-packages/pkg/client/debugvalues.py 444 root bin
 f none usr/lib/python2.4/vendor-packages/pkg/client/debugvalues.pyc 444 root bin
-f none usr/lib/python2.4/vendor-packages/pkg/client/filter.py 444 root bin
-f none usr/lib/python2.4/vendor-packages/pkg/client/filter.pyc 444 root bin
 f none usr/lib/python2.4/vendor-packages/pkg/client/history.py 444 root bin
 f none usr/lib/python2.4/vendor-packages/pkg/client/history.pyc 444 root bin
 f none usr/lib/python2.4/vendor-packages/pkg/client/image.py 444 root bin
@@ -100,12 +96,12 @@
 f none usr/lib/python2.4/vendor-packages/pkg/client/imageconfig.pyc 444 root bin
 f none usr/lib/python2.4/vendor-packages/pkg/client/imageplan.py 444 root bin
 f none usr/lib/python2.4/vendor-packages/pkg/client/imageplan.pyc 444 root bin
-f none usr/lib/python2.4/vendor-packages/pkg/client/imagestate.py 444 root bin
-f none usr/lib/python2.4/vendor-packages/pkg/client/imagestate.pyc 444 root bin
 f none usr/lib/python2.4/vendor-packages/pkg/client/imagetypes.py 444 root bin
 f none usr/lib/python2.4/vendor-packages/pkg/client/imagetypes.pyc 444 root bin
 f none usr/lib/python2.4/vendor-packages/pkg/client/indexer.py 444 root bin
 f none usr/lib/python2.4/vendor-packages/pkg/client/indexer.pyc 444 root bin
+f none usr/lib/python2.4/vendor-packages/pkg/client/pkg_solver.py 444 root bin
+f none usr/lib/python2.4/vendor-packages/pkg/client/pkg_solver.pyc 444 root bin
 f none usr/lib/python2.4/vendor-packages/pkg/client/pkgplan.py 444 root bin
 f none usr/lib/python2.4/vendor-packages/pkg/client/pkgplan.pyc 444 root bin
 f none usr/lib/python2.4/vendor-packages/pkg/client/progress.py 444 root bin
@@ -143,6 +139,8 @@
 f none usr/lib/python2.4/vendor-packages/pkg/file_layout/layout.pyc 444 root bin
 f none usr/lib/python2.4/vendor-packages/pkg/file_layout/file_manager.py 444 root bin
 f none usr/lib/python2.4/vendor-packages/pkg/file_layout/file_manager.pyc 444 root bin
+f none usr/lib/python2.4/vendor-packages/pkg/facet.py 444 root bin
+f none usr/lib/python2.4/vendor-packages/pkg/facet.pyc 444 root bin
 d none usr/lib/python2.4/vendor-packages/pkg/flavor 755 root bin
 f none usr/lib/python2.4/vendor-packages/pkg/flavor/__init__.py 444 root bin
 f none usr/lib/python2.4/vendor-packages/pkg/flavor/__init__.pyc 444 root bin
@@ -232,6 +230,7 @@
 f none usr/lib/python2.4/vendor-packages/pkg/server/transaction.pyc 444 root bin
 f none usr/lib/python2.4/vendor-packages/pkg/smf.py 444 root bin
 f none usr/lib/python2.4/vendor-packages/pkg/smf.pyc 444 root bin
+f none usr/lib/python2.4/vendor-packages/pkg/solver.so 755 root bin
 f none usr/lib/python2.4/vendor-packages/pkg/sysvpkg.py 444 root bin
 f none usr/lib/python2.4/vendor-packages/pkg/sysvpkg.pyc 444 root bin
 d none usr/lib/python2.4/vendor-packages/pkg/um 755 root bin
--- a/src/pkgdep.py	Tue Nov 17 17:06:35 2009 -0600
+++ b/src/pkgdep.py	Wed Nov 18 15:53:48 2009 -0800
@@ -42,7 +42,7 @@
 import pkg.publish.dependencies as dependencies
 from pkg.misc import msg, emsg, PipeError
 
-CLIENT_API_VERSION = 23
+CLIENT_API_VERSION = 24
 PKG_CLIENT_NAME = "pkgdep"
 
 DEFAULT_SUFFIX = ".res"
--- a/src/setup.py	Tue Nov 17 17:06:35 2009 -0600
+++ b/src/setup.py	Wed Nov 18 15:53:48 2009 -0800
@@ -306,6 +306,10 @@
 _actions_srcs = [
         'modules/actions/_actions.c'
         ]
+solver_srcs = [
+        'modules/solver/solver.c', 
+        'modules/solver/py_solver.c'
+        ]
 include_dirs = [ 'modules' ]
 lint_flags = [ '-u', '-axms', '-erroff=E_NAME_DEF_NOT_USED2' ]
 
@@ -871,6 +875,18 @@
                             extra_link_args = link_args,
                             define_macros = [('_FILE_OFFSET_BITS', '64')]
                             ),
+                    Extension(
+                             'solver',
+                             solver_srcs,
+                             include_dirs = include_dirs + ["."],
+                             extra_compile_args = compile_args,
+                             extra_link_args = [
+                                 "-ztext",
+                                 "-lm",
+                                 "-lc"
+                                 ],
+                             define_macros = [('_FILE_OFFSET_BITS', '64')]
+                            )
                     ]
         else:
             elf_libraries += [ 'ssl' ]
--- a/src/tests/api/t_filter.py	Tue Nov 17 17:06:35 2009 -0600
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,228 +0,0 @@
-#!/usr/bin/python2.4
-#
-# CDDL HEADER START
-#
-# The contents of this file are subject to the terms of the
-# Common Development and Distribution License (the "License").
-# You may not use this file except in compliance with the License.
-#
-# You can obtain a copy of the license at usr/src/OPENSOLARIS.LICENSE
-# or http://www.opensolaris.org/os/licensing.
-# See the License for the specific language governing permissions
-# and limitations under the License.
-#
-# When distributing Covered Code, include this CDDL HEADER in each
-# file and include the License file at usr/src/OPENSOLARIS.LICENSE.
-# If applicable, add the following below this CDDL HEADER, with the
-# fields enclosed by brackets "[]" replaced with your own identifying
-# information: Portions Copyright [yyyy] [name of copyright owner]
-#
-# CDDL HEADER END
-#
-
-# Copyright 2008 Sun Microsystems, Inc.  All rights reserved.
-# Use is subject to license terms.
-
-import unittest
-import pkg.client.filter as filter
-import pkg.actions as actions
-
-import sys
-import os
-
-# Set the path so that modules above can be found
-path_to_parent = os.path.join(os.path.dirname(__file__), "..")
-sys.path.insert(0, path_to_parent)
-import pkg5unittest
-
-class TestFilter(pkg5unittest.Pkg5TestCase):
-        def setUp(self):
-                self.actionstr = """\
-                file path=/usr/bin/ls arch=i386 debug=true
-                file path=/usr/bin/ls arch=i386 debug=false
-                file path=/usr/bin/ls arch=sparc debug=true
-                file path=/usr/bin/ls arch=sparc debug=false
-                file path=/usr/bin/hostname arch=386 version=0.9
-                file path=/usr/bin/hostname arch=sparc version=9
-                file path=/usr/bin/hostid arch=386 version=0.9.9
-                file path=/usr/bin/hostid arch=sparc version=9.9
-                file path=/usr/sbin/6to4relay arch=386 version=0.a6.b5.c4.d3.e2.f1
-                file path=/usr/sbin/6to4relay arch=sparcv9 version=0.6.5.4.3.2.1
-                file path=/usr/bin/i386 386=true 0.i.3.8.6=cpuarch
-                file path=/usr/bin/sparc 386=false 0.9.9=cpuarch
-                file path=/var/svc/manifest/intrd.xml opensolaris.zone=global
-                file path=/path/to/french/text doc=true locale=fr
-                file path=/path/to/swedish/text doc=true locale=sv
-                file path=/path/to/english/text doc=true locale=en
-                file path=/path/to/us-english/text doc=true locale=en_US"""
-
-                self.actions = [
-                    actions.fromstr(s.strip())
-                    for s in self.actionstr.splitlines()
-                ]
-
-        def doFilter(self, in_filters):
-                filters = []
-                match = 0
-                nomatch = 0
-                for f_entry in in_filters:
-                        expr, comp_expr = filter.compile_filter(f_entry)
-                        filters.append((expr, comp_expr))
-
-                for a_entry in self.actions:
-                        res = filter.apply_filters(a_entry, filters)
-                        if res:
-                                match += 1
-                        else:
-                                nomatch += 1
-                return match
-
-        def doFilterStr(self, in_filters):
-                filters = []
-                outstr = ""
-
-                for f_entry in in_filters:
-                        expr, comp_expr = filter.compile_filter(f_entry)
-                        filters.append((expr, comp_expr))
-
-                for a_entry in self.actions:
-                        d_attrs = a_entry.attrs
-                        res = filter.apply_filters(a_entry, filters)
-                        outstr += "%-5s %s" % (res, str(d_attrs))
-
-                return outstr
-
-        def test_01_debug_i386(self):
-                """ ASSERT: arch=i386 & debug=true filters work """
-                self.assertEqual(
-                    self.doFilter([ "arch=i386 & debug=true" ]), 8)
-
-        def test_02_nondebug_i386(self):
-                """ ASSERT: arch=i386 & debug=false filters work """
-                self.assertEqual(
-                    self.doFilter([ "arch=i386 & debug=false" ]), 8)
-
-        def test_03_i386(self):
-                """ ASSERT: arch=i386 filters work """
-                self.assertEqual(
-                    self.doFilter([ "arch=i386" ]), 9)
-
-        def test_04_sparc(self):
-                """ ASSERT: arch=sparc filters work """
-                self.assertEqual(
-                    self.doFilter([ "arch=sparc" ]), 11)
-
-        def test_05_doc(self):
-                """ ASSERT: doc=true filters work """
-                self.assertEqual(
-                    self.doFilter([ "doc=true" ]), 17)
-
-        def test_06_doc(self):
-                """ ASSERT: doc=false filters work """
-                self.assertEqual(
-                    self.doFilter([ "doc=false" ]), 13)
-
-        def test_07_or(self):
-                """ ASSERT: OR filters work """
-                self.assertEqual(
-                    self.doFilter([ "locale=sv | locale=fr" ]), 15)
-
-        def test_08_and_or(self):
-                """ ASSERT: complex filters work """
-                self.assertEqual(
-                    self.doFilter([
-                        "arch=sparc & debug=false & (locale=sv | locale=fr)"
-                        ]), 8)
-
-        def test_09_multiple(self):
-                """ ASSERT: a list of multiple filters is possible """
-                self.assertNotEqual(
-                    self.doFilter([ "arch=i386", "debug=false" ]),
-                    self.doFilter([ "arch=i386" ]))
-
-        def test_10_multiple(self):
-                """ ASSERT: multiple filters is the same as ANDing """
-
-                self.assertEqual(
-                    self.doFilterStr([ "arch=i386", "debug=false" ]),
-                    self.doFilterStr([ "arch=i386 & debug=false" ]))
-
-        def test_11_numval(self):
-                """ ASSERT: filters with numeric values work """
-                self.assertEqual(
-                    self.doFilter([ "arch=386" ]), 10)
-
-        def test_12_fracval(self):
-                """ ASSERT: filters with fractional numeric values work """
-                self.assertEqual(
-                    self.doFilter([ "version=0.9" ]), 12)
-
-        def test_13_fracval_and_numval(self):
-                """ ASSERT: filters with fractional numeric values ANDed with filters with numeric values work """
-                self.assertEqual(
-                    self.doFilter([ "version=0.9 & arch=386" ]), 8)
-
-        def test_14_fracval_and_textval_(self):
-                """ ASSERT: filters with fractional numeric values ANDed with filters with text values work """
-                self.assertEqual(
-                    self.doFilter([ "version=0.9 & arch=sparc" ]), 9)
-
-        def test_15_numval_and_textval(self):
-                """ ASSERT: filters with numeric ANDed with filters with text value work """
-                self.assertEqual(
-                    self.doFilter([ "version=9 & arch=sparc" ]), 10)
-
-        def test_16_multinumval(self):
-                """ ASSERT: filters with multi-component numeric values work """
-                self.assertEqual(
-                    self.doFilter([ "version=0.9.9" ]), 12)
-
-        def test_17_multinumval_and_numval(self):
-                """ ASSERT: filters with multi-component numeric values ANDed with filters with numeric values work """
-                self.assertEqual(
-                    self.doFilter([ "version=0.6.5.4.3.2.1 & arch=386" ]), 7)
-
-        def test_18_multinumval_and_textval(self):
-                """ ASSERT: filters with multi-component numeric values ANDed with filters with text values work """
-                self.assertEqual(
-                    self.doFilter([ "version=0.9.9 & arch=sparc" ]), 9)
-
-        def test_19_multinumtextval(self):
-                """ ASSERT: filters with multi-component numeric and text values work """
-                self.assertEqual(
-                    self.doFilter([ "version=0.a6.b5.c4.d3.e2.f1" ]), 12)
-
-        def test_20_multinumtextval_and_numval(self):
-                """ ASSERT: filters with multi-component numeric and text values ANDed with filters with numeric values work """
-                self.assertEqual(
-                    self.doFilter([ "version=0.a6.b5.c4.d3.e2.f1 & arch=386" ]), 8)
-
-        def test_21_multinumtextval_and_textval(self):
-                """ ASSERT: filters with multi-component numeric and text values ANDed with filters with text values work """
-                self.assertEqual(
-                    self.doFilter([ "version=0.a6.b5.c4.d3.e2.f1 & arch=sparcv9" ]), 7)
-
-        def test_22_numfilter(self):
-                """ ASSERT: numeric filters with text values work """
-                self.assertEqual(
-                    self.doFilter([ "386=true" ]), 16)
-
-        def test_23_multinumfilter(self):
-                """ ASSERT: multi-component numeric filters with text values work """
-                self.assertEqual(
-                    self.doFilter([ "0.9.9=foobar" ]), 16)
-
-        def test_24_multinumtextfilter(self):
-                """ ASSERT: multi-component numeric and text filters with text values work """
-                self.assertEqual(
-                    self.doFilter([ "0.i.3.8.6=foobar" ]), 16)
-
-        def test_25_multinumfilter_complex(self):
-                """ ASSERT: multi-component numeric complex filters work """
-                self.assertEqual(
-                    self.doFilter([
-                        "version=0.9 & (0.i.3.8.6=cpuarch | 0.9.9=foobar) | version=0.a6.b5.c4.d3.e2.f1"
-                        ]), 13)
-
-if __name__ == "__main__":
-        unittest.main()
--- a/src/tests/api/t_manifest.py	Tue Nov 17 17:06:35 2009 -0600
+++ b/src/tests/api/t_manifest.py	Wed Nov 18 15:53:48 2009 -0800
@@ -35,7 +35,6 @@
 import pkg.manifest as manifest
 import pkg.actions as actions
 import pkg.fmri as fmri
-import pkg.client.filter as filter
 
 # Set the path so that modules above can be found
 path_to_parent = os.path.join(os.path.dirname(__file__), "..")
--- a/src/tests/baseline.txt	Tue Nov 17 17:06:35 2009 -0600
+++ b/src/tests/baseline.txt	Wed Nov 18 15:53:48 2009 -0800
@@ -278,6 +278,7 @@
 cli.t_api_search.py TestApiSearchBasicsRestartingDepot.test_local_image_update|pass
 cli.t_api_search.py TestApiSearchMulti.test_bug_2955|pass
 cli.t_api_search.py TestApiSearchMulti.test_bug_8318|pass
+cli.t_change_facet.py TestPkgChangeFacet.test_1|pass
 cli.t_change_variant.py TestPkgChangeVariant.test_cv_01_none_1|pass
 cli.t_change_variant.py TestPkgChangeVariant.test_cv_01_none_2|pass
 cli.t_change_variant.py TestPkgChangeVariant.test_cv_01_none_3|pass
@@ -302,6 +303,7 @@
 cli.t_change_variant.py TestPkgChangeVariant.test_cv_10_arch_and_zone_2|pass
 cli.t_change_variant.py TestPkgChangeVariant.test_cv_11_arch_and_zone_1|pass
 cli.t_change_variant.py TestPkgChangeVariant.test_cv_11_arch_and_zone_2|pass
+cli.t_colliding_links.py TestPkgCollidingLinks.test_1|pass
 cli.t_fix.py TestFix.test_fix1|pass
 cli.t_fix.py TestFix.test_fix2|pass
 cli.t_pkg_R_option.py TestROption.test_1|pass
@@ -320,7 +322,7 @@
 cli.t_pkg_api_install.py TestPkgApiInstall.test_bug_4109|pass
 cli.t_pkg_api_install.py TestPkgApiInstall.test_catalog_v0|pass
 cli.t_pkg_api_install.py TestPkgApiInstall.test_image_upgrade|pass
-cli.t_pkg_api_install.py TestPkgApiInstall.test_install_matching|error
+cli.t_pkg_api_install.py TestPkgApiInstall.test_install_matching|pass
 cli.t_pkg_api_install.py TestPkgApiInstall.test_nonrecursive_dependent_uninstall|pass
 cli.t_pkg_api_install.py TestPkgApiInstall.test_recursive_uninstall|pass
 cli.t_pkg_contents.py TestPkgContentsBasics.test_contents_1|pass
@@ -378,8 +380,10 @@
 cli.t_pkg_info.py TestPkgInfoBasics.test_info_empty_image|pass
 cli.t_pkg_info.py TestPkgInfoBasics.test_info_local_remote|pass
 cli.t_pkg_info.py TestPkgInfoBasics.test_pkg_info_bad_fmri|pass
+cli.t_pkg_install.py TestDependencies.test_exclude_dependencies|pass
+cli.t_pkg_install.py TestDependencies.test_incorporation_dependencies|pass
+cli.t_pkg_install.py TestDependencies.test_require_dependencies|pass
 cli.t_pkg_install.py TestDependencies.test_optional_dependencies|pass
-cli.t_pkg_install.py TestDependencies.test_require_optional|pass
 cli.t_pkg_install.py TestImageCreateCorruptImage.test_empty_ospkg|pass
 cli.t_pkg_install.py TestImageCreateCorruptImage.test_empty_var_pkg|pass
 cli.t_pkg_install.py TestImageCreateCorruptImage.test_ospkg_left_alone|pass
@@ -414,6 +418,9 @@
 cli.t_pkg_install.py TestMultipleDepots.test_11_uninstall_after_preferred_publisher_change|pass
 cli.t_pkg_install.py TestMultipleDepots.test_12_uninstall_after_publisher_removal|pass
 cli.t_pkg_install.py TestMultipleDepots.test_13_non_preferred_multimatch|pass
+cli.t_pkg_install.py TestMultipleDepots.test_14_nonsticky_publisher|pass
+cli.t_pkg_install.py TestMultipleDepots.test_15_nonsticky_update|pass
+cli.t_pkg_install.py TestMultipleDepots.test_16_disabled_nonsticky|pass
 cli.t_pkg_install.py TestPkgInstallActions.test_bad_hardlinks|pass
 cli.t_pkg_install.py TestPkgInstallActions.test_bad_links|pass
 cli.t_pkg_install.py TestPkgInstallActions.test_basics_0|pass
@@ -436,16 +443,18 @@
 cli.t_pkg_install.py TestPkgInstallBasics.test_basics_3|pass
 cli.t_pkg_install.py TestPkgInstallBasics.test_basics_4|pass
 cli.t_pkg_install.py TestPkgInstallBasics.test_basics_5|pass
+cli.t_pkg_install.py TestPkgInstallBasics.test_basics_6|pass
 cli.t_pkg_install.py TestPkgInstallBasics.test_bug_1338|pass
 cli.t_pkg_install.py TestPkgInstallBasics.test_bug_1338_2|pass
 cli.t_pkg_install.py TestPkgInstallBasics.test_bug_1338_3|pass
 cli.t_pkg_install.py TestPkgInstallBasics.test_bug_1338_4|pass
 cli.t_pkg_install.py TestPkgInstallBasics.test_bug_2795|pass
 cli.t_pkg_install.py TestPkgInstallBasics.test_bug_3770|pass
+cli.t_pkg_install.py TestPkgInstallBasics.test_bug_6018|pass
 cli.t_pkg_install.py TestPkgInstallBasics.test_bug_9929|pass
 cli.t_pkg_install.py TestPkgInstallBasics.test_cli|pass
 cli.t_pkg_install.py TestPkgInstallBasics.test_image_upgrade|pass
-cli.t_pkg_install.py TestPkgInstallBasics.test_install_matching|fail
+cli.t_pkg_install.py TestPkgInstallBasics.test_install_matching|pass
 cli.t_pkg_install.py TestPkgInstallBasics.test_nonrecursive_dependent_uninstall|pass
 cli.t_pkg_install.py TestPkgInstallBasics.test_recursive_uninstall|pass
 cli.t_pkg_install.py TestPkgInstallCircularDependencies.test_anchored_circular_dependencies|pass
@@ -507,6 +516,7 @@
 cli.t_pkg_publisher.py TestPkgPublisherBasics.test_publisher_validation|pass
 cli.t_pkg_publisher.py TestPkgPublisherMany.test_enable_disable|pass
 cli.t_pkg_publisher.py TestPkgPublisherMany.test_set_mirrors_origins|pass
+cli.t_pkg_publisher.py TestPkgPublisherMany.test_search_order|pass
 cli.t_pkg_rebuild_index.py TestPkgRebuildIndex.test_rebuild_index_bad_opts|pass
 cli.t_pkg_rebuild_index.py TestPkgRebuildIndex.test_rebuild_index_bad_perms|pass
 cli.t_pkg_refresh.py TestPkgRefreshMulti.test_catalog_v1|pass
@@ -570,6 +580,8 @@
 cli.t_util_merge.py TestUtilMerge.test_0_merge|pass
 cli.t_util_update_file_layout.py TestFileManager.test_1|pass
 cli.t_util_update_file_layout.py TestFileManager.test_opts|pass
+cli.t_solver.py TestSolver.test_no_solution|pass
+cli.t_solver.py TestSolver.test_solution|pass
 cli.t_variants.py TestPkgVariants.test_old_zones_pkgs|pass
 cli.t_variants.py TestPkgVariants.test_variant_1|pass
 gui.t_pm_addrepo.py TestPkgGuiAddRepoBasics.testAddRepository|pass
--- a/src/tests/cli/t_api.py	Tue Nov 17 17:06:35 2009 -0600
+++ b/src/tests/cli/t_api.py	Wed Nov 18 15:53:48 2009 -0800
@@ -40,7 +40,7 @@
 import time
 import unittest
 
-API_VERSION = 23
+API_VERSION = 24
 PKG_CLIENT_NAME = "pkg"
 
 class TestPkgApi(testutils.SingleDepotTestCase):
@@ -121,7 +121,7 @@
         def __try_bad_installs(self, api_obj):
 
                 self.assertRaises(api_errors.PlanExistsException,
-                    api_obj.plan_install,["foo"], [])
+                    api_obj.plan_install,["foo"])
 
                 self.assertRaises(api_errors.PlanExistsException,
                     api_obj.plan_uninstall,["foo"], False)
@@ -167,7 +167,7 @@
                 self.assertRaises(api_errors.PlanMissingException,
                     api_obj.prepare)
 
-                api_obj.plan_install(["foo"], [])
+                api_obj.plan_install(["foo"])
                 self.__try_bad_combinations_and_complete(api_obj)
                 api_obj.reset()
 
@@ -207,18 +207,17 @@
                     progresstracker, lambda x: False, PKG_CLIENT_NAME)
 
                 recursive_removal = False
-                filters = []
 
-                api_obj.plan_install(["foo"], filters)
+                api_obj.plan_install(["foo"])
                 self.assert_(api_obj.describe() is not None)
                 api_obj.reset()
                 self.assert_(api_obj.describe() is None)
-                api_obj.plan_install(["foo"], filters)
+                api_obj.plan_install(["foo"])
                 self.assert_(api_obj.describe() is not None)
                 api_obj.prepare()
                 api_obj.reset()
                 self.assert_(api_obj.describe() is None)
-                api_obj.plan_install(["foo"], filters)
+                api_obj.plan_install(["foo"])
                 self.assert_(api_obj.describe() is not None)
                 api_obj.prepare()
                 api_obj.execute_plan()
--- a/src/tests/cli/t_api_info.py	Tue Nov 17 17:06:35 2009 -0600
+++ b/src/tests/cli/t_api_info.py	Wed Nov 18 15:53:48 2009 -0800
@@ -40,7 +40,7 @@
 import pkg.client.api_errors as api_errors
 import pkg.client.progress as progress
 
-API_VERSION = 23
+API_VERSION = 24
 PKG_CLIENT_NAME = "pkg"
 
 class TestApiInfo(testutils.SingleDepotTestCase):
@@ -128,8 +128,6 @@
 
                 self.image_create(durl)
 
-                filters = []
-
                 local = True
                 get_license = False
 
@@ -141,7 +139,7 @@
                 self.assert_(not ret[api.ImageInterface.INFO_FOUND])
                 self.assert_(len(ret[api.ImageInterface.INFO_MISSING]) == 1)
                 
-                api_obj.plan_install(["jade"], filters)
+                api_obj.plan_install(["jade"])
                 api_obj.prepare()
                 api_obj.execute_plan()
 
--- a/src/tests/cli/t_api_search.py	Tue Nov 17 17:06:35 2009 -0600
+++ b/src/tests/cli/t_api_search.py	Wed Nov 18 15:53:48 2009 -0800
@@ -47,7 +47,7 @@
 import pkg.portable as portable
 import pkg.search_storage as ss
 
-API_VERSION = 23
+API_VERSION = 24
 PKG_CLIENT_NAME = "pkg"
 
 class TestApiSearchBasics(testutils.SingleDepotTestCase):
@@ -547,12 +547,23 @@
                 if remote:
                         search_func = lambda x: api_obj.remote_search(x,
                             servers=servers)
-                res = search_func(query)
-                if return_actions:
-                        res = self._extract_action_from_res(res)
-                else:
-                        res = self._extract_package_from_res(res)
-                res = set(res)
+                init_time = time.time()
+
+                # servers may not be ready immediately - retry search
+                # operation for 5 seconds
+
+                while (time.time() - init_time) < 5:
+                        try:
+                                res = search_func(query)
+                                if return_actions:
+                                        res = self._extract_action_from_res(res)
+                                else:
+                                        res = self._extract_package_from_res(res)
+                                res = set(res)
+                                break
+                        except api_errors.ProblematicSearchServers, e:
+                                pass
+
                 self._check(set(res), test_value)
 
         def _search_op_slow(self, api_obj, remote, token, test_value,
@@ -1130,7 +1141,7 @@
 
         @staticmethod
         def _do_install(api_obj, pkg_list, **kwargs):
-                api_obj.plan_install(pkg_list, [], **kwargs)
+                api_obj.plan_install(pkg_list, **kwargs)
                 TestApiSearchBasics._do_finish(api_obj)
 
         @staticmethod
@@ -1806,6 +1817,7 @@
                 """Test for known bug 983."""
                 durl = self.dc.get_depot_url()
                 self.pkgsend_bulk(durl, self.bug_983_manifest)
+                time.sleep(2)
                 self.image_create(durl)
                 progresstracker = progress.NullProgressTracker()
                 api_obj = api.ImageInterface(self.get_img_path(), API_VERSION,
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/src/tests/cli/t_change_facet.py	Wed Nov 18 15:53:48 2009 -0800
@@ -0,0 +1,155 @@
+#!/usr/bin/python
+#
+# CDDL HEADER START
+#
+# The contents of this file are subject to the terms of the
+# Common Development and Distribution License (the "License").
+# You may not use this file except in compliance with the License.
+#
+# You can obtain a copy of the license at usr/src/OPENSOLARIS.LICENSE
+# or http://www.opensolaris.org/os/licensing.
+# See the License for the specific language governing permissions
+# and limitations under the License.
+#
+# When distributing Covered Code, include this CDDL HEADER in each
+# file and include the License file at usr/src/OPENSOLARIS.LICENSE.
+# If applicable, add the following below this CDDL HEADER, with the
+# fields enclosed by brackets "[]" replaced with your own identifying
+# information: Portions Copyright [yyyy] [name of copyright owner]
+#
+# CDDL HEADER END
+#
+
+# Copyright 2009 Sun Microsystems, Inc.  All rights reserved.
+# Use is subject to license terms.
+
+import testutils
+if __name__ == "__main__":
+        testutils.setup_environment("../../../proto")
+
+import os
+import re
+import time
+import errno
+import unittest
+import shutil
+import sys
+from stat import *
+
+
+
+class TestPkgChangeFacet(testutils.SingleDepotTestCase):
+        # Only start/stop the depot once (instead of for every test)
+        persistent_depot = True
+
+        pkg_A = """
+        open [email protected],5.11-0
+        add file /tmp/facets_0 mode=0555 owner=root group=bin path=0         
+        add file /tmp/facets_1 mode=0555 owner=root group=bin path=1 facet.locale.fr=True
+        add file /tmp/facets_2 mode=0555 owner=root group=bin path=2 facet.locale.fr_FR=True
+        add file /tmp/facets_3 mode=0555 owner=root group=bin path=3 facet.locale.fr_CA=True
+        add file /tmp/facets_4 mode=0555 owner=root group=bin path=4 facet.locale.fr_CA=True facet.locale.nl_ZA=True
+        add file /tmp/facets_5 mode=0555 owner=root group=bin path=5 facet.locale.nl=True
+        add file /tmp/facets_6 mode=0555 owner=root group=bin path=6 facet.locale.nl_NA=True
+        add file /tmp/facets_7 mode=0555 owner=root group=bin path=7 facet.locale.nl_ZA=True
+        close"""
+
+        misc_files = [p for p in pkg_A.split() if p.startswith("/")]
+
+        def setUp(self):
+                testutils.SingleDepotTestCase.setUp(self)
+
+
+                for p in self.misc_files:
+                        path = os.path.dirname(p)
+                        if not os.path.exists(path):
+                                os.makedirs(path)
+                        f = open(p, "w")
+                        # write the name of the file into the file, so that
+                        # all files have differing contents
+                        f.write(p)
+                        f.close()
+                        self.debug("wrote %s" % p)
+
+                depot = self.dc.get_depot_url()
+                self.pkgsend_bulk(depot, self.pkg_A)
+
+        def tearDown(self):
+                testutils.SingleDepotTestCase.tearDown(self)
+                for p in self.misc_files:
+                        os.remove(p)
+
+        def assert_file_is_there(self, path, negate=False):
+                """Verify that the specified path exists. If negate is true, 
+                then make sure the path doesn't exist"""
+
+                file_path = os.path.join(self.get_img_path(), path)
+
+                try:
+                        f = file(file_path)
+                except IOError, e:
+                        if e.errno == errno.ENOENT and negate:
+                                return
+                        self.assert_(False, "File %s is not there" % path)
+                # file is there
+                if negate:
+                        self.assert_(False, "File %s is there" % path)
+                return
+
+        def test_1(self):
+                # create an image w/ locales set
+                ic_args = "";
+                ic_args += " --facet 'facet.locale*=False' " 
+                ic_args += " --facet 'facet.locale.fr*=True' " 
+                ic_args += " --facet 'facet.locale.fr_CA=False' " 
+
+                depot = self.dc.get_depot_url()
+                self.image_create(depot, additional_args=ic_args)
+                self.pkg("facet")
+                self.pkg("facet -H 'facet.locale*' | egrep False")
+                # install a package and verify
+
+                self.pkg("install pkg_A")
+                self.pkg("verify")
+                self.pkg("facet")
+
+                # make sure it delivers it's files as appropriate
+                self.assert_file_is_there("0")
+                self.assert_file_is_there("1")
+                self.assert_file_is_there("2")
+                self.assert_file_is_there("3", negate=True)
+                self.assert_file_is_there("4", negate=True)
+                self.assert_file_is_there("5", negate=True)
+                self.assert_file_is_there("6", negate=True)
+                self.assert_file_is_there("7", negate=True)
+
+                # change to pick up another file w/ two tags
+                self.pkg("change-facet -v facet.locale.nl_ZA=True")
+                self.pkg("verify")
+                self.pkg("facet")
+
+                self.assert_file_is_there("0")
+                self.assert_file_is_there("1")
+                self.assert_file_is_there("2")
+                self.assert_file_is_there("3", negate=True)
+                self.assert_file_is_there("4")
+                self.assert_file_is_there("5", negate=True)
+                self.assert_file_is_there("6", negate=True)
+                self.assert_file_is_there("7")
+ 
+                # remove all the facets
+                self.pkg("change-facet -v facet.locale*=None 'facet.locale.fr*'=None facet.locale.fr_CA=None")
+                self.pkg("verify")
+
+                for i in range(8):
+                        self.assert_file_is_there("%d" % i)
+                
+                # zap all the locales
+                self.pkg("change-facet -v facet.locale*=False facet.locale.nl_ZA=None")
+                self.pkg("verify")
+                self.pkg("facet")
+                
+                for i in range(8):
+                        self.assert_file_is_there("%d" % i, negate=(i != 0))
+
+ 
--- a/src/tests/cli/t_change_variant.py	Tue Nov 17 17:06:35 2009 -0600
+++ b/src/tests/cli/t_change_variant.py	Wed Nov 18 15:53:48 2009 -0800
@@ -1,4 +1,4 @@
-#!/usr/bin/python2.4
+#!/usr/bin/python
 #
 # CDDL HEADER START
 #
@@ -33,6 +33,7 @@
 import errno
 import unittest
 import shutil
+import sys
 from stat import *
 
 # needed to get variant settings
@@ -157,6 +158,7 @@
                 it doesn't contain the specified token."""
 
                 file_path = os.path.join(self.get_img_path(), path)
+
                 try:
                         f = file(file_path)
                 except IOError, e:
@@ -180,11 +182,11 @@
                 f.close()
 
                 if not negate and not found:
-                        self.assert_(False, "File %s does not contain %s" %
-                            (path, token))
+                        self.assert_(False, "File %s (%s) does not contain %s" %
+                            (path, file_path, token))
                 if negate and found:
-                        self.assert_(False, "File %s contain %s" %
-                            (path, token))
+                        self.assert_(False, "File %s (%s) contains %s" %
+                            (path, file_path, token))
 
         def p_verify(self, p=None, v_arch=None, v_zone=None, negate=False):
                 """Given a specific architecture and zone variant, verify
@@ -268,13 +270,13 @@
 
                 if "variant.arch" not in ic.variants:
                         self.assert_(False,
-                            "unable to determine inmage arch variant")
+                            "unable to determine image arch variant")
                 if ic.variants["variant.arch"] != v_arch:
                         self.assert_(False, "unexpected arch variant")
 
                 if "variant.opensolaris.zone" not in ic.variants:
                         self.assert_(False,
-                            "unable to determine inmage zone variant")
+                            "unable to determine image zone variant")
                 if ic.variants["variant.opensolaris.zone"] != v_zone:
                         self.assert_(False, "unexpected zone variant")
 
@@ -324,27 +326,30 @@
                 ic_args += " --variant variant.opensolaris.zone=%s " % v_zone
                 depot = self.dc.get_depot_url()
                 self.image_create(depot, additional_args=ic_args)
+                self.pkg("variant -H| egrep %s" % ("'variant.arch[ ]*%s'" % v_arch))
+                self.pkg("variant -H| egrep %s" % ("'variant.opensolaris.zone[ ]*%s'" % v_zone))
 
                 # install the specified packages into the image
                 ii_args = "";
                 for p in pl:
                         ii_args += " %s " % p
-                self.pkg("install %s" % ii_args)
-
+                self.pkg("install %s" % ii_args)                
+                
                 # if we're paranoid, then verify the image we just installed
                 if self.verify_install:
                         self.i_verify(v_arch, v_zone, pl)
-
                 # change the specified variant
                 cv_args = "";
                 cv_args += " -v";
                 cv_args += " variant.arch=%s" % v_arch2
                 cv_args += " variant.opensolaris.zone=%s" % v_zone2
-                self.pkg("change-variant" + cv_args)
-
+                self.pkg("change-variant -v" + cv_args)
                 # verify the updated image
                 self.i_verify(v_arch2, v_zone2, pl2)
 
+                self.pkg("variant -H| egrep %s" % ("'variant.arch[ ]*%s'" % v_arch2))
+                self.pkg("variant -H| egrep %s" % ("'variant.opensolaris.zone[ ]*%s'" % v_zone2))
+
                 self.image_destroy()
 
         def test_cv_01_none_1(self):
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/src/tests/cli/t_colliding_links.py	Wed Nov 18 15:53:48 2009 -0800
@@ -0,0 +1,117 @@
+#!/usr/bin/python
+#
+# CDDL HEADER START
+#
+# The contents of this file are subject to the terms of the
+# Common Development and Distribution License (the "License").
+# You may not use this file except in compliance with the License.
+#
+# You can obtain a copy of the license at usr/src/OPENSOLARIS.LICENSE
+# or http://www.opensolaris.org/os/licensing.
+# See the License for the specific language governing permissions
+# and limitations under the License.
+#
+# When distributing Covered Code, include this CDDL HEADER in each
+# file and include the License file at usr/src/OPENSOLARIS.LICENSE.
+# If applicable, add the following below this CDDL HEADER, with the
+# fields enclosed by brackets "[]" replaced with your own identifying
+# information: Portions Copyright [yyyy] [name of copyright owner]
+#
+# CDDL HEADER END
+#
+
+# Copyright 2009 Sun Microsystems, Inc.  All rights reserved.
+# Use is subject to license terms.
+
+import testutils
+if __name__ == "__main__":
+        testutils.setup_environment("../../../proto")
+
+import os
+import re
+import time
+import errno
+import unittest
+import shutil
+import sys
+from stat import *
+
+class TestPkgCollidingLinks(testutils.SingleDepotTestCase):
+        # Only start/stop the depot once (instead of for every test)
+        persistent_depot = True
+
+        pkg_A = """
+        open [email protected],5.11-0
+        add file /tmp/link_target_0 mode=0555 owner=root group=bin path=link_target_0
+        add file /tmp/link_target_1 mode=0555 owner=root group=bin path=link_target_1
+        add file /tmp/link_target_2 mode=0555 owner=root group=bin path=link_target_2
+        close"""
+
+        pkg_B = """
+        open [email protected],5.11-0
+        add link path=0 target=./link_target_0
+        add link path=1 target=./link_target_1
+        add link path=2 target=./link_target_2
+        close"""
+
+        pkg_C = """
+        open [email protected],5.11-0
+        add link path=0 target=./link_target_0
+        add link path=1 target=./link_target_1
+        add link path=/2 target=./link_target_2
+        close"""
+
+
+        misc_files = [p for p in pkg_A.split() if p.startswith("/")]
+
+
+        def setUp(self):
+                testutils.SingleDepotTestCase.setUp(self)
+
+                for p in self.misc_files:
+                        path = os.path.dirname(p)
+                        if not os.path.exists(path):
+                                os.makedirs(path)
+                        f = open(p, "w")
+                        # write the name of the file into the file, so that
+                        # all files have differing contents
+                        f.write(p)
+                        f.close()
+                        self.debug("wrote %s" % p)
+
+                depot = self.dc.get_depot_url()
+                self.pkgsend_bulk(depot, self.pkg_A)
+                self.pkgsend_bulk(depot, self.pkg_B)
+                self.pkgsend_bulk(depot, self.pkg_C)
+
+        def tearDown(self):
+                testutils.SingleDepotTestCase.tearDown(self)
+                for p in self.misc_files:
+                        os.remove(p)
+
+
+        def test_1(self):
+                """Verify symlinks are correctly reference counted
+                during installation & removal of packages"""
+                # create an image w/ locales set
+                depot = self.dc.get_depot_url()
+                self.image_create(depot)
+                # install packages and verify
+              
+                self.pkg("install pkg_A pkg_B")
+                self.pkg("verify")
+
+                # add a pkg w/ duplicate links
+                self.pkg("install pkg_C")
+                self.pkg("verify")
+
+                # cause trouble.
+                self.pkg("uninstall pkg_C")
+                self.pkg("verify")
+
+                # readd a pkg w/ duplicate links
+                self.pkg("install pkg_C")
+                self.pkg("verify")
+
+                self.pkg("uninstall pkg_B pkg_C")
+                self.pkg("verify")
--- a/src/tests/cli/t_fix.py	Tue Nov 17 17:06:35 2009 -0600
+++ b/src/tests/cli/t_fix.py	Wed Nov 18 15:53:48 2009 -0800
@@ -90,6 +90,7 @@
                 size2 = self.file_size(victim)
                 self.assertEqual(size1, size2)
 
+                # check that we didn't reindex
                 new_mtime = os.stat(index_file).st_mtime
                 self.assertEqual(orig_mtime, new_mtime)
 
--- a/src/tests/cli/t_pkg_R_option.py	Tue Nov 17 17:06:35 2009 -0600
+++ b/src/tests/cli/t_pkg_R_option.py	Wed Nov 18 15:53:48 2009 -0800
@@ -71,7 +71,7 @@
                 self.pkg("-R %s image-update" % imgpath)
 
                 self.pkg("-R %s uninstall foo" % badpath, exit=1)
-                self.pkg("-R %s install foo" % imgpath)
+                self.pkg("-R %s install foo" % imgpath, exit=4)
 
                 self.pkg("-R %s info foo" % badpath, exit=1)
                 self.pkg("-R %s info foo" % imgpath)
--- a/src/tests/cli/t_pkg_api_install.py	Tue Nov 17 17:06:35 2009 -0600
+++ b/src/tests/cli/t_pkg_api_install.py	Wed Nov 18 15:53:48 2009 -0800
@@ -38,7 +38,7 @@
 import pkg.client.api_errors as api_errors
 import pkg.client.progress as progress
 
-API_VERSION = 23
+API_VERSION = 24
 PKG_CLIENT_NAME = "pkg"
 
 class TestPkgApiInstall(testutils.SingleDepotTestCase):
@@ -142,16 +142,15 @@
                         self.debug("wrote %s" % p)
 
         def tearDown(self):
+                self.debug("In teardown")
                 testutils.SingleDepotTestCase.tearDown(self)
                 for p in self.misc_files:
                         os.remove(p)
 
         @staticmethod
-        def __do_install(api_obj, fmris, filters=None):
-                if not filters:
-                        filters = []
+        def __do_install(api_obj, fmris):
                 api_obj.reset()
-                api_obj.plan_install(fmris, filters)
+                api_obj.plan_install(fmris)
                 api_obj.prepare()
                 api_obj.execute_plan()
 
@@ -386,7 +385,9 @@
                 api_obj = api.ImageInterface(self.get_img_path(), API_VERSION,
                     progresstracker, lambda x: True, PKG_CLIENT_NAME)
 
-                self.assertRaises(api_errors.InventoryException,
+                self.pkg("list -a")
+
+                self.assertRaises(api_errors.PlanCreationException,
                     self.__do_install, api_obj, ["[email protected]"])
 
         def test_bug_1338_2(self):
@@ -401,7 +402,7 @@
                 api_obj = api.ImageInterface(self.get_img_path(), API_VERSION,
                     progresstracker, lambda x: True, PKG_CLIENT_NAME)
 
-                self.assertRaises(api_errors.InventoryException,
+                self.assertRaises(api_errors.PlanCreationException,
                     self.__do_install, api_obj, ["[email protected]", "[email protected]"])
 
         def test_bug_1338_3(self):
@@ -416,7 +417,7 @@
                 api_obj = api.ImageInterface(self.get_img_path(), API_VERSION,
                     progresstracker, lambda x: True, PKG_CLIENT_NAME)
 
-                self.assertRaises(api_errors.InventoryException,
+                self.assertRaises(api_errors.PlanCreationException,
                     self.__do_install, api_obj, ["[email protected]"])
 
         def test_bug_1338_4(self):
@@ -430,7 +431,7 @@
                 api_obj = api.ImageInterface(self.get_img_path(), API_VERSION,
                     progresstracker, lambda x: True, PKG_CLIENT_NAME)
 
-                self.assertRaises(api_errors.InventoryException,
+                self.assertRaises(api_errors.PlanCreationException,
                     self.__do_install, api_obj, ["[email protected]"])
 
         def test_bug_2795(self):
@@ -444,14 +445,16 @@
                 api_obj = api.ImageInterface(self.get_img_path(), API_VERSION,
                     progresstracker, lambda x: True, PKG_CLIENT_NAME)
 
-                self.__do_install(api_obj, ["[email protected]", "[email protected]"])
-                self.pkg("list [email protected]", exit = 1)
-                self.pkg("list [email protected]")
-                self.__do_uninstall(api_obj, ["foo"])
+                self.assertRaises(api_errors.PlanCreationException,
+                    self.__do_install, api_obj, ["[email protected]", "[email protected]"])
+
+                self.pkg("list foo", exit = 1)
 
-                self.__do_install(api_obj, ["[email protected]", "[email protected]"])
-                self.pkg("list [email protected]", exit = 1)
-                self.pkg("list [email protected]")
+                self.assertRaises(api_errors.PlanCreationException,
+                    self.__do_install, api_obj, ["[email protected]", "[email protected]"])
+
+
+                self.pkg("list foo", exit = 1)
 
         def test_install_matching(self):
                 """ Try to [un]install packages matching a pattern """
@@ -491,31 +494,32 @@
 
                 def check_missing(e):
                         return e.missing_matches
-                
+
+
                 testutils.eval_assert_raises(api_errors.PlanCreationException,
-                    check_unfound, api_obj.plan_install, ["foo"], [])
+                    check_unfound, api_obj.plan_install, ["foo"])
 
                 api_obj.reset()
                 testutils.eval_assert_raises(api_errors.PlanCreationException,
-                    check_unfound, api_obj.plan_uninstall, ["foo"], [])
+                    check_missing, api_obj.plan_uninstall, ["foo"], False)
 
                 api_obj.reset()
                 testutils.eval_assert_raises(api_errors.PlanCreationException,
-                    check_illegal, api_obj.plan_install, ["@/foo"], [])
+                    check_illegal, api_obj.plan_install, ["@/foo"])
                 
                 api_obj.reset()
                 testutils.eval_assert_raises(api_errors.PlanCreationException,
-                    check_illegal, api_obj.plan_uninstall, ["/foo"], [])
+                    check_illegal, api_obj.plan_uninstall, ["/foo"], False)
 
                 self.pkgsend_bulk(durl, self.foo10)
 
                 api_obj.refresh(False)
                 testutils.eval_assert_raises(api_errors.PlanCreationException,
-                    check_unfound, api_obj.plan_uninstall, ["foo"], [])
+                    check_missing, api_obj.plan_uninstall, ["foo"], False)
 
                 api_obj.reset()
                 testutils.eval_assert_raises(api_errors.PlanCreationException,
-                    check_illegal, api_obj.plan_uninstall, ["/foo"], [])
+                    check_illegal, api_obj.plan_uninstall, ["/foo"], False)
 
                 api_obj.reset()
                 api_obj.refresh(True)
@@ -524,7 +528,7 @@
 
                 api_obj.reset()                
                 testutils.eval_assert_raises(api_errors.PlanCreationException,
-                    check_missing, api_obj.plan_uninstall, ["foo"], [])
+                    check_missing, api_obj.plan_uninstall, ["foo"], False)
 
         def test_bug_4109(self):
                 durl = self.dc.get_depot_url()
@@ -538,7 +542,7 @@
 
                 api_obj.reset()
                 testutils.eval_assert_raises(api_errors.PlanCreationException,
-                    check_illegal, api_obj.plan_install, ["/foo"], [])
+                    check_illegal, api_obj.plan_install, ["/foo"])
 
         def test_catalog_v0(self):
                 """Test install from a publisher's repository that only supports
--- a/src/tests/cli/t_pkg_depotd.py	Tue Nov 17 17:06:35 2009 -0600
+++ b/src/tests/cli/t_pkg_depotd.py	Wed Nov 18 15:53:48 2009 -0800
@@ -242,7 +242,7 @@
                 self.dc.refresh()
                 self.pkg("refresh")
 
-                self.pkg("install bar")
+                self.pkg("install bar", exit=4) # nothing to do
                 self.pkg("verify")
 
         def test_face_root(self):
--- a/src/tests/cli/t_pkg_history.py	Tue Nov 17 17:06:35 2009 -0600
+++ b/src/tests/cli/t_pkg_history.py	Wed Nov 18 15:53:48 2009 -0800
@@ -89,15 +89,15 @@
 
                 durl2 = self.dcs[2].get_depot_url()
                 commands = [
-                    "install foo",
-                    "uninstall foo",
-                    "image-update",
-                    "set-publisher -O " + durl2 + " test2",
-                    "set-publisher -P test1",
-                    "set-publisher -m " + durl2 + " test1",
-                    "set-publisher -M " + durl2 + " test1",
-                    "unset-publisher test2",
-                    "rebuild-index"
+                    ("install foo", 0),
+                    ("uninstall foo", 0),
+                    ("image-update", 4), 
+                    ("set-publisher -O " + durl2 + " test2", 0),
+                    ("set-publisher -P test1", 0), 
+                    ("set-publisher -m " + durl2 + " test1", 0),
+                    ("set-publisher -M " + durl2 + " test1", 0),
+                    ("unset-publisher test2", 0),
+                    ("rebuild-index", 0)
                 ]
 
                 operations = [
@@ -111,8 +111,8 @@
                     "rebuild-index"
                 ]
 
-                for cmd in commands:
-                        self.pkg(cmd)
+                for cmd, exit in commands:
+                        self.pkg(cmd, exit=exit)
 
                 self.pkg("history -H")
                 o = self.output
@@ -129,7 +129,7 @@
                 # The actual commands are only found in long format.
                 self.pkg("history -l")
                 o = self.output
-                for cmd in commands:
+                for cmd, exit in commands:
                         # Verify that each of the commands was recorded.
                         if o.find(" %s" % cmd) == -1:
                                 raise RuntimeError("Command: %s wasn't recorded,"
--- a/src/tests/cli/t_pkg_image_update.py	Tue Nov 17 17:06:35 2009 -0600
+++ b/src/tests/cli/t_pkg_image_update.py	Wed Nov 18 15:53:48 2009 -0800
@@ -181,15 +181,15 @@
                 self.image_create(durl1, prefix="test1")
 
                 # First, verify that the preferred status of a publisher will
-                # choose which source is used for image-update when two
+                # not affect which source is used for image-update when two
                 # publishers offer the same package and the package publisher
                 # was preferred at the time of install.
                 self.pkg("set-publisher -P -O %s test2" % durl2)
                 self.pkg("install [email protected]")
                 self.pkg("info [email protected] | grep test2")
                 self.pkg("set-publisher -P test1")
-                self.pkg("image-update -v")
-                self.pkg("info [email protected] | grep test1")
+                self.pkg("image-update -v", exit=4)
+                self.pkg("info [email protected] | grep test1", exit=1)
                 self.pkg("uninstall foo")
 
                 # Next, verify that the preferred status of a publisher will
@@ -199,28 +199,11 @@
                 # to install the package.
                 self.pkg("install [email protected]")
                 self.pkg("info [email protected] | grep test2")
-                self.pkg("image-update -v")
+                self.pkg("image-update -v", exit=4)
                 self.pkg("info [email protected] | grep test2")
-                self.pkg("uninstall baz")
-
-                # Next, verify that if two non-preferred publishers offer
-                # the same package, that the publisher it was installed from
-                # will be chosen for an update and the update will succeed. In
-                # addition, its dependencies should be selected from the same
-                # publisher used for the update if that publisher has them and
-                # the remaining dependencies selected from the first available.
-                self.pkg("set-publisher -P -O %s test3" % durl3)
-                self.pkg("install pkg://test1/[email protected]")
-                self.pkg("info [email protected] | grep test1")
-                self.pkg("info [email protected] | grep test1")
-                self.pkg("info [email protected] | grep test2")
-                self.pkg("image-update -v")
-                self.pkg("info [email protected] | grep test1")
-                self.pkg("info [email protected] | grep test1")
-                self.pkg("info [email protected] | grep test1")
 
                 # Finally, cleanup and verify no packages are installed.
-                self.pkg("uninstall -vr corge")
+                self.pkg("uninstall '*'")
                 self.pkg("list", exit=1)
 
 
--- a/src/tests/cli/t_pkg_info.py	Tue Nov 17 17:06:35 2009 -0600
+++ b/src/tests/cli/t_pkg_info.py	Wed Nov 18 15:53:48 2009 -0800
@@ -211,7 +211,6 @@
                 durl = self.dc.get_depot_url()
                 self.pkgsend_bulk(durl, pkg1)
                 self.image_create(durl)
-
                 self.pkg("info --license -r bronze")
                 self.pkg("info --license -r silver", exit=1)
                 self.pkg("info --license -r bronze silver", exit=1)
--- a/src/tests/cli/t_pkg_install.py	Tue Nov 17 17:06:35 2009 -0600
+++ b/src/tests/cli/t_pkg_install.py	Wed Nov 18 15:53:48 2009 -0800
@@ -1,4 +1,4 @@
-#!/usr/bin/python2.4
+#!/usr/bin/python
 #
 # CDDL HEADER START
 #
@@ -65,6 +65,14 @@
             open a/[email protected],5.11-0
             close """
 
+        boring10 = """
+            open [email protected],5.11-0
+            close """
+
+        boring11 = """
+            open [email protected],5.11-0
+            close """
+
         bar10 = """
             open [email protected],5.11-0
             add depend type=require fmri=pkg:/[email protected]
@@ -133,6 +141,17 @@
             add file /tmp/cat mode=0555 owner=root group=bin path=/bin/cat
             close """
 
+        a6018_1 = """
+            open [email protected],5.11-0
+            close """
+        a6018_2 = """
+            open [email protected],5.11-0
+            close """
+        b6018_1 = """
+            open [email protected],5.11-0
+            add depend type=optional fmri=a6018@1
+            close """
+
         misc_files = [ "/tmp/libc.so.1", "/tmp/cat", "/tmp/baz" ]
 
         def setUp(self):
@@ -201,8 +220,8 @@
 
                 self.pkg("search -l /lib/libc.so.1")
                 self.pkg("search -r /lib/libc.so.1")
-                self.pkg("search -l blah", exit = 1)
-                self.pkg("search -r blah", exit = 1)
+                self.pkg("search -l blah", exit=1)
+                self.pkg("search -r blah", exit=1)
 
                 # check to make sure timestamp was set to correct value
 
@@ -232,11 +251,11 @@
 
                 self.pkg("install [email protected]")
                 self.pkg("list [email protected]")
-                self.pkg("list [email protected]", exit = 1)
+                self.pkg("list [email protected]", exit=1)
 
                 self.pkg("install [email protected]")
                 self.pkg("list [email protected]")
-                self.pkg("list [email protected]", exit = 1)
+                self.pkg("list [email protected]", exit=1)
                 self.pkg("list foo@1")
                 self.pkg("verify")
 
@@ -260,10 +279,40 @@
                 self.pkg("uninstall -v bar foo")
 
                 # foo and bar should not be installed at this point
-                self.pkg("list bar", exit = 1)
-                self.pkg("list foo", exit = 1)
+                self.pkg("list bar", exit=1)
+                self.pkg("list foo", exit=1)
                 self.pkg("verify")
 
+        def test_basics_5(self):
+                """ Add [email protected], install [email protected]. """
+                durl = self.dc.get_depot_url()
+                self.pkgsend_bulk(durl, self.xbar11)
+                self.image_create(durl)
+                self.pkg("install [email protected]", exit=1)
+
+        def test_basics_6(self):
+                """ Install [email protected], upgrade to [email protected].
+                Boring should be left alone, while
+                foo gets upgraded as needed"""
+
+                durl = self.dc.get_depot_url()
+                self.pkgsend_bulk(durl, self.bar10)
+                self.pkgsend_bulk(durl, self.bar11)
+                self.pkgsend_bulk(durl, self.foo10)
+                self.pkgsend_bulk(durl, self.foo11)
+                self.pkgsend_bulk(durl, self.foo12)
+                self.pkgsend_bulk(durl, self.boring10)
+                self.pkgsend_bulk(durl, self.boring11)
+
+                self.image_create(durl)
+
+                self.pkg("install [email protected] [email protected] [email protected]")
+                self.pkg("list")
+                self.pkg("list [email protected] [email protected] [email protected]")
+                self.pkg("install -v [email protected]") # upgrade bar
+                self.pkg("list")
+                self.pkg("list [email protected] [email protected] [email protected]")
+
         def test_image_upgrade(self):
                 """ Send package [email protected], dependent on [email protected].  Install [email protected].
                     List all packages.  Upgrade image. """
@@ -311,8 +360,8 @@
                 # bar depends on foo.  foo and bar should both
                 # be removed by this action.
                 self.pkg("uninstall -vr foo")
-                self.pkg("list bar", exit = 1)
-                self.pkg("list foo", exit = 1)
+                self.pkg("list bar", exit=1)
+                self.pkg("list foo", exit=1)
 
         def test_nonrecursive_dependent_uninstall(self):
                 """Trying to remove a package that's a dependency of another
@@ -325,27 +374,20 @@
 
                 self.pkg("install [email protected]")
 
-                self.pkg("uninstall -v foo", exit = 1)
+                self.pkg("uninstall -v foo", exit=1)
                 self.pkg("list bar")
                 self.pkg("list foo")
 
-        def test_basics_5(self):
-                """ Add [email protected], install [email protected]. """
-
-                durl = self.dc.get_depot_url()
-                self.pkgsend_bulk(durl, self.xbar11)
-                self.image_create(durl)
-
-                self.pkg("install [email protected]", exit = 1)
-
+        
         def test_bug_1338(self):
                 """ Add [email protected], dependent on [email protected], install [email protected]. """
 
                 durl = self.dc.get_depot_url()
+                self.pkg("list -a")
                 self.pkgsend_bulk(durl, self.bar11)
                 self.image_create(durl)
 
-                self.pkg("install [email protected]", exit = 1)
+                self.pkg("install [email protected]", exit=1)
 
         def test_bug_1338_2(self):
                 """ Add [email protected], dependent on [email protected], and [email protected], dependent
@@ -355,8 +397,8 @@
                 self.pkgsend_bulk(durl, self.bar11)
                 self.pkgsend_bulk(durl, self.baz10)
                 self.image_create(durl)
-
-                self.pkg("install [email protected] [email protected]", exit = 1)
+                self.pkg("list -a")
+                self.pkg("install [email protected] [email protected]")
 
         def test_bug_1338_3(self):
                 """ Add [email protected], [email protected]. [email protected] depends on [email protected] which
@@ -367,7 +409,7 @@
                 self.pkgsend_bulk(durl, self.xdeep10)
                 self.image_create(durl)
 
-                self.pkg("install [email protected]", exit = 1)
+                self.pkg("install [email protected]", exit=1)
 
         def test_bug_1338_4(self):
                 """ Add [email protected]. [email protected] depends on [email protected] which depends
@@ -377,7 +419,7 @@
                 self.pkgsend_bulk(durl, self.ydeep10)
                 self.image_create(durl)
 
-                self.pkg("install [email protected]", exit = 1)
+                self.pkg("install [email protected]", exit=1)
 
         def test_bug_2795(self):
                 """ Try to install two versions of the same package """
@@ -387,14 +429,34 @@
                 self.pkgsend_bulk(durl, self.foo12)
                 self.image_create(durl)
 
-                self.pkg("install [email protected] [email protected]")
-                self.pkg("list [email protected]", exit = 1)
-                self.pkg("list [email protected]")
-                self.pkg("uninstall foo")
-
-                self.pkg("install [email protected] [email protected]")
-                self.pkg("list [email protected]", exit = 1)
-                self.pkg("list [email protected]")
+                self.pkg("install [email protected] [email protected]", exit=1)
+
+        def test_bug_6018(self):
+                """  From original comment in bug report:
+
+                Consider a repository that contains:
+
+                a@1 and a@2
+
+                b@1 that contains an optional dependency on package a@1
+
+                If a@1 and b@1 are installed in an image, the "pkg image-update" command
+                produces the following output:
+
+                $ pkg image-update
+                No updates available for this image.
+
+                However, "pkg install a@2" works. 
+                """
+
+                durl = self.dc.get_depot_url()
+                self.pkgsend_bulk(durl, self.a6018_1)
+                self.pkgsend_bulk(durl, self.a6018_2)                
+                self.pkgsend_bulk(durl, self.b6018_1)                
+                self.image_create(durl)
+                self.pkg("install b6018@1 a6018@1")
+                self.pkg("image-update")
+                self.pkg("list b6018@1 a6018@2")
 
 
         def test_install_matching(self):
@@ -405,16 +467,18 @@
                 self.pkgsend_bulk(durl, self.bar10)
                 self.pkgsend_bulk(durl, self.baz10)
                 self.image_create(durl)
-
-                self.pkg("install 'ba*'")
-                self.pkg("list [email protected]", exit=0)
-                self.pkg("list [email protected]", exit=0)
-                self.pkg("list [email protected]", exit=0)
-
-                self.pkg("uninstall 'ba*'")
-                self.pkg("list [email protected]", exit=0)
-                self.pkg("list [email protected]", exit=1)
-                self.pkg("list [email protected]", exit=1)
+                # don't specify versions here; we have many
+                # different versions of foo, bar & baz in repo
+                # when entire class is run w/ one repo instance.
+                
+                # first case should fail since multiple patterns
+                # match the same pacakge
+                self.pkg("install 'ba*' 'b*'", exit=1)
+                self.pkg("install 'ba*'", exit=0)
+                self.pkg("list foo", exit=0)
+                self.pkg("list bar", exit=0)
+                self.pkg("list baz", exit=0)
+                self.pkg("uninstall 'b*' 'f*'")
 
         def test_bug_3770(self):
                 """ Try to install two versions of the same package """
@@ -490,34 +554,15 @@
                 "multiple_matches" populated (on uninstall)."""
                 durl = self.dc.get_depot_url()
                 self.pkgsend_bulk(durl, self.afoo10)
+                self.pkgsend_bulk(durl, self.bfoo10)
                 self.pkgsend_bulk(durl, self.bar10)
                 self.image_create(durl)
 
-                self.pkg("install foo")
-                foo_dir = os.path.join(self.img_path, "var", "pkg", "pkg", "a%2Ffoo")
-                old_ver = os.listdir(foo_dir)[0]
-                new_ver = old_ver[:-2]+ str((int(old_ver[-2]) + 1) % 10) + \
-                    old_ver[-1]
-                shutil.copytree(os.path.join(foo_dir, old_ver),
-                    os.path.join(foo_dir, new_ver))
-
-                # Load the 'installed' catalog and add an entry for the
-                # new package version.
-                istate_dir = os.path.join(self.img_path, "var", "pkg", "state",
-                    "installed")
-                cat = catalog.Catalog(meta_root=istate_dir)
-
-                # Value of PKG_STATE_INSTALLED in image.py is 2.
-                mdata = { "states": [2] }
-                cat.add_package(
-                    fmri.PkgFmri("a/foo@%s" % urllib.unquote(new_ver),
-                        publisher="test"), metadata=mdata)
-                cat.save()
-
-                known_dir = os.path.join(self.img_path, "var", "pkg", "state",
-                    "known")
-                shutil.rmtree(known_dir)
+                self.pkg("install foo", exit=1)
+                self.pkg("install a/foo b/foo", exit=0)
+                self.pkg("list")                
                 self.pkg("uninstall foo", exit=1)
+                self.pkg("uninstall a/foo b/foo", exit=0)
 
         def test_bug_6874(self):
                 """Don't stack trace when printing a PlanCreationException with
@@ -541,7 +586,7 @@
 
                 self.pkgsend_bulk(durl, self.anotherfoo11)
                 self.pkg("refresh")
-                self.pkg("image-update -v")
+                self.pkg("image-update -v", exit=4)
 
         def test_ambiguous_pattern_depend(self):
                 """A dependency on a package should pull in an exact name
@@ -562,9 +607,6 @@
                 self.pkg("install depender")
 
                 # Make sure that we didn't get other/foo from the dependency.
-                # Note that because inventory() sorts by package name and
-                # evaluate_fmri() only looks at the first value, this package
-                # name must sort before "foo" in order to cause a faillure here.
                 self.pkg("list another/foo", exit=1)
 
         def test_non_ambiguous_fragment(self):
@@ -964,12 +1006,11 @@
                 """Make sure we don't round up packages we specify on
                 install"""
                 durl = self.dc.get_depot_url()
-
+                first_bronze = self.pkgsend_bulk(durl, self.bronze20)
                 self.pkgsend_bulk(durl, self.incorp20)
                 self.pkgsend_bulk(durl, self.amber10)
                 self.pkgsend_bulk(durl, self.bronze10)
                 self.pkgsend_bulk(durl, self.amber20)
-                first_bronze = self.pkgsend_bulk(durl, self.bronze20)
                 self.pkgsend_bulk(durl, self.bronze20)
 
                 # create image 
@@ -1082,10 +1123,8 @@
 
                 # upgrade pkg that loses incorp. deps. in new version
                 self.pkg("install [email protected]")
-                # FIX ME; bronze doesn't get updated because it was part
-                # of previous incorporation
                 self.pkg("image-update")
-                self.pkg("list [email protected]", exit=1)
+                self.pkg("list [email protected]")
 
         def test_upgrade3(self):
                 """ test for editable files moving between packages or locations or both"""
@@ -1188,11 +1227,11 @@
 
                 self.pkg("--debug simulate_live_root=True install [email protected]")
                 self.pkg("verify -v")
-                self.pkg("--debug simulate_live_root=True install [email protected]", exit=1)
-                self.pkg("--debug simulate_live_root=True uninstall liveroot", exit=1)
+                self.pkg("--debug simulate_live_root=True install [email protected]", exit=5)
+                self.pkg("--debug simulate_live_root=True uninstall liveroot", exit=5)
                 # "break" liveroot@1
                 self.file_append("etc/liveroot", "this file has been changed")
-                self.pkg("--debug simulate_live_root=True fix liveroot", exit=1)
+                self.pkg("--debug simulate_live_root=True fix liveroot", exit=5)
 
         def test_upgrade_driver_conflicts(self):
                 """Test to make sure driver_aliases conflicts don't cause
@@ -1582,6 +1621,7 @@
                 self.pkg("contents -m usertest")
 
                 self.pkgsend_bulk(durl, self.usertest11)
+                self.pkg("refresh")
                 self.pkg("install usertest")
                 self.pkg("verify")
                 self.pkg("contents -m usertest")
@@ -1832,29 +1872,29 @@
 
                 name_pat = re.compile("^\s+open\s+(\S+)\@.*$")
 
-                def __manually_check_deps(name, install=True):
+                def __manually_check_deps(name, install=True, exit=0):
                         cmd = "install --no-refresh"
                         if not install:
                                 cmd = "uninstall"
                         if name == "only_depend" and not install:
-                                self.pkg("uninstall foo")
+                                self.pkg("uninstall foo", exit=exit)
                         elif name == "only_driver":
-                                self.pkg("%s devicebase" % cmd)
+                                self.pkg("%s devicebase" % cmd, exit=exit)
                         elif name == "only_group":
-                                self.pkg("%s basics" % cmd)
+                                self.pkg("%s basics" % cmd, exit=exit)
                         elif name == "only_hardlink":
-                                self.pkg("%s only_file" % cmd)
+                                self.pkg("%s only_file" % cmd, exit=exit)
                         elif name == "only_user":
                                 if install:
-                                        self.pkg("%s basics" % cmd)
-                                        self.pkg("%s only_group" % cmd)
+                                        self.pkg("%s basics" % cmd, exit=exit)
+                                        self.pkg("%s only_group" % cmd, exit=exit)
                                 else:
-                                        self.pkg("%s only_group" % cmd)
-                                        self.pkg("%s basics" % cmd)
+                                        self.pkg("%s only_group" % cmd, exit=exit)
+                                        self.pkg("%s basics" % cmd, exit=exit)
                 for p in pkg_list:
                         name_mat = name_pat.match(p.splitlines()[1])
                         pname = name_mat.group(1)
-                        __manually_check_deps(pname)
+                        __manually_check_deps(pname, exit=[0,4])
                         self.pkg("install --no-refresh %s" % pname,
                             su_wrap=True, exit=1)
                         self.pkg("install %s" % pname, su_wrap=True,
@@ -1868,7 +1908,7 @@
                 for p in pkg_list:
                         name_mat = name_pat.match(p.splitlines()[1])
                         pname = name_mat.group(1)
-                        __manually_check_deps(pname)
+                        __manually_check_deps(pname, exit=[0,4])
                         self.pkg("install --no-refresh %s" % pname)
 
                 for p in pkg_list:
@@ -1876,7 +1916,7 @@
                 self.pkgsend_bulk(durl, self.devicebase + self.basics0 + \
                     self.basics1)
 
-                self.pkg("image-update --no-refresh", su_wrap=True)
+                self.pkg("image-update --no-refresh", su_wrap=True, exit=4)
                 self.pkg("refresh")
                 self.pkg("image-update", su_wrap=True, exit=1)
                 # Should fail since user doesn't have permission to refresh
@@ -1975,6 +2015,133 @@
             close
         """
 
+        pkg30 = """
+            open [email protected],5.11-0
+            add depend type=require fmri=pkg:/[email protected]
+            close
+        """
+
+        pkg40 = """
+            open [email protected],5.11-0
+            add depend type=exclude fmri=pkg:/[email protected]
+            close
+        """
+
+        pkg50 = """
+            open [email protected],5.11-0
+            add depend type=exclude fmri=pkg:/[email protected]
+            add depend type=require fmri=pkg:/[email protected]
+            close
+        """
+ 
+        pkg505 = """
+            open [email protected],5.11-0
+            add depend type=exclude fmri=pkg:/[email protected]
+            add depend type=require fmri=pkg:/[email protected]
+            close
+        """
+        pkg51 = """
+            open [email protected],5.11-0
+            add depend type=exclude fmri=pkg:/[email protected]
+            add depend type=exclude fmri=pkg:/pkg2
+            add depend type=require fmri=pkg:/[email protected]
+            close
+        """
+        pkg60 = """
+            open [email protected],5.11-0
+            add depend type=exclude fmri=pkg:/[email protected]
+            close
+        """
+
+        pkg61 = """
+            open [email protected],5.11-0
+            close
+        """
+
+        leaf_template = """
+            open pkg%s%s@%s,5.11-0
+            add depend type=require fmri=pkg:/%s_incorp
+            close
+        """
+        leaf_expansion = [
+                ("A","_0", "1.0", "A"),
+                ("A","_1", "1.0", "A"),
+                ("A","_2", "1.0", "A"),
+                ("A","_3", "1.0", "A"),
+
+                ("B","_0", "1.0", "B"),
+                ("B","_1", "1.0", "B"),
+                ("B","_2", "1.0", "B"),
+                ("B","_3", "1.0", "B"),
+
+                ("A","_0", "1.1", "A"),
+                ("A","_1", "1.1", "A"),
+                ("A","_2", "1.1", "A"),
+                ("A","_3", "1.1", "A"),
+
+                ("B","_0", "1.1", "B"),
+                ("B","_1", "1.1", "B"),
+                ("B","_2", "1.1", "B"),
+                ("B","_3", "1.1", "B")
+                ]
+
+        incorps = [ """
+            open [email protected],5.11-0
+            add depend type=incorporate fmri=pkg:/[email protected]
+            add depend type=incorporate fmri=pkg:/[email protected]
+            add depend type=incorporate fmri=pkg:/[email protected]
+            add depend type=incorporate fmri=pkg:/[email protected]
+            close
+        """, 
+
+        """
+            open [email protected],5.11-0
+            add depend type=incorporate fmri=pkg:/[email protected]
+            add depend type=incorporate fmri=pkg:/[email protected]
+            add depend type=incorporate fmri=pkg:/[email protected]
+            add depend type=incorporate fmri=pkg:/[email protected]
+            close
+        """, 
+
+        """
+            open [email protected],5.11-0
+            add depend type=incorporate fmri=pkg:/[email protected]
+            add depend type=incorporate fmri=pkg:/[email protected]
+            add depend type=incorporate fmri=pkg:/[email protected]
+            add depend type=incorporate fmri=pkg:/[email protected]
+            close
+        """, 
+
+        """
+            open [email protected],5.11-0
+            add depend type=incorporate fmri=pkg:/[email protected]
+            add depend type=incorporate fmri=pkg:/[email protected]
+            add depend type=incorporate fmri=pkg:/[email protected]
+            add depend type=incorporate fmri=pkg:/[email protected]
+            close
+        """, 
+
+        """
+            open [email protected],5.11-0
+            add depend type=incorporate fmri=pkg:/[email protected]
+            add depend type=incorporate fmri=pkg:/[email protected]
+            close
+        """,
+
+        """
+            open [email protected],5.11-0
+            add depend type=incorporate fmri=pkg:/[email protected]
+            add depend type=incorporate fmri=pkg:/[email protected]
+            close
+        """]
+                    
+        bug_7394_incorp = """
+            open [email protected],5.11-0
+            add depend type=incorporate fmri=pkg:/[email protected]
+            close
+        """
+
+
         def setUp(self):
                 testutils.SingleDepotTestCase.setUp(self)
                 durl = self.dc.get_depot_url()
@@ -1982,11 +2149,73 @@
                 self.pkgsend_bulk(durl, self.pkg20)
                 self.pkgsend_bulk(durl, self.pkg11)
                 self.pkgsend_bulk(durl, self.pkg21)
+                self.pkgsend_bulk(durl, self.pkg30)
+                self.pkgsend_bulk(durl, self.pkg40)
+                self.pkgsend_bulk(durl, self.pkg50)
+                self.pkgsend_bulk(durl, self.pkg505)
+                self.pkgsend_bulk(durl, self.pkg51)
+                self.pkgsend_bulk(durl, self.pkg60)
+                self.pkgsend_bulk(durl, self.pkg61)
+                self.pkgsend_bulk(durl, self.bug_7394_incorp)
+
+                for t in self.leaf_expansion:
+                        self.pkgsend_bulk(durl, self.leaf_template % t)
+
+                for i in self.incorps:
+                        self.pkgsend_bulk(durl, i)
+
+        def test_require_dependencies(self):
+                """ exercise require dependencies"""
+                durl = self.dc.get_depot_url()
+                self.image_create(durl)
+                self.pkg("install [email protected]")
+                self.pkg("verify  [email protected]")
+                self.pkg("install [email protected]")
+                self.pkg("verify  [email protected] [email protected]")
+
+        def test_exclude_dependencies(self):
+                """ exercise exclude dependencies """
+                durl = self.dc.get_depot_url()
+                self.image_create(durl)
+                # install pkg w/ exclude dep.
+                self.pkg("install [email protected]")
+                self.pkg("verify  [email protected]")
+                # install pkg that is allowed by dep
+                self.pkg("install [email protected]")
+                self.pkg("verify  [email protected]")
+                # try to install disallowed pkg
+                self.pkg("install [email protected]", exit=1)
+                self.pkg("uninstall '*'")
+                # install pkg 
+                self.pkg("install [email protected]")
+                # try to install pkg exclude dep on already
+                # installed pkg
+                self.pkg("install [email protected]", exit=1)
+                self.pkg("uninstall '*'")
+                # install a package w/ both exclude
+                # and require dependencies
+                self.pkg("install pkg5")
+                self.pkg("verify [email protected] [email protected] ")
+                self.pkg("uninstall '*'")
+                # pick pkg to install that fits constraint
+                # of already installed pkg
+                self.pkg("install pkg2")
+                self.pkg("install pkg5")
+                self.pkg("verify [email protected] [email protected] pkg2")
+                self.pkg("uninstall '*'")
+                # install a package that requires updating
+                # existing package to avoid exclude 
+                # dependency
+                self.pkg("install [email protected]")
+                self.pkg("install [email protected]")
+                self.pkg("verify [email protected] [email protected]")
+                self.pkg("uninstall '*'")
+                # try to install two incompatible pkgs
+                self.pkg("install [email protected] [email protected]", exit=1)
 
         def test_optional_dependencies(self):
                 """ check to make sure that optional dependencies are enforced
                 """
-
                 durl = self.dc.get_depot_url()
                 self.image_create(durl)
                 self.pkg("install [email protected]")
@@ -2007,17 +2236,34 @@
                 self.pkg("list [email protected]")
                 self.pkg("install [email protected]", exit=1)
 
-        def test_require_optional(self):
-                """ check that the require optional policy is working
-                """
-
+        def test_incorporation_dependencies(self):
+                """ shake out incorporation dependencies """
                 durl = self.dc.get_depot_url()
                 self.image_create(durl)
-                self.pkg("set-property require-optional true")
-                self.pkg("install pkg1")
-                # the optional dependency should be installed because of the
-                # policy setting
-                self.pkg("list [email protected]")
+                self.pkg("list -a") # help w/ debugging
+
+                # simple pkg requiring controlling incorp
+                # should control pkgA_1 as well
+                self.pkg("install -v [email protected] pkgA_1")
+                self.pkg("list")
+                self.pkg("verify [email protected] [email protected] [email protected]")
+                self.pkg("image-update -v")
+                self.pkg("list [email protected] [email protected] [email protected]")
+                self.pkg("uninstall '*'")
+                # try nested incorporations
+                self.pkg("install -v [email protected] pkgA_0 pkgB_0")
+                self.pkg("list")
+                self.pkg("list [email protected] [email protected] [email protected] [email protected] [email protected]")
+                # try to break incorporation
+                self.pkg("install -v [email protected]", exit=1) # fixed by [email protected]
+                # try image update
+                self.pkg("image-update -v")
+                self.pkg("list [email protected] [email protected] [email protected] [email protected] [email protected]")
+                self.pkg("uninstall '*'")
+                # what happens when incorporation specified
+                # a package that isn't in the catalog
+                self.pkg("install bug_7394_incorp")
+                self.pkg("install pkg1", exit=1)
 
 class TestMultipleDepots(testutils.ManyDepotTestCase):
         # Only start/stop the depot once (instead of for every test)
@@ -2166,44 +2412,49 @@
 
         def test_04_upgrade_preferred_to_non_preferred(self):
                 """Install a package from the preferred publisher, and then
-                upgrade it, implicitly switching to a non-preferred
-                publisher."""
+                upgrade it, failing to implicitly switching to a non-preferred
+                publisher and then managing it explicitly"""
                 self.pkg("list -a upgrade-p")
                 self.pkg("install [email protected]")
-                self.pkg("install [email protected]")
+                self.pkg("install [email protected]", exit=1)
+                self.pkg("install pkg://test2/[email protected]")
                 self.pkg("uninstall upgrade-p")
 
         def test_05_upgrade_non_preferred_to_preferred(self):
                 """Install a package from a non-preferred publisher, and then
-                upgrade it, implicitly switching to the preferred publisher."""
+                try to upgrade it, failing to implicitly switchto the preferred 
+                publisher and then succeed doing it explicitly."""
                 self.pkg("list -a upgrade-np")
                 self.pkg("install [email protected]")
-                self.pkg("install [email protected]")
+                self.pkg("install [email protected]", exit=1)
+                self.pkg("install pkg://test1/[email protected]")
                 self.pkg("uninstall upgrade-np")
 
         def test_06_upgrade_preferred_to_non_preferred_incorporated(self):
                 """Install a package from the preferred publisher, and then
-                upgrade it, implicitly switching to a non-preferred
+                upgrade it, failing to implicitly switching to a non-preferred
                 publisher, when the package is constrained by an
-                incorporation."""
+                incorporation, and then succeed when doing so explicitly"""
+
                 self.pkg("list -a upgrade-p incorp-p")
                 self.pkg("install [email protected]")
                 self.pkg("install upgrade-p")
-                self.pkg("install [email protected]")
+                self.pkg("install [email protected]", exit=1)
+                self.pkg("install [email protected] pkg://test2/[email protected]")
                 self.pkg("list [email protected]")
-                self.pkg("uninstall upgrade-p")
+                self.pkg("uninstall '*'")
+
 
         def test_07_upgrade_non_preferred_to_preferred_incorporated(self):
                 """Install a package from the preferred publisher, and then
                 upgrade it, implicitly switching to a non-preferred
                 publisher, when the package is constrained by an
                 incorporation."""
+                self.pkg("list", exit=1)
                 self.pkg("list -a upgrade-np incorp-np")
                 self.pkg("install [email protected]")
-                self.pkg("install upgrade-np")
-                self.pkg("install [email protected]")
-                self.pkg("list [email protected]")
-                self.pkg("uninstall upgrade-np incorp-np incorp-p")
+                self.pkg("install upgrade-np", exit=1)
+                self.pkg("uninstall '*'")
 
         def test_08_install_repository_access(self):
                 """Verify that packages can still be installed from a repository
@@ -2263,14 +2514,15 @@
                 self.pkg("install [email protected]")
                 self.pkg("set-publisher -P test2")
                 self.pkg("unset-publisher test1")
+                self.pkg("list")
 
                 # Attempting to install an already installed package should
                 # be a no-op even if the corresponding publisher no longer
                 # exists.
-                self.pkg("install [email protected]")
+                self.pkg("install [email protected]", exit=4)
 
                 # Image update should work if we don't see the optional dependency
-                self.pkg("image-update")
+                self.pkg("image-update", exit=4)
 
                 # Add back the installed package's publisher, but using a
                 # a repository with an empty catalog.  After that, attempt to
@@ -2278,7 +2530,7 @@
                 # the fmri is no longer in the publisher's catalog.
                 self.pkg("set-publisher -O %s test1" % \
                     self.dcs[3].get_depot_url())
-                self.pkg("install [email protected]")
+                self.pkg("install [email protected]", exit=4)
                 self.pkg("info [email protected]")
                 self.pkg("unset-publisher test1")
 
@@ -2293,7 +2545,7 @@
                     self.dcs[1].get_depot_url())
                 self.pkg("set-publisher -O %s test1" % \
                     self.dcs[3].get_depot_url())
-                self.pkg("install [email protected]")
+                self.pkg("install [email protected]", exit=4)
                 self.pkg("unset-publisher test1")
                 self.pkg("unset-publisher test3")
 
@@ -2303,7 +2555,7 @@
                 # different publisher's catalog.
                 self.pkg("set-publisher -O %s test3" % \
                     self.dcs[1].get_depot_url())
-                self.pkg("install [email protected]")
+                self.pkg("install [email protected]", exit=4)
                 self.pkg("unset-publisher test3")
 
                 # Change the image metadata back to where it was, in preparation
@@ -2385,10 +2637,15 @@
                 self.pkg("set-publisher -P -O %s test3" % \
                     self.dcs[3].get_depot_url())
 
+                # make sure we look here first; tests rely on that
+                self.pkg("set-publisher --search-before=test2 test1")
+                self.pkg("publisher")
                 # First, verify that installing a package from a non-preferred
                 # publisher will cause its dependencies to be installed from the
                 # same publisher if the preferred publisher does not offer them.
+                self.pkg("list -a")
                 self.pkg("install pkg://test1/baz")
+                self.pkg("list")
                 self.pkg("info baz | grep test1")
                 self.pkg("info corge | grep test1")
                 self.pkg("uninstall -r corge")
@@ -2406,7 +2663,77 @@
                 self.pkg("set-publisher -P test1")
                 self.pkg("unset-publisher test3")
 
-
+        def test_14_nonsticky_publisher(self):
+                """Test various aspects of the stick/non-sticky
+                behavior of publishers"""
+
+                # For ease of debugging
+                self.pkg("list -a")
+                # install from non-preferred repo explicitly
+                self.pkg("install pkg://test2/[email protected]")
+                # Demonstrate that perferred publisher is not
+                # acceptable, since test2 is sticky by default
+                self.pkg("install [email protected]", exit=1) # not right repo
+                # Check that we can proceed once test2 is not sticky
+                self.pkg("set-publisher --non-sticky test2")
+                self.pkg("install [email protected]") # should work now
+                # Restore to pristine
+                self.pkg("set-publisher --sticky test2")
+                self.pkg("uninstall upgrade-np")
+                # Repeat the test w/ preferred
+                self.pkg("install upgrade-p")
+                self.pkg("set-publisher -P test2")
+                self.pkg("install [email protected]", exit=1) #orig pub is sticky
+                self.pkg("set-publisher --non-sticky test1")  #not anymore
+                self.pkg("install [email protected]")
+                self.pkg("set-publisher -P --sticky test1") # restore
+                self.pkg("uninstall '*'")
+                # Check  that search order can be overridden w/ explicit
+                # version specification...
+                self.pkg("install upgrade-p")
+                self.pkg("install [email protected]", exit=1)
+                self.pkg("set-publisher --non-sticky test1") 
+                self.pkg("install [email protected]") # find match later on 
+                self.pkg("set-publisher --sticky test1")
+                self.pkg("uninstall '*'")                
+
+        def test_15_nonsticky_update(self):
+                """Test to make sure image-update follows the same
+                publisher selection mechanisms as pkg install"""
+
+                # try image-update
+                self.pkg("install pkg://test2/[email protected]")
+                self.pkg("image-update", exit=4) 
+                self.pkg("list [email protected]")
+                self.pkg("set-publisher --non-sticky test2") 
+                self.pkg("publisher")
+                self.pkg("list -a upgrade-np")
+                self.pkg("image-update") 
+                self.pkg("list [email protected]")
+                self.pkg("set-publisher --sticky test2")  
+                self.pkg("uninstall '*'")                
+
+        def test_16_disabled_nonsticky(self):
+                """Test to make sure disabled publishers are 
+                automatically made non-sticky, and after 
+                being enabled keep their previous value
+                of stickiness"""
+
+                # For ease of debugging
+                self.pkg("list -a")
+                # install from non-preferred repo explicitly
+                self.pkg("install pkg://test2/[email protected]")
+                # Demonstrate that perferred publisher is not
+                # acceptable, since test2 is sticky by default
+                self.pkg("install [email protected]", exit=1) # not right repo
+                # Disable test2 and then we should be able to proceed
+                self.pkg("set-publisher --disable test2")
+                self.pkg("install [email protected]")
+                self.pkg("publisher")
+                self.pkg("set-publisher --enable test2")
+                self.pkg("publisher")
+                self.pkg("publisher | egrep sticky", exit=1 )
+                
 class TestImageCreateCorruptImage(testutils.SingleDepotTestCaseCorruptImage):
         """
         If a new essential directory is added to the format of an image it will
@@ -2820,11 +3147,15 @@
                     close
                 """
 
-                fred = """
+                fred1 = """
                     open fred@1
                     add depend type=require fmri=foo
                     close
                 """
+                fred2 = """
+                    open fred@2
+                    close
+                """
 
                 durl = self.dc.get_depot_url()
                 self.pkgsend_bulk(durl, foo1)
@@ -2833,7 +3164,8 @@
                 self.pkgsend_bulk(durl, qbar)
                 self.pkgsend_bulk(durl, qux1)
                 self.pkgsend_bulk(durl, qux2)
-                self.pkgsend_bulk(durl, fred)
+                self.pkgsend_bulk(durl, fred1)
+
 
                 self.image_create(durl)
 
@@ -2847,7 +3179,7 @@
 
                 # Explicitly installing an obsolete package succeeds, but
                 # results in nothing on the system. (1)
-                self.pkg("install foo@2")
+                self.pkg("install foo@2", exit=4)
                 self.pkg("list foo", exit=1)
 
                 # Installing a package with a dependency on an obsolete package
@@ -2861,7 +3193,7 @@
                 self.pkg("list qbar")
                 self.pkg("list foo@1")
                 self.pkg("list qux | grep -- --r--")
-
+                self.pkg("uninstall '*'") #clean up for next test
                 # A simple rename test: First install the pre-renamed version of
                 # qux.  Then install the renamed version, and see that the new
                 # package is installed, and the renamed package is installed,
@@ -2870,12 +3202,18 @@
                 self.pkg("install qux")
                 self.pkg("list foo@1")
                 self.pkg("list qux | grep -- --r--")
+                self.pkg("uninstall '*'") #clean up for next test
 
                 # Install a package that's going to be obsoleted and a package
                 # that depends on it.  Update the package to its obsolete
                 # version and see that it fails.  (6, sorta)
-                self.pkg("install foo@1 fred")
-                self.pkg("install foo", exit=1)
+                self.pkg("install foo@1 fred@1")
+                self.pkg("install foo@2", exit=1)
+                # now add a version of fred that doesn't require foo, and
+                # show that update works
+                self.pkgsend_bulk(durl, fred2)
+                self.pkg("refresh")
+                self.pkg("install foo@2")
 
         def test_basic_7a(self):
                 """Upgrade a package to a version with a dependency on a renamed
@@ -3049,7 +3387,8 @@
                 self.pkgsend_bulk(durl, t6ap2_1)
 
                 self.pkg("refresh")
-                self.pkg("image-update", exit=1)
+                self.pkg("image-update", exit=4) # does nothing
+                self.pkg("list t6ap1")
 
         def test_basic_6b(self):
                 """Install a package with a dependency, and image-update after
@@ -3085,12 +3424,14 @@
                 self.image_create(durl)
 
                 self.pkg("install t6ap1")
+                self.pkg("list")
 
                 self.pkgsend_bulk(durl, t6ap1_2)
                 self.pkgsend_bulk(durl, t6ap2_2)
 
                 self.pkg("refresh")
-                self.pkg("image-update", exit=1)
+                self.pkg("image-update")
+                self.pkg("list t6ap1@2 t6ap2@1")
 
         def test_basic_8a(self):
                 """Upgrade a package to an obsolete leaf version when another
@@ -3123,7 +3464,8 @@
                 self.pkgsend_bulk(durl, t8ap1_2)
 
                 self.pkg("refresh")
-                self.pkg("image-update", exit=1)
+                self.pkg("image-update", exit=4) # does nothing
+                self.pkg("list  t8ap2@1")
 
         def test_basic_13a(self):
                 """Publish an package with a dependency, then publish both as
@@ -3192,8 +3534,9 @@
                 self.pkg("list pkg:/netbeans", exit=1)
 
         def test_basic_11a(self):
-                """Install a package using an ambiguous name where only one
-                match is non-renamed."""
+                """Install a package using an ambiguous name where
+                pkg is renamed to another package, but not the
+                conflicting one"""
 
                 t11p1 = """
                     open netbonze@1
@@ -3218,9 +3561,7 @@
                 self.pkgsend_bulk(durl, t11p3)
                 self.image_create(durl)
 
-                self.pkg("install netbonze")
-                self.pkg("list pkg:/developer/netbonze")
-                self.pkg("list pkg:/netbonze", exit=1)
+                self.pkg("install netbonze", exit=1)
 
         def test_basic_11b(self):
                 """Install a package using an ambiguous name where only one
@@ -3229,24 +3570,38 @@
 
                 t11p1 = """
                     open netbooze@1
+                    close
+                """
+
+                t11p2 = """
+                    open netbooze@2
                     add set name=pkg.renamed value=true
                     add depend type=require fmri=developer/netbooze
                     close
                 """
 
-                t11p2 = """
-                    open developer/netbooze@1
+                t11p3 = """
+                    open developer/netbooze@2
+                    close
+                """
+
+                t11p4 = """
+                    open developer/netbooze@3
+                    add depend type=require fmri=developer/missing
                     close
                 """
 
                 durl = self.dc.get_depot_url()
                 self.pkgsend_bulk(durl, t11p1)
                 self.pkgsend_bulk(durl, t11p2)
+                self.pkgsend_bulk(durl, t11p3)
+                self.pkgsend_bulk(durl, t11p4)
                 self.image_create(durl)
 
                 self.pkg("install netbooze")
                 self.pkg("list pkg:/developer/netbooze")
-                self.pkg("list pkg:/netbooze", exit=1)
+                self.pkg("list pkg:/netbooze")
+
 
         def test_basic_12(self):
                 """Upgrade a package across a rename to an ambiguous name."""
@@ -3352,7 +3707,7 @@
                 self.image_create(durl)
 
                 self.pkg("install inc1p1")
-                self.pkg("install inc1p2")
+                self.pkg("install inc1p2", exit=4)
 
                 self.pkg("list inc1p2", exit=1)
 
@@ -3421,8 +3776,7 @@
         def test_01(self):
                 """If an obsolete package is found in a preferred publisher and
                 a non-obsolete package of the same name is found in a
-                non-preferred publisher, then we should choose the package from
-                the preferred publisher, even though it's obsolete."""
+                non-preferred publisher, pick the preferred pub as usual """
 
                 durl1 = self.dcs[1].get_depot_url()
                 durl2 = self.dcs[2].get_depot_url()
@@ -3432,8 +3786,9 @@
 
                 self.image_create(durl1, prefix="test1")
                 self.pkg("set-publisher -O " + durl2 + " test2")
-
-                self.pkg("install stem")
+                self.pkg("list -a")
+
+                self.pkg("install stem", exit=4) # noting to do since it's obs
                 # We should choose the obsolete package, which means nothing
                 # gets installed.
                 self.pkg("list", exit=1)
@@ -3461,8 +3816,7 @@
                 self.image_create(durl1, prefix="test1")
                 self.pkg("set-publisher -O " + durl2 + " test2")
 
-                self.pkg("install stem")
-                self.pkg("list pkg://test2/stem")
+                self.pkg("install stem", exit=1)
 
 
 if __name__ == "__main__":
--- a/src/tests/cli/t_pkg_intent.py	Tue Nov 17 17:06:35 2009 -0600
+++ b/src/tests/cli/t_pkg_intent.py	Wed Nov 18 15:53:48 2009 -0800
@@ -39,7 +39,7 @@
 import pkg.client.api_errors as api_errors
 import pkg.client.progress as progress
 
-API_VERSION = 23
+API_VERSION = 24
 PKG_CLIENT_NAME = "pkg"
 
 class TestPkgIntent(testutils.SingleDepotTestCase):
@@ -78,7 +78,7 @@
             open [email protected],5.11-0
             add depend type=require fmri=pkg:/[email protected]
             add dir mode=0755 owner=root group=bin path=/bin
-            add file /tmp/cat mode=0555 owner=root group=bin path=/bin/cat
+            add file /tmp/cat mode=0555 owner=root group=bin path=/bin/cat 
             close """
 
         baz10 = """
@@ -145,11 +145,9 @@
                 return False
 
         @staticmethod
-        def __do_install(api_obj, fmris, filters=None, noexecute=False):
-                if not filters:
-                        filters = []
+        def __do_install(api_obj, fmris, noexecute=False):
                 api_obj.reset()
-                api_obj.plan_install(fmris, filters, noexecute=noexecute)
+                api_obj.plan_install(fmris, noexecute=noexecute)
                 if not noexecute:
                         api_obj.prepare()
                         api_obj.execute_plan()
@@ -165,7 +163,7 @@
                         api_obj.execute_plan()
 
         def test_0_info(self):
-                """Verify that informational operations send the expected
+                """Verify that informational operations do not send
                 intent information."""
 
                 durl = self.dc.get_depot_url()
@@ -176,32 +174,19 @@
                 api_obj = api.ImageInterface(self.get_img_path(), API_VERSION,
                     progresstracker, lambda x: False, PKG_CLIENT_NAME)
 
-                info_needed = api.PackageInfo.ALL_OPTIONS - \
-                    frozenset([api.PackageInfo.LICENSES,
-                    api.PackageInfo.SIZE]) - \
-                    (api.PackageInfo.ACTION_OPTIONS - \
-                    frozenset([api.PackageInfo.DEPENDENCIES]))
-
-                api_obj.info(plist, False, info_needed)
+                api_obj.info(plist, False, frozenset([api.PackageInfo.IDENTITY,
+                    api.PackageInfo.STATE, api.PackageInfo.PREF_PUBLISHER]))
 
                 entries = self.get_intent_entries()
-                self.assertEqual(entries, [])
-
+                self.assert_(entries == [])
+                
                 api_obj.info(plist, False,
-                    info_needed | api.PackageInfo.ACTION_OPTIONS)
+                    frozenset([api.PackageInfo.DEPENDENCIES]))
 
                 entries = self.get_intent_entries()
-                # Verify that evaluation and processing entries are present
-                # for info.  This will only happen if the client actually
-                # has to contact the repository to get information not found
-                # in the catalog.
-                target = fmri.PkgFmri(plist[0]).get_fmri(anarchy=True,
-                    include_scheme=False)
-                self.assert_(self.intent_entry_exists(entries, {
-                    "operation": "info",
-                    "reason": "info",
-                    "initial_target": target,
-                }))
+                # Verify that no entries are present
+                self.assert_(not entries)
+
 
         def test_1_install_uninstall(self):
                 """Verify that the install and uninstall of a single package
@@ -216,46 +201,32 @@
                 api_obj = api.ImageInterface(self.get_img_path(), API_VERSION,
                     progresstracker, lambda x: False, PKG_CLIENT_NAME)
                 self.__do_install(api_obj, ["foo"], noexecute=True)
-                self.__do_install(api_obj, ["foo"])
+                entries = self.get_intent_entries()
+                # no data should be there
+                self.assert_(not entries)
 
-                # Test uninstall.
-                self.__do_uninstall(api_obj, ["foo"], noexecute=True)
-                self.__do_uninstall(api_obj, ["foo"])
+                self.__do_install(api_obj, ["foo"])
 
                 entries = self.get_intent_entries()
-                # Verify that entries are present for install.
-                target = fmri.PkgFmri(plist[0]).get_fmri(anarchy=True,
-                    include_scheme=False)
-                self.assert_(self.intent_entry_exists(entries, {
-                    "operation": "install",
-                    "reason": "info",
-                    "initial_target": target,
-                }))
+
+                foo = fmri.PkgFmri(plist[0]).get_fmri(anarchy=True)
+
 
                 self.assert_(self.intent_entry_exists(entries, {
                     "operation": "install",
-                    "reason": "info",
-                    "initial_target": target,
+                    "new_fmri" : foo,
+                    "reference": "foo"
                 }))
 
-                # Verify that evaluation entries are not present for uninstall.
-                # Image operations that are for evaluation only and do not
-                # require retrieving manifest information will not send any
-                # intent information for efficiency.
-                target_ver = str(fmri.PkgFmri(target).version)
+                # Test uninstall.
+                self.__do_uninstall(api_obj, ["*"])
+
+                # Verify that processing entries are present for uninstall.
+                entries = self.get_intent_entries()
                 self.assert_(self.intent_entry_exists(entries, {
                     "operation": "uninstall",
-                    "prior_version": target_ver,
-                    "reason": "evaluate",
-                    "initial_target": target,
-                }) == False)
-
-                # Verify that processing entries are present for uninstall.
-                self.assert_(self.intent_entry_exists(entries, {
-                    "operation": "uninstall",
-                    "prior_version": target_ver,
-                    "reason": "process",
-                    "initial_target": target,
+                    "old_fmri" :  foo,
+                    "reference": "*"
                 }))
 
         def test_2_upgrade(self):
@@ -268,66 +239,37 @@
                 self.image_create(durl)
                 progresstracker = progress.NullProgressTracker()
 
+                foo10 = fmri.PkgFmri(plist[0]).get_fmri(anarchy=True)
+                foo11 = fmri.PkgFmri(plist[1]).get_fmri(anarchy=True)
+
                 # Test install.
                 api_obj = api.ImageInterface(self.get_img_path(), API_VERSION,
                     progresstracker, lambda x: True, PKG_CLIENT_NAME)
-                self.__do_install(api_obj, ["[email protected]"], noexecute=True)
                 self.__do_install(api_obj, ["[email protected]"])
-                self.__do_install(api_obj, ["[email protected]"], noexecute=True)
                 self.__do_install(api_obj, ["[email protected]"])
 
                 # Test uninstall.
-                self.__do_uninstall(api_obj, ["foo"], noexecute=True)
                 self.__do_uninstall(api_obj, ["foo"])
 
                 entries = self.get_intent_entries()
-                # Verify entries are present for install.
-                target0 = fmri.PkgFmri(plist[0]).get_fmri(anarchy=True,
-                    include_scheme=False)
+                # Verify that evaluation and processing entries are present
+                # for install.
                 self.assert_(self.intent_entry_exists(entries, {
                     "operation": "install",
-                    "reason": "info",
-                    "initial_target": target0,
-                }))
-
-                target1 = fmri.PkgFmri(plist[1]).get_fmri(anarchy=True,
-                    include_scheme=False)
-                self.assert_(self.intent_entry_exists(entries, {
-                    "operation": "install",
-                    "reason": "info",
-                    "initial_target": target1,
+                    "new_fmri" : foo10,
+                    "reference": "[email protected]"
                 }))
 
                 self.assert_(self.intent_entry_exists(entries, {
                     "operation": "install",
-                    "reason": "info",
-                    "initial_target": target0,
+                    "new_fmri" : foo11,
+                    "old_fmri" : foo10,
+                    "reference": "[email protected]"
                 }))
-
-                version0 = str(fmri.PkgFmri(target0).version)
-                self.assert_(self.intent_entry_exists(entries, {
-                    "operation": "install",
-                    "reason": "info",
-                    "initial_target": target1,
-                }))
-
-                # Verify that evaluation entries are not present for uninstall.
-                # Image operations that are for evaluation only and do not
-                # require retrieving manifest information will not send any
-                # intent information for efficiency.
                 self.assert_(self.intent_entry_exists(entries, {
                     "operation": "uninstall",
-                    "reason": "evaluate",
-                    "prior_version": str(fmri.PkgFmri(plist[1]).version),
-                    "initial_target": target1,
-                }) == False)
-
-                version1 = str(fmri.PkgFmri(target1).version)
-                self.assert_(self.intent_entry_exists(entries, {
-                    "operation": "uninstall",
-                    "reason": "process",
-                    "prior_version": version1,
-                    "initial_target": target1,
+                    "old_fmri" :  foo11,
+                    "reference": "foo"
                 }))
 
         def test_3_dependencies(self):
@@ -344,32 +286,35 @@
                 self.__do_install(api_obj, ["[email protected]"])
                 self.__do_uninstall(api_obj, ["bar", "foo"])
 
+
+                foo = fmri.PkgFmri(plist[0]).get_fmri(anarchy=True)
+                bar = fmri.PkgFmri(plist[1]).get_fmri(anarchy=True)
+
                 # Only testing for process; no need to re-test for evaluate.
                 entries = self.get_intent_entries()
-                target1 = fmri.PkgFmri(plist[1]).get_fmri(anarchy=True,
-                    include_scheme=False)
                 self.assert_(self.intent_entry_exists(entries, {
                     "operation": "install",
-                    "reason": "info",
-                    "initial_target": target1,
+                    "new_fmri" : bar,
+                    "reference": "[email protected]"
                 }))
 
-                target0 = fmri.PkgFmri(plist[0]).get_fmri(anarchy=True,
-                    include_scheme=False)
                 self.assert_(self.intent_entry_exists(entries, {
                     "operation": "install",
-                    "reason": "info",
-                    "initial_target": target0,
+                    "new_fmri" : foo,
                 }))
 
         def test_4_image_upgrade(self):
                 """Verify that the correct intent information is sent during an
                 image upgrade."""
 
+                fmri_list = ["foo10", "foo11", "bar10", "foo12", "bar11"]
+                
                 plist = []
                 durl = self.dc.get_depot_url()
                 plist.extend(self.pkgsend_bulk(durl, self.foo10 + self.foo11 + \
                     self.bar10))
+
+
                 self.image_create(durl)
                 progresstracker = progress.NullProgressTracker()
 
@@ -379,6 +324,11 @@
 
                 plist.extend(self.pkgsend_bulk(durl, self.foo12 + self.bar11))
 
+                def print_fmri(a):
+                        return fmri.PkgFmri(a).get_fmri(anarchy=True)
+
+                fmris = dict(zip(fmri_list, [print_fmri(p) for p in plist]))
+
                 api_obj.refresh(immediate=True)
 
                 api_obj.reset()
@@ -386,154 +336,35 @@
                 api_obj.prepare()
                 api_obj.execute_plan()
 
-                # Only testing for process; no need to re-test for evaluate.
+                # uninstall foo & bar
+                self.__do_uninstall(api_obj, ["foo"], True)
+                                       
                 entries = self.get_intent_entries()
-                # Verify that foo10 was installed when upgrading to foo12.
-                version0 = str(fmri.PkgFmri(plist[0]).version)
-                target3 = fmri.PkgFmri(plist[3]).get_fmri(anarchy=True,
-                    include_scheme=False)
+                # Verify that foo11 was installed when upgrading to foo12.
                 self.assert_(self.intent_entry_exists(entries, {
                     "operation": "image-update",
-                    "reason": "info",
-                    "initial_target": target3,
+                    "new_fmri" : fmris["foo12"],
+                    "old_fmri" : fmris["foo11"]
                 }))
 
-                version2 = str(fmri.PkgFmri(plist[2]).version)
-                target4 = fmri.PkgFmri(plist[4]).get_fmri(anarchy=True,
-                    include_scheme=False)
                 # Verify that bar10 was installed when upgrading to bar11.
                 self.assert_(self.intent_entry_exists(entries, {
                     "operation": "image-update",
-                    "reason": "info",
-                    "initial_target": target4,
+                    "new_fmri" : fmris["bar11"],
+                    "old_fmri" : fmris["bar10"]
                 }))
-
-        def test_5_recursive_uninstall(self):
-                """Verify that a recursive uninstall sends the expected intent
-                information."""
-
-                durl = self.dc.get_depot_url()
-                plist = self.pkgsend_bulk(durl, self.foo10 + self.foo11 + self.bar10)
-                self.image_create(durl)
-                progresstracker = progress.NullProgressTracker()
-                api_obj = api.ImageInterface(self.get_img_path(), API_VERSION,
-                    progresstracker, lambda x: True, PKG_CLIENT_NAME)
-                self.__do_install(api_obj, ["[email protected]"])
-
-                # Only testing for process; no need to re-test for evaluate.
-                self.__do_uninstall(api_obj, ["foo"], True)
-
-                entries = self.get_intent_entries()
-                # Verify that foo10 was uninstalled.
-                target0 = fmri.PkgFmri(plist[0]).get_fmri(anarchy=True,
-                    include_scheme=False)
-                version0 = str(fmri.PkgFmri(target0).version)
+                # Verify that bar was uninstalled along w/ foo
                 self.assert_(self.intent_entry_exists(entries, {
                     "operation": "uninstall",
-                    "reason": "process",
-                    "initial_target": target0,
-                    "prior_version": version0,
-                }))
-
-                # Verify that bar10 was uninstalled because of foo10.
-                target2 = fmri.PkgFmri(plist[2]).get_fmri(anarchy=True,
-                    include_scheme=False)
-                version2 = str(fmri.PkgFmri(target2).version)
-                self.assert_(self.intent_entry_exists(entries, {
-                    "operation": "uninstall",
-                    "needed_by": target0,
-                    "reason": "process",
-                    "initial_target": target0,
-                    "target": target2,
-                    "prior_version": version2,
+                    "old_fmri" : fmris["bar11"]
                 }))
 
-        def test_6_deep_dependencies(self):
-                """Verify that an install or uninstall of a package with a
-                dependency chain sends the expected intent information."""
-
-                durl = self.dc.get_depot_url()
-                plist = self.pkgsend_bulk(durl, self.foo10 + self.bar10 + \
-                    self.baz10)
-                self.image_create(durl)
-                progresstracker = progress.NullProgressTracker()
-
-                api_obj = api.ImageInterface(self.get_img_path(), API_VERSION,
-                    progresstracker, lambda x: True, PKG_CLIENT_NAME)
-                self.__do_install(api_obj, ["baz"])
-                self.__do_uninstall(api_obj, ["foo"], True)
-
-                # Only testing for process; no need to re-test for evaluate.
-                entries = self.get_intent_entries()
-
-                #
-                # Verify the install entries.
-                #
-
-                # Verify baz is logged.
-                target2 = fmri.PkgFmri(plist[2]).get_fmri(anarchy=True,
-                    include_scheme=False)
-                self.assert_(self.intent_entry_exists(entries, {
-                    "operation": "install",
-                    "reason": "info",
-                    "initial_target": target2,
-                }))
-
-                # Verify bar is logged.
-                target1 = fmri.PkgFmri(plist[1]).get_fmri(anarchy=True,
-                    include_scheme=False)
-                self.assert_(self.intent_entry_exists(entries, {
-                    "operation": "install",
-                    "reason": "info",
-                    "initial_target": target1,
-                }))
-
-                # Verify foo is logged.
-                target0 = fmri.PkgFmri(plist[0]).get_fmri(anarchy=True,
-                    include_scheme=False)
-                self.assert_(self.intent_entry_exists(entries, {
-                    "operation": "install",
-                    "reason": "info",
-                    "initial_target": target0,
-                }))
-
-                #
-                # Verify the uninstall entries.
-                #
-
-                # Verify foo is the initial target.
-                version0 = str(fmri.PkgFmri(target0).version)
+                # Verify that bar was uninstalled along w/ foo
                 self.assert_(self.intent_entry_exists(entries, {
                     "operation": "uninstall",
-                    "reason": "process",
-                    "initial_target": target0,
-                    "prior_version": version0,
+                    "old_fmri" : fmris["foo12"],
+                    "reference": "foo"
                 }))
 
-                # Verify foo is the initial target, bar is needed_by foo, and
-                # foo is the target.
-                version1 = str(fmri.PkgFmri(target1).version)
-                self.assert_(self.intent_entry_exists(entries, {
-                    "operation": "uninstall",
-                    "needed_by": target0,
-                    "reason": "process",
-                    "initial_target": target0,
-                    "target": target1,
-                    "prior_version": version1,
-                }))
-
-                # Verify foo is the initial target, baz is needed_by bar, and
-                # baz is the target.
-                version2 = str(fmri.PkgFmri(target2).version)
-                self.assert_(self.intent_entry_exists(entries, {
-                    "operation": "uninstall",
-                    "needed_by": target1,
-                    "reason": "process",
-                    "initial_target": target0,
-                    "target": target2,
-                    "prior_version": version2,
-                }))
-
-
 if __name__ == "__main__":
         unittest.main()
--- a/src/tests/cli/t_pkg_list.py	Tue Nov 17 17:06:35 2009 -0600
+++ b/src/tests/cli/t_pkg_list.py	Wed Nov 18 15:53:48 2009 -0800
@@ -128,7 +128,7 @@
                     "foo 1.0-0 known u----\n"
                 output = self.reduceSpaces(self.output)
                 self.assertEqualDiff(expected, output)
-
+                
         def test_list_02(self):
                 """List all "[email protected]", regardless of publisher, with "pkg:/"
                 prefix."""
@@ -249,6 +249,7 @@
                 durl2 = self.dcs[2].get_depot_url()
                 durl3 = self.dcs[3].get_depot_url()
 
+                self.pkg("list -a")
                 # Install a package from the second publisher.
                 self.pkg("install pkg://test2/[email protected]")
 
--- a/src/tests/cli/t_pkg_publisher.py	Tue Nov 17 17:06:35 2009 -0600
+++ b/src/tests/cli/t_pkg_publisher.py	Wed Nov 18 15:53:48 2009 -0800
@@ -251,8 +251,12 @@
             open bar@1,5.11-0
             close """
 
+        baz1 = """
+            open baz@1,5.11-0
+            close """
+
         def setUp(self):
-                testutils.ManyDepotTestCase.setUp(self, ["test1", "test2",
+                testutils.ManyDepotTestCase.setUp(self, ["test1", "test2", "test3", 
                     "test1", "test1"])
 
                 durl1 = self.dcs[1].get_depot_url()
@@ -261,8 +265,12 @@
                 durl2 = self.dcs[2].get_depot_url()
                 self.pkgsend_bulk(durl2, self.bar1)
 
+                durl3 = self.dcs[3].get_depot_url()
+                self.pkgsend_bulk(durl3, self.baz1)
+
                 self.image_create(durl1, prefix="test1")
                 self.pkg("set-publisher -O " + durl2 + " test2")
+                self.pkg("set-publisher -O " + durl3 + " test3")
 
         def tearDown(self):
                 testutils.ManyDepotTestCase.tearDown(self)
@@ -361,11 +369,14 @@
         def test_enable_disable(self):
                 """Test enable and disable."""
 
+                self.pkg("publisher")
                 self.pkg("publisher | grep test1")
                 self.pkg("publisher | grep test2")
 
                 self.pkg("set-publisher -d test2")
-                self.pkg("publisher | grep test2", exit=1)
+                self.pkg("publisher | grep test2") # always show
+                self.pkg("publisher -n | grep test2", exit=1) # unless -n
+
                 self.pkg("list -a bar", exit=1)
                 self.pkg("publisher -a | grep test2")
                 self.pkg("set-publisher -P test2", exit=1)
@@ -375,7 +386,8 @@
                 self.pkg("list -a bar")
 
                 self.pkg("set-publisher --disable test2")
-                self.pkg("publisher | grep test2", exit=1)
+                self.pkg("publisher | grep test2")
+                self.pkg("publisher -n | grep test2", exit=1)
                 self.pkg("list -a bar", exit=1)
                 self.pkg("publisher -a | grep test2")
                 self.pkg("set-publisher --enable test2")
@@ -386,5 +398,41 @@
                 self.pkg("set-publisher -d test1", exit=1)
                 self.pkg("set-publisher --disable test1", exit=1)
 
+        def test_search_order(self):
+                """Test moving search order around"""
+                # following should be order from above test
+                self.pkg("publisher") # ease debugging
+                self.pkg("publisher -H | head -1 | egrep test1")
+                self.pkg("publisher -H | head -2 | egrep test2")
+                self.pkg("publisher -H | head -3 | egrep test3")
+                # make test2 disabled, make sure order is preserved                
+                self.pkg("set-publisher --disable test2")
+                self.pkg("publisher") # ease debugging
+                self.pkg("publisher -H | head -1 | egrep test1")
+                self.pkg("publisher -H | head -2 | egrep test2")
+                self.pkg("publisher -H | head -3 | egrep test3")
+                self.pkg("set-publisher --enable test2")
+                # make test3 preferred
+                self.pkg("set-publisher -P test3")
+                self.pkg("publisher") # ease debugging
+                self.pkg("publisher -H | head -1 | egrep test3")
+                self.pkg("publisher -H | head -2 | egrep test1")
+                self.pkg("publisher -H | head -3 | egrep test2")
+                # move test3 after test1
+                self.pkg("set-publisher --search-after=test1 test3")
+                self.pkg("publisher") # ease debugging              
+                self.pkg("publisher -H | head -1 | egrep test1")
+                self.pkg("publisher -H | head -2 | egrep test3")
+                self.pkg("publisher -H | head -3 | egrep test2")
+                # move test2 before test3
+                self.pkg("set-publisher --search-before=test3 test2")
+                self.pkg("publisher") # ease debugging              
+                self.pkg("publisher -H | head -1 | egrep test1")
+                self.pkg("publisher -H | head -2 | egrep test2")
+                self.pkg("publisher -H | head -3 | egrep test3")
+                # make sure we cannot get ahead or behind of ourselves
+                self.pkg("set-publisher --search-before=test3 test3", exit=1)
+                self.pkg("set-publisher --search-after=test3 test3", exit=1)
+
 if __name__ == "__main__":
         unittest.main()
--- a/src/tests/cli/t_pkgdep_resolve.py	Tue Nov 17 17:06:35 2009 -0600
+++ b/src/tests/cli/t_pkgdep_resolve.py	Wed Nov 18 15:53:48 2009 -0800
@@ -40,7 +40,7 @@
 import pkg.portable as portable
 import pkg.publish.dependencies as dependencies
 
-API_VERSION = 23
+API_VERSION = 24
 PKG_CLIENT_NAME = "pkg"
 
 class TestApiDependencies(testutils.SingleDepotTestCase):
@@ -175,7 +175,7 @@
 
         @staticmethod
         def _do_install(api_obj, pkg_list, **kwargs):
-                api_obj.plan_install(pkg_list, [], **kwargs)
+                api_obj.plan_install(pkg_list, **kwargs)
                 TestApiDependencies._do_finish(api_obj)
 
         @staticmethod
--- a/src/tests/cli/t_pkgsend.py	Tue Nov 17 17:06:35 2009 -0600
+++ b/src/tests/cli/t_pkgsend.py	Wed Nov 18 15:53:48 2009 -0800
@@ -55,10 +55,11 @@
                 dfurl = "file://%s" % self.dc.get_repodir()
 
                 for url in (dhurl, dfurl):
-                        self.pkgsend_bulk(url,
+                        for line in \
                             """open [email protected],5.11-0
                             add dir mode=0755 owner=root group=bin path=/bin
-                            close -A""")
+                            close -A""".splitlines():
+                                self.pkgsend(url, line)
 
                         if url == dfurl:
                                 # Must restart pkg.depotd so it will pickup the
@@ -493,8 +494,8 @@
                                 # to be incremented.
                                 p2 = p.replace("<ver>", str(ver))
                                 self.pkgsend_bulk(url, p2, exit=exit)
-                                if exit:
-                                        self.pkgsend(url, "close -A")
+                                #if exit:
+                                #        self.pkgsend(url, "close -A")
 
                                 # Then do it line-by-line
                                 for i, l in enumerate(p.splitlines()):
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/src/tests/cli/t_solver.py	Wed Nov 18 15:53:48 2009 -0800
@@ -0,0 +1,2214 @@
+#!/usr/bin/python
+#
+# CDDL HEADER START
+#
+# The contents of this file are subject to the terms of the
+# Common Development and Distribution License (the "License").
+# You may not use this file except in compliance with the License.
+#
+# You can obtain a copy of the license at usr/src/OPENSOLARIS.LICENSE
+# or http://www.opensolaris.org/os/licensing.
+# See the License for the specific language governing permissions
+# and limitations under the License.
+#
+# When distributing Covered Code, include this CDDL HEADER in each
+# file and include the License file at usr/src/OPENSOLARIS.LICENSE.
+# If applicable, add the following below this CDDL HEADER, with the
+# fields enclosed by brackets "[]" replaced with your own identifying
+# information: Portions Copyright [yyyy] [name of copyright owner]
+#
+# CDDL HEADER END
+#
+
+# Copyright 2009 Sun Microsystems, Inc.  All rights reserved.
+# Use is subject to license terms.
+
+import unittest
+import pkg.solver as solver
+import os
+import sys
+
+# Set the path so that modules above can be found
+path_to_parent = os.path.join(os.path.dirname(__file__), "..")
+sys.path.insert(0, path_to_parent)
+import pkg5unittest
+
+class TestSolver(pkg5unittest.Pkg5TestCase):
+
+        def test_no_solution(self):
+                cnf_test(failing_test_case.splitlines())
+
+        def test_solution(self):
+                cnf_test(working_test_case.splitlines())
+
+def cnf_test(lines):
+        s = solver.msat_solver()
+        
+        for l in lines:
+                if l and l[0] in 'pc%0':
+                        pass # comment
+                else:
+                        # skip trailing 0
+                        cl = [int(i) for i in l.split()[0:-1]]
+                        if cl and not s.add_clause(cl):
+                                return False
+        # create new copy of solver instance to test copy code
+        n = solver.msat_solver(s)
+        del s # force gc of old solver instance
+        return n.solve([])
+
+
+failing_test_case = """
+c This Formular is generated by mcnf
+c
+c    horn? no
+c    forced? no
+c    mixed sat? no
+c    clause length = 3
+c
+p cnf 250  1065
+ -128 -209 148 0
+2 196 -115 0
+-66 -189 -241 0
+-84 -132 -93 0
+214 179 66 0
+203 132 -237 0
+164 -13 -172 0
+-157 198 160 0
+-91 -164 235 0
+-70 -116 54 0
+-164 171 -189 0
+126 -184 211 0
+-19 118 41 0
+32 105 -33 0
+-141 -108 50 0
+-1 156 -188 0
+138 -181 142 0
+-191 -247 -220 0
+-101 -207 -88 0
+68 114 -234 0
+134 -57 -131 0
+-30 -133 116 0
+-24 -173 92 0
+226 -4 -224 0
+-190 204 61 0
+148 205 -174 0
+213 -56 53 0
+174 -250 206 0
+32 219 -112 0
+203 -222 202 0
+130 42 226 0
+222 33 58 0
+58 35 34 0
+-121 80 245 0
+-231 38 -248 0
+-205 -179 184 0
+-182 -204 -36 0
+23 35 -181 0
+82 -168 -59 0
+103 132 -182 0
+-243 -18 -160 0
+-180 130 95 0
+111 -140 -107 0
+19 28 -72 0
+-222 207 -103 0
+134 -50 -184 0
+185 155 -11 0
+-102 230 -18 0
+-112 39 242 0
+154 -87 53 0
+173 -123 -159 0
+-238 -101 -40 0
+-126 -232 -139 0
+107 -51 197 0
+-194 -138 -150 0
+106 -66 -11 0
+-150 -159 -27 0
+-98 -32 138 0
+144 -32 128 0
+153 74 -249 0
+-190 -175 -208 0
+-127 88 -38 0
+-59 125 -225 0
+-23 4 181 0
+12 247 -133 0
+151 -238 127 0
+237 -65 -154 0
+-218 -26 -55 0
+91 -245 169 0
+-81 -156 10 0
+166 -66 -45 0
+109 -162 47 0
+-193 153 40 0
+162 -186 -7 0
+93 38 -58 0
+-159 -167 -39 0
+-187 68 124 0
+247 23 212 0
+49 182 -243 0
+206 -105 -237 0
+236 116 154 0
+236 -6 182 0
+-168 236 35 0
+-186 70 -236 0
+127 80 103 0
+100 79 -176 0
+117 -88 -1 0
+60 -115 -224 0
+-148 181 -65 0
+-132 235 19 0
+-44 -197 190 0
+-214 67 129 0
+-203 175 -191 0
+-172 166 -115 0
+-176 -180 207 0
+-56 -208 -1 0
+-37 140 -19 0
+-242 -55 58 0
+-116 -153 241 0
+-203 64 -219 0
+-214 64 90 0
+-166 96 155 0
+68 -2 63 0
+-200 49 196 0
+-230 -232 -148 0
+-81 105 219 0
+187 -236 -123 0
+-99 237 136 0
+-205 61 -118 0
+-235 -230 128 0
+38 -9 -124 0
+-34 -116 179 0
+-40 -55 -85 0
+244 170 6 0
+-7 -54 -236 0
+153 -223 173 0
+-219 -13 -217 0
+244 -210 -228 0
+23 -128 -113 0
+35 -245 -235 0
+184 31 143 0
+-207 -24 135 0
+97 -165 -14 0
+-17 15 26 0
+61 78 -8 0
+215 -30 -166 0
+229 93 -246 0
+-167 -113 80 0
+78 205 -87 0
+117 -144 207 0
+-10 153 -84 0
+-147 238 61 0
+-58 -17 -190 0
+209 -25 -81 0
+-175 244 -57 0
+185 127 -147 0
+237 199 -144 0
+124 148 -10 0
+190 244 231 0
+-185 214 -101 0
+-237 31 -94 0
+-39 36 -94 0
+-175 206 81 0
+141 -209 -109 0
+228 -165 -112 0
+-45 142 238 0
+-34 -64 -71 0
+-60 170 -109 0
+6 245 87 0
+12 -93 -231 0
+80 216 28 0
+-103 137 116 0
+-77 -73 -30 0
+-63 219 -129 0
+-215 -94 86 0
+81 46 221 0
+161 -215 -212 0
+-137 -215 48 0
+-50 211 229 0
+172 74 154 0
+-14 -100 166 0
+59 -119 -243 0
+244 -31 -96 0
+51 -247 205 0
+-90 97 -139 0
+113 118 -7 0
+57 -161 -84 0
+180 -174 -9 0
+-19 16 -202 0
+-39 -134 224 0
+84 240 195 0
+-55 75 -207 0
+116 54 60 0
+80 98 40 0
+-159 109 217 0
+-210 -119 82 0
+-201 -14 174 0
+43 -19 -100 0
+-126 223 26 0
+-249 163 -205 0
+-58 -4 109 0
+-239 109 -82 0
+210 -58 -2 0
+238 -36 117 0
+109 -199 32 0
+-54 221 -80 0
+230 99 97 0
+45 221 169 0
+191 17 114 0
+-177 -138 -12 0
+35 -5 145 0
+-102 -147 103 0
+-59 3 84 0
+-56 240 -130 0
+-233 -223 -47 0
+169 216 -3 0
+-68 -182 67 0
+34 -189 27 0
+230 -222 66 0
+19 123 84 0
+64 35 231 0
+236 165 242 0
+77 -119 -61 0
+-179 215 198 0
+-105 -93 211 0
+-204 221 -112 0
+-244 23 -125 0
+-107 152 78 0
+144 -183 28 0
+-179 -32 194 0
+217 174 72 0
+-38 101 -132 0
+-122 193 108 0
+12 3 -44 0
+140 -23 -2 0
+185 -129 145 0
+-116 -245 -102 0
+203 -29 -131 0
+-172 239 243 0
+-186 -167 109 0
+158 -184 149 0
+-53 56 100 0
+-92 5 35 0
+-212 -236 250 0
+-42 137 -193 0
+171 231 127 0
+176 -85 -122 0
+32 81 -178 0
+78 -14 -227 0
+104 -10 -65 0
+239 -81 -118 0
+182 76 -235 0
+-226 -132 54 0
+145 25 120 0
+49 205 99 0
+250 240 -89 0
+17 37 -65 0
+226 -66 -47 0
+136 -112 19 0
+-94 -32 74 0
+200 144 -65 0
+27 29 207 0
+6 112 53 0
+-170 -192 -65 0
+-18 206 8 0
+-158 245 147 0
+222 34 24 0
+69 182 -121 0
+-22 -202 -232 0
+-213 -82 173 0
+78 -176 -151 0
+245 215 242 0
+-126 -96 243 0
+-164 -220 205 0
+-60 128 162 0
+-237 -126 -20 0
+105 -144 -165 0
+-158 76 -38 0
+153 -236 206 0
+187 -191 247 0
+-192 159 171 0
+162 -151 -213 0
+19 155 238 0
+-43 207 -46 0
+117 250 118 0
+159 40 -199 0
+149 -163 -145 0
+23 7 46 0
+71 106 56 0
+-43 220 -118 0
+-200 242 6 0
+143 219 -168 0
+-179 102 -163 0
+-74 183 -82 0
+248 92 -154 0
+81 202 229 0
+92 243 19 0
+165 -210 199 0
+-54 -8 244 0
+-70 -135 -223 0
+-80 -89 -189 0
+-7 182 -16 0
+172 53 8 0
+114 -107 197 0
+-135 -35 -239 0
+-214 -10 137 0
+136 -131 151 0
+15 -37 -89 0
+-234 -7 97 0
+118 191 101 0
+215 123 185 0
+-230 202 -190 0
+211 -59 210 0
+200 -162 116 0
+158 12 212 0
+-56 229 196 0
+-50 -52 218 0
+164 -142 -71 0
+233 -140 159 0
+119 93 65 0
+155 -156 -57 0
+117 -197 180 0
+-97 -100 -2 0
+165 206 83 0
+-127 173 -110 0
+20 158 116 0
+-205 -125 -209 0
+-244 -57 -246 0
+-139 -173 21 0
+89 -150 149 0
+-51 60 -224 0
+-81 -193 195 0
+-208 -209 83 0
+-141 70 -223 0
+200 -121 99 0
+-207 201 -61 0
+167 157 -71 0
+200 78 182 0
+82 171 -183 0
+4 -33 -85 0
+-103 87 186 0
+41 183 169 0
+84 244 -116 0
+128 -108 -67 0
+24 165 181 0
+94 118 -148 0
+-41 -71 -98 0
+248 15 -135 0
+10 -157 -17 0
+-225 241 93 0
+217 -192 227 0
+28 -11 78 0
+-16 218 24 0
+59 -91 -210 0
+4 141 166 0
+-204 12 -206 0
+187 153 -186 0
+23 -43 -124 0
+100 2 -46 0
+-236 -80 -102 0
+165 129 159 0
+-45 245 -187 0
+137 31 -7 0
+214 -178 200 0
+222 107 -74 0
+-37 -161 -129 0
+-202 -214 99 0
+69 3 78 0
+-7 -217 51 0
+215 -99 96 0
+124 41 195 0
+-233 -223 206 0
+233 197 87 0
+-102 33 70 0
+175 241 162 0
+86 -202 83 0
+137 -214 64 0
+-111 -204 197 0
+-23 12 -121 0
+101 28 -95 0
+-206 57 220 0
+-122 -2 -125 0
+75 -241 86 0
+-72 185 77 0
+-125 210 -143 0
+106 36 54 0
+136 142 -93 0
+-142 -180 214 0
+44 -22 -60 0
+-235 88 -130 0
+-124 -162 245 0
+19 -121 156 0
+84 -23 -191 0
+-173 -43 -163 0
+151 148 213 0
+-239 147 -180 0
+-247 -92 -36 0
+163 37 -188 0
+-245 -7 -231 0
+-32 -139 41 0
+-13 -157 211 0
+-107 136 126 0
+-58 201 -115 0
+-184 -48 -205 0
+57 -237 -80 0
+160 -243 155 0
+154 -42 -179 0
+-107 -242 -167 0
+169 170 -19 0
+-65 -140 -209 0
+143 146 182 0
+90 -170 158 0
+-235 54 83 0
+-122 193 239 0
+112 130 -212 0
+-9 -167 93 0
+-57 54 -28 0
+-55 -56 29 0
+-206 -79 245 0
+-16 -81 -190 0
+174 160 131 0
+-125 156 -148 0
+-128 174 -25 0
+-209 -16 60 0
+-13 -225 109 0
+167 -124 168 0
+142 -16 173 0
+8 -45 216 0
+13 160 -41 0
+-101 125 -225 0
+-218 244 -49 0
+153 -183 204 0
+117 230 167 0
+-108 24 -27 0
+149 -198 13 0
+48 124 -84 0
+-35 -162 58 0
+110 -103 9 0
+-143 -43 169 0
+-91 -87 -70 0
+11 -117 244 0
+-141 37 -177 0
+238 126 -215 0
+157 103 -27 0
+-133 134 -3 0
+112 -107 -113 0
+-225 -104 26 0
+109 -220 -174 0
+58 -140 -86 0
+1 97 14 0
+249 161 -217 0
+99 -242 33 0
+172 2 -235 0
+-79 -132 107 0
+43 -217 -169 0
+-166 218 -128 0
+63 178 135 0
+110 224 30 0
+-62 147 -237 0
+-241 -103 -169 0
+125 -75 -106 0
+146 20 -112 0
+59 226 -136 0
+-194 132 -101 0
+133 -41 -14 0
+190 74 -247 0
+230 221 224 0
+-3 -61 -65 0
+93 15 194 0
+-155 105 117 0
+-146 -127 -35 0
+170 -173 213 0
+-13 -234 -117 0
+-244 -225 15 0
+-41 151 -185 0
+-196 -2 114 0
+-220 111 -238 0
+234 134 146 0
+100 -29 -4 0
+-195 -6 151 0
+-116 109 -9 0
+45 -58 -61 0
+195 224 66 0
+119 174 129 0
+122 233 100 0
+30 -227 -120 0
+238 1 16 0
+231 229 -46 0
+188 226 23 0
+-181 247 -216 0
+233 84 97 0
+8 41 71 0
+37 52 56 0
+-227 58 84 0
+116 48 -95 0
+-58 233 36 0
+210 11 -116 0
+-107 -103 242 0
+21 -161 169 0
+202 25 82 0
+248 163 65 0
+-108 26 -78 0
+-162 163 -248 0
+-14 -95 92 0
+218 -151 -26 0
+-132 -195 44 0
+14 85 -136 0
+-236 219 -105 0
+164 136 -25 0
+7 36 124 0
+-163 -216 -15 0
+-66 176 -76 0
+-144 -3 -101 0
+-178 -149 -108 0
+175 -161 210 0
+-118 106 -11 0
+-124 128 98 0
+-81 -223 117 0
+154 149 -1 0
+-186 26 66 0
+-190 192 -114 0
+-122 -197 -52 0
+-84 -226 105 0
+52 61 225 0
+206 -7 -101 0
+-29 93 -116 0
+67 -164 135 0
+1 -217 -5 0
+-180 218 222 0
+230 -225 -50 0
+4 -25 45 0
+-57 234 -1 0
+-221 -103 100 0
+137 234 -109 0
+20 -227 -202 0
+-103 -247 198 0
+-29 -148 -35 0
+-191 102 18 0
+-52 -195 18 0
+61 5 -247 0
+165 -207 -217 0
+-147 -207 27 0
+100 117 -129 0
+-152 -83 132 0
+-190 53 -121 0
+156 230 181 0
+2 -239 -65 0
+-55 -20 -107 0
+-119 -39 -221 0
+-116 147 16 0
+-211 238 -60 0
+249 -111 141 0
+-54 -193 -81 0
+49 -245 -5 0
+-233 110 -109 0
+-79 -56 180 0
+-41 196 150 0
+242 -63 231 0
+39 22 100 0
+5 23 204 0
+-55 -100 105 0
+-22 -28 247 0
+-209 200 67 0
+-46 59 62 0
+-239 -107 -125 0
+242 25 -246 0
+-148 -30 -11 0
+-148 160 -169 0
+5 145 249 0
+168 -28 -207 0
+-188 212 -201 0
+-166 205 -239 0
+145 -246 -100 0
+3 215 -93 0
+101 -198 -160 0
+-233 178 -90 0
+-143 -26 -102 0
+-72 -97 -195 0
+-119 -163 -120 0
+93 13 98 0
+-131 -53 15 0
+-118 129 151 0
+168 81 199 0
+-17 121 -21 0
+-36 -175 196 0
+-221 57 68 0
+111 145 -183 0
+114 -31 24 0
+-170 159 -146 0
+123 -80 152 0
+-84 -184 -134 0
+-206 30 -55 0
+81 -154 198 0
+129 135 248 0
+-2 198 122 0
+230 101 -18 0
+25 208 216 0
+-247 176 160 0
+34 -159 9 0
+-74 184 31 0
+29 -66 -148 0
+-233 204 -107 0
+204 -30 -127 0
+-237 8 -65 0
+79 112 181 0
+157 -85 83 0
+204 113 -216 0
+-11 -15 27 0
+44 114 8 0
+105 188 -158 0
+-51 204 -48 0
+145 211 40 0
+-107 -31 -114 0
+-134 212 -105 0
+188 -174 -151 0
+58 -9 -151 0
+33 -37 -119 0
+172 -3 169 0
+-26 -21 48 0
+-94 -99 -41 0
+192 1 -7 0
+250 -138 185 0
+-6 -131 -83 0
+11 191 -240 0
+-175 -163 -249 0
+-214 -98 193 0
+120 190 -185 0
+-135 -64 -24 0
+-187 249 -129 0
+76 -232 112 0
+-17 -161 117 0
+-6 250 246 0
+85 -188 117 0
+-47 91 -103 0
+-123 -92 142 0
+3 -183 -249 0
+-175 148 -129 0
+223 172 119 0
+194 76 -114 0
+206 123 -222 0
+-186 -110 -71 0
+-63 152 -110 0
+-122 -44 -119 0
+-14 76 -224 0
+-8 -77 -97 0
+-116 110 63 0
+148 106 192 0
+204 -168 -56 0
+-221 173 -13 0
+168 57 -211 0
+218 151 245 0
+70 -234 -143 0
+24 194 106 0
+16 -236 -187 0
+162 97 43 0
+8 79 228 0
+-39 -179 48 0
+-119 213 -231 0
+-239 57 -232 0
+-161 247 8 0
+30 -127 197 0
+72 168 -233 0
+-157 -217 -135 0
+134 180 233 0
+27 -14 -64 0
+153 247 -60 0
+-154 -76 -106 0
+-59 -100 170 0
+120 -121 -41 0
+-169 13 158 0
+-166 199 120 0
+164 202 -199 0
+-223 148 -242 0
+4 211 100 0
+188 -231 -98 0
+218 129 -93 0
+-211 18 -93 0
+51 -10 -78 0
+22 -155 -130 0
+207 -135 -172 0
+199 197 14 0
+182 -245 -135 0
+-204 181 -32 0
+-18 -237 80 0
+-96 69 193 0
+-98 245 -91 0
+71 -24 93 0
+48 -131 194 0
+29 144 -12 0
+128 15 -71 0
+125 58 -238 0
+-84 111 38 0
+224 168 246 0
+-82 -188 -33 0
+-67 98 242 0
+34 248 -112 0
+217 95 59 0
+56 245 13 0
+72 129 -245 0
+82 134 -61 0
+-128 55 -183 0
+-187 42 38 0
+90 -102 54 0
+-159 224 229 0
+-117 -158 -180 0
+113 108 5 0
+239 34 -122 0
+-85 -118 -19 0
+-240 129 -145 0
+-15 149 129 0
+-144 -189 217 0
+228 -223 97 0
+16 -84 -242 0
+206 -212 91 0
+-71 -194 21 0
+59 -31 37 0
+-89 156 -243 0
+60 21 75 0
+14 12 -8 0
+-227 -183 131 0
+-95 -190 -49 0
+-151 -54 -133 0
+-134 49 -157 0
+6 -114 224 0
+201 -195 -17 0
+-99 -36 88 0
+123 -67 105 0
+142 -94 49 0
+58 106 234 0
+22 -18 -86 0
+201 -245 71 0
+-220 -228 227 0
+-117 31 -212 0
+-177 -140 -59 0
+229 233 150 0
+47 -36 103 0
+-239 102 -241 0
+-35 194 208 0
+199 -37 -180 0
+140 -176 -123 0
+148 -36 243 0
+14 141 227 0
+-182 -141 248 0
+178 85 144 0
+247 231 15 0
+77 -168 -40 0
+-194 -181 -83 0
+-225 116 -79 0
+-80 182 -50 0
+63 -36 -122 0
+82 231 -59 0
+-64 -244 157 0
+-86 140 -207 0
+-129 -192 -143 0
+-69 227 216 0
+-83 137 -101 0
+117 -71 145 0
+115 -53 199 0
+-32 96 -1 0
+104 93 -142 0
+190 116 83 0
+191 -124 -161 0
+144 11 -181 0
+-151 113 243 0
+-66 -141 -108 0
+-153 -149 7 0
+-75 -129 137 0
+113 -107 43 0
+-191 99 237 0
+199 67 163 0
+-198 -177 -21 0
+217 -236 88 0
+-136 -84 158 0
+52 68 -204 0
+-61 200 21 0
+95 -204 -221 0
+-75 -125 118 0
+213 113 173 0
+-226 -92 118 0
+-134 -189 67 0
+-198 7 -26 0
+-49 197 57 0
+-5 -72 -146 0
+226 167 -27 0
+211 -229 94 0
+-101 -80 -12 0
+58 -47 -80 0
+148 -217 -9 0
+229 -120 -117 0
+161 -174 191 0
+10 -51 -154 0
+-155 235 -198 0
+-171 247 127 0
+-130 19 140 0
+-209 -185 -25 0
+-223 -199 -27 0
+-28 124 187 0
+135 -28 31 0
+-31 88 89 0
+22 -43 -47 0
+21 165 -184 0
+-250 69 -27 0
+221 -177 162 0
+-72 -218 207 0
+23 -159 83 0
+-54 225 -190 0
+-140 -21 49 0
+-50 -177 -18 0
+80 -250 172 0
+-77 183 -218 0
+184 55 -146 0
+-104 181 -188 0
+243 146 -70 0
+-215 -187 -247 0
+-196 -50 90 0
+-84 143 -146 0
+147 119 -118 0
+227 14 -110 0
+44 238 -153 0
+197 -69 -176 0
+127 65 27 0
+208 190 -162 0
+-39 250 -196 0
+114 -89 206 0
+142 75 -148 0
+-202 237 -194 0
+-21 216 -177 0
+114 -80 -200 0
+-27 91 -84 0
+-63 249 -36 0
+89 -18 -133 0
+-19 -17 -107 0
+145 62 -227 0
+-89 -148 -44 0
+-133 -192 -149 0
+-65 240 -233 0
+-88 -40 -245 0
+92 -129 4 0
+22 -62 -21 0
+216 116 -93 0
+79 100 234 0
+39 134 44 0
+-226 -170 -157 0
+104 9 -191 0
+26 -39 40 0
+113 232 -174 0
+-101 81 -104 0
+173 90 101 0
+-208 173 -97 0
+-72 209 -111 0
+-51 -93 108 0
+-248 216 181 0
+-65 -170 212 0
+-102 -161 146 0
+-72 -28 -25 0
+-117 -18 229 0
+-52 163 -79 0
+94 120 79 0
+105 116 -227 0
+67 186 -211 0
+-226 -235 196 0
+-67 -11 23 0
+-55 -85 -197 0
+-200 -245 -76 0
+109 -61 -127 0
+-248 127 -229 0
+53 148 -197 0
+151 -98 -24 0
+-58 180 -158 0
+74 214 -200 0
+-31 241 172 0
+26 219 -56 0
+1 110 -18 0
+156 19 -89 0
+112 87 204 0
+-5 151 -59 0
+34 -149 100 0
+83 248 220 0
+31 2 -78 0
+110 -152 -37 0
+-132 -217 -57 0
+-71 176 79 0
+31 -98 -75 0
+-60 229 -171 0
+87 207 112 0
+30 151 -41 0
+17 162 109 0
+-172 111 -221 0
+166 170 -147 0
+-48 143 -201 0
+233 -46 -122 0
+207 -149 -124 0
+-188 -166 -65 0
+-76 -77 -96 0
+-216 211 45 0
+137 -103 -106 0
+220 -82 -136 0
+47 -84 -44 0
+-37 67 -32 0
+33 -5 156 0
+-137 58 127 0
+229 -36 -84 0
+243 175 63 0
+242 -73 -121 0
+219 237 164 0
+149 -201 -142 0
+27 172 243 0
+-90 29 45 0
+206 57 153 0
+-235 -49 -94 0
+233 71 108 0
+82 -122 223 0
+-195 -71 37 0
+-70 179 -159 0
+-79 -240 -38 0
+-79 -121 30 0
+-238 -78 -246 0
+218 96 48 0
+107 -154 -199 0
+4 -205 194 0
+1 -205 -203 0
+-155 129 -26 0
+-128 57 22 0
+-195 -168 -10 0
+97 -186 -90 0
+122 227 171 0
+22 163 191 0
+-223 191 -85 0
+100 -59 -63 0
+245 49 -181 0
+-51 210 -135 0
+-34 55 54 0
+2 74 -57 0
+233 168 -230 0
+-40 22 230 0
+128 -157 27 0
+-154 -161 -114 0
+-74 -136 38 0
+51 -205 23 0
+-212 40 -71 0
+9 -138 -83 0
+-95 54 121 0
+-174 -85 140 0
+66 16 67 0
+-137 -8 105 0
+-133 -206 -3 0
+175 86 -206 0
+-50 -217 51 0
+51 244 31 0
+184 -218 84 0
+-153 58 -237 0
+56 -198 63 0
+228 -42 74 0
+43 -32 245 0
+-150 82 -44 0
+-14 -22 25 0
+228 -232 -245 0
+-147 -221 29 0
+-222 41 -40 0
+42 -13 -20 0
+53 9 161 0
+125 236 69 0
+-105 -172 32 0
+-142 114 -71 0
+-120 -122 -197 0
+-29 9 -200 0
+26 210 -193 0
+-155 183 140 0
+216 -208 -146 0
+-220 -8 98 0
+109 175 -63 0
+-16 -139 -108 0
+176 137 -119 0
+-97 39 142 0
+218 -44 -37 0
+-119 -69 -107 0
+-79 142 109 0
+-123 25 227 0
+177 -187 -89 0
+-99 -147 -207 0
+-68 81 236 0
+145 90 3 0
+93 -149 -127 0
+-120 -67 154 0
+121 234 -229 0
+-245 186 21 0
+92 5 -121 0
+197 -100 -46 0
+-40 -39 -3 0
+25 -117 -121 0
+-194 -189 175 0
+246 10 40 0
+13 50 147 0
+-243 163 105 0
+132 -131 -218 0
+-241 78 101 0
+-200 -38 -29 0
+-36 -166 183 0
+248 -216 218 0
+-203 92 204 0
+-83 -84 -165 0
+-202 -197 -244 0
+112 -221 63 0
+100 151 -1 0
+141 -206 -52 0
+181 -208 -229 0
+53 93 173 0
+193 -184 -79 0
+41 -78 -133 0
+1 -35 -90 0
+-198 -60 174 0
+152 207 -157 0
+183 -196 -163 0
+-244 242 218 0
+11 32 146 0
+-66 -32 -84 0
+-54 -109 -195 0
+190 -116 144 0
+-242 -122 86 0
+-71 7 -150 0
+241 -173 -15 0
+62 -217 81 0
+205 -116 130 0
+193 -209 128 0
+146 -240 -132 0
+29 197 161 0
+15 83 -39 0
+-109 -44 81 0
+244 85 -7 0
+-246 9 165 0
+115 -83 67 0
+-98 -141 170 0
+-102 94 -52 0
+-231 -74 -28 0
+162 191 -149 0
+197 -183 -35 0
+102 -56 50 0
+30 -45 -129 0
+25 -207 -33 0
+192 -106 -169 0
+43 -129 -169 0
+237 244 182 0
+-72 -44 -168 0
+-158 -150 102 0
+168 -143 151 0
+-72 26 212 0
+116 -89 98 0
+171 -197 156 0
+233 -54 -181 0
+129 -161 25 0
+113 69 -33 0
+179 -175 224 0
+138 -143 -46 0
+75 213 -246 0
+-137 -175 -150 0
+-169 -67 215 0
+86 69 -199 0
+-159 233 63 0
+-145 101 6 0
+129 -243 -227 0
+-175 72 -247 0
+163 -109 207 0
+31 77 33 0
+-136 175 160 0
+-192 -193 -7 0
+99 145 232 0
+-233 198 114 0
+240 -89 -108 0
+-81 -67 -63 0
+5 149 69 0
+-172 166 -184 0
+158 -244 -166 0
+-53 -172 -62 0
+49 25 61 0
+237 19 -166 0
+94 202 -148 0
+-246 13 152 0
+-135 -86 -5 0
+-190 -44 -223 0
+-17 -141 6 0
+165 39 237 0
+221 -62 -104 0
+-206 107 -223 0
+-159 -243 -13 0
+118 -9 57 0
+%
+0
+"""
+working_test_case = """
+c This Formular is generated by mcnf
+c
+c    horn? no
+c    forced? no
+c    mixed sat? no
+c    clause length = 3
+c
+p cnf 250  1065
+ -108 246 59 0
+-161 -43 234 0
+7 41 -88 0
+26 178 -41 0
+-7 -145 -33 0
+206 18 -136 0
+-15 173 -213 0
+31 -91 215 0
+3 216 196 0
+234 -85 179 0
+155 -195 106 0
+-211 -223 -41 0
+97 2 -217 0
+-81 -122 27 0
+-149 34 239 0
+69 -216 183 0
+-69 148 -92 0
+-89 -120 184 0
+231 110 -213 0
+67 173 -195 0
+132 155 183 0
+-115 83 4 0
+173 163 -242 0
+-198 43 90 0
+71 -116 37 0
+-232 52 28 0
+21 -230 -124 0
+-146 -108 -110 0
+-116 163 214 0
+69 -143 128 0
+228 141 -99 0
+-47 75 -193 0
+-118 -244 -235 0
+148 -246 -112 0
+124 19 -76 0
+-49 102 -125 0
+110 155 3 0
+-180 -192 -94 0
+-114 -67 -219 0
+-159 53 187 0
+219 -102 162 0
+21 -109 -173 0
+-124 90 189 0
+-191 117 175 0
+77 -250 -155 0
+-74 203 60 0
+-4 65 166 0
+174 -212 -165 0
+-220 119 -35 0
+-247 105 126 0
+-110 -192 63 0
+-227 181 172 0
+-219 -31 -221 0
+51 113 19 0
+212 -4 151 0
+197 -14 -211 0
+117 159 -69 0
+48 -19 207 0
+-168 203 -212 0
+-232 250 -222 0
+151 34 139 0
+249 -159 229 0
+-243 244 -48 0
+48 180 -153 0
+-227 -98 190 0
+-73 -130 60 0
+33 239 -11 0
+41 -48 -201 0
+-39 43 143 0
+131 -28 106 0
+-97 49 -215 0
+7 42 -194 0
+-224 -94 46 0
+137 -220 -84 0
+-38 41 -163 0
+-229 208 -187 0
+149 -120 238 0
+-90 111 135 0
+176 -171 -36 0
+144 -238 237 0
+-194 111 -55 0
+76 203 -38 0
+-105 134 34 0
+55 -194 -239 0
+37 -95 177 0
+-121 -94 -169 0
+-93 49 -175 0
+54 -217 102 0
+-155 11 63 0
+83 -138 109 0
+-68 -30 103 0
+-208 -48 -125 0
+-100 230 -204 0
+-70 222 171 0
+146 -198 158 0
+13 24 98 0
+191 217 100 0
+52 -198 5 0
+166 219 -43 0
+107 -247 105 0
+-83 -13 86 0
+232 -68 -61 0
+-107 185 -112 0
+-106 225 -226 0
+48 71 238 0
+144 -83 -135 0
+-56 -27 -39 0
+243 94 55 0
+38 139 35 0
+-146 -127 180 0
+182 -83 84 0
+45 211 -70 0
+31 -91 72 0
+-146 -232 244 0
+-39 140 -200 0
+219 205 -220 0
+-94 -65 -87 0
+-143 180 -24 0
+70 161 -201 0
+136 128 85 0
+-223 64 62 0
+-69 209 147 0
+88 -15 -225 0
+80 48 -149 0
+224 246 -117 0
+-166 -53 -26 0
+-59 -63 -100 0
+-1 -55 -237 0
+214 246 13 0
+-101 249 -118 0
+-180 -222 -250 0
+-97 -7 58 0
+-169 -213 -80 0
+-120 152 242 0
+5 115 15 0
+70 -12 -43 0
+65 63 -248 0
+-148 177 173 0
+-224 201 12 0
+-231 -88 -141 0
+66 29 -233 0
+99 163 12 0
+-56 -183 197 0
+89 133 229 0
+126 79 149 0
+-238 -139 -137 0
+-170 -95 -148 0
+-202 -246 115 0
+-176 -63 158 0
+216 38 -83 0
+221 41 44 0
+-91 181 135 0
+171 -63 71 0
+-60 136 107 0
+222 5 57 0
+210 -89 -151 0
+-44 36 -91 0
+3 -194 -15 0
+-117 38 -110 0
+242 226 155 0
+158 -240 110 0
+218 -37 90 0
+11 217 57 0
+250 -157 73 0
+-9 -122 53 0
+185 -76 73 0
+-99 -101 102 0
+52 -171 33 0
+-143 195 228 0
+42 -63 -229 0
+-178 -160 224 0
+-65 -54 208 0
+232 -43 -38 0
+85 43 -178 0
+-171 -50 45 0
+47 71 -180 0
+127 135 -187 0
+-201 33 222 0
+-221 -131 -165 0
+-131 114 221 0
+195 60 185 0
+-8 206 -140 0
+124 -240 223 0
+-217 198 149 0
+52 -227 -206 0
+136 -96 29 0
+-76 -228 64 0
+-157 -47 93 0
+148 -108 17 0
+139 40 -89 0
+63 198 86 0
+199 94 -33 0
+-116 216 -2 0
+27 242 -1 0
+-156 177 28 0
+234 -83 37 0
+-124 -123 -149 0
+112 -1 173 0
+7 235 10 0
+245 -184 -224 0
+-112 -161 77 0
+203 104 124 0
+-59 -123 -10 0
+250 -242 -203 0
+56 243 164 0
+24 126 -2 0
+-101 227 86 0
+-233 138 -218 0
+-211 -119 -196 0
+143 -183 -186 0
+-148 236 76 0
+-131 -187 -77 0
+62 -144 -43 0
+-232 96 -30 0
+121 -152 89 0
+7 105 37 0
+182 135 -58 0
+-164 -162 -112 0
+-118 173 93 0
+-54 220 -2 0
+-193 32 65 0
+-101 46 203 0
+-127 -219 -215 0
+235 -42 -77 0
+-179 -242 -145 0
+-140 77 203 0
+-23 157 -112 0
+-28 -193 134 0
+-147 -166 100 0
+148 171 -31 0
+-214 241 -166 0
+-217 204 93 0
+-219 -211 -142 0
+107 57 50 0
+227 220 119 0
+-234 62 24 0
+-131 -223 24 0
+232 133 -4 0
+74 200 -201 0
+211 6 220 0
+-113 -9 102 0
+-207 39 -80 0
+24 244 -125 0
+171 190 -167 0
+122 24 -201 0
+-132 216 -235 0
+-90 58 -181 0
+161 62 185 0
+-3 -9 242 0
+115 2 -78 0
+42 -225 -145 0
+168 -46 55 0
+-40 -126 -154 0
+26 164 -1 0
+-71 199 -133 0
+-78 55 -201 0
+219 249 -203 0
+116 137 -43 0
+11 -137 -118 0
+143 224 -150 0
+-2 -199 218 0
+108 -140 -47 0
+228 28 -30 0
+224 58 -27 0
+-42 -211 153 0
+104 -238 -222 0
+-120 47 -33 0
+61 85 223 0
+230 -78 -77 0
+185 -210 106 0
+111 4 80 0
+226 195 -66 0
+-204 172 8 0
+195 -241 191 0
+166 182 -69 0
+-114 -130 223 0
+213 -189 243 0
+-201 151 26 0
+70 21 28 0
+-119 -208 -207 0
+-156 94 106 0
+-177 250 -6 0
+24 -227 -103 0
+87 4 -200 0
+139 133 -200 0
+-71 226 23 0
+-244 -32 60 0
+202 225 -63 0
+-233 -111 1 0
+175 -114 -147 0
+54 -30 193 0
+-137 -199 78 0
+31 -149 3 0
+90 186 138 0
+105 -43 -227 0
+-218 220 -216 0
+206 -182 119 0
+62 158 -215 0
+-92 -91 -103 0
+92 35 -13 0
+-4 -148 -219 0
+-119 125 8 0
+214 -160 -39 0
+120 -1 59 0
+190 216 -43 0
+-11 -5 119 0
+-118 -36 -187 0
+-200 -152 98 0
+194 -50 -8 0
+32 88 -154 0
+-45 106 159 0
+-226 202 112 0
+-101 28 -201 0
+-206 -209 180 0
+152 244 165 0
+210 112 -115 0
+195 -1 -151 0
+104 -14 5 0
+185 -167 -229 0
+192 31 184 0
+-116 -46 -113 0
+178 -108 140 0
+56 17 67 0
+80 6 195 0
+-250 -61 -106 0
+184 31 -236 0
+185 188 -7 0
+-127 -72 187 0
+-221 212 -13 0
+240 -19 192 0
+36 -135 -139 0
+-170 -116 87 0
+96 66 173 0
+40 -229 16 0
+184 -134 55 0
+233 13 141 0
+19 -204 -188 0
+-208 226 -192 0
+-185 48 178 0
+236 -34 -204 0
+-46 141 -194 0
+-8 -30 181 0
+72 -92 37 0
+-212 157 -92 0
+-210 -92 -225 0
+95 176 23 0
+-16 120 -63 0
+7 -136 40 0
+-88 -110 168 0
+121 208 -115 0
+228 215 171 0
+35 -19 -151 0
+45 222 -101 0
+95 103 108 0
+-35 -152 -64 0
+144 -226 -149 0
+-95 11 -170 0
+211 -152 106 0
+-59 80 223 0
+22 126 -156 0
+-19 -167 128 0
+-68 76 -114 0
+-121 32 122 0
+96 152 187 0
+-72 -90 -152 0
+129 193 93 0
+-109 -177 -149 0
+193 35 2 0
+172 -106 246 0
+134 -245 152 0
+212 100 -19 0
+127 -214 -56 0
+-245 -128 3 0
+89 -114 119 0
+147 -105 37 0
+-125 -102 -108 0
+22 33 177 0
+46 52 -240 0
+-62 -136 45 0
+222 -117 120 0
+16 -40 111 0
+86 206 49 0
+123 -78 -158 0
+44 188 90 0
+-103 89 176 0
+232 -112 130 0
+-109 70 19 0
+-15 204 128 0
+-127 -110 192 0
+-26 52 -147 0
+-41 5 105 0
+234 206 -160 0
+-52 128 195 0
+-184 -176 121 0
+184 167 -120 0
+-74 158 -148 0
+-18 62 200 0
+169 115 -190 0
+-124 229 164 0
+-63 37 221 0
+-76 190 245 0
+-217 136 -134 0
+-228 231 62 0
+156 -218 -85 0
+124 -25 225 0
+-182 32 -31 0
+-250 -221 35 0
+-80 114 -78 0
+248 186 139 0
+-19 -128 125 0
+85 154 113 0
+14 -80 -88 0
+-145 -43 88 0
+145 181 55 0
+134 31 -187 0
+-89 -109 -62 0
+-68 237 -222 0
+-130 -180 -227 0
+-86 48 90 0
+-199 -215 -132 0
+44 60 -14 0
+-248 -79 224 0
+-154 114 189 0
+-39 167 -139 0
+230 -5 -184 0
+17 184 -215 0
+-17 37 54 0
+-249 159 -151 0
+-29 -78 -148 0
+136 186 209 0
+-224 64 33 0
+-163 111 108 0
+-4 99 23 0
+137 -64 138 0
+-237 -116 29 0
+44 158 -139 0
+147 -8 -92 0
+-118 -228 42 0
+201 19 141 0
+-182 -39 -238 0
+-36 27 -79 0
+-157 249 -181 0
+-191 121 132 0
+-59 212 32 0
+72 -233 122 0
+230 -229 -132 0
+-231 60 -233 0
+-66 -249 106 0
+-210 -40 -79 0
+-75 61 111 0
+-51 -98 -32 0
+-166 137 245 0
+-134 113 52 0
+-107 19 72 0
+-64 85 -121 0
+227 82 -87 0
+-194 180 -128 0
+241 -211 38 0
+-74 -56 115 0
+206 -54 -210 0
+-66 -204 72 0
+144 156 16 0
+-197 84 54 0
+-80 199 59 0
+69 49 103 0
+19 190 -34 0
+-176 235 -151 0
+33 -202 -78 0
+-90 -15 -151 0
+198 28 -43 0
+-131 -74 -108 0
+159 89 -184 0
+-54 -62 141 0
+-83 -238 91 0
+227 -84 -76 0
+187 -15 205 0
+-243 -87 -207 0
+115 200 -48 0
+-82 -163 184 0
+221 -122 153 0
+-178 77 -52 0
+-250 113 -65 0
+-192 -153 -161 0
+185 -240 153 0
+187 -133 171 0
+-127 -108 139 0
+-158 -240 -121 0
+183 -137 -62 0
+-84 -60 210 0
+-35 -115 -5 0
+-16 81 41 0
+163 -167 -20 0
+192 -71 -102 0
+-223 -248 -37 0
+54 -18 79 0
+242 -238 114 0
+64 55 -39 0
+-48 30 -248 0
+-126 -6 -159 0
+-127 242 -160 0
+238 42 120 0
+224 -138 -66 0
+-189 -18 -183 0
+99 -43 -220 0
+149 -82 -59 0
+-25 239 60 0
+99 -201 137 0
+-50 188 -223 0
+84 147 157 0
+240 -183 -212 0
+239 243 -149 0
+119 217 162 0
+46 126 21 0
+204 196 21 0
+-174 26 53 0
+-45 63 -15 0
+155 -229 -99 0
+-149 29 -51 0
+43 250 -107 0
+-183 -34 -169 0
+7 -214 -55 0
+154 -61 -143 0
+-176 -25 -155 0
+-138 235 201 0
+137 -231 95 0
+48 -223 -227 0
+-16 -147 193 0
+-34 75 94 0
+140 -189 21 0
+-152 70 49 0
+-9 173 -238 0
+118 -39 129 0
+-129 -230 -101 0
+7 -58 89 0
+-96 50 -92 0
+-158 54 -139 0
+126 -156 -201 0
+-31 94 127 0
+32 72 103 0
+-142 195 51 0
+200 -246 -150 0
+-1 17 94 0
+-98 -52 -152 0
+20 213 -38 0
+-123 -225 -81 0
+-19 -158 165 0
+-107 -246 73 0
+-9 45 145 0
+-127 39 164 0
+-34 95 130 0
+-226 -210 213 0
+-250 -201 -91 0
+209 -191 -78 0
+-245 -248 192 0
+208 -191 -157 0
+136 123 169 0
+-117 -17 -74 0
+-140 174 162 0
+121 -37 119 0
+124 -152 217 0
+-240 -125 237 0
+33 90 -20 0
+-77 -187 -160 0
+-109 24 -239 0
+-3 -209 85 0
+84 -229 -199 0
+74 170 12 0
+-79 102 -245 0
+-191 -197 172 0
+-111 -176 -216 0
+-19 229 184 0
+-139 -123 240 0
+-208 45 -116 0
+100 -224 151 0
+-28 -13 -249 0
+-198 -226 -122 0
+-201 -81 43 0
+205 -189 53 0
+-23 240 -60 0
+-246 38 -224 0
+138 229 156 0
+-179 -60 -221 0
+204 -98 32 0
+-46 -228 -178 0
+-215 25 112 0
+96 -34 198 0
+32 -203 -225 0
+231 -156 -232 0
+130 -224 -197 0
+196 156 209 0
+99 210 -49 0
+91 95 143 0
+-199 79 -250 0
+150 221 152 0
+-31 223 249 0
+4 -127 -73 0
+-244 13 231 0
+-231 -27 -156 0
+159 -107 -217 0
+-153 -234 -216 0
+15 227 122 0
+-223 -23 -56 0
+-31 139 160 0
+-183 28 -223 0
+142 -241 -159 0
+-102 -157 -109 0
+17 -216 160 0
+206 -200 207 0
+-232 -70 -104 0
+131 -110 182 0
+171 70 -230 0
+167 -58 189 0
+145 -86 -57 0
+177 -183 -13 0
+-221 18 90 0
+-225 228 -127 0
+177 -174 226 0
+222 -144 -191 0
+-222 -171 -74 0
+214 172 229 0
+-111 49 12 0
+-155 179 -192 0
+-236 14 86 0
+-68 -13 -1 0
+103 210 -123 0
+-116 124 -244 0
+145 -14 -174 0
+28 129 230 0
+-192 123 35 0
+143 -118 241 0
+-211 -64 -128 0
+201 -85 -1 0
+91 198 24 0
+-92 -2 -201 0
+-158 87 83 0
+152 63 94 0
+17 218 119 0
+-162 183 237 0
+-13 -95 -49 0
+179 129 -79 0
+-42 -178 242 0
+169 -224 227 0
+76 152 1 0
+-109 98 99 0
+-150 240 -198 0
+-158 -43 152 0
+-159 -97 82 0
+-203 210 223 0
+208 132 -157 0
+127 -189 -208 0
+-48 -74 -1 0
+-57 132 -26 0
+-210 -220 -120 0
+-241 -131 23 0
+115 -64 -250 0
+31 -185 -239 0
+-210 190 94 0
+144 -38 80 0
+-155 211 134 0
+-238 222 134 0
+-153 74 205 0
+-103 64 28 0
+-57 -193 143 0
+139 110 -73 0
+-93 25 -153 0
+-247 164 20 0
+40 157 -189 0
+172 160 -180 0
+-177 -185 245 0
+180 49 56 0
+28 -22 232 0
+172 13 193 0
+97 1 206 0
+-161 -242 -185 0
+-186 170 -190 0
+59 28 -236 0
+76 246 222 0
+64 -202 51 0
+-20 138 107 0
+96 -228 16 0
+-249 28 44 0
+-193 -143 -113 0
+215 -224 -170 0
+-131 -12 35 0
+84 61 54 0
+-116 94 50 0
+-26 -21 -9 0
+107 -150 -143 0
+-174 45 147 0
+34 -116 174 0
+-109 -80 -113 0
+25 -14 -212 0
+203 9 -46 0
+-231 -209 4 0
+-239 200 151 0
+181 146 -195 0
+-234 79 195 0
+-91 -65 -105 0
+96 141 98 0
+135 -6 215 0
+150 -98 -147 0
+-26 124 -30 0
+-66 160 206 0
+-60 -142 6 0
+-173 -126 -28 0
+138 -60 -43 0
+235 -179 57 0
+156 -215 34 0
+-227 195 -221 0
+6 -25 -87 0
+228 49 -57 0
+203 68 139 0
+-133 237 -1 0
+-247 74 -80 0
+179 62 206 0
+12 73 -165 0
+-28 45 -65 0
+-203 186 -132 0
+-88 99 53 0
+99 246 171 0
+172 23 -88 0
+-84 119 224 0
+-44 -237 211 0
+-155 -28 -163 0
+-67 44 -224 0
+3 19 7 0
+-19 189 -216 0
+-18 130 -237 0
+-42 -210 -204 0
+-183 -233 192 0
+-141 222 -59 0
+-244 80 -102 0
+-210 90 68 0
+123 110 -82 0
+-226 -246 -231 0
+206 -43 -172 0
+178 -184 -63 0
+217 103 224 0
+-157 -172 -152 0
+-236 -223 211 0
+166 96 155 0
+-70 38 -28 0
+-18 34 23 0
+-33 -224 -242 0
+149 -197 213 0
+-222 -79 198 0
+-220 235 -95 0
+-167 -135 194 0
+10 159 -235 0
+-241 242 143 0
+55 -72 133 0
+-59 -168 -33 0
+64 81 -35 0
+18 30 -70 0
+198 -22 153 0
+146 29 75 0
+76 -89 189 0
+10 55 -184 0
+205 79 233 0
+186 29 35 0
+-91 14 37 0
+187 -118 -155 0
+-228 236 201 0
+115 235 -90 0
+-111 193 199 0
+-153 122 80 0
+-6 223 -239 0
+57 -76 -200 0
+18 -101 -214 0
+-28 -59 -165 0
+42 107 67 0
+-243 -52 -77 0
+-196 20 -249 0
+125 -45 87 0
+-60 -179 93 0
+-169 196 -154 0
+-89 60 -1 0
+88 -237 233 0
+-73 7 -53 0
+193 -154 133 0
+-82 46 232 0
+-184 119 -109 0
+-148 -121 136 0
+-138 -30 24 0
+145 -130 -23 0
+63 -247 -195 0
+-94 166 93 0
+-103 -247 -246 0
+6 -14 -232 0
+-148 98 -50 0
+69 -187 -212 0
+237 76 -108 0
+-205 130 204 0
+-152 -124 93 0
+54 -51 143 0
+68 39 -204 0
+222 39 11 0
+-37 72 169 0
+-69 173 160 0
+206 -110 112 0
+116 30 -121 0
+4 29 210 0
+-53 -144 -149 0
+-7 202 -93 0
+228 -69 -9 0
+171 32 1 0
+-212 104 -87 0
+-249 -170 -89 0
+-68 146 175 0
+59 -39 105 0
+39 48 -53 0
+-98 -50 7 0
+-129 221 -44 0
+190 186 -79 0
+151 -155 179 0
+27 11 -104 0
+-233 147 -242 0
+-113 -210 183 0
+-89 -118 237 0
+-58 -132 -236 0
+-42 -163 218 0
+87 -225 -164 0
+-40 -76 -204 0
+24 -71 -249 0
+91 46 -111 0
+-33 -73 -161 0
+-17 -54 127 0
+-174 -172 -167 0
+9 -168 -219 0
+237 19 1 0
+95 -128 105 0
+157 144 127 0
+124 247 180 0
+-188 -134 -241 0
+112 -127 187 0
+-145 68 158 0
+-73 228 179 0
+-207 -135 249 0
+-210 113 -6 0
+119 -130 -23 0
+-87 -138 -63 0
+-30 -210 112 0
+-210 116 -7 0
+81 -211 43 0
+233 30 -191 0
+250 -171 -71 0
+196 -194 168 0
+-179 111 -191 0
+-116 -150 153 0
+-220 -219 -93 0
+-94 224 99 0
+122 232 207 0
+115 -218 219 0
+-247 -19 -187 0
+214 147 143 0
+234 150 -90 0
+95 -185 -107 0
+-160 -88 113 0
+167 140 -33 0
+-27 2 -106 0
+35 42 61 0
+249 219 59 0
+2 180 120 0
+-129 225 -151 0
+-121 104 -192 0
+-138 -57 -41 0
+-73 179 -133 0
+164 -36 -83 0
+-45 -28 -116 0
+135 60 47 0
+76 173 125 0
+-31 194 233 0
+-67 239 -53 0
+-40 67 -231 0
+-98 -148 229 0
+213 -50 187 0
+141 197 2 0
+-140 177 66 0
+145 115 -155 0
+44 117 65 0
+-36 223 88 0
+30 200 31 0
+-212 237 174 0
+-49 -177 167 0
+-218 63 -148 0
+-25 46 23 0
+25 -54 226 0
+163 -88 21 0
+98 -41 -9 0
+-98 -181 18 0
+-182 -194 -137 0
+-230 214 -46 0
+240 182 9 0
+93 -116 41 0
+57 -228 186 0
+165 154 -49 0
+42 88 -202 0
+8 33 -152 0
+17 -136 -35 0
+-62 45 141 0
+-102 33 -18 0
+138 -126 214 0
+-59 -221 39 0
+-130 4 -218 0
+-78 123 250 0
+83 221 -151 0
+-225 8 110 0
+-194 156 43 0
+65 -245 34 0
+-238 -237 217 0
+106 -37 -56 0
+240 111 184 0
+-172 -243 185 0
+245 40 -45 0
+122 60 -189 0
+-174 -152 181 0
+155 -147 -178 0
+117 -168 -219 0
+-102 -127 234 0
+99 26 -114 0
+181 36 -62 0
+178 169 116 0
+81 -123 -30 0
+-243 -26 -38 0
+-31 20 217 0
+55 239 -116 0
+-85 -27 49 0
+62 212 177 0
+3 -4 127 0
+-233 -9 68 0
+-28 208 -114 0
+23 159 240 0
+125 171 83 0
+152 16 97 0
+-77 -39 -84 0
+-19 -15 195 0
+95 -180 177 0
+204 -125 -207 0
+-130 78 235 0
+-51 182 79 0
+122 -95 -80 0
+85 -72 -167 0
+48 -109 -41 0
+-223 -25 -44 0
+248 -51 -81 0
+-141 57 -2 0
+208 -207 -50 0
+41 15 -63 0
+48 -242 -232 0
+240 196 -32 0
+-227 -163 -23 0
+207 -90 102 0
+210 5 84 0
+-230 -134 95 0
+-193 214 239 0
+-192 -11 173 0
+-109 -196 145 0
+30 -161 -113 0
+216 164 83 0
+103 67 21 0
+102 -233 66 0
+-79 56 -250 0
+-82 -45 112 0
+-144 -124 208 0
+33 -37 -149 0
+-109 230 173 0
+-229 82 -174 0
+179 137 -235 0
+84 -225 -16 0
+123 233 235 0
+-211 144 92 0
+-2 46 -177 0
+115 -202 -119 0
+241 75 174 0
+-205 238 -105 0
+-53 219 34 0
+-239 103 -148 0
+147 -143 123 0
+-214 -232 188 0
+10 -193 41 0
+6 185 18 0
+141 -68 -222 0
+209 -129 -143 0
+-194 -108 116 0
+-184 5 -46 0
+23 -125 -113 0
+-159 54 -8 0
+130 -50 119 0
+156 -74 120 0
+-124 139 -119 0
+-97 -134 112 0
+-150 -82 -79 0
+148 91 219 0
+-30 19 -172 0
+-13 164 -15 0
+-119 -179 -11 0
+5 -38 -15 0
+-4 94 -66 0
+-71 140 -198 0
+-119 -51 -151 0
+9 50 162 0
+-203 35 88 0
+192 122 -127 0
+-187 -164 -126 0
+121 -25 -215 0
+-5 -167 -196 0
+216 138 -11 0
+-86 46 250 0
+199 239 -8 0
+-238 244 188 0
+98 -19 -11 0
+19 -151 -83 0
+-158 40 232 0
+1 200 -23 0
+171 -11 -139 0
+-203 60 173 0
+-160 -122 -8 0
+-179 -58 1 0
+-194 -216 -202 0
+-111 144 -237 0
+153 -206 9 0
+-182 79 -233 0
+-150 -249 -191 0
+153 102 121 0
+79 -178 -239 0
+14 -79 196 0
+133 84 193 0
+-248 173 -13 0
+-93 225 22 0
+58 -158 -16 0
+-36 236 -20 0
+96 140 7 0
+53 -2 202 0
+235 -32 249 0
+94 53 -234 0
+50 134 -191 0
+-243 -211 88 0
+-28 52 94 0
+3 101 65 0
+78 200 -112 0
+-16 -108 -93 0
+-154 -169 242 0
+-196 220 -177 0
+-177 187 24 0
+5 242 -195 0
+81 157 -247 0
+-191 -3 123 0
+-16 115 10 0
+-152 -217 33 0
+-7 -149 67 0
+105 45 23 0
+-55 -23 109 0
+-217 -220 -144 0
+73 -63 149 0
+-180 -245 -191 0
+-113 -50 199 0
+242 -205 52 0
+129 98 -226 0
+205 146 -200 0
+145 -180 -182 0
+-240 -39 176 0
+217 166 -31 0
+-24 -144 9 0
+137 -120 -228 0
+-24 -117 158 0
+%
+0
+"""
--- a/src/tests/cli/t_variants.py	Tue Nov 17 17:06:35 2009 -0600
+++ b/src/tests/cli/t_variants.py	Wed Nov 18 15:53:48 2009 -0800
@@ -53,6 +53,8 @@
         add file /tmp/bronze_zone/etc/sparc_global mode=0555 owner=root group=bin path=etc/zone_arch variant.arch=sparc variant.opensolaris.zone=global
         add file /tmp/bronze_zone/etc/i386_global mode=0555 owner=root group=bin path=etc/zone_arch variant.arch=i386 variant.opensolaris.zone=global
         add file /tmp/bronze_zone/etc/zos_global mode=0555 owner=root group=bin path=etc/zone_arch variant.arch=zos variant.opensolaris.zone=global
+        add file /tmp/bronze_zone/false mode=0555 owner=root group=bin path=etc/isdebug variant.debug.kernel=false 
+        add file /tmp/bronze_zone/true mode=0555 owner=root group=bin path=etc/isdebug variant.debug.kernel=true
         close"""
 
         silver10 = """
@@ -73,7 +75,9 @@
                 "/tmp/bronze_zone/etc/zos_nonglobal",
                 "/tmp/bronze_zone/etc/i386_global",
                 "/tmp/bronze_zone/etc/sparc_global",
-                "/tmp/bronze_zone/etc/zos_global"
+                "/tmp/bronze_zone/etc/zos_global",
+                "/tmp/bronze_zone/false",
+                "/tmp/bronze_zone/true",
                 ]
 
         def setUp(self):
@@ -107,29 +111,42 @@
 
                 self.__vtest(depot, "sparc", "global")
                 self.__vtest(depot, "i386", "global")
-                self.__vtest(depot, "zos", "global")
-                self.__vtest(depot, "sparc", "nonglobal")
-                self.__vtest(depot, "i386", "nonglobal")
-                self.__vtest(depot, "zos", "nonglobal")
+                self.__vtest(depot, "zos", "global", "true")
+                self.__vtest(depot, "sparc", "nonglobal", "true")
+                self.__vtest(depot, "i386", "nonglobal", "false")
+                self.__vtest(depot, "zos", "nonglobal", "false")
 
                 self.image_create(depot, 
                     additional_args="--variant variant.arch=%s" % "sparc")
                 self.pkg("install silver", exit=1)
 
-        def __vtest(self, depot, arch, zone):
+        def __vtest(self, depot, arch, zone, isdebug=""):
                 """ test if install works for spec'd arch"""
-                self.image_create(depot, additional_args="--variant variant.arch=%s --variant variant.opensolaris.zone=%s" % (arch, zone))
+
+                if isdebug:
+                        do_isdebug = "--variant variant.debug.kernel=%s" % isdebug
+                else:
+                        do_isdebug = ""
+                        is_debug = "false"
+
+                self.image_create(depot, 
+                    additional_args="--variant variant.arch=%s --variant variant.opensolaris.zone=%s %s" % (
+                    arch, zone, do_isdebug))
                 self.pkg("install bronze")
+                self.pkg("verify")
                 self.file_contains("etc/motd", arch)
                 self.file_contains("etc/zone_motd", zone)
                 self.file_contains("etc/zone_arch", zone)
                 self.file_contains("etc/zone_arch", arch)
-                self.pkg("verify")
+                self.file_contains("etc/isdebug", isdebug)
                 self.image_destroy()
 
         def file_contains(self, path, string):
                 file_path = os.path.join(self.get_img_path(), path)
-                f = file(file_path)
+                try:
+                        f = file(file_path)
+                except:
+                        self.assert_(False, "File %s is missing" % path)
                 for line in f:
                         if string in line:
                                 f.close()
--- a/src/tests/cli/testutils.py	Tue Nov 17 17:06:35 2009 -0600
+++ b/src/tests/cli/testutils.py	Wed Nov 18 15:53:48 2009 -0800
@@ -31,6 +31,7 @@
 import errno
 import platform
 import tempfile
+import time
 try:
         import pwd
 except ImportError:
@@ -208,6 +209,25 @@
                         str += format_debug(self.__debug)
                 return str
 
+
+class AssFailException(pkg5unittest.Pkg5TestCase.failureException):
+        def __init__(self, comment = None, debug=None):
+                Exception.__init__(self)
+                self.__comment = comment
+                self.__debug = debug
+
+        def __str__(self):
+
+                str = ""
+                if self.__comment is None:
+                        str += Exception.__str__(self)
+                else:
+                        str += format_comment(self.__comment)
+                if self.__debug is not None and self.__debug != "":
+                        str += format_debug(self.__debug)
+                return str
+
+
 class UnexpectedExitCodeException(pkg5unittest.Pkg5TestCase.failureException):
         def __init__(self, command, expected, got, output=None, comment=None,
             debug=None):
@@ -225,8 +245,8 @@
 
                 str = ""
                 str += format_comment(self.__comment)
-
-                str += "  Expected exit status: %d.  Got: %d." % \
+                
+                str += "  Expected exit status: %s.  Got: %d." % \
                     (self.__expected, self.__got)
 
                 str += format_output(self.__command, self.__output)
@@ -243,12 +263,13 @@
         def __init__(self, com = ""):
                 Exception.__init__(self, com)
 
-
-
 class CliTestCase(pkg5unittest.Pkg5TestCase):
         __debug = False
         __debug_buf = ""
 
+        def in_debug_mode(self):
+                return self.__debug
+
         def setUp(self):
                 self.image_dir = None
                 self.pid = os.getpid()
@@ -271,14 +292,13 @@
         def tearDown(self):
                 self.image_destroy()
 
-        # In the case of an assertion (not a pkg() failure) dump the most
-        # recent debug info to stdout so that it is captured in the test log.
+        # In the case of an assertion (not a pkg() failure)
+        # raise an assertion so we can get the debug logs displayed
         def assert_(self, expr, msg=None):
                 if not expr:
-                        print "--- (most recent debug buffer) " + "-" * 39
-                        print self.get_debugbuf()
-                        print "-" * 70
-                        pkg5unittest.Pkg5TestCase.assert_(self, expr, msg)
+                        raise AssFailException(comment=msg,
+                            debug=self.get_debugbuf())
+ 
 
         def get_img_path(self):
                 return self.img_path
@@ -339,7 +359,9 @@
         def image_destroy(self):
                 self.debug("image_destroy")
                 os.chdir(self.pwd)
-                if os.path.exists(self.img_path):
+
+                
+                if not self.in_debug_mode() and os.path.exists(self.img_path):
                         shutil.rmtree(self.img_path)
 
         def pkg(self, command, exit=0, comment="", prefix="", su_wrap=None):
@@ -375,7 +397,11 @@
                 if retcode == 99:
                         raise TracebackException(cmdline, self.output, comment,
                             debug=self.get_debugbuf())
-                elif retcode != exit:
+
+                if not isinstance(exit, list):
+                        exit = [exit]
+
+                if retcode not in exit:
                         raise UnexpectedExitCodeException(cmdline,
                             exit, retcode, self.output, comment,
                             debug=self.get_debugbuf())
@@ -456,7 +482,7 @@
 
                 return retcode
 
-        def pkgsend(self, depot_url="", command="", exit=0, comment=""):
+        def pkgsend(self, depot_url="", command="", exit=0, comment="", retry400=True):
 
                 wrapper = ""
                 if os.environ.has_key("PKGCOVERAGE"):
@@ -464,8 +490,7 @@
 
                 args = []
                 if depot_url:
-                        depot_url = "-s " + depot_url
-                        args.append(depot_url)
+                        args.append("-s " + depot_url)
 
                 if command:
                         args.append(command)
@@ -497,6 +522,7 @@
                         # retcode != 0 will be handled below
 
                 else:
+                        
                         p = subprocess.Popen(cmdline,
                             shell = True,
                             stdout = subprocess.PIPE,
@@ -505,13 +531,24 @@
                         output = p.stdout.read()
                         retcode = p.wait()
                         self.debugresult(retcode, output)
+                        
+                        if retcode !=0:
+                                if retry400 and (command.startswith("publish") or \
+                                    command.startswith("open")) and \
+                                    "status '400'" in output:    
+                                    # this may be error 400 - too quick to republish
+                                    # try once more after sleeping
+                                         time.sleep(1)
+                                         return self.pkgsend(depot_url, command, 
+                                             exit, comment, retry400=False)
 
-                        if retcode == 0 and command.startswith("close"):
+                        elif command.startswith("close") or \
+                            command.startswith("publish"):
                                 os.environ["PKG_TRANS_ID"] = ""
                                 for l in output.splitlines():
                                         if l.startswith("pkg:/"):
                                                 published = l
-                                                break
+                                                break                                        
 
                 if retcode == 99:
                         raise TracebackException(cmdline, output, comment,
@@ -536,18 +573,33 @@
 
                 plist = []
                 try:
+                        accumulate = []
+                        current_fmri = None
+
                         for line in commands.split("\n"):
                                 line = line.strip()
                                 if line == "":
                                         continue
-                                retcode, published = self.pkgsend(depot_url, line)
-                                if retcode == 0 and published:
-                                        plist.append(published)
-
-                except TracebackException:
-                        if os.environ.get("PKG_TRANS_ID", None):
-                                self.pkgsend(depot_url, "close -A", exit=0)
-                        raise
+                                if line.startswith("add"):
+                                        accumulate.append(line[4:])
+                                else:
+                                        if current_fmri: # send any content seen so far (can be 0)
+                                                self.assert_(current_fmri != None, 
+                                                    "Missing open in pkgsend string")
+                                                f = tempfile.NamedTemporaryFile(dir="/tmp")
+                                                for l in accumulate:
+                                                        f.write("%s\n" % l)
+                                                f.flush()
+                                                cmd = "publish -d / %s %s" % (current_fmri, f.name)
+                                                current_fmri = None
+                                                accumulate = []
+                                                retcode, published = self.pkgsend(depot_url, cmd)
+                                                if retcode == 0 and published:
+                                                        plist.append(published)
+                                                f.close()
+                                        if line.startswith("open"):
+                                                current_fmri = line[5:].strip()
+                                        
                 except UnexpectedExitCodeException, e:
                         if e.exitcode != exit:
                                 raise
@@ -558,6 +610,7 @@
                             debug=self.get_debugbuf())
 
                 return plist
+                                                
 
         def cmdline_run(self, cmdline, exit=0):
                 p = subprocess.Popen(cmdline,
@@ -742,7 +795,9 @@
                         try:
                                 self.check_traceback(dc.get_logpath())
                         finally:
-                                dc.kill()
+                                status = dc.kill()
+                                if status:
+                                        self.debug("depot: %s" % status)
                                 shutil.rmtree(dir)
 
                 self.dcs = None
--- a/src/tests/pkg5unittest.py	Tue Nov 17 17:06:35 2009 -0600
+++ b/src/tests/pkg5unittest.py	Wed Nov 18 15:53:48 2009 -0800
@@ -60,7 +60,6 @@
         def __str__(self):
                 return "%s.py %s.%s" % (self.__class__.__module__,
                     self.__class__.__name__, self.__testMethodName)
-
         def getTeardownFunc(self):
                 return (self, self.tearDown)
 
@@ -168,7 +167,7 @@
                     tdf = test.getTeardownFunc()[1]
                     tdf()
                     if test.persistent_depot:
-                        test.dc.kill()
+                            test.reallytearDown()
                     raise
 
         def getDescription(self, test):
@@ -339,6 +338,7 @@
                                 # For test classes with persistent_depot set,
                                 # make their setup/teardown methods do nothing
                                 # since we are calling them here.
+                                test.reallytearDown = tdf
                                 test.setUp = donothing
                                 test.tearDown = donothing
                         test_start = time.time()