15744850 no actuators are run when pkg manipulates a booted NGZ image from the GZ s11-update
authorthejaswini.k@oracle.com
Fri, 18 Apr 2014 10:14:06 +0530
branchs11-update
changeset 3063 bbaaf1579de3
parent 3059 e914f511bf0c
child 3064 608a03af8e43
15744850 no actuators are run when pkg manipulates a booted NGZ image from the GZ 17457046 pkg should display list of new / modified editable files for operation 18399970 file action should ignore preserve attribute for elf files 18414661 plan summary output should have more whitespace 18477216 ImagePlan.plan_desc doesn't include all package changes 17478601 provide a pkg(5) private module to compute SHA512/256 15615267 ability to require updates based on complete content 16273726 https support for svc:/application/pkg/depot
doc/client_api_versions.txt
src/client.py
src/depot-config.py
src/depot.py
src/man/pkg.depot-config.1m
src/modules/actions/directory.py
src/modules/actions/file.py
src/modules/actions/generic.py
src/modules/actions/hardlink.py
src/modules/actions/license.py
src/modules/actions/link.py
src/modules/client/actuator.py
src/modules/client/api.py
src/modules/client/imageconfig.py
src/modules/client/imageplan.py
src/modules/client/linkedimage/zone.py
src/modules/client/plandesc.py
src/modules/digest.py
src/modules/lint/engine.py
src/modules/misc.py
src/modules/server/depot.py
src/modules/sha512_t.c
src/modules/smf.py
src/pkg/manifests/package:pkg.p5m
src/pkgdep.py
src/setup.py
src/svc/pkg-depot.xml
src/svc/svc-pkg-depot
src/sysrepo.py
src/tests/api/t_pkg_api_revert.py
src/tests/api/t_sha512_t.py
src/tests/api/t_smf.py
src/tests/cli/t_actuators.py
src/tests/cli/t_depot_config.py
src/tests/cli/t_pkg_image_update.py
src/tests/cli/t_pkg_install.py
src/tests/cli/t_pkg_revert.py
src/tests/cli/t_pkg_search.py
src/tests/cli/t_pkg_sysrepo.py
src/tests/cli/t_pkgrecv.py
src/tests/cli/t_pkgrepo.py
src/tests/cli/t_pkgsend.py
src/tests/cli/t_pkgsign.py
src/tests/pkg5unittest.py
src/tests/ro_data/elftest.so.1
src/tests/ro_data/elftest.so.2
src/util/apache2/depot/depot_httpd.conf.mako
src/util/misc/user_attr.d/package:pkg
--- a/doc/client_api_versions.txt	Wed Apr 09 17:53:35 2014 -0700
+++ b/doc/client_api_versions.txt	Fri Apr 18 10:14:06 2014 +0530
@@ -1,3 +1,12 @@
+Version 79:
+Compatible with clients using versions 72-78.
+
+    pkg.client.api.PlanDescription has changed as follows:
+
+        * Added get_editable_changes() function to return the list of
+	  "editable" files that will be moved, removed, installed, or
+	  updated during the planned operation.
+
 Version 78:
 Compatible with clients using versions 72-77.
 
--- a/src/client.py	Wed Apr 09 17:53:35 2014 -0700
+++ b/src/client.py	Fri Apr 18 10:14:06 2014 +0530
@@ -91,7 +91,7 @@
         import sys
         sys.exit(1)
 
-CLIENT_API_VERSION = 77
+CLIENT_API_VERSION = 79
 PKG_CLIENT_NAME = "pkg"
 
 JUST_UNKNOWN = 0
@@ -1005,7 +1005,7 @@
                 if verbose > 0:
                         disp.extend(["fmris", "mediators", "services",
                                      "variants/facets", "boot-archive",
-                                     "release-notes"])
+                                     "release-notes", "editable"])
                 if verbose > 1:
                         disp.append("actions")
                 if verbose > 2:
@@ -1110,16 +1110,23 @@
                 for s in status:
                         logger.info("%s %s" % (s[0].rjust(rjust_status),
                             s[1].rjust(rjust_value)))
-                # Ensure there is a blank line between status information and
-                # remainder.
-                logger.info("")
-
+
+        need_blank = True
         if "mediators" in disp and mediators:
+                if need_blank:
+                        logger.info("")
+
                 logger.info(_("Changed mediators:"))
                 for x in mediators:
                         logger.info("  %s" % x)
+                # output has trailing blank
+                need_blank = False
 
         if "variants/facets" in disp and varcets:
+                if need_blank:
+                        logger.info("")
+                need_blank = True
+
                 logger.info(_("Changed variants/facets:"))
                 for x in varcets:
                         logger.info("  %s" % x)
@@ -1128,6 +1135,9 @@
                 first = True
                 for l in plan.get_solver_errors():
                         if first:
+                                if need_blank:
+                                        logger.info("")
+                                need_blank = True
                                 logger.info(_("Solver dependency errors:"))
                                 first = False
                         logger.info(l)
@@ -1157,6 +1167,10 @@
                         changed[pparent].append((pname, pver))
 
                 if changed:
+                        if need_blank:
+                                logger.info("")
+                        need_blank = True
+
                         logger.info(_("Changed packages:"))
                         last_parent = None
                         for pparent, pname, pver in (
@@ -1175,21 +1189,68 @@
                 last_action = None
                 for action, smf_fmri in plan.services:
                         if last_action is None:
-                                logger.info("Services:")
+                                if need_blank:
+                                        logger.info("")
+                                need_blank = True
+                                logger.info(_("Services:"))
                         if action != last_action:
                                 logger.info("  %s:" % action)
                         logger.info("    %s" % smf_fmri)
                         last_action = action
 
+        if "editable" in disp:
+                moved, removed, installed, updated = plan.get_editable_changes()
+
+                cfg_change_fmt = "    {0}"
+                cfg_changes = []
+                first = True
+
+                def add_cfg_changes(changes, chg_hdr, chg_fmt=cfg_change_fmt):
+                        first = True
+                        for chg in changes:
+                                if first:
+                                        cfg_changes.append("  {0}".format(
+                                            chg_hdr))
+                                        first = False
+                                cfg_changes.append(chg_fmt.format(*chg))
+
+                add_cfg_changes((entry for entry in moved),
+                    _("Move:"), chg_fmt="    {0} -> {1}")
+
+                add_cfg_changes(((src,) for (src, dest) in removed),
+                    _("Remove:"))
+
+                add_cfg_changes(((dest,) for (src, dest) in installed),
+                    _("Install:"))
+
+                add_cfg_changes(((dest,) for (src, dest) in updated),
+                    _("Update:"))
+
+                if cfg_changes:
+                        if need_blank:
+                                logger.info("")
+                        need_blank = True
+                        logger.info(_("Editable files to change:"))
+                        for l in cfg_changes:
+                                logger.info(l)
+
         if "actions" in disp:
-                logger.info("Actions:")
+                if need_blank:
+                        logger.info("")
+                need_blank = True
+
+                logger.info(_("Actions:"))
                 for a in plan.get_actions():
                         logger.info("  %s" % a)
 
 
         if plan.has_release_notes():
+                if need_blank:
+                        logger.info("")
+                need_blank = True
+
                 if "release-notes" in disp:
-                        logger.info("Release Notes:")
+                        logger.info(_("Release Notes:"))
                         for a in plan.get_release_notes():
                                 logger.info("  %s", a)
                 else:
@@ -1242,6 +1303,7 @@
         variants_changed = []
         services_affected = []
         mediators_changed = []
+        editables_changed = []
         licenses = []
         if child_images is None:
                 child_images = []
@@ -1275,6 +1337,27 @@
                 services_affected = plan.services
                 mediators_changed = plan.mediators
 
+                emoved, eremoved, einstalled, eupdated = \
+                    plan.get_editable_changes()
+
+                # Lists of lists are used here to ensure a consistent ordering
+                # and because tuples will be convereted to lists anyway; a
+                # dictionary would be more logical for the top level entries,
+                # but would make testing more difficult and this is a small,
+                # known set anyway.
+                emoved = [[e for e in entry] for entry in emoved]
+                eremoved = [src for (src, dest) in eremoved]
+                einstalled = [dest for (src, dest) in einstalled]
+                eupdated = [dest for (src, dest) in eupdated]
+                if emoved:
+                        editables_changed.append(["moved", emoved])
+                if eremoved:
+                        editables_changed.append(["removed", eremoved])
+                if einstalled:
+                        editables_changed.append(["installed", einstalled])
+                if eupdated:
+                        editables_changed.append(["updated", eupdated])
+
                 for n in plan.get_release_notes():
                         release_notes.append(n)
 
@@ -1296,28 +1379,30 @@
                             (str(dfmri), src_tup, dest_tup))
                         api_inst.set_plan_license_status(dfmri, dest_li.license,
                             displayed=True)
+
         ret = {
-            "create-backup-be": backup_be_created,
-            "create-new-be": new_be_created,
+            "activate-be": be_activated,
+            "add-packages": sorted(added_fmris),
+            "affect-packages": sorted(affected_fmris),
+            "affect-services": sorted(services_affected),
             "backup-be-name": backup_be_name,
             "be-name": be_name,
             "boot-archive-rebuild": boot_archive_rebuilt,
-            "activate-be": be_activated,
+            "change-facets": sorted(facets_changed),
+            "change-editables": editables_changed,
+            "change-mediators": sorted(mediators_changed),
+            "change-packages": sorted(changed_fmris),
+            "change-variants": sorted(variants_changed),
+            "child-images": child_images,
+            "create-backup-be": backup_be_created,
+            "create-new-be": new_be_created,
+            "image-name": None,
+            "licenses": sorted(licenses),
+            "release-notes": release_notes,
+            "remove-packages": sorted(removed_fmris),
             "space-available": space_available,
             "space-required": space_required,
-            "remove-packages": sorted(removed_fmris),
-            "add-packages": sorted(added_fmris),
-            "change-packages": sorted(changed_fmris),
-            "affect-packages": sorted(affected_fmris),
-            "change-facets": sorted(facets_changed),
-            "change-variants": sorted(variants_changed),
-            "affect-services": sorted(services_affected),
-            "change-mediators": sorted(mediators_changed),
-            "release-notes": release_notes,
-            "image-name": None,
-            "child-images": child_images,
             "version": parsable_version,
-            "licenses": sorted(licenses)
         }
         # The image name for the parent image is always None.  If this image is
         # a child image, then the image name will be set when the parent image
--- a/src/depot-config.py	Wed Apr 09 17:53:35 2014 -0700
+++ b/src/depot-config.py	Fri Apr 18 10:14:06 2014 +0530
@@ -21,7 +21,7 @@
 #
 
 #
-# Copyright (c) 2013, Oracle and/or its affiliates. All rights reserved.
+# Copyright (c) 2013, 2014, Oracle and/or its affiliates. All rights reserved.
 #
 
 import errno
@@ -40,6 +40,7 @@
 
 from mako.template import Template
 from mako.lookup import TemplateLookup
+from OpenSSL.crypto import *
 
 import pkg
 import pkg.client.api_errors as apx
@@ -142,7 +143,12 @@
         pkg.depot-config ( -d repository_dir | -S ) -r runtime_dir
                 [-c cache_dir] [-s cache_size] [-p port] [-h hostname]
                 [-l logs_dir] [-T template_dir] [-A]
-                [-t server_type] ( [-F] [-P server_prefix] )
+                [-t server_type] ( ( [-F] [-P server_prefix] ) | [--https
+                ( ( --cert server_cert_file --key server_key_file
+                [--cert-chain ssl_cert_chain_file] ) |
+                --cert-key-dir cert_key_directory ) [ (--ca-cert ca_cert_file
+                --ca-key ca_key_file ) ]
+                [--smf-fmri smf_pkg_depot_fmri] ] )
 """))
         sys.exit(retcode)
 
@@ -184,7 +190,8 @@
 
 def _write_httpd_conf(pubs, default_pubs, runtime_dir, log_dir, template_dir,
         cache_dir, cache_size, host, port, sroot,
-        fragment=False, allow_refresh=False):
+        fragment=False, allow_refresh=False, ssl_cert_file="",
+        ssl_key_file="", ssl_cert_chain_file=""):
         """Writes the webserver configuration for the depot.
 
         pubs            repository and publisher information, a list in the form
@@ -219,6 +226,13 @@
 
         'repo_prefix' exists so that we can disambiguate between multiple
         repositories that provide the same publisher.
+
+        'ssl_cert_file' the location of the server certificate file.
+
+        'ssl_key_file' the location of the server key file.
+
+        'ssl_cert_chain_file' the location of the certificate chain file if the
+            the server certificate is not signed by the top level CA.
         """
 
         try:
@@ -298,7 +312,10 @@
                     host=host,
                     port=port,
                     sroot=sroot,
-                    allow_refresh=allow_refresh
+                    allow_refresh=allow_refresh,
+                    ssl_cert_file=ssl_cert_file,
+                    ssl_key_file=ssl_key_file,
+                    ssl_cert_chain_file=ssl_cert_chain_file
                 )
 
                 with file(conf_path, "wb") as conf_file:
@@ -373,6 +390,127 @@
                 raise DepotException(
                     _("Unable to write status response: %s") % err)
 
+def _createCertificateKey(serial, CN, starttime, endtime,
+    dump_cert_path, dump_key_path, issuerCert=None, issuerKey=None,
+    key_type=TYPE_RSA, key_bits=1024, digest="sha256"):
+        """Generate a certificate given a certificate request.
+
+        'serial' is the serial number for the certificate
+
+        'CN' is the subject common name of the certificate.
+
+        'starttime' is the timestamp when the certificate starts
+                          being valid. 0 means now.
+
+        'endtime' is the timestamp when the certificate stops being
+                        valid
+
+        'dump_cert_path' is the file the generated certificate gets dumped.
+
+        'dump_key_path' is the file the generated key gets dumped.
+
+        'issuerCert' is the certificate object of the issuer.
+
+        'issuerKey' is the key object of the issuer.
+
+        'key_type' is the key type. allowed value: TYPE_RSA and TYPE_DSA.
+
+        'key_bits' is number of bits to use in the key.
+
+        'digest' is the digestion method to use for signing.
+        """
+
+        key = PKey()
+        key.generate_key(key_type, key_bits)
+
+        cert = X509()
+        cert.set_serial_number(serial)
+        cert.gmtime_adj_notBefore(starttime)
+        cert.gmtime_adj_notAfter(endtime)
+
+        cert.get_subject().C = "US"
+        cert.get_subject().ST = "California"
+        cert.get_subject().L = "Santa Clara"
+        cert.get_subject().O = "pkg5"
+
+        cert.set_pubkey(key)
+        # If a issuer is specified, set the issuer. otherwise set cert
+        # itself as a issuer.
+        if issuerCert:
+                cert.get_subject().CN = CN
+                cert.set_issuer(issuerCert.get_subject())
+        else:
+                cert.get_subject().CN = "Depot Test CA"
+                cert.set_issuer(cert.get_subject())
+
+        # If there is a issuer key, sign with that key. Otherwise,
+        # create a self-signed cert.
+        if issuerKey:
+                cert.add_extensions([X509Extension("basicConstraints", True,
+                    "CA:FALSE")])
+                cert.sign(issuerKey, digest)
+        else:
+                cert.add_extensions([X509Extension("basicConstraints", True,
+                    "CA:TRUE")])
+                cert.sign(key, digest)
+        with open(dump_cert_path, "w") as f:
+                f.write(dump_certificate(FILETYPE_PEM, cert))
+        with open(dump_key_path, "w") as f:
+                f.write(dump_privatekey(FILETYPE_PEM, key))
+        return (cert, key)
+
+def _generate_server_cert_key(host, port, ca_cert_file="", ca_key_file="",
+    output_dir="/tmp"):
+        """ Generate certificate and key files for https service."""
+        if os.path.exists(output_dir):
+                if not os.path.isdir(output_dir):
+                        raise DepotException(
+                            _("%s is not a directory") % output_dir)
+        else:
+                misc.makedirs(output_dir)
+        server_id = "%s_%s" % (host, port)
+
+        cs_prefix = "server_%s" % server_id
+        server_cert_file = os.path.join(output_dir, "%s_cert.pem" % cs_prefix)
+        server_key_file = os.path.join(output_dir, "%s_key.pem" % cs_prefix)
+
+        # If the cert and key files do not exist, then generate one.
+        if not os.path.exists(server_cert_file) or not os.path.exists(
+            server_key_file):
+                # Used as a factor to easily specify a year.
+                year_factor = 60 * 60 * 24 * 365
+
+                # If user specifies ca_cert_file and ca_key_file, just load
+                # the files. Otherwise, generate new ca_cert and ca_key.
+                if not ca_cert_file or not ca_key_file:
+                        ca_cert_file = os.path.join(output_dir,
+                            "ca_%s_cert.pem" % server_id)
+                        ca_key_file = os.path.join(output_dir,
+                            "ca_%s_key.pem" % server_id)
+                        ca_cert, ca_key = _createCertificateKey(1, host,
+                            0, year_factor * 10, ca_cert_file, ca_key_file)
+                else:
+                        if not os.path.exists(ca_cert_file):
+                                raise DepotException(_("Cannot find user "
+                                    "provided CA certificate file: %s")
+                                    % ca_cert_file)
+                        if not os.path.exists(ca_key_file):
+                                raise DepotException(_("Cannot find user "
+                                    "provided CA key file: %s")
+                                    % ca_key_file)
+                        with open(ca_cert_file, "r") as fr:
+                                ca_cert = load_certificate(FILETYPE_PEM,
+                                    fr.read())
+                        with open(ca_key_file, "r") as fr:
+                                ca_key = load_privatekey(FILETYPE_PEM,
+                                    fr.read())
+
+                _createCertificateKey(2, host, 0, year_factor * 10,
+                    server_cert_file, server_key_file, issuerCert=ca_cert,
+                    issuerKey=ca_key)
+
+        return (ca_cert_file, ca_key_file, server_cert_file, server_key_file)
+
 def cleanup_htdocs(htdocs_dir):
         """Destroy any existing "htdocs" directory."""
         try:
@@ -384,7 +522,8 @@
 
 def refresh_conf(repo_info, log_dir, host, port, runtime_dir,
             template_dir, cache_dir, cache_size, sroot, fragment=False,
-            allow_refresh=False):
+            allow_refresh=False, ssl_cert_file="", ssl_key_file="",
+            ssl_cert_chain_file=""):
         """Creates a new configuration for the depot."""
         try:
                 ret = EXIT_OK
@@ -439,7 +578,9 @@
 
                 _write_httpd_conf(pubs, default_pubs, runtime_dir, log_dir,
                     template_dir, cache_dir, cache_size, host, port, sroot,
-                    fragment=fragment, allow_refresh=allow_refresh)
+                    fragment=fragment, allow_refresh=allow_refresh,
+                    ssl_cert_file=ssl_cert_file, ssl_key_file=ssl_key_file,
+                    ssl_cert_chain_file=ssl_cert_chain_file)
                 _write_versions_response(htdocs_path, fragment=fragment)
                 # If we're writing a configuration fragment, then the web server
                 # is probably not running as DEPOT_USER:DEPOT_GROUP
@@ -518,6 +659,19 @@
                 raise DepotException(_("%s is not a valid prefix"))
         return "%s/" % val
 
+def _update_smf_props(smf_fmri, prop_list, orig, dest):
+        """Update the smf props after the new prop values are generated."""
+
+        smf_instances = smf.check_fmris(None, smf_fmri)
+        for fmri in smf_instances:
+                refresh = False
+                for i in range(len(prop_list)):
+                        if orig[i] != dest[i]:
+                                smf.set_prop(fmri, prop_list[i], dest[i])
+                                refresh = True
+                if refresh:
+                        smf.refresh(fmri)
+
 def main_func():
 
         # some sensible defaults
@@ -532,6 +686,23 @@
         cache_size = 0
         # whether we're writing a full httpd.conf, or just a fragment
         fragment = False
+        # Whether we support https service.
+        https = False
+        # The location of server certificate file.
+        ssl_cert_file = ""
+        # The location of server key file.
+        ssl_key_file = ""
+        # The location of the server ca certificate file.
+        ssl_ca_cert_file = ""
+        # The location of the server ca key file.
+        ssl_ca_key_file = ""
+        # Directory for storing generated certificates and keys
+        cert_key_dir = ""
+        # SSL certificate chain file path if the server certificate is not
+        # signed by the top level CA.
+        ssl_cert_chain_file = ""
+        # The pkg/depot smf instance fmri.
+        smf_fmri = ""
         # an optional url-prefix, used to separate pkg5 services from the rest
         # of the webserver url namespace, only used when running in fragment
         # mode, otherwise we assume we're the only service running on this
@@ -552,10 +723,11 @@
         server_type = "apache2"
 
         writable_root_set = False
-
         try:
                 opts, pargs = getopt.getopt(sys.argv[1:],
-                    "Ac:d:Fh:l:P:p:r:Ss:t:T:?", ["help", "debug="])
+                    "Ac:d:Fh:l:P:p:r:Ss:t:T:?", ["help", "debug=", "https",
+                    "cert=", "key=", "ca-cert=", "ca-key=", "cert-chain=",
+                    "cert-key-dir=", "smf-fmri="])
                 for opt, arg in opts:
                         if opt == "--help":
                                 usage()
@@ -597,6 +769,22 @@
                                 use_smf_instances = True
                         elif opt == "-A":
                                 allow_refresh = True
+                        elif opt == "--https":
+                                https = True
+                        elif opt == "--cert":
+                                ssl_cert_file = arg
+                        elif opt == "--key":
+                                ssl_key_file = arg
+                        elif opt == "--ca-cert":
+                                ssl_ca_cert_file = arg
+                        elif opt == "--ca-key":
+                                ssl_ca_key_file = arg
+                        elif opt == "--cert-chain":
+                                ssl_cert_chain_file = arg
+                        elif opt == "--cert-key-dir":
+                                cert_key_dir = arg
+                        elif opt == "--smf-fmri":
+                                smf_fmri = arg
                         elif opt == "--debug":
                                 try:
                                         key, value = arg.split("=", 1)
@@ -629,6 +817,86 @@
         if repo_info and use_smf_instances:
                 usage(_("cannot use -d and -S together."))
 
+        if https:
+                if fragment:
+                        usage(_("https configuration is not supported in "
+                            "fragment mode."))
+                if bool(ssl_cert_file) != bool(ssl_key_file):
+                        usage(_("certificate and key files must be presented "
+                            "at the same time."))
+                elif not ssl_cert_file and not ssl_key_file:
+                        if not cert_key_dir:
+                                usage(_("cert-key-dir option is require to "
+                                    "store the generated certificates and keys"))
+                        if ssl_cert_chain_file:
+                                usage(_("Cannot use --cert-chain without "
+                                    "--cert and --key"))
+                        if bool(ssl_ca_cert_file) != bool(ssl_ca_key_file):
+                                usage(_("server CA certificate and key files "
+                                    "must be presented at the same time."))
+                        # If fmri is specifed for pkg/depot instance, we need
+                        # record the proporty values for updating.
+                        if smf_fmri:
+                                orig = (ssl_ca_cert_file, ssl_ca_key_file,
+                                    ssl_cert_file, ssl_key_file)
+                        try:
+                                ssl_ca_cert_file, ssl_ca_key_file, ssl_cert_file, \
+                                    ssl_key_file = \
+                                    _generate_server_cert_key(host, port,
+                                    ca_cert_file=ssl_ca_cert_file,
+                                    ca_key_file=ssl_ca_key_file,
+                                    output_dir=cert_key_dir)
+                                if ssl_ca_cert_file:
+                                        msg(_("Server CA certificate is "
+                                            "located at %s. Please deploy it "
+                                            "into /etc/certs/CA directory of "
+                                            "each client.")
+                                            % ssl_ca_cert_file)
+                        except (DepotException, EnvironmentError), e:
+                                    error(e)
+                                    return EXIT_OOPS
+
+                        # Update the pkg/depot instance smf properties if
+                        # anything changes.
+                        if smf_fmri:
+                                dest = (ssl_ca_cert_file, ssl_ca_key_file,
+                                    ssl_cert_file, ssl_key_file)
+                                if orig != dest:
+                                        prop_list = ["config/ssl_ca_cert_file",
+                                            "config/ssl_ca_key_file",
+                                            "config/ssl_cert_file",
+                                            "config/ssl_key_file"]
+                                        try:
+                                                _update_smf_props(smf_fmri, prop_list,
+                                                    orig, dest)
+                                        except (smf.NonzeroExitException,
+                                            RuntimeError), e:
+                                                error(e)
+                                                return EXIT_OOPS
+                else:
+                        if not os.path.exists(ssl_cert_file):
+                                error(_("User provided server certificate "
+                                    "file %s does not exist.") % ssl_cert_file)
+                                return EXIT_OOPS
+                        if not os.path.exists(ssl_key_file):
+                                error(_("User provided server key file %s "
+                                    "does not exist.") % ssl_key_file)
+                                return EXIT_OOPS
+                        if ssl_cert_chain_file and not os.path.exists(
+                            ssl_cert_chain_file):
+                                error(_("User provided certificate chain file "
+                                    "%s does not exist.") %
+                                    ssl_cert_chain_file)
+                                return EXIT_OOPS
+        else:
+                if ssl_cert_file or ssl_key_file or ssl_ca_cert_file \
+                    or ssl_ca_key_file or ssl_cert_chain_file:
+                        usage(_("certificate or key files are given before "
+                            "https service is turned on. Use --https to turn "
+                            "on the service."))
+                if smf_fmri:
+                        usage(_("cannot use --smf-fmri without --https."))
+
         # We can't support httpd.conf fragments with writable root, because
         # we don't have the mod_wsgi app that can build the index or serve
         # search requests everywhere the fragments might be used. (eg. on
@@ -666,7 +934,8 @@
 
         ret = refresh_conf(repo_info, log_dir, host, port, runtime_dir,
             template_dir, cache_dir, cache_size, sroot, fragment=fragment,
-            allow_refresh=allow_refresh)
+            allow_refresh=allow_refresh, ssl_cert_file=ssl_cert_file,
+            ssl_key_file=ssl_key_file, ssl_cert_chain_file=ssl_cert_chain_file)
         return ret
 
 #
--- a/src/depot.py	Wed Apr 09 17:53:35 2014 -0700
+++ b/src/depot.py	Fri Apr 18 10:14:06 2014 +0530
@@ -19,7 +19,7 @@
 #
 # CDDL HEADER END
 #
-# Copyright (c) 2007, 2013, Oracle and/or its affiliates. All rights reserved.
+# Copyright (c) 2007, 2014, Oracle and/or its affiliates. All rights reserved.
 #
 
 # pkg.depotd - package repository daemon
@@ -168,7 +168,7 @@
         --debug         The name of a debug feature to enable; or a whitespace
                         or comma separated list of features to enable.
                         Possible values are: headers, hash=sha1+sha256,
-                        hash=sha256
+                        hash=sha256, hash=sha1+sha512_256, hash=sha512_256
         --image-root    The path to the image whose file information will be
                         used as a cache for file data.
         --log-access    The destination for any access related information
--- a/src/man/pkg.depot-config.1m	Wed Apr 09 17:53:35 2014 -0700
+++ b/src/man/pkg.depot-config.1m	Fri Apr 18 10:14:06 2014 +0530
@@ -9,7 +9,12 @@
 /usr/lib/pkg.depot-config ( -d \fIrepository_dir\fR | -S )
     -r \fIruntime_dir\fR [-c \fIcache_dir\fR] [-s \fIcache_size\fR] [-p \fIport\fR]
     [-h \fIhostname\fR] [-l \fIlogs_dir\fR] [-T \fItemplate_dir\fR]
-    [-A] [-t \fIserver_type\fR] ( [-F] [-P \fIserver_prefix\fR] )
+    [-A] [-t \fIserver_type\fR] ( ([-F] [-P \fIserver_prefix\fR] ) | [--https
+    ( ( --cert \fIserver_cert_file\fR --key \fIserver_key_file\fR
+    [--cert-chain \fIssl_cert_chain_file\fR] ) |
+    --cert-key-dir \fIcert_key_directory\fR )
+    [ (--ca-cert \fIca_cert_file\fR --ca-key \fIca_key_file\fR ) ]
+    [--smf-fmri \fIsmf_pkg_depot_fmri\fR] ] )
 .fi
 
 .SH DESCRIPTION
@@ -214,6 +219,94 @@
 Specify the prefix used to map the depot into the web server namespace. The \fB-P\fR option is intended to be used with the \fB-F\fR option.
 .RE
 
+.sp
+.ne 2
+.mk
+.na
+\fB\fB--https\fR
+.ad
+.sp .6
+.RS 4n
+Enable the HTTPS service. This option cannot be used with the \fB-F\fR or \fB-P\fR options.
+.RE
+
+.sp
+.ne 2
+.mk
+.na
+\fB\fB--cert\fR \fIserver_cert_file\fR\fR
+.ad
+.sp .6
+.RS 4n
+Specify the location of the server certificate file. This option can only be used with the \fB--https\fR option. Either both the \fB--cert\fR and \fB--key\fR options or the \fB--cert-key-dir\fR option must be used with the \fB--https\fR option.
+.RE
+
+.sp
+.ne 2
+.mk
+.na
+\fB\fB--key\fR \fIserver_key_file\fR\fR
+.ad
+.sp .6
+.RS 4n
+Specify the location of the server key file. This option can only be used with the \fB--https\fR option. Either both the \fB--cert\fR and \fB--key\fR options or the \fB--cert-key-dir\fR option must be used with the \fB--https\fR option.
+.RE
+
+.sp
+.ne 2
+.mk
+.na
+\fB\fB--cert-key-dir\fR \fIcert_key_directory\fR\fR
+.ad
+.sp .6
+.RS 4n
+Specify the directory where the automatically generated certificates and keys should be stored if options \fB--cert\fR and \fB--key\fR are omitted. This option can only be used with the \fB--https\fR option. Either both the \fB--cert\fR and \fB--key\fR options or the \fB--cert-key-dir\fR option must be used with the \fB--https\fR option.
+.RE
+
+.sp
+.ne 2
+.mk
+.na
+\fB\fB--ca-cert\fR \fIssl_ca_cert_file\fR\fR
+.ad
+.sp .6
+.RS 4n
+Specify the location of the top CA certificate file. This option can only be used with the \fB--https\fR option and must be used with \fB--ca-key\fR option together. The option is only used for automatically generating the server certificate based on this CA certificate and the key specified by \fB--ca-key\fR option.
+.RE
+
+.sp
+.ne 2
+.mk
+.na
+\fB\fB--ca-key\fR \fIssl_ca_key_file\fR\fR
+.ad
+.sp .6
+.RS 4n
+Specify the location of the top CA key file. This option can only be used with the \fB--https\fR option and must be used with \fB--ca-cert\fR option together. The option is only used for automatically generating the server certificate based on this key and the CA certificate specified by \fB--ca-cert\fR option.
+.RE
+
+.sp
+.ne 2
+.mk
+.na
+\fB\fB--cert-chain\fR \fIssl_cert_chain_file\fR\fR
+.ad
+.sp .6
+.RS 4n
+This option can only be used with the \fB--https\fR option. This option is required if the server certificate is not signed by the top level CA directly but is signed by an intermediate authority.
+.RE
+
+.sp
+.ne 2
+.mk
+.na
+\fB\fB--smf-fmri\fR \fIsmf_pkg_depot_fmri\fR\fR
+.ad
+.sp .6
+.RS 4n
+Specify the FMRI of the pkg/depot service instance. This option is used to update the corresponding SMF properties of that instance if any certificates or keys are automatically generated for that instance. This option can only be used with the \fB--https\fR option.
+.RE
+
 .SH PROVIDING ADDITIONAL SERVER CONFIGURATION
 .sp
 .LP
--- a/src/modules/actions/directory.py	Wed Apr 09 17:53:35 2014 -0700
+++ b/src/modules/actions/directory.py	Fri Apr 18 10:14:06 2014 +0530
@@ -21,7 +21,7 @@
 #
 
 #
-# Copyright (c) 2007, 2013, Oracle and/or its affiliates. All rights reserved.
+# Copyright (c) 2007, 2014, Oracle and/or its affiliates. All rights reserved.
 #
 
 """module describing a directory packaging object
@@ -95,8 +95,7 @@
                         oowner, ogroup = orig.get_fsobj_uid_gid(pkgplan,
                             pkgplan.origin_fmri)
 
-                path = os.path.normpath(os.path.sep.join((
-                    pkgplan.image.get_root(), self.attrs["path"])))
+                path = self.get_installed_path(pkgplan.image.get_root())
 
                 # Don't allow installation through symlinks.
                 self.fsobj_checkpath(pkgplan, path)
@@ -200,8 +199,7 @@
                 return errors, warnings, info
 
         def remove(self, pkgplan):
-                path = os.path.normpath(os.path.sep.join(
-                    (pkgplan.image.get_root(), self.attrs["path"])))
+                path = self.get_installed_path(pkgplan.image.get_root())
                 try:
                         os.rmdir(path)
                 except OSError, e:
--- a/src/modules/actions/file.py	Wed Apr 09 17:53:35 2014 -0700
+++ b/src/modules/actions/file.py	Fri Apr 18 10:14:06 2014 +0530
@@ -75,16 +75,12 @@
                         """If the file exists, check if it is in use."""
                         if not orig:
                                 return
-                        path = os.path.normpath(
-                            os.path.join(pkgplan.image.get_root(),
-                            orig.attrs["path"]))
+                        path = orig.get_installed_path(pkgplan.image.get_root())
                         if os.path.isfile(path) and self.in_use(path):
                                 raise api_errors.FileInUseException, path
 
                 def preremove(self, pkgplan):
-                        path = os.path.normpath(
-                            os.path.join(pkgplan.image.get_root(),
-                            self.attrs["path"]))
+                        path = self.get_installed_path(pkgplan.image.get_root())
                         if os.path.isfile(path) and self.in_use(path):
                                 raise api_errors.FileInUseException, path
 
@@ -113,8 +109,7 @@
                 owner, group = self.get_fsobj_uid_gid(pkgplan,
                     pkgplan.destination_fmri)
 
-                final_path = os.path.normpath(os.path.sep.join(
-                    (pkgplan.image.get_root(), self.attrs["path"])))
+                final_path = self.get_installed_path(pkgplan.image.get_root())
 
                 # Don't allow installation through symlinks.
                 self.fsobj_checkpath(pkgplan, final_path)
@@ -147,7 +142,7 @@
                 # XXX We should save the originally installed file.  It can be
                 # used as an ancestor for a three-way merge, for example.  Where
                 # should it be stored?
-                pres_type = self.__check_preserve(orig, pkgplan)
+                pres_type = self._check_preserve(orig, pkgplan)
                 do_content = True
                 old_path = None
                 if pres_type == True or (pres_type and
@@ -184,7 +179,6 @@
                                         raise
 
                 # XXX This needs to be modularized.
-                # XXX This needs to be controlled by policy.
                 if do_content and self.needsdata(orig, pkgplan):
                         tfilefd, temp = tempfile.mkstemp(dir=os.path.dirname(
                             final_path))
@@ -298,8 +292,7 @@
                 the preserve attribute is not present, that the hashes
                 and other attributes of the file match."""
 
-                path = os.path.normpath(os.path.sep.join(
-                    (img.get_root(), self.attrs["path"])))
+                path = self.get_installed_path(img.get_root())
 
                 lstat, errors, warnings, info, abort = \
                     self.verify_fsobj_common(img, stat.S_IFREG)
@@ -453,7 +446,7 @@
 
                 return errors, warnings, info
 
-        def __check_preserve(self, orig, pkgplan):
+        def _check_preserve(self, orig, pkgplan, orig_path=None):
                 """Return the type of preservation needed for this action.
 
                 Returns None if preservation is not defined by the action.
@@ -463,13 +456,22 @@
                 or 'legacy' for each of the respective forms of preservation.
                 """
 
+                # If the logic in this function ever changes, all callers will
+                # need to be updated to reflect how they interpret return
+                # values.
+
                 try:
                         pres_type = self.attrs["preserve"]
                 except KeyError:
-                        return None
+                        return
 
-                final_path = os.path.normpath(os.path.sep.join(
-                    (pkgplan.image.get_root(), self.attrs["path"])))
+                if "elfhash" in self.attrs:
+                        # Don't allow preserve logic to be applied to elf files;
+                        # if we ever stop tagging elf binaries with this
+                        # attribute, this will need to be updated.
+                        return
+
+                final_path = self.get_installed_path(pkgplan.image.get_root())
 
                 # 'legacy' preservation is very different than other forms of
                 # preservation as it doesn't account for the on-disk state of
@@ -536,6 +538,10 @@
                                                 return "renameold.update"
                                 return False
 
+                if (orig and orig_path):
+                        # Comparison will be based on a file being moved.
+                        is_file = os.path.isfile(orig_path)
+
                 # If the action has been marked with a preserve attribute, and
                 # the file exists and has a content hash different from what the
                 # system expected it to be, then we preserve the original file
@@ -544,7 +550,9 @@
                         # if we had an action installed, then we know what hash
                         # function was used to compute it's hash attribute.
                         if orig:
-                                chash, cdata = misc.get_data_digest(final_path,
+                                if not orig_path:
+                                        orig_path = final_path
+                                chash, cdata = misc.get_data_digest(orig_path,
                                     hash_func=orig_hash_func)
                         if not orig or chash != orig_hash_val:
                                 if pres_type in ("renameold", "renamenew"):
@@ -560,13 +568,21 @@
         def needsdata(self, orig, pkgplan):
                 if self.replace_required:
                         return True
-                # check for the presence of a simple elfhash attribute,
-                # and if that's present, look for the common preferred elfhash.
-                # For now, this is sufficient, but when additional content
-                # types are supported (and we stop publishing SHA-1 hashes) more
-                # work will be needed to compute 'bothelf'.
-                bothelf = orig and "elfhash" in orig.attrs and \
-                    "elfhash" in self.attrs
+
+                # import goes here to prevent circular import
+                from pkg.client.imageconfig import CONTENT_UPDATE_POLICY
+
+                use_content_hash = pkgplan.image.cfg.get_policy_str(
+                    CONTENT_UPDATE_POLICY) == "when-required"
+
+                # If content update policy allows it, check for the presence of
+                # a simple elfhash attribute, and if that's present, look for
+                # the common preferred elfhash.  For now, this is sufficient,
+                # but when additional content types are supported (and we stop
+                # publishing SHA-1 hashes) more work will be needed to compute
+                # 'bothelf'.
+                bothelf = use_content_hash and orig and \
+                    "elfhash" in orig.attrs and "elfhash" in self.attrs
                 if bothelf:
                         common_elf_attr, common_elfhash, common_orig_elfhash, \
                             common_elf_func = \
@@ -596,12 +612,11 @@
                         # ensures that for cases where the mode or some other
                         # attribute of the file has changed that the file will
                         # be installed.
-                        path = os.path.normpath(os.path.sep.join(
-                            (pkgplan.image.get_root(), self.attrs["path"])))
+                        path = self.get_installed_path(pkgplan.image.get_root())
                         if not os.path.isfile(path):
                                 return True
 
-                pres_type = self.__check_preserve(orig, pkgplan)
+                pres_type = self._check_preserve(orig, pkgplan)
                 if pres_type != None and pres_type != True:
                         # Preserved files only need data if they're being
                         # changed (e.g. "renameold", etc.).
@@ -610,8 +625,7 @@
                 return False
 
         def remove(self, pkgplan):
-                path = os.path.normpath(os.path.sep.join(
-                    (pkgplan.image.get_root(), self.attrs["path"])))
+                path = self.get_installed_path(pkgplan.image.get_root())
 
                 # Are we supposed to save this file to restore it elsewhere
                 # or in another pkg? 'save_file' is set by the imageplan.
@@ -723,12 +737,8 @@
                         # Nothing to restore; original file is missing.
                         return
 
-                path = self.attrs["path"]
-
-                full_path = os.path.normpath(os.path.sep.join(
-                    (image.get_root(), path)))
-
-                assert(not os.path.exists(full_path))
+                full_path = self.get_installed_path(image.get_root())
+                assert not os.path.exists(full_path)
 
                 misc.copyfile(saved_name, full_path)
                 os.unlink(saved_name)
--- a/src/modules/actions/generic.py	Wed Apr 09 17:53:35 2014 -0700
+++ b/src/modules/actions/generic.py	Fri Apr 18 10:14:06 2014 +0530
@@ -21,7 +21,7 @@
 #
 
 #
-# Copyright (c) 2007, 2013, Oracle and/or its affiliates. All rights reserved.
+# Copyright (c) 2007, 2014, Oracle and/or its affiliates. All rights reserved.
 #
 
 """module describing a generic packaging object
@@ -493,6 +493,15 @@
                         ]
                 return []
 
+        def get_installed_path(self, img_root):
+                """Given an image root, return the installed path of the action
+                if it has a installable payload (i.e. 'path' attribute)."""
+                try:
+                        return os.path.normpath(os.path.join(img_root,
+                            self.attrs["path"]))
+                except KeyError:
+                        return
+
         def distinguished_name(self):
                 """ Return the distinguishing name for this action,
                     preceded by the type of the distinguishing name.  For
@@ -741,8 +750,7 @@
                 or invalid, an InvalidActionAttributesError exception will be
                 raised."""
 
-                path = os.path.normpath(os.path.sep.join(
-                    (pkgplan.image.get_root(), self.attrs["path"])))
+                path = self.get_installed_path(pkgplan.image.get_root())
 
                 # The attribute may be missing.
                 owner = self.attrs.get("owner", "").rstrip()
@@ -874,8 +882,7 @@
                                     group)
                                 group = None
 
-                path = os.path.normpath(
-                    os.path.sep.join((img.get_root(), self.attrs["path"])))
+                path = self.get_installed_path(img.get_root())
 
                 lstat = None
                 try:
--- a/src/modules/actions/hardlink.py	Wed Apr 09 17:53:35 2014 -0700
+++ b/src/modules/actions/hardlink.py	Fri Apr 18 10:14:06 2014 +0530
@@ -21,7 +21,7 @@
 #
 
 #
-# Copyright (c) 2007, 2012, Oracle and/or its affiliates. All rights reserved.
+# Copyright (c) 2007, 2014, Oracle and/or its affiliates. All rights reserved.
 #
 
 """module describing a (hard) link packaging object
@@ -67,11 +67,8 @@
         def install(self, pkgplan, orig):
                 """Client-side method that installs a hard link."""
 
-                path = self.attrs["path"]
                 target = self.get_target_path()
-
-                path = os.path.normpath(os.path.sep.join(
-                    (pkgplan.image.get_root(), path)))
+                path = self.get_installed_path(pkgplan.image.get_root())
 
                 # Don't allow installation through symlinks.
                 self.fsobj_checkpath(pkgplan, path)
@@ -118,8 +115,7 @@
                         return errors, warnings, info
 
                 target = self.get_target_path()
-                path = os.path.normpath(os.path.sep.join(
-                    (img.get_root(), self.attrs["path"])))
+                path = self.get_installed_path(img.get_root())
                 target = os.path.normpath(os.path.sep.join(
                     (img.get_root(), target)))
 
--- a/src/modules/actions/license.py	Wed Apr 09 17:53:35 2014 -0700
+++ b/src/modules/actions/license.py	Fri Apr 18 10:14:06 2014 +0530
@@ -21,7 +21,7 @@
 #
 
 #
-# Copyright (c) 2007, 2013, Oracle and/or its affiliates. All rights reserved.
+# Copyright (c) 2007, 2014, Oracle and/or its affiliates. All rights reserved.
 #
 
 """module describing a license packaging object
@@ -79,12 +79,10 @@
                 # ensure "path" is initialized.  it may not be if we've loaded
                 # a plan that was previously prepared.
                 self.preinstall(pkgplan, orig)
-                path = self.attrs["path"]
 
                 stream = self.data()
 
-                path = os.path.normpath(os.path.sep.join(
-                    (pkgplan.image.get_root(), path)))
+                path = self.get_installed_path(pkgplan.image.get_root())
 
                 # make sure the directory exists and the file is writable
                 if not os.path.exists(os.path.dirname(path)):
--- a/src/modules/actions/link.py	Wed Apr 09 17:53:35 2014 -0700
+++ b/src/modules/actions/link.py	Fri Apr 18 10:14:06 2014 +0530
@@ -21,7 +21,7 @@
 #
 
 #
-# Copyright (c) 2007, 2012, Oracle and/or its affiliates. All rights reserved.
+# Copyright (c) 2007, 2014, Oracle and/or its affiliates. All rights reserved.
 #
 
 """module describing a (symbolic) link packaging object
@@ -56,11 +56,8 @@
         def install(self, pkgplan, orig):
                 """Client-side method that installs a link."""
 
-                path = self.attrs["path"]
                 target = self.attrs["target"]
-
-                path = os.path.normpath(os.path.sep.join(
-                    (pkgplan.image.get_root(), path)))
+                path = self.get_installed_path(pkgplan.image.get_root())
 
                 # Don't allow installation through symlinks.
                 self.fsobj_checkpath(pkgplan, path)
@@ -81,11 +78,8 @@
                 info).  The error list will be empty if the action has been
                 correctly installed in the given image."""
 
-                path = self.attrs["path"]
                 target = self.attrs["target"]
-
-                path = os.path.normpath(os.path.sep.join(
-                    (img.get_root(), path)))
+                path = self.get_installed_path(img.get_root())
 
                 lstat, errors, warnings, info, abort = \
                     self.verify_fsobj_common(img, stat.S_IFLNK)
@@ -107,8 +101,7 @@
                 other than a link is found at the destination location, it
                 will be removed or salvaged."""
 
-                path = os.path.normpath(os.path.sep.join(
-                    (pkgplan.image.get_root(), self.attrs["path"])))
+                path = self.get_installed_path(pkgplan.image.get_root())
                 return self.remove_fsobj(pkgplan, path)
 
         def generate_indices(self):
--- a/src/modules/client/actuator.py	Wed Apr 09 17:53:35 2014 -0700
+++ b/src/modules/client/actuator.py	Fri Apr 18 10:14:06 2014 +0530
@@ -89,6 +89,7 @@
                 self.cmd_path = ""
                 self.sync_timeout = 0
                 self.act_timed_out = False
+                self.zone = None
 
         @staticmethod
         def getstate(obj, je_state=None):
@@ -129,6 +130,16 @@
                 """
                 self.sync_timeout = timeout
 
+        def set_zone(self, zname):
+                """Specify if actuators are supposed to be run within a zone.
+                If 'zname' is None, actuators are run in the global zone,
+                otherwise actuators are run in the zone 'zname'. The caller has
+                to make sure the zone exists and is running. If there are any
+                issues with calling an actuator in the zone, it will be
+                ignored."""
+
+                self.zone = zname
+
         @property
         def timed_out(self):
                 return self.act_timed_out
@@ -193,7 +204,7 @@
                 """Returns a list of tuples of possible release notes"""
                 return self.update.get("release-note", []) + \
                     self.install.get("release-note", [])
-                
+
         def get_services_list(self):
                 """Returns a list of services that would be restarted"""
                 return [(fmri, smf) for fmri, smf in self.get_list()
@@ -218,11 +229,24 @@
 
         def __invoke(self, func, *args, **kwargs):
                 """Execute SMF command. Remember if command timed out."""
+
+                if self.zone:
+                        kwargs["zone"] = self.zone
+
                 try:
                         func(*args, **kwargs)
                 except smf.NonzeroExitException, nze:
                         if nze.return_code == smf.EXIT_TIMEOUT:
                                 self.act_timed_out = True
+                        elif " ".join(nze.output).startswith("zlogin:"):
+                                # Ignore zlogin errors; the worst which
+                                # can happen is that an actuator is not run
+                                # (disable is always run with -t).
+                                # Since we only test once if the zone is
+                                # runnning, this could happen if someone shuts
+                                # down the zone while we are in the process of
+                                # executing.
+                                pass
                         else:
                                 raise
 
@@ -241,8 +265,10 @@
                                             os.O_EXCL  |
                                             os.O_CREAT |
                                             os.O_WRONLY))
-                        if not DebugValues.get_value("smf_cmds_dir"):
+                        if not DebugValues.get_value("smf_cmds_dir") and \
+                            not self.zone:
                                 return
+
                 self.do_nothing = False
 
         def exec_pre_actuators(self, image):
@@ -256,20 +282,22 @@
 
                 disable_fmris = self.removal.get("disable_fmri", set())
 
-                suspend_fmris = smf.check_fmris("suspend_fmri", suspend_fmris)
-                disable_fmris = smf.check_fmris("disable_fmri", disable_fmris)
+                suspend_fmris = smf.check_fmris("suspend_fmri", suspend_fmris,
+                    zone=self.zone)
+                disable_fmris = smf.check_fmris("disable_fmri", disable_fmris,
+                    zone=self.zone)
                 # eliminate services not loaded or not running
                 # remember those services enabled only temporarily
 
                 for fmri in suspend_fmris.copy():
-                        state = smf.get_state(fmri)
+                        state = smf.get_state(fmri, zone=self.zone)
                         if state <= smf.SMF_SVC_TMP_ENABLED:
                                 suspend_fmris.remove(fmri)
                         if state == smf.SMF_SVC_TMP_ENABLED:
                                 tmp_suspend_fmris.add(fmri)
 
                 for fmri in disable_fmris.copy():
-                        if smf.is_disabled(fmri):
+                        if smf.is_disabled(fmri, zone=self.zone):
                                 disable_fmris.remove(fmri)
 
                 self.suspend_fmris = suspend_fmris
@@ -326,14 +354,16 @@
                     self.update.get("restart_fmri", set()) | \
                     self.install.get("restart_fmri", set())
 
-                refresh_fmris = smf.check_fmris("refresh_fmri", refresh_fmris)
-                restart_fmris = smf.check_fmris("restart_fmri", restart_fmris)
+                refresh_fmris = smf.check_fmris("refresh_fmri", refresh_fmris,
+                    zone=self.zone)
+                restart_fmris = smf.check_fmris("restart_fmri", restart_fmris,
+                    zone=self.zone)
 
                 # ignore services not present or not
                 # enabled
 
                 for fmri in refresh_fmris.copy():
-                        if smf.is_disabled(fmri):
+                        if smf.is_disabled(fmri, zone=self.zone):
                                 refresh_fmris.remove(fmri)
 
                 params = tuple(refresh_fmris)
@@ -342,7 +372,7 @@
                         self.__invoke(smf.refresh, params, sync_timeout=self.sync_timeout)
 
                 for fmri in restart_fmris.copy():
-                        if smf.is_disabled(fmri):
+                        if smf.is_disabled(fmri, zone=self.zone):
                                 restart_fmris.remove(fmri)
 
                 params = tuple(restart_fmris)
--- a/src/modules/client/api.py	Wed Apr 09 17:53:35 2014 -0700
+++ b/src/modules/client/api.py	Fri Apr 18 10:14:06 2014 +0530
@@ -103,8 +103,8 @@
 # things like help(pkg.client.api.PlanDescription)
 from pkg.client.plandesc import PlanDescription # pylint: disable=W0611
 
-CURRENT_API_VERSION = 78
-COMPATIBLE_API_VERSIONS = frozenset([72, 73, 74, 75, 76, 77,
+CURRENT_API_VERSION = 79
+COMPATIBLE_API_VERSIONS = frozenset([72, 73, 74, 75, 76, 77, 78,
     CURRENT_API_VERSION])
 CURRENT_P5I_VERSION = 1
 
--- a/src/modules/client/imageconfig.py	Wed Apr 09 17:53:35 2014 -0700
+++ b/src/modules/client/imageconfig.py	Fri Apr 18 10:14:06 2014 +0530
@@ -53,6 +53,7 @@
 # should use the constants defined here.
 
 BE_POLICY = "be-policy"
+CONTENT_UPDATE_POLICY = "content-update-policy"
 FLUSH_CONTENT_CACHE = "flush-content-cache-on-success"
 MIRROR_DISCOVERY = "mirror-discovery"
 SEND_UUID = "send-uuid"
@@ -62,6 +63,7 @@
 default_policies = {
     BE_POLICY: "default",
     CHECK_CERTIFICATE_REVOCATION: False,
+    CONTENT_UPDATE_POLICY: "default",
     FLUSH_CONTENT_CACHE: True,
     MIRROR_DISCOVERY: False,
     SEND_UUID: True,
@@ -69,6 +71,11 @@
     USE_SYSTEM_REPO: False
 }
 
+default_policy_map = {
+    BE_POLICY: { "default": "create-backup" },
+    CONTENT_UPDATE_POLICY: { "default": "always" },
+}
+
 CA_PATH = "ca-path"
 # Default CA_PATH is /etc/openssl/certs
 default_properties = {
@@ -160,6 +167,9 @@
                     cfg.PropDefined(BE_POLICY, allowed=["default",
                         "always-new", "create-backup", "when-required"],
                         default=default_policies[BE_POLICY]),
+                    cfg.PropDefined(CONTENT_UPDATE_POLICY, allowed=["default",
+                        "always", "when-required"],
+                        default=default_policies[CONTENT_UPDATE_POLICY]),
                     cfg.PropBool(FLUSH_CONTENT_CACHE,
                         default=default_policies[FLUSH_CONTENT_CACHE]),
                     cfg.PropBool(MIRROR_DISCOVERY,
@@ -359,7 +369,18 @@
                 not defined in the image configuration.
                 """
                 assert policy in default_policies
-                return self.get_property("property", policy)
+
+                prop = self.get_property("property", policy)
+
+                # If requested policy has a default mapping in
+                # default_policy_map, we substitute the correct value if it's
+                # still set to 'default'.
+                if policy in default_policy_map and \
+                    prop == default_policies[policy]:
+                        return default_policy_map[policy] \
+                            [default_policies[policy]]
+
+                return prop
 
         def get_property(self, section, name):
                 """Returns the value of the property object matching the given
--- a/src/modules/client/imageplan.py	Wed Apr 09 17:53:35 2014 -0700
+++ b/src/modules/client/imageplan.py	Fri Apr 18 10:14:06 2014 +0530
@@ -46,6 +46,7 @@
 import pkg.catalog
 import pkg.client.api_errors as api_errors
 import pkg.client.indexer as indexer
+import pkg.client.linkedimage.zone as zone
 import pkg.client.pkg_solver as pkg_solver
 import pkg.client.pkgdefs as pkgdefs
 import pkg.client.pkgplan as pkgplan
@@ -236,6 +237,16 @@
                 """get the (approx) number of download space available"""
                 return self.pd._cbytes_avail
 
+        def __finish_plan(self, pdstate, fmri_changes=None):
+                """Private helper function that must be called at the end of
+                every planning operation to ensure final plan state is set and
+                any general post-plan work is performed."""
+
+                pd = self.pd
+                pd.state = pdstate
+                if not fmri_changes is None:
+                        pd._fmri_changes = fmri_changes
+
         def __vector_2_fmri_changes(self, installed_dict, vector,
             li_pkg_updates=True, new_variants=None, new_facets=None,
             fmri_changes=None):
@@ -509,7 +520,7 @@
                     new_variants=new_variants,
                     pkgs_inst=pkgs_inst,
                     reject_list=reject_list)
-                self.pd.state = plandesc.EVALUATED_PKGS
+                self.__finish_plan(plandesc.EVALUATED_PKGS)
 
         def __plan_exact_install(self, li_pkg_updates=True, li_sync_op=False,
             new_facets=None, new_variants=None, pkgs_inst=None,
@@ -527,7 +538,7 @@
                     pkgs_inst=pkgs_inst,
                     reject_list=reject_list,
                     exact_install=True)
-                self.pd.state = plandesc.EVALUATED_PKGS
+                self.__finish_plan(plandesc.EVALUATED_PKGS)
 
         def set_be_options(self, backup_be, backup_be_name, new_be,
             be_activate, be_name):
@@ -695,7 +706,7 @@
                             new_variants=new_variants,
                             reject_list=reject_list,
                             fmri_changes=fmri_changes)
-                        self.pd.state = plandesc.EVALUATED_PKGS
+                        self.__finish_plan(plandesc.EVALUATED_PKGS)
 
                 # evaluate what varcet changes are required
                 new_variants, new_facets, \
@@ -748,8 +759,9 @@
                 # If solver isn't involved, assume the list of packages
                 # has been determined.
                 assert fmri_changes is not None
-                self.pd._fmri_changes = fmri_changes
-                self.pd.state = plandesc.EVALUATED_PKGS
+                self.__finish_plan(plandesc.EVALUATED_PKGS,
+                    fmri_changes=fmri_changes)
+
 
         def plan_set_mediators(self, new_mediators):
                 """Determine the changes needed to set the specified mediators.
@@ -887,7 +899,7 @@
                         self.pd._new_mediators = update_mediators
 
                 pt.plan_done(pt.PLAN_MEDIATION_CHG)
-                self.pd.state = plandesc.EVALUATED_PKGS
+                self.__finish_plan(plandesc.EVALUATED_PKGS)
 
         def __any_reject_matches(self, reject_list):
                 """Check if any reject patterns match installed packages (in
@@ -919,8 +931,8 @@
                 # don't bother invoking the solver.
                 if not uninstall and not new_facets is not None and insync:
                         # we don't need to do anything
-                        self.pd._fmri_changes = []
-                        self.pd.state = plandesc.EVALUATED_PKGS
+                        self.__finish_plan(plandesc.EVALUATED_PKGS,
+                            fmri_changes=[])
                         return
 
                 self.__plan_install(li_pkg_updates=li_pkg_updates,
@@ -981,7 +993,7 @@
                 if DebugValues["plan"]:
                         self.pd._solver_errors = solver.get_trim_errors()
 
-                self.pd.state = plandesc.EVALUATED_PKGS
+                self.__finish_plan(plandesc.EVALUATED_PKGS)
 
         def __plan_update_solver(self, pkgs_update=None,
             ignore_missing=False, reject_list=misc.EmptyI):
@@ -1076,7 +1088,7 @@
                     ignore_missing=ignore_missing,
                     pkgs_update=pkgs_update,
                     reject_list=reject_list)
-                self.pd.state = plandesc.EVALUATED_PKGS
+                self.__finish_plan(plandesc.EVALUATED_PKGS)
 
         def plan_revert(self, args, tagged):
                 """Plan reverting the specified files or files tagged as
@@ -1218,10 +1230,9 @@
                             can_exclude=True)
                         self.pd.pkg_plans.append(pp)
 
-                self.pd._fmri_changes = []
-
                 pt.plan_done(pt.PLAN_PKGPLAN)
                 pt.plan_all_done()
+                self.__finish_plan(plandesc.EVALUATED_PKGS, fmri_changes=[])
 
         def __gen_matching_acts(self, path, pattern):
                 # return two lists of actions that match pattern at path
@@ -2529,6 +2540,272 @@
                 else:
                         d.setdefault(name, []).append(value)
 
+        def __evaluate_pkg_preserved_files(self):
+                """Private helper function that determines which preserved files
+                have changed in ImagePlan and how."""
+
+                assert self.state >= plandesc.MERGED_OK
+
+                pd = self.pd
+
+                # Track movement of preserved ("editable") files for plan
+                # summary and cache management.
+                moved = []
+                removed = []
+                installed = []
+                updated = []
+
+                # __merge_actions() adds the 'save_file' attribute to src
+                # actions that are being moved somewhere else and to dest
+                # actions that will be restored from a src action.  This only
+                # happens when at least one of the files involved has a
+                # 'preserve' attribute, so it's safe to treat either as a
+                # 'preserved' ("editable") file.
+
+                # The removal_actions are processed first since we'll determine
+                # how to transform them while processing the install and update
+                # actions based on the destination file state.
+                for ap in pd.removal_actions:
+                        src = ap.src
+                        if src.name != "file":
+                                continue
+                        if not ("preserve" in src.attrs or
+                            "save_file" in src.attrs or
+                            "overlay" in src.attrs):
+                                # Removed action has to be a preserved file or a
+                                # source of a restore.
+                                continue
+                        if "elfhash" in src.attrs:
+                                # Ignore erroneously tagged files.
+                                continue
+
+                        entry = [src.attrs["path"]]
+                        save_file = src.attrs.get("save_file")
+                        if save_file:
+                                entry.append(save_file[0])
+                                entry.append(src)
+                        removed.append(entry)
+
+                for ap in itertools.chain(pd.install_actions,
+                    pd.update_actions):
+                        orig = ap.src
+                        dest = ap.dst
+                        if dest.name != "file":
+                                continue
+                        if not ((orig and ("preserve" in orig.attrs or
+                            "save_file" in orig.attrs or
+                            "overlay" in orig.attrs)) or
+                            ("preserve" in dest.attrs or
+                            "save_file" in dest.attrs or
+                            "overlay" in dest.attrs)):
+                                # At least one of the actions has to be a
+                                # preserved file or a target of a restore.
+                                continue
+                        if "elfhash" in dest.attrs:
+                                # Ignore erroneously tagged files.
+                                continue
+
+                        tpath = dest.attrs["path"]
+                        entry = [tpath]
+                        save_file = dest.attrs.get("save_file")
+                        if save_file:
+                                tcache_name = save_file[0]
+                                for (ridx, rentry) in enumerate(removed):
+                                        if len(rentry) == 1:
+                                                continue
+
+                                        rpath, rcache_name, rorig = rentry
+                                        if rcache_name == tcache_name:
+                                                # If the cache name for this new
+                                                # file matches one of those for
+                                                # a removed file, the removed
+                                                # file will be renamed to this
+                                                # action's path before the
+                                                # action is processed.
+                                                del removed[ridx]
+                                                save_file = rpath
+                                                orig = rorig
+                                                break
+                                else:
+                                        save_file = None
+
+                        if not orig:
+                                # We can't rely on _check_preserve for this case
+                                # as there's no existing on-disk file at the
+                                # destination path yet.
+                                if dest.attrs.get("preserve") != "legacy":
+                                        # 'legacy' actions are only delivered if
+                                        # we're updating something already
+                                        # installed or moving an existing file.
+                                        installed.append(entry)
+                                continue
+                        elif orig.name != "file":
+                                # File is being replaced with another object
+                                # type.
+                                updated.append(entry)
+                                continue
+
+                        # The order of these checks is significant in
+                        # determining how a preserved file changed!
+                        #
+                        # First, check for on-disk content changes.
+                        opath = orig.get_installed_path(self.image.get_root())
+                        pres_type = dest._check_preserve(orig, ap.p,
+                            orig_path=opath)
+
+                        final_path = dest.get_installed_path(
+                            self.image.get_root())
+
+                        # If a removed action is going to be restored to
+                        # complete the operation, show the removed action path
+                        # as the source for the move omitting the steps
+                        # in-between.  For example:
+                        #  moved: testme -> newme
+                        #  moved: newme -> newme.legacy
+                        #  installed: newme
+                        # ...becomes:
+                        #  moved: testme -> newme.legacy
+                        #  installed: newme
+                        if save_file:
+                                mpath = save_file
+                        else:
+                                mpath = tpath
+
+                        if pres_type == "renameold":
+                                moved.append([mpath, tpath + ".old"])
+                                installed.append(entry)
+                                continue
+                        elif pres_type == "renameold.update":
+                                moved.append([mpath, tpath + ".update"])
+                                installed.append(entry)
+                                continue
+                        elif pres_type == "legacy":
+                                if orig.attrs.get("preserve") == "legacy":
+                                        updated.append(entry)
+                                        continue
+                                # Move only happens on preserve transition and
+                                # only if original already exists.
+                                if os.path.isfile(opath):
+                                        moved.append([mpath, tpath + ".legacy"])
+                                installed.append(entry)
+                                continue
+                        elif pres_type == True and save_file:
+                                # If the source and destination path are the
+                                # same, the content won't be updated.
+                                if mpath != tpath:
+                                        # New content ignored in favour of old.
+                                        moved.append([mpath, tpath])
+                                continue
+
+                        # Next, if on-disk file will be preserved and some other
+                        # unique_attr is changing (such as mode, etc.) mark the
+                        # file as "updated".
+                        if (pres_type == True and
+                            ImagePlan.__find_inconsistent_attrs(
+                                ((orig,), (dest,)), ignore=("path",))):
+                                updated.append(entry)
+                                continue
+
+                        # For remaining cases, what happens is based on the
+                        # result of _check_preserve().
+                        if pres_type == "renamenew":
+                                if save_file:
+                                        moved.append([mpath, tpath])
+                                # Delivered content changed.
+                                installed.append([tpath + ".new"])
+                        elif pres_type is None:
+                                # Delivered content or unique_attrs changed.
+                                updated.append(entry)
+                        elif pres_type == False:
+                                if save_file:
+                                        moved.append([mpath, tpath])
+                                        continue
+
+                                if not os.path.isfile(final_path):
+                                        # File is missing or of wrong type.
+                                        installed.append(entry)
+                                        continue
+
+                                # If a file is moving between packages, it will
+                                # appear as an update, but may not have not have
+                                # different content or unique_attrs.  Check to
+                                # see if it does.
+                                if ImagePlan.__find_inconsistent_attrs(
+                                    ((orig,), (dest,)), ignore=("path",)):
+                                        # Different unique_attrs.
+                                        updated.append(entry)
+                                        continue
+
+                                attr, shash, ohash, hfunc = \
+                                    digest.get_common_preferred_hash(dest, orig)
+                                if shash != ohash:
+                                        # Delivered content changed.
+                                        updated.append(entry)
+                                        continue
+
+                # Pre-sort results for consumers.
+                installed.sort()
+                moved.sort()
+                removed.sort()
+                updated.sort()
+
+                self.pd._preserved = {
+                    "installed": installed,
+                    "moved": moved,
+                    "removed": removed,
+                    "updated": updated,
+                }
+
+        def __evaluate_pkg_downloads(self):
+                """Private helper function that determines package data to be
+                downloaded and updates the plan accordingly."""
+
+                assert self.state >= plandesc.MERGED_OK
+
+                pd = self.pd
+
+                for p in pd.pkg_plans:
+                        cpbytes, pbytes = p.get_bytes_added()
+                        if p.destination_fmri:
+                                mpath = self.image.get_manifest_path(
+                                    p.destination_fmri)
+                                try:
+                                        # Manifest data is essentially stored
+                                        # three times (original, cache, catalog).
+                                        # For now, include this in cbytes_added
+                                        # since that's closest to where the
+                                        # download cache is stored.
+                                        pd._cbytes_added += \
+                                            os.stat(mpath).st_size * 3
+                                except EnvironmentError, e:
+                                        raise api_errors._convert_error(e)
+                        pd._cbytes_added += cpbytes
+                        pd._bytes_added += pbytes
+
+                # Include state directory in cbytes_added for now since it's
+                # closest to where the download cache is stored.  (Twice the
+                # amount is used because image state update involves using
+                # a complete copy of existing state.)
+                pd._cbytes_added += misc.get_dir_size(self.image._statedir) * 2
+
+                # Our slop factor is 25%; overestimating is safer than under-
+                # estimating.  This attempts to approximate how much overhead
+                # the filesystem will impose on the operation.  Empirical
+                # testing suggests that overhead can vary wildly depending on
+                # average file size, fragmentation, zfs metadata overhead, etc.
+                # For an install of a package such as solaris-small-server into
+                # an image, a 12% difference between actual size and installed
+                # size was found, so this seems safe enough.  (And helps account
+                # for any bootarchives, fs overhead, etc.)
+                pd._cbytes_added *= 1.25
+                pd._bytes_added *= 1.25
+
+                # XXX For now, include cbytes_added in bytes_added total; in the
+                # future, this should only happen if they share the same
+                # filesystem.
+                pd._bytes_added += pd._cbytes_added
+                self.__update_avail_space()
+
         def evaluate(self):
                 """Given already determined fmri changes,
                 build pkg plans and figure out exact impact of
@@ -2542,63 +2819,23 @@
                         # plan is no longer valid.
                         raise api_errors.InvalidPlanError()
 
-                self.evaluate_pkg_plans()
-                self.merge_actions()
-                self.compile_release_notes()
-
-                fmri_updates = [
-                        (p.origin_fmri, p.destination_fmri)
-                        for p in self.pd.pkg_plans
-                ]
-                if not self.pd._li_pkg_updates and fmri_updates:
+                self.__evaluate_pkg_plans()
+                self.__merge_actions()
+                self.__compile_release_notes()
+
+                if not self.pd._li_pkg_updates and self.pd.pkg_plans:
                         # oops.  the caller requested no package updates and
                         # we couldn't satisfy that request.
+                        fmri_updates = [
+                                (p.origin_fmri, p.destination_fmri)
+                                for p in self.pd.pkg_plans
+                        ]
                         raise api_errors.PlanCreationException(
                             pkg_updates_required=fmri_updates)
 
-                for p in self.pd.pkg_plans:
-                        cpbytes, pbytes = p.get_bytes_added()
-                        if p.destination_fmri:
-                                mpath = self.image.get_manifest_path(
-                                    p.destination_fmri)
-                                try:
-                                        # Manifest data is essentially stored
-                                        # three times (original, cache, catalog).
-                                        # For now, include this in cbytes_added
-                                        # since that's closest to where the
-                                        # download cache is stored.
-                                        self.pd._cbytes_added += \
-                                            os.stat(mpath).st_size * 3
-                                except EnvironmentError, e:
-                                        raise api_errors._convert_error(e)
-                        self.pd._cbytes_added += cpbytes
-                        self.pd._bytes_added += pbytes
-
-                # Include state directory in cbytes_added for now since it's
-                # closest to where the download cache is stored.  (Twice the
-                # amount is used because image state update involves using
-                # a complete copy of existing state.)
-                self.pd._cbytes_added += \
-                    misc.get_dir_size(self.image._statedir) * 2
-
-                # Our slop factor is 25%; overestimating is safer than under-
-                # estimating.  This attempts to approximate how much overhead
-                # the filesystem will impose on the operation.  Empirical
-                # testing suggests that overhead can vary wildly depending on
-                # average file size, fragmentation, zfs metadata overhead, etc.
-                # For an install of a package such as solaris-small-server into
-                # an image, a 12% difference between actual size and installed
-                # size was found, so this seems safe enough.  (And helps account
-                # for any bootarchives, fs overhead, etc.)
-                self.pd._cbytes_added *= 1.25
-                self.pd._bytes_added *= 1.25
-
-                # XXX For now, include cbytes_added in bytes_added total; in the
-                # future, this should only happen if they share the same
-                # filesystem.
-                self.pd._bytes_added += self.pd._cbytes_added
-
-                self.__update_avail_space()
+                # These must be done after action merging.
+                self.__evaluate_pkg_preserved_files()
+                self.__evaluate_pkg_downloads()
 
         def __update_avail_space(self):
                 """Update amount of available space on FS"""
@@ -2651,7 +2888,7 @@
                 finally:
                         self.image.cleanup_downloads()
 
-        def compile_release_notes(self):
+        def __compile_release_notes(self):
                 """Figure out what release notes need to be displayed"""
                 release_notes = self.pd._actuators.get_release_note_info()
                 must_display = False
@@ -2672,7 +2909,7 @@
 
                         self.pd.release_notes = (must_display, notes)
 
-        def save_release_notes(self):
+        def __save_release_notes(self):
                 """Save a copy of the release notes and store the file name"""
                 if self.pd.release_notes[1]:
                         # create a file in imgdir/notes
@@ -2690,7 +2927,7 @@
                         tmpfile.close()
                         self.pd.release_notes_name = os.path.basename(path)
 
-        def evaluate_pkg_plans(self):
+        def __evaluate_pkg_plans(self):
                 """Internal helper function that does the work of converting
                 fmri changes into pkg plans."""
 
@@ -3130,7 +3367,7 @@
                 self.pd._new_mediators = prop_mediators
                 # Link mediation is complete.
 
-        def merge_actions(self):
+        def __merge_actions(self):
                 """Given a set of fmri changes and their associated pkg plan,
                 merge all the resultant actions for the packages being
                 updated."""
@@ -3602,11 +3839,14 @@
                 self.pd.install_actions.sort(key=addsort)
 
                 # cleanup pkg_plan objects which don't actually contain any
-                # changes
+                # changes and add any new ones to list of changes
                 for p in list(self.pd.pkg_plans):
                         if p.origin_fmri != p.destination_fmri or \
                             p.actions.removed or p.actions.changed or \
                             p.actions.added:
+                                pair = (p.origin_fmri, p.destination_fmri)
+                                if pair not in self.pd._fmri_changes:
+                                        self.pd._fmri_changes.append(pair)
                                 continue
                         self.pd.pkg_plans.remove(p)
                         fmri = p.origin_fmri
@@ -3913,6 +4153,22 @@
                         # client isn't left with invalid state.
                         self.image._remove_fast_lookups()
 
+                if not self.image.is_liveroot():
+                        # Check if the child is a running zone. If so run the
+                        # actuator in the zone.
+
+                        # Linked Image code uses trailing slashes, Image code
+                        # does not. So we make sure that our path comparisons
+                        # are always on tha same page.
+                        root = os.path.normpath(self.image.root)
+
+                        rzones = zone.list_running_zones()
+                        for z, path in rzones.iteritems():
+                                if os.path.normpath(path) == root:
+                                        self.pd._actuators.set_zone(z)
+                                        # there should be only on zone per path
+                                        break
+
                 self.pd._actuators.exec_prep(self.image)
 
                 self.pd._actuators.exec_pre_actuators(self.image)
@@ -4045,7 +4301,7 @@
                         self.pd._actuators.exec_post_actuators(self.image)
 
                 self.image._create_fast_lookups(progtrack=self.__progtrack)
-                self.save_release_notes()
+                self.__save_release_notes()
 
                 # success
                 self.pd.state = plandesc.EXECUTED_OK
--- a/src/modules/client/linkedimage/zone.py	Wed Apr 09 17:53:35 2014 -0700
+++ b/src/modules/client/linkedimage/zone.py	Fri Apr 18 10:14:06 2014 +0530
@@ -259,7 +259,9 @@
 
                 # convert zone names into into LinkedImageName objects
                 zlist = []
-                for zone, path in zdict.iteritems():
+                # state is unused
+                # pylint: disable=W0612
+                for zone, (path, state) in zdict.iteritems():
                         lin = li.LinkedImageName("%s:%s" % (self.__pname, zone))
                         zlist.append([lin, path])
 
@@ -436,8 +438,9 @@
 def _list_zones(root, path_transform):
         """Get the zones associated with the image located at 'root'.  We
         return a dictionary where the keys are zone names and the values are
-        zone root pahts.  The global zone is excluded from the results.
-        Solaris10 branded zones are excluded from the results.  """
+        tuples containing zone root path and current state. The global zone is
+        excluded from the results. Solaris10 branded zones are excluded from the
+        results."""
 
         rv = dict()
         cmd = DebugValues.get_value("bin_zoneadm") # pylint: disable=E1120
@@ -504,6 +507,20 @@
                 if z_state not in zone_installed_states:
                         continue
 
-                rv[z_name] = z_rootpath
+                rv[z_name] = (z_rootpath, z_state)
 
         return rv
+
+def list_running_zones():
+        """Return dictionary with currently running zones of the system in the
+        following form:
+                { zone_name : zone_path, ... }
+        """
+
+        zdict = _list_zones("/", li.PATH_TRANSFORM_NONE)
+        rzdict = {}
+        for z_name, (z_path, z_state) in zdict.iteritems():
+                if z_state == ZONE_STATE_STR_RUNNING:
+                        rzdict[z_name] = z_path
+
+        return rzdict
--- a/src/modules/client/plandesc.py	Wed Apr 09 17:53:35 2014 -0700
+++ b/src/modules/client/plandesc.py	Fri Apr 18 10:14:06 2014 +0530
@@ -123,6 +123,12 @@
             "_old_facets": pkg.facet.Facets,
             "_new_facets": pkg.facet.Facets,
             "_rm_aliases": { str: set() },
+            "_preserved": {
+                "moved": [[str, str]],
+                "removed": [[str]],
+                "installed": [[str]],
+                "updated": [[str]],
+            },
             "added_groups": { str: pkg.fmri.PkgFmri },
             "added_users": { str: pkg.fmri.PkgFmri },
             "child_op_vectors": [ ( str, [ li.LinkedImageName ], {}, bool ) ],
@@ -168,6 +174,12 @@
                 self._fmri_changes = [] # install  (None, fmri)
                                         # remove   (oldfmri, None)
                                         # update   (oldfmri, newfmri|oldfmri)
+                self._preserved = {
+                    "moved": [],
+                    "removed": [],
+                    "installed": [],
+                    "updated": [],
+                }
                 self._solver_summary = []
                 self._solver_errors = None
                 self.li_attach = False
@@ -330,6 +342,7 @@
 
                 # reduce memory consumption
                 self._fmri_changes = []
+                self._preserved = {}
                 # We have to save the timed_out state.
                 self._act_timed_out = self._actuators.timed_out
                 self._actuators = pkg.client.actuator.Actuator()
@@ -556,7 +569,7 @@
                 return rv
 
         def get_changes(self):
-                """A generation function that yields tuples of PackageInfo
+                """A generator function that yields tuples of PackageInfo
                 objects of the form (src_pi, dest_pi).
 
                 If 'src_pi' is None, then 'dest_pi' is the package being
@@ -582,6 +595,21 @@
                                 dinfo = PackageInfo.build_from_fmri(dfmri)
                         yield (sinfo, dinfo)
 
+        def get_editable_changes(self):
+                """This function returns a tuple of generators that yield tuples
+                of the form (src, dest) of the preserved ("editable") files that
+                will be installed, moved, removed, or updated.  The returned
+                list of generators is (moved, removed, installed, updated)."""
+
+                return (
+                    (entry for entry in self._preserved["moved"]),
+                    ((entry[0], None) for entry in self._preserved["removed"]),
+                    ((None, entry[0])
+                        for entry in self._preserved["installed"]),
+                    ((entry[0], entry[0])
+                        for entry in self._preserved["updated"]),
+                )
+
         def get_actions(self):
                 """A generator function that yields action change descriptions
                 in the order they will be performed."""
--- a/src/modules/digest.py	Wed Apr 09 17:53:35 2014 -0700
+++ b/src/modules/digest.py	Fri Apr 18 10:14:06 2014 +0530
@@ -21,10 +21,15 @@
 #
 
 #
-# Copyright (c) 2013, Oracle and/or its affiliates. All rights reserved.
+# Copyright (c) 2013, 2014, Oracle and/or its affiliates. All rights reserved.
 #
 
 import hashlib
+try:
+        import pkg.sha512_t
+        sha512_supported = True
+except ImportError:
+        sha512_supported = False
 
 # When running the test suite, we alter our behaviour depending on certain
 # debug flags.
@@ -70,7 +75,16 @@
 # using the "most preferred" hash. See get_preferred_hash(..),
 # get_least_preferred_hash(..) and get_common_preferred_hash(..)
 #
-if DebugValues["hash"] == "sha1+sha256":
+if DebugValues["hash"] == "sha1+sha512_256":
+        # Simulate pkg(5) where SHA-1 and SHA-512/256 are used for publication
+        DEFAULT_HASH_ATTRS = ["hash", "pkg.hash.sha512_256"]
+        DEFAULT_CHASH_ATTRS = ["chash", "pkg.chash.sha512_256"]
+        DEFAULT_CONTENT_HASH_ATTRS = ["elfhash", "pkg.content-hash.sha512_256"]
+        DEFAULT_CHAIN_ATTRS = ["chain", "pkg.chain.sha512_256"]
+        DEFAULT_CHAIN_CHASH_ATTRS = ["chain.chashes",
+            "pkg.chain.chashes.sha512_256"]
+
+elif DebugValues["hash"] == "sha1+sha256":
         # Simulate pkg(5) where SHA-1 and SHA-256 are used for publication
         DEFAULT_HASH_ATTRS = ["hash", "pkg.hash.sha256"]
         DEFAULT_CHASH_ATTRS = ["chash", "pkg.chash.sha256"]
@@ -79,6 +93,14 @@
         DEFAULT_CHAIN_CHASH_ATTRS = ["chain.chashes",
             "pkg.chain.chashes.sha256"]
 
+elif DebugValues["hash"] == "sha512_256":
+        # Simulate pkg(5) where SHA-1 is no longer used for publication
+        DEFAULT_HASH_ATTRS = ["pkg.hash.sha512_256"]
+        DEFAULT_CHASH_ATTRS = ["pkg.chash.sha512_256"]
+        DEFAULT_CONTENT_HASH_ATTRS = ["pkg.content-hash.sha512_256"]
+        DEFAULT_CHAIN_ATTRS = ["pkg.chain.sha512_256"]
+        DEFAULT_CHAIN_CHASH_ATTRS = ["pkg.chain.chashes.sha512_256"]
+
 elif DebugValues["hash"] == "sha256":
         # Simulate pkg(5) where SHA-1 is no longer used for publication
         DEFAULT_HASH_ATTRS = ["pkg.hash.sha256"]
@@ -111,7 +133,7 @@
 # value being computed with this data, along with a 'hexdigest()' method to
 # return the hexadecimal value of the hash.
 #
-# At present, these are all hashlib factory methods. When maintaining these
+# At present, some of these are hashlib factory methods. When maintaining these
 # dictionaries, it is important to *never remove* entries from them, otherwise
 # clients with installed packages will not be able to verify their content when
 # pkg(5) is updated.
@@ -126,6 +148,9 @@
             "pkg.hash.sha256": hashlib.sha256,
         }
 
+        if sha512_supported:
+                HASH_ALGS["pkg.hash.sha512_256"] = pkg.sha512_t.SHA512_t
+
 # A dictionary of the compressed hash attributes we know about.
 CHASH_ALGS = {}
 for key in HASH_ALGS:
@@ -138,7 +163,9 @@
 for key in HASH_ALGS:
         if key == "hash":
                 CONTENT_HASH_ALGS["elfhash"] = HASH_ALGS[key]
-        else:
+        # For now, we don't want content-hash in attributes by default since
+        # the algorithm for it is changing soon.
+        elif DebugValues["hash"]:
                 CONTENT_HASH_ALGS[key.replace("hash", "content-hash")] = \
                     HASH_ALGS[key]
 
@@ -164,20 +191,30 @@
 if DebugValues["hash"] == "sha1":
         RANKED_HASH_ATTRS = ("hash")
 elif DebugValues["hash"] == "sha2":
-        RANKED_HASH_ATTRS = ("pkg.hash.sha256")
+        if sha512_supported:
+                RANKED_HASH_ATTRS = ("pkg.hash.sha512_256",)
+        else:
+                RANKED_HASH_ATTRS = ("pkg.hash.sha256",)
 else:
         RANKED_HASH_ATTRS = (
             "pkg.hash.sha256",
             "hash",
         )
 
+        if sha512_supported:
+                RANKED_HASH_ATTRS = (
+                    "pkg.hash.sha512_256",
+                ) + RANKED_HASH_ATTRS
+
 RANKED_CHASH_ATTRS = tuple(key.replace("hash", "chash")
     for key in RANKED_HASH_ATTRS)
 _content_hash_attrs = []
 for key in RANKED_HASH_ATTRS:
         if key == "hash":
                 _content_hash_attrs.append("elfhash")
-        else:
+        # For now, we don't want content-hash in attributes by default since
+        # the algorithm for it is changing soon.
+        elif DebugValues["hash"]:
                 _content_hash_attrs.append(key.replace("hash", "content-hash"))
 
 RANKED_CONTENT_HASH_ATTRS = tuple(_content_hash_attrs)
--- a/src/modules/lint/engine.py	Wed Apr 09 17:53:35 2014 -0700
+++ b/src/modules/lint/engine.py	Fri Apr 18 10:14:06 2014 +0530
@@ -40,7 +40,7 @@
 import urllib2
 
 PKG_CLIENT_NAME = "pkglint"
-CLIENT_API_VERSION = 78
+CLIENT_API_VERSION = 79
 pkg.client.global_settings.client_name = PKG_CLIENT_NAME
 
 class LintEngineException(Exception):
--- a/src/modules/misc.py	Wed Apr 09 17:53:35 2014 -0700
+++ b/src/modules/misc.py	Fri Apr 18 10:14:06 2014 +0530
@@ -560,8 +560,7 @@
         for this data. The keys must be present in 'hash_algs', a dictionary
         mapping keys to the factory methods that are used to create objects
         to compute them. The factory method must take no parameters, and must
-        return an object that has 'update()' and 'hexdigest()' methods. In the
-        current implementation, these are all hashlib factory methods.
+        return an object that has 'update()' and 'hexdigest()' methods.
 
         'hash_func' is provided as a convenience to simply hash the data with
         a single hash algorithm. The value of 'hash_func' should be the factory
@@ -581,7 +580,7 @@
                 length = os.stat(data).st_size
 
         # Setup our results dictionary so that each attribute maps to a
-        # new hashlib object.
+        # new hash object.
         if hash_func:
                 hsh = hash_func()
         else:
@@ -618,7 +617,7 @@
                 return hsh.hexdigest(), content.read()
 
         # The returned dictionary can now be populated with the hexdigests
-        # instead of the hashlib objects themselves.
+        # instead of the hash objects themselves.
         for attr in hash_results:
                 hash_results[attr] = hash_results[attr].hexdigest()
         return hash_results, content.read()
--- a/src/modules/server/depot.py	Wed Apr 09 17:53:35 2014 -0700
+++ b/src/modules/server/depot.py	Fri Apr 18 10:14:06 2014 +0530
@@ -21,7 +21,7 @@
 #
 
 #
-# Copyright (c) 2008, 2013, Oracle and/or its affiliates. All rights reserved.
+# Copyright (c) 2008, 2014, Oracle and/or its affiliates. All rights reserved.
 #
 
 import cherrypy
@@ -2373,7 +2373,8 @@
                     cfg.PropDefined("cfg_file", allowed=["", "<pathname>"]),
                     cfg.Property("content_root"),
                     cfg.PropList("debug", allowed=["", "headers",
-                        "hash=sha256", "hash=sha1+sha256"]),
+                        "hash=sha256", "hash=sha1+sha256", "hash=sha512_256",
+                        "hash=sha1+sha512_256"]),
                     cfg.PropList("disable_ops"),
                     cfg.PropDefined("image_root", allowed=["",
                         "<abspathname>"]),
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/src/modules/sha512_t.c	Fri Apr 18 10:14:06 2014 +0530
@@ -0,0 +1,272 @@
+/*
+ * CDDL HEADER START
+ *
+ * The contents of this file are subject to the terms of the
+ * Common Development and Distribution License (the "License").
+ * You may not use this file except in compliance with the License.
+ *
+ * You can obtain a copy of the license at usr/src/OPENSOLARIS.LICENSE
+ * or http://www.opensolaris.org/os/licensing.
+ * See the License for the specific language governing permissions
+ * and limitations under the License.
+ *
+ * When distributing Covered Code, include this CDDL HEADER in each
+ * file and include the License file at usr/src/OPENSOLARIS.LICENSE.
+ * If applicable, add the following below this CDDL HEADER, with the
+ * fields enclosed by brackets "[]" replaced with your own identifying
+ * information: Portions Copyright [yyyy] [name of copyright owner]
+ *
+ * CDDL HEADER END
+ */
+
+/*
+ *  Copyright (c) 2014, Oracle and/or its affiliates. All rights reserved.
+ */
+
+#include <Python.h>
+#include <sha2.h>
+#include "structmember.h"
+
+/*
+ * A hash module computes SHA512/t. Now it only supports SHA512/256 and
+ * SHA512/224.
+ *
+ * The default hash function is SHA512/256. Change your hash function to
+ * SHA512/224 with the argument t=224 when you create a hash object.
+ *
+ * Hash objects have methods update(arg), digest() and hexdigest(), and an
+ * attribute hash_size.
+ *
+ * For example:
+ *
+ * >>> import pkg.sha512_t
+ * >>> a = pkg.sha512_t.SHA512_t()
+ * >>> a.update("abc")
+ * >>> a.digest()
+ * 'S\x04\x8e&\x81\x94\x1e\xf9\x9b.)\xb7kL}\xab\xe4\xc2\xd0\xc64\xfcmF\xe0\xe2
+ * \xf11\x07\xe7\xaf#'
+ * More condensed:
+ *
+ * >>> pkg.sha512_t.SHA512_t("abc").hexdigest()
+ * '53048e2681941ef99b2e29b76b4c7dabe4c2d0c634fc6d46e0e2f13107e7af23'
+ *
+ * >>> pkg.sha512_t.SHA512_t(t=224).hexdigest()
+ * '4634270f707b6a54daae7530460842e20e37ed265ceee9a43e8924aa'
+ *
+ */
+
+typedef struct {
+	PyObject_HEAD
+	SHA512_CTX shc;
+	int hash_size;
+} SHA512_t_Object;
+
+static void
+py_dealloc(SHA512_t_Object* self)
+{
+	self->ob_type->tp_free((PyObject*)self);
+}
+
+/*
+ * Create an SHA512_t object, with optional arguments: string message and
+ * hash size.
+ *
+ */
+
+/*ARGSUSED*/
+static int
+py_init(SHA512_t_Object *self, PyObject *args, PyObject *kwds)
+{
+	PyObject *strObj = NULL;
+	char *message;
+	/* Default hash algorithm is SHA512/256. */
+	self->hash_size = 256;
+	static char *kwlist[] = {"message", "t", NULL};
+
+	if (PyArg_ParseTupleAndKeywords(args, kwds, "|Si", kwlist,
+	    &strObj, &self->hash_size) == 0)
+		return (-1);
+
+	if (self->hash_size != 256 && self->hash_size != 224) {
+		PyErr_SetString(PyExc_ValueError, "The module "
+		    "only supports SHA512/256 or SHA512/224.\n");
+		return (-1);
+	}
+
+	SHA512_t_Init(self->hash_size, &self->shc);
+	if (strObj != NULL) {
+		if ((message = PyBytes_AsString(strObj)) == NULL)
+			return (-1);
+		SHA512_t_Update(&self->shc, message, strlen(message));
+	}
+	return (0);
+}
+
+/*
+ * Update the hash object with a string object. Repeated calls are equivalent
+ * to a single call with the concatenation of all the strings.
+ *
+ */
+
+static char py_update_doc[] = "\n\
+Update the hash object with the string arguments.\n\
+\n\
+@param message: input message to digest\n\
+\n\
+@return: None\n\
+";
+
+/*ARGSUSED*/
+static PyObject *
+py_update(SHA512_t_Object* self, PyObject *args)
+{
+	PyObject *strObj = NULL;
+	char *message;
+
+	if (!PyArg_ParseTuple(args, "S", &strObj))
+		return (NULL);
+
+	if (strObj != NULL) {
+		if ((message = PyBytes_AsString(strObj)) == NULL)
+			return (NULL);
+		SHA512_t_Update(&self->shc, message, strlen(message));
+	}
+	Py_RETURN_NONE;
+}
+
+/*
+ * Return the digest of the strings passed to the py_update() method so far.
+ *
+ */
+
+static char py_digest_doc[] = "\n\
+Return the digest of the strings passed to the update() method so far.\n\
+\n\
+@return: string of digest of messages\n\
+";
+
+/*ARGSUSED*/
+static PyObject *
+py_digest(SHA512_t_Object* self, PyObject *args)
+{
+	int size = self->hash_size / 8;
+	unsigned char buffer[size];
+	SHA512_CTX shc;
+	shc = self->shc;
+	SHA512_t_Final(buffer, &shc);
+	return (PyString_FromStringAndSize((const char *)buffer, size));
+}
+
+/*
+ * Return a string with a hex representation of the digest of the strings
+ * passed to the py_update() method so far.
+ *
+ */
+
+static char py_hexdigest_doc[] = "\n\
+Return hexadecimal digest of the strings passed to the update() method\
+so far.\n\
+\n\
+@return: string of double length and hexadecimal digest of the messages\n\
+";
+
+/*ARGSUSED*/
+static PyObject *
+py_hexdigest(SHA512_t_Object* self, PyObject *args)
+{
+	int i;
+	int buffer_size = self->hash_size / 8;
+	int result_size = self->hash_size / 4;
+	unsigned char buffer[buffer_size];
+	unsigned char result[result_size];
+	char hexchars[16] = "0123456789abcdef";
+	SHA512_CTX shc;
+	shc = self->shc;
+	SHA512_t_Final(buffer, &shc);
+	for (i = 0; i < buffer_size; i++) {
+		result[2 * i] = \
+		    hexchars[(buffer[i] & 0xf0) >> 4];
+		result[2 * i + 1] = \
+		    hexchars[buffer[i] & 0x0f];
+	}
+	return (PyString_FromStringAndSize((const char *)result, result_size));
+}
+
+static PyMemberDef SHA512_t_members[] = {
+	{ "hash_size", T_INT, offsetof(SHA512_t_Object, hash_size), 0,
+	    "hash size"},
+	{ NULL }  /* Sentinel */
+};
+
+static PyMethodDef SHA512_t_methods[] = {
+	{ "update", (PyCFunction)py_update, METH_VARARGS,
+	    py_update_doc },
+	{ "digest", (PyCFunction)py_digest, METH_NOARGS,
+	    py_digest_doc },
+	{ "hexdigest", (PyCFunction)py_hexdigest, METH_NOARGS,
+	    py_hexdigest_doc },
+	{ NULL }  /* Sentinel */
+};
+
+static PyTypeObject SHA512_t_Type = {
+	PyObject_HEAD_INIT(NULL)
+	0,	/* ob_size */
+	"sha512_t.SHA512_t",	/* tp_name */
+	sizeof (SHA512_t_Object),	/* tp_basicsize */
+	0,	/* tp_itemsize */
+	(destructor)py_dealloc,	/* tp_dealloc */
+	0,	/* tp_print */
+	0,	/* tp_getattr */
+	0,	/* tp_setattr */
+	0,	/* tp_compare */
+	0,	/* tp_repr */
+	0,	/* tp_as_number */
+	0,	/* tp_as_sequence */
+	0,	/* tp_as_mapping */
+	0,	/* tp_hash */
+	0,	/* tp_call */
+	0,	/* tp_str */
+	0,	/* tp_getattro */
+	0,	/* tp_setattro */
+	0,	/* tp_as_buffer */
+	Py_TPFLAGS_DEFAULT | Py_TPFLAGS_BASETYPE, /* tp_flags */
+	"SHA512/t objects",	/* tp_doc */
+	0,	/* tp_traverse */
+	0,	/* tp_clear */
+	0,	/* tp_richcompare */
+	0,	/* tp_weaklistoffset */
+	0,	/* tp_iter */
+	0,	/* tp_iternext */
+	SHA512_t_methods,	/* tp_methods */
+	SHA512_t_members,	/* tp_members */
+	0,	/* tp_getset */
+	0,	/* tp_base */
+	0,	/* tp_dict */
+	0,	/* tp_descr_get */
+	0,	/* tp_descr_set */
+	0,	/* tp_dictoffset */
+	(initproc)py_init,	/* tp_init */
+};
+
+static PyMethodDef sha512_t_methods[] = {
+	{ NULL }  /* Sentinel */
+};
+
+PyMODINIT_FUNC
+initsha512_t(void)
+{
+	PyObject* m;
+
+	SHA512_t_Type.tp_new = PyType_GenericNew;
+	if (PyType_Ready(&SHA512_t_Type) < 0)
+		return;
+
+	m = Py_InitModule3("sha512_t", sha512_t_methods,
+	    "This module provides SHA512_t hashing.");
+
+	if (m == NULL)
+		return;
+
+	Py_INCREF(&SHA512_t_Type);
+	PyModule_AddObject(m, "SHA512_t", (PyObject *)&SHA512_t_Type);
+}
--- a/src/modules/smf.py	Wed Apr 09 17:53:35 2014 -0700
+++ b/src/modules/smf.py	Fri Apr 18 10:14:06 2014 +0530
@@ -52,7 +52,9 @@
 
 svcprop_path = "/usr/bin/svcprop"
 svcadm_path  = "/usr/sbin/svcadm"
+svccfg_path = "/usr/sbin/svccfg"
 svcs_path = "/usr/bin/svcs"
+zlogin_path = "/usr/sbin/zlogin"
 
 class NonzeroExitException(Exception):
         def __init__(self, cmd, return_code, output):
@@ -71,13 +73,19 @@
                     (self.cmd, self.return_code, self.output)
 
 
-def __call(args):
+def __call(args, zone=None):
         # a way to invoke a separate executable for testing
         cmds_dir = DebugValues.get_value("smf_cmds_dir")
         if cmds_dir:
                 args = (
                     os.path.join(cmds_dir,
                     args[0].lstrip("/")),) + args[1:]
+        if zone:
+                cmd = DebugValues.get_value("bin_zlogin")
+                if cmd is None:
+                        cmd = zlogin_path
+                args = (cmd, zone) + args
+
         try:
                 proc = subprocess.Popen(args, stdout=subprocess.PIPE,
                     stderr=subprocess.STDOUT)
@@ -90,10 +98,10 @@
                 raise NonzeroExitException(args, ret, buf)
         return buf
 
-def get_state(fmri):
+def get_state(fmri, zone=None):
         """ return state of smf service """
 
-        props = get_props(fmri)
+        props = get_props(fmri, zone=zone)
         if not props:
                 return SMF_SVC_UNKNOWN
 
@@ -110,10 +118,10 @@
                 return SMF_SVC_ENABLED
         return SMF_SVC_DISABLED
 
-def is_disabled(fmri):
-        return get_state(fmri) < SMF_SVC_TMP_ENABLED
+def is_disabled(fmri, zone=None):
+        return get_state(fmri, zone=zone) < SMF_SVC_TMP_ENABLED
 
-def check_fmris(attr, fmris):
+def check_fmris(attr, fmris, zone=None):
         """ Walk a set of fmris checking that each is fully specifed with
         an instance.
         If an FMRI is not fully specified and does not contain at least
@@ -142,7 +150,7 @@
                 if is_glob:
                         cmd = (svcs_path, "-H", "-o", "fmri", "%s" % fmri)
                         try:
-                                instances = __call(cmd)
+                                instances = __call(cmd, zone=zone)
                                 for instance in instances:
                                         fmris.add(instance.rstrip())
                         except NonzeroExitException:
@@ -155,11 +163,11 @@
                             "for %(fmri)s.") % locals())
         return fmris
 
-def get_props(svcfmri):
+def get_props(svcfmri, zone=None):
         args = (svcprop_path, "-c", svcfmri)
 
         try:
-                buf = __call(args)
+                buf = __call(args, zone=zone)
         except NonzeroExitException:
                 return {} # empty output == not installed
 
@@ -168,18 +176,23 @@
             for l in buf
         ])
 
-def get_prop(fmri, prop):
+def set_prop(fmri, prop, value, zone=None):
+        args = (svccfg_path, "-s", fmri, "setprop", "%s=%s" % (prop, value))
+        __call(args, zone=zone)
+
+def get_prop(fmri, prop, zone=None):
         args = (svcprop_path, "-c", "-p", prop, fmri)
-        buf = __call(args)
+        buf = __call(args, zone=zone)
         assert len(buf) == 1, "Was expecting one entry, got:%s" % buf
         buf = buf[0].rstrip("\n")
         return buf
 
-def enable(fmris, temporary=False, sync_timeout=0):
+def enable(fmris, temporary=False, sync_timeout=0, zone=None):
         if not fmris:
                 return
         if isinstance(fmris, basestring):
                 fmris = (fmris,)
+
         args = [svcadm_path, "enable"]
         if sync_timeout:
                 args.append("-s")
@@ -188,9 +201,9 @@
         if temporary:
                 args.append("-t")
         # fmris could be a list so explicit cast is necessary
-        __call(tuple(args) + tuple(fmris))
+        __call(tuple(args) + tuple(fmris), zone=zone)
 
-def disable(fmris, temporary=False, sync_timeout=0):
+def disable(fmris, temporary=False, sync_timeout=0, zone=None):
         if not fmris:
                 return
         if isinstance(fmris, basestring):
@@ -201,18 +214,18 @@
         if temporary:
                 args.append("-t")
         # fmris could be a list so explicit cast is necessary
-        __call(tuple(args) + tuple(fmris))
+        __call(tuple(args) + tuple(fmris), zone=zone)
 
-def mark(state, fmris):
+def mark(state, fmris, zone=None):
         if not fmris:
                 return
         if isinstance(fmris, basestring):
                 fmris = (fmris,)
         args = [svcadm_path, "mark", state]
         # fmris could be a list so explicit cast is necessary
-        __call(tuple(args) + tuple(fmris))
+        __call(tuple(args) + tuple(fmris), zone=zone)
 
-def refresh(fmris, sync_timeout=0):
+def refresh(fmris, sync_timeout=0, zone=None):
         if not fmris:
                 return
         if isinstance(fmris, basestring):
@@ -223,9 +236,9 @@
                 if sync_timeout != -1:
                         args.append("-T %d" % sync_timeout)
         # fmris could be a list so explicit cast is necessary
-        __call(tuple(args) + tuple(fmris))
+        __call(tuple(args) + tuple(fmris), zone=zone)
 
-def restart(fmris, sync_timeout=0):
+def restart(fmris, sync_timeout=0, zone=None):
         if not fmris:
                 return
         if isinstance(fmris, basestring):
@@ -236,4 +249,4 @@
                 if sync_timeout != -1:
                         args.append("-T %d" % sync_timeout)
         # fmris could be a list so explicit cast is necessary
-        __call(tuple(args) + tuple(fmris))
+        __call(tuple(args) + tuple(fmris), zone=zone)
--- a/src/pkg/manifests/package:pkg.p5m	Wed Apr 09 17:53:35 2014 -0700
+++ b/src/pkg/manifests/package:pkg.p5m	Fri Apr 18 10:14:06 2014 +0530
@@ -34,6 +34,7 @@
 file path=$(PYDIRVP)/pkg-0.1-py2.6.egg-info
 dir  path=$(PYDIRVP)/pkg/64
 file path=$(PYDIRVP)/pkg/64/_varcet.so
+file path=$(PYDIRVP)/pkg/64/sha512_t.so
 file path=$(PYDIRVP)/pkg/64/sysattr.so
 file path=$(PYDIRVP)/pkg/__init__.py
 file path=$(PYDIRVP)/pkg/_varcet.so
@@ -193,6 +194,7 @@
 file path=$(PYDIRVP)/pkg/server/query_parser.py
 file path=$(PYDIRVP)/pkg/server/repository.py
 file path=$(PYDIRVP)/pkg/server/transaction.py
+file path=$(PYDIRVP)/pkg/sha512_t.so
 file path=$(PYDIRVP)/pkg/smf.py
 file path=$(PYDIRVP)/pkg/solver.so
 file path=$(PYDIRVP)/pkg/sysattr.so
--- a/src/pkgdep.py	Wed Apr 09 17:53:35 2014 -0700
+++ b/src/pkgdep.py	Fri Apr 18 10:14:06 2014 +0530
@@ -43,7 +43,7 @@
 import pkg.publish.dependencies as dependencies
 from pkg.misc import msg, emsg, PipeError
 
-CLIENT_API_VERSION = 78
+CLIENT_API_VERSION = 79
 PKG_CLIENT_NAME = "pkgdepend"
 
 DEFAULT_SUFFIX = ".res"
--- a/src/setup.py	Wed Apr 09 17:53:35 2014 -0700
+++ b/src/setup.py	Fri Apr 18 10:14:06 2014 +0530
@@ -467,6 +467,9 @@
 pkg_locales = \
     'ar ca cs de es fr he hu id it ja ko nl pl pt_BR ru sk sv zh_CN zh_HK zh_TW'.split()
 
+sha512_t_srcs = [
+        'modules/sha512_t.c'
+        ]
 sysattr_srcs = [
         'modules/sysattr.c'
         ]
@@ -657,6 +660,12 @@
                             ['-I' + self.escape(get_python_inc())] + \
                             ["%s%s" % ("-l", k) for k in sysattr_libraries] + \
                             sysattr_srcs
+                        sha512_tcmd = lint + lint_flags + \
+                            ['-D_FILE_OFFSET_BITS=64'] + \
+                            ["%s%s" % ("-I", k) for k in include_dirs] + \
+                            ['-I' + self.escape(get_python_inc())] + \
+                            ["%s%s" % ("-l", k) for k in sha512_t_libraries] + \
+                            sha512_t_srcs
 
                         print(" ".join(archcmd))
                         os.system(" ".join(archcmd))
@@ -674,6 +683,8 @@
                         os.system(" ".join(syscallatcmd))
                         print(" ".join(sysattrcmd))
                         os.system(" ".join(sysattrcmd))
+                        print(" ".join(sha512_tcmd))
+                        os.system(" ".join(sha512_tcmd))
 
 
 # Runs both C and Python lint
@@ -1574,6 +1585,7 @@
         ]
 elf_libraries = None
 sysattr_libraries = None
+sha512_t_libraries = None
 data_files = web_files
 cmdclasses = {
         'install': install_func,
@@ -1723,6 +1735,7 @@
         if osname == 'sunos':
             elf_libraries += [ 'md' ]
             sysattr_libraries = [ 'nvpair' ]
+            sha512_t_libraries = [ 'md' ]
             ext_modules += [
                     Extension(
                             'arch',
@@ -1758,6 +1771,16 @@
                             define_macros = [('_FILE_OFFSET_BITS', '64')],
                             build_64 = True
                             ),
+                    Extension(
+                            'sha512_t',
+                            sha512_t_srcs,
+                            include_dirs = include_dirs,
+                            libraries = sha512_t_libraries,
+                            extra_compile_args = compile_args,
+                            extra_link_args = link_args,
+                            define_macros = [('_FILE_OFFSET_BITS', '64')],
+                            build_64 = True
+                            ),
                     ]
         else:
             elf_libraries += [ 'ssl' ]
--- a/src/svc/pkg-depot.xml	Wed Apr 09 17:53:35 2014 -0700
+++ b/src/svc/pkg-depot.xml	Fri Apr 18 10:14:06 2014 +0530
@@ -19,7 +19,7 @@
 
 	CDDL HEADER END
 
-	Copyright (c) 2013, Oracle and/or its affiliates.  All rights reserved.
+	Copyright (c) 2013, 2014, Oracle and/or its affiliates.  All rights reserved.
 
 	NOTE:  This service manifest is not editable; its contents will
 	be overwritten by package or patch operations, including
@@ -152,8 +152,22 @@
                         <propval name='allow_refresh' type='boolean' value='false' />
 
                         <propval name='value_authorization' type='astring'
-                                value='solaris.smf.value.pkg-depot' />
+                                value='solaris.smf.value.pkg-depot-config' />
 
+                        <propval name='https' type='boolean'
+                                value='false' />
+                        <propval name='ssl_cert_file' type='astring'
+                                value='' />
+                        <propval name='ssl_key_file' type='astring'
+                                value='' />
+                        <propval name='ssl_ca_cert_file' type='astring'
+                                value='' />
+                        <propval name='ssl_ca_key_file' type='astring'
+                                value='' />
+                        <propval name='ssl_cert_key_dir' type='astring'
+                                value='/var/cache/pkg/depot/cert_key_dir' />
+                        <propval name='ssl_cert_chain_file' type='astring'
+                                value='' />
 
                 </property_group>
 
--- a/src/svc/svc-pkg-depot	Wed Apr 09 17:53:35 2014 -0700
+++ b/src/svc/svc-pkg-depot	Fri Apr 18 10:14:06 2014 +0530
@@ -20,7 +20,7 @@
 # CDDL HEADER END
 #
 #
-# Copyright (c) 2013, Oracle and/or its affiliates. All rights reserved.
+# Copyright (c) 2013, 2014, Oracle and/or its affiliates. All rights reserved.
 #
 
 . /lib/svc/share/smf_include.sh
@@ -111,20 +111,48 @@
 }
 
 function run_depot {
-
+        if [ "${depot_https}" == "true" ]; then
+                https_cmd="--https"
+                smf_fmri_cmd="--smf-fmri ${SMF_FMRI}"
+                if ! [ -z "${depot_ssl_cert_file}" ]; then
+                        ssl_cert_file_cmd="--cert ${depot_ssl_cert_file}"
+                fi
+                if ! [ -z "${depot_ssl_key_file}" ]; then
+                        ssl_key_file_cmd="--key ${depot_ssl_key_file}"
+                fi
+                if ! [ -z "${depot_ssl_ca_cert_file}" ]; then
+                        ssl_ca_cert_file_cmd="--ca-cert ${depot_ssl_ca_cert_file}"
+                fi
+                if ! [ -z "${depot_ssl_ca_key_file}" ]; then
+                        ssl_ca_key_file_cmd="--ca-key ${depot_ssl_ca_key_file}"
+                fi
+                if ! [ -z "${depot_ssl_cert_key_dir}" ]; then
+                        ssl_cert_key_dir_cmd="--cert-key-dir ${depot_ssl_cert_key_dir}"
+                fi
+                if ! [ -z "${depot_ssl_cert_chain_file}" ]; then
+                        ssl_cert_chain_file_cmd="--cert-chain ${depot_ssl_cert_chain_file}"
+                fi
+        fi
         /usr/lib/pkg.depot-config \
-               -S \
-               -c ${depot_cache_dir} \
-               -h ${depot_host} \
-               -l ${depot_log_dir} \
-               -p ${depot_port} \
-               -r ${depot_runtime_dir} \
-               -s ${depot_cache_max} \
-               -T ${depot_template_dir} \
-               -t apache2 \
-               ${depot_allow_refresh}
+            -S \
+            -c ${depot_cache_dir} \
+            -h ${depot_host} \
+            -l ${depot_log_dir} \
+            -p ${depot_port} \
+            -r ${depot_runtime_dir} \
+            -s ${depot_cache_max} \
+            -T ${depot_template_dir} \
+            -t apache2 ${https_cmd} \
+            ${smf_fmri_cmd} \
+            ${ssl_cert_file_cmd} \
+            ${ssl_key_file_cmd} \
+            ${ssl_ca_cert_file_cmd} \
+            ${ssl_ca_key_file_cmd} \
+            ${ssl_cert_key_dir_cmd} \
+            ${ssl_cert_chain_file_cmd} \
+            ${depot_allow_refresh}
 	failure=$?
-	if [ $? -ne 0 ] ; then
+	if [ ${failure} -ne 0 ] ; then
 		# make sure we leave nothing behind
 		kill_apache
 		kill_htcacheclean
@@ -207,10 +235,11 @@
 check_prop ${depot_host} config/host
 check_prop ${depot_port} config/port
 check_prop ${depot_log_dir} config/log_dir
-check_prop ${deport_template_dir} config/template_dir
+check_prop ${depot_template_dir} config/template_dir
 check_prop ${depot_runtime_dir} config/runtime_dir
 check_prop ${depot_cache_dir} config/cache_dir
 check_prop ${depot_cache_max} config/cache_max
+check_prop ${depot_https} config/https
 check_prop ${depot_allow_refresh} config/allow_refresh
 if [ "${depot_allow_refresh}" == "true" ] ; then
 	depot_allow_refresh="-A"
@@ -221,6 +250,7 @@
 FAILED_TO_RUN="Server failed to %s. Check the SMF service log or the\
  error log at ${depot_log_dir}/error_log for more information, if any."
 
+
 case "$1" in
 "start")
 	cmd="start"
@@ -251,8 +281,18 @@
 	cmd="stop"
 	kill_htcacheclean
         emsg=$(/usr/bin/printf ${FAILED_TO_RUN} stop)
-	${HTTPD} -f ${depot_runtime_dir}/depot_httpd.conf \
-            ${STARTUP_OPTIONS} -k ${cmd} 2>&1
+        # If https service is on and user blindly deleted the certificate dir,
+        # then the stop method will cause error due to not find certificate
+        # and key files. Instead of causing this error, we kill the apache
+        # instance manually.
+        if [[ "${depot_https}" == "true" && \
+            ! ( -f "${depot_ssl_cert_file}" && \
+            -f "${depot_ssl_key_file}" ) ]]; then
+                kill_apache
+        else
+	        ${HTTPD} -f ${depot_runtime_dir}/depot_httpd.conf \
+                    ${STARTUP_OPTIONS} -k ${cmd} 2>&1
+        fi
 	check_apache_failure $? $emsg
 	;;
 *)
--- a/src/sysrepo.py	Wed Apr 09 17:53:35 2014 -0700
+++ b/src/sysrepo.py	Fri Apr 18 10:14:06 2014 +0530
@@ -59,7 +59,7 @@
 orig_cwd = None
 
 PKG_CLIENT_NAME = "pkg.sysrepo"
-CLIENT_API_VERSION = 78
+CLIENT_API_VERSION = 79
 pkg.client.global_settings.client_name = PKG_CLIENT_NAME
 
 # exit codes
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/src/tests/api/t_pkg_api_revert.py	Fri Apr 18 10:14:06 2014 +0530
@@ -0,0 +1,87 @@
+#!/usr/bin/python
+#
+# CDDL HEADER START
+#
+# The contents of this file are subject to the terms of the
+# Common Development and Distribution License (the "License").
+# You may not use this file except in compliance with the License.
+#
+# You can obtain a copy of the license at usr/src/OPENSOLARIS.LICENSE
+# or http://www.opensolaris.org/os/licensing.
+# See the License for the specific language governing permissions
+# and limitations under the License.
+#
+# When distributing Covered Code, include this CDDL HEADER in each
+# file and include the License file at usr/src/OPENSOLARIS.LICENSE.
+# If applicable, add the following below this CDDL HEADER, with the
+# fields enclosed by brackets "[]" replaced with your own identifying
+# information: Portions Copyright [yyyy] [name of copyright owner]
+#
+# CDDL HEADER END
+#
+
+#
+# Copyright (c) 2014 Oracle and/or its affiliates. All rights reserved.
+#
+
+import testutils
+if __name__ == "__main__":
+        testutils.setup_environment("../../../proto")
+import pkg5unittest
+
+
+class TestPkgApiRevert(pkg5unittest.SingleDepotTestCase):
+        # Only start/stop the depot once (instead of for every test)
+        persistent_setup = True
+
+        pkgs = """
+            open [email protected],5.11-0
+            add dir mode=0755 owner=root group=bin path=etc
+            add file etc/file1 mode=0555 owner=root group=bin path=etc/file1
+            add file etc/file2 mode=0555 owner=root group=bin path=etc/file2
+            close
+            open [email protected],5.11-0
+            add dir mode=0755 owner=root group=bin path=etc
+            add file etc/file3 mode=0555 owner=root group=bin path=etc/file3
+            close
+            """
+
+        misc_files = ["etc/file1", "etc/file2", "etc/file3"]
+
+        def setUp(self):
+                pkg5unittest.SingleDepotTestCase.setUp(self)
+                self.make_misc_files(self.misc_files)
+                self.plist = self.pkgsend_bulk(self.rurl, self.pkgs)
+
+        def test_changed_packages(self):
+                """Verify that pkg revert correctly marks changed packages."""
+
+                api_inst = self.image_create(self.rurl)
+
+                # try reverting non-editable files
+                self._api_install(api_inst, ["[email protected]", "[email protected]"])
+
+                # remove a files from pkg A only
+                self.file_remove("etc/file2")
+
+                # make sure we broke only pkg A
+                self.pkg("verify A", exit=1)
+                self.pkg("verify B")
+
+                # now see if revert when files in both packages are named only
+                # marks pkg A as changed
+                self._api_revert(api_inst, ["/etc/file2"], noexecute=True)
+                plan = api_inst.describe()
+                pfmri = self.plist[0]
+                self.assertEqualDiff([(pfmri, pfmri)], [
+                    (str(entry[0]), str(entry[1]))
+                    for entry in plan.plan_desc
+                ])
+
+                # actually execute it, then check verify passes
+                self._api_revert(api_inst, ["/etc/file2", "/etc/file3"])
+                self.pkg("verify")
+
+
+if __name__ == "__main__":
+        unittest.main()
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/src/tests/api/t_sha512_t.py	Fri Apr 18 10:14:06 2014 +0530
@@ -0,0 +1,131 @@
+#!/usr/bin/python
+#
+# CDDL HEADER START
+#
+# The contents of this file are subject to the terms of the
+# Common Development and Distribution License (the "License").
+# You may not use this file except in compliance with the License.
+#
+# You can obtain a copy of the license at usr/src/OPENSOLARIS.LICENSE
+# or http://www.opensolaris.org/os/licensing.
+# See the License for the specific language governing permissions
+# and limitations under the License.
+#
+# When distributing Covered Code, include this CDDL HEADER in each
+# file and include the License file at usr/src/OPENSOLARIS.LICENSE.
+# If applicable, add the following below this CDDL HEADER, with the
+# fields enclosed by brackets "[]" replaced with your own identifying
+# information: Portions Copyright [yyyy] [name of copyright owner]
+#
+# CDDL HEADER END
+#
+
+#
+# Copyright (c) 2014, Oracle and/or its affiliates. All rights reserved.
+#
+
+import testutils
+if __name__ == "__main__":
+        testutils.setup_environment("../../../proto")
+import pkg5unittest
+
+import unittest
+import pkg.sha512_t as sha512_t
+
+class TestPkgSha(pkg5unittest.Pkg5TestCase):
+        """A class tests the sha512_t module."""
+
+        def test_basic(self):
+                # The expected values are from the examples:
+                # http://csrc.nist.gov/groups/ST/toolkit/documents/Examples/SHA512_224.pdf
+                # http://csrc.nist.gov/groups/ST/toolkit/documents/Examples/SHA512_256.pdf
+
+                # Test SHA512/256
+                # Test hexdigest()
+                a = sha512_t.SHA512_t()
+                a.update("abc")
+                expected = "53048e2681941ef99b2e29b76b4c7dabe4c2d0c634fc6d46e0e2f13107e7af23"
+                output = a.hexdigest()
+                self.assertEqualDiff(expected, output)
+                
+                a = sha512_t.SHA512_t("abcdefghbcdefghicdefghijdefghijkefghijklfghijklmghijklmnhijklmnoijklmnopjklmnopqklmnopqrlmnopqrsmnopqrstnopqrstu")
+                expected = "3928e184fb8690f840da3988121d31be65cb9d3ef83ee6146feac861e19b563a"
+                output = a.hexdigest()
+                self.assertEqualDiff(expected, output)
+
+                # Test the length of the output of hexdigest()
+                output = len(sha512_t.SHA512_t("0.861687995815").hexdigest()) 
+                self.assertEqualDiff(64, output)
+                output = len(sha512_t.SHA512_t("0.861687995815", 224).hexdigest()) 
+                self.assertEqualDiff(56, output)
+
+                # Test digest()
+                a = sha512_t.SHA512_t()
+                a.update("abc")
+                expected = "S\x04\x8e&\x81\x94\x1e\xf9\x9b.)\xb7kL}\xab\xe4\xc2\xd0\xc64\xfcmF\xe0\xe2\xf11\x07\xe7\xaf#"
+                output = a.digest()
+                self.assertEqualDiff(expected, output)
+
+                # Test the length of the output of digest()
+                output = len(sha512_t.SHA512_t("0.861687995815").digest()) 
+                self.assertEqualDiff(32, output)
+                output = len(sha512_t.SHA512_t("0.861687995815", 224).digest()) 
+                self.assertEqualDiff(28, output)
+
+                # Test update()
+                a = sha512_t.SHA512_t("a")
+                a.update("bc")
+                expected = "53048e2681941ef99b2e29b76b4c7dabe4c2d0c634fc6d46e0e2f13107e7af23"
+                output = a.hexdigest()
+                self.assertEqualDiff(expected, output)
+
+                a = sha512_t.SHA512_t("a")
+                a.digest()
+                a.update("b")
+                a.hexdigest()
+                a.update("c")
+                output = a.hexdigest()
+                self.assertEqualDiff(expected, output)
+
+                # Test hash_size
+                a = sha512_t.SHA512_t()
+                self.assertEqualDiff("256", a.hash_size)
+
+                # Test SHA512/224
+                a = sha512_t.SHA512_t(t=224)
+                a.update("abc")
+                expected = "4634270f707b6a54daae7530460842e20e37ed265ceee9a43e8924aa"
+                output = a.hexdigest()
+                self.assertEqualDiff(expected, output)
+
+                a = sha512_t.SHA512_t("abcdefghbcdefghicdefghijdefghijkefghijklfghijklmghijklmnhijklmnoijklmnopjklmnopqklmnopqrlmnopqrsmnopqrstnopqrstu", t=224)
+                expected = "23fec5bb94d60b23308192640b0c453335d664734fe40e7268674af9"
+                output = a.hexdigest()
+                self.assertEqualDiff(expected, output)
+
+                # Test positional arguments
+                a = sha512_t.SHA512_t("abc", 224)
+                expected = "4634270f707b6a54daae7530460842e20e37ed265ceee9a43e8924aa"
+                output = a.hexdigest()
+                self.assertEqualDiff(expected, output)
+
+                # Test keyword arguments
+                a = sha512_t.SHA512_t(message="abc", t=224)
+                expected = "4634270f707b6a54daae7530460842e20e37ed265ceee9a43e8924aa"
+                output = a.hexdigest()
+                self.assertEqualDiff(expected, output)
+
+                # Test scalability
+                a = sha512_t.SHA512_t()
+                for i in xrange(1000000):
+                        a.update("abc")
+                a.hexdigest()
+
+                # Test bad input
+                self.assertRaises(TypeError, sha512_t.SHA512_t, 8)
+                self.assertRaises(ValueError, sha512_t.SHA512_t, t=160)
+                self.assertRaises(TypeError, sha512_t.SHA512_t.update, 8)
+
+
+if __name__ == "__main__":
+        unittest.main()
--- a/src/tests/api/t_smf.py	Wed Apr 09 17:53:35 2014 -0700
+++ b/src/tests/api/t_smf.py	Fri Apr 18 10:14:06 2014 +0530
@@ -32,6 +32,8 @@
 
 import pkg.smf as smf
 
+from pkg.client.debugvalues import DebugValues
+
 class TestSMF(pkg5unittest.SingleDepotTestCase):
         # Only start/stop the depot once (instead of for every test)
         persistent_setup = True
@@ -113,7 +115,13 @@
 esac
 echo $FMRI
 exit $RETURN
-"""
+""",
+                "bin_zlogin" : \
+"""#!/bin/ksh
+zone_name=$1
+shift
+echo "zlogin $zone_name" >> $PKG_TEST_DIR/zlogin_arguments
+($*)""",
 }
         misc_files = { \
                 "svcprop_enabled" :
@@ -337,9 +345,9 @@
 stop/timeout_seconds count 0
 stop/type astring method""",
 
-
                 "empty": "",
 }
+
         def setUp(self):
                 pkg5unittest.SingleDepotTestCase.setUp(self)
                 self.make_misc_files(self.misc_files, prefix="testdata")
@@ -546,3 +554,74 @@
                 self.file_contains(svcadm_output,
                     "svcadm mark degraded svc:/system/test_enable_svc:default foo")
                 os.unlink(svcadm_output)
+
+        def test_zone_actuators(self):
+                """Test that the smf interface for zones performs as
+                expected."""
+
+                testdata_dir = os.path.join(self.test_root, "testdata")
+                svcadm_output = os.path.join(testdata_dir,
+                    "svcadm_arguments")
+                zlogin_output = os.path.join(testdata_dir,
+                    "zlogin_arguments")
+                os.environ["PKG_TEST_DIR"] = testdata_dir
+                DebugValues["bin_zlogin"] = os.path.join(self.test_root,
+                    "smf_cmds", "bin_zlogin")
+
+                zone = "z1"
+
+                smf.restart("svc:/system/test_restart_svc:default", zone=zone)
+                self.file_contains(zlogin_output,
+                    "zlogin "+zone)
+                os.unlink(zlogin_output)
+                self.file_contains(svcadm_output,
+                    "svcadm restart svc:/system/test_restart_svc:default")
+                os.unlink(svcadm_output)
+
+                smf.refresh("svc:/system/test_refresh_svc:default", zone=zone)
+                self.file_contains(zlogin_output,
+                    "zlogin "+zone)
+                os.unlink(zlogin_output)
+                self.file_contains(svcadm_output,
+                    "svcadm refresh svc:/system/test_refresh_svc:default")
+                os.unlink(svcadm_output)
+
+                smf.mark("maintenance", "svc:/system/test_mark_svc:default", zone=zone)
+                self.file_contains(zlogin_output,
+                    "zlogin "+zone)
+                os.unlink(zlogin_output)
+                self.file_contains(svcadm_output,
+                    "svcadm mark maintenance svc:/system/test_mark_svc:default")
+                os.unlink(svcadm_output)
+
+                smf.enable("svc:/system/test_enable_svc:default", zone=zone)
+                self.file_contains(zlogin_output,
+                    "zlogin "+zone)
+                os.unlink(zlogin_output)
+                self.file_contains(svcadm_output,
+                    "svcadm enable svc:/system/test_enable_svc:default")
+                os.unlink(svcadm_output)
+
+                smf.disable("svc:/system/test_disable_svc:default", zone=zone)
+                self.file_contains(zlogin_output,
+                    "zlogin "+zone)
+                os.unlink(zlogin_output)
+                self.file_contains(svcadm_output,
+                    "svcadm disable -s svc:/system/test_disable_svc:default")
+                os.unlink(svcadm_output)
+
+                os.environ["PKG_SVCPROP_OUTPUT"] = "svcprop_enabled"
+                smf.get_prop("foo", "start/timeout_seconds", zone=zone)
+                self.file_contains(zlogin_output,
+                    "zlogin "+zone)
+                os.unlink(zlogin_output)
+
+                smf.is_disabled("foo", zone=zone)
+                self.file_contains(zlogin_output,
+                    "zlogin "+zone)
+                os.unlink(zlogin_output)
+
+                smf.get_state("foo", zone=zone)
+                self.file_contains(zlogin_output,
+                    "zlogin "+zone)
+                os.unlink(zlogin_output)
--- a/src/tests/cli/t_actuators.py	Wed Apr 09 17:53:35 2014 -0700
+++ b/src/tests/cli/t_actuators.py	Fri Apr 18 10:14:06 2014 +0530
@@ -78,7 +78,23 @@
 esac
 echo $FMRI
 exit $RETURN
-"""
+""",
+                "bin_zlogin" : \
+"""#!/bin/sh
+# print full cmd line, then execute in gz what zlogin would execute in ngz
+echo $0 "$@" >> $PKG_TEST_DIR/zlogin_arguments
+shift
+($*)
+""",
+                "bin_zoneadm" : \
+"""#!/bin/sh
+cat <<-EOF
+0:global:running:/::solaris:shared:-:none:
+1:z1:running:$PKG_TZR1::solaris:excl:-::
+2:z2:installed:$PKG_TZR2::solaris:excl:-::
+EOF
+exit 0"""
+
 }
         misc_files = { \
                 "svcprop_enabled" :
@@ -462,6 +478,69 @@
                     "svc:/system/test_multi_svc2:default")
                 os.unlink(svcadm_output)
 
+        def __create_zone(self, zname, rurl):
+                """Create a fake zone linked image and attach to parent."""
+
+                zone_path = os.path.join(self.img_path(0), zname)
+                os.mkdir(zone_path)
+                # zone images are rooted at <zonepath>/root
+                zimg_path = os.path.join(zone_path, "root")
+                self.image_create(repourl=rurl, img_path=zimg_path)
+                self.pkg("-R %s attach-linked -c system:%s %s" %
+                    (self.img_path(0), zname, zimg_path))
+
+                return zone_path
+                
+        def test_zone_actuators(self):
+                """test zone actuators"""
+
+                rurl = self.dc.get_repo_url()
+                plist = self.pkgsend_bulk(rurl, self.pkg_list)
+                self.image_create(rurl)
+
+                # Create fake zone images.
+                # We have one "running" zone (z1) and one "installed" zone (z2).
+                # The zone actuators should only be run in the running zone.
+
+                # set env variable for fake zoneadm to print correct zonepaths
+                os.environ["PKG_TZR1"] = self.__create_zone("z1", rurl)
+                os.environ["PKG_TZR2"] = self.__create_zone("z2", rurl)
+
+                os.environ["PKG_TEST_DIR"] = self.testdata_dir
+                os.environ["PKG_SVCADM_EXIT_CODE"] = "0"
+                os.environ["PKG_SVCPROP_EXIT_CODE"] = "0"
+
+                # Prepare fake zone and smf cmds.
+                svcadm_output = os.path.join(self.testdata_dir,
+                    "svcadm_arguments")
+                zlogin_output = os.path.join(self.testdata_dir,
+                    "zlogin_arguments")
+                bin_zlogin = os.path.join(self.test_root,
+                    "smf_cmds", "bin_zlogin")
+                bin_zoneadm = os.path.join(self.test_root,
+                    "smf_cmds", "bin_zoneadm")
+
+                # make it look like our test service is enabled
+                os.environ["PKG_SVCPROP_OUTPUT"] = "svcprop_enabled"
+
+                # test to see if our test service is restarted on install
+                self.pkg("--debug bin_zoneadm='%s' "
+                    "--debug bin_zlogin='%s' "
+                    "install -rv [email protected]" % (bin_zoneadm, bin_zlogin))
+                # test that actuator in global zone and z2 is run
+                self.file_contains(svcadm_output,
+                    "svcadm restart svc:/system/test_restart_svc:default",
+                    appearances=2)
+                os.unlink(svcadm_output)
+                # test that actuator in non-global zone is run
+                self.file_contains(zlogin_output,
+                    "zlogin z1")
+                self.file_doesnt_contain(zlogin_output,
+                    "zlogin z2")
+                self.file_contains(zlogin_output,
+                    "svcadm restart svc:/system/test_restart_svc:default")
+                os.unlink(zlogin_output)
+
 class TestPkgReleaseNotes(pkg5unittest.SingleDepotTestCase):
         # Only start/stop the depot once (instead of for every test)
         persistent_setup = True
--- a/src/tests/cli/t_depot_config.py	Wed Apr 09 17:53:35 2014 -0700
+++ b/src/tests/cli/t_depot_config.py	Fri Apr 18 10:14:06 2014 +0530
@@ -22,7 +22,7 @@
 #
 
 #
-# Copyright (c) 2013, Oracle and/or its affiliates. All rights reserved.
+# Copyright (c) 2013, 2014, Oracle and/or its affiliates. All rights reserved.
 #
 
 import testutils
@@ -36,24 +36,16 @@
 import time
 import unittest
 import urllib2
+import certgenerator
+import shutil
 
+from pkg.client.debugvalues import DebugValues
 import pkg.fmri
 
 HTTPDEPOT_USER = "pkg5srv"
 
-class TestHttpDepot(pkg5unittest.ApacheDepotTestCase):
-        """Tests that exercise the pkg.depot-config CLI as well as checking the
-        functionality of the depot-config itself. This test class will
-        fail if not run as root, since many of the tests use 'pkg.depot-config -a'
-        which will attempt to chown a directory to pkg5srv:pkg5srv.
 
-        The default_svcs_conf having an instance name of 'usr' is not a
-        coincidence: we use it there so that we catch RewriteRules that
-        mistakenly try to serve content from the root filesystem ('/') rather
-        than from beneath our DocumentRoot (assuming that test systems always
-        have a /usr directory)
-        """
-
+class _Apache(object):
         # An array that can be used to build our svcs(1) wrapper.
         default_svcs_conf = [
             # FMRI                                   STATE
@@ -111,10 +103,159 @@
 
         misc_files = ["tmp/sample", "tmp/updated", "tmp/another", "tmp/new"]
 
+        _svcs_template = \
+"""#!/usr/bin/ksh93
+#
+# This script produces false svcs(1) output, using
+# a list of space separated strings, with each string
+# of the format <fmri>%%<state>
+#
+# Since the string here is generated from a Python program, we have to escape
+# all 'percent' characters.
+#
+# eg.
+# SERVICE_STATUS="svc:/application/pkg/server:foo%%online svc:/application/pkg/server:default%%offline svc:/application/pkg/server:usr%%online"
+# We expect to be called with 'svcs -H -o fmri <fmri>' but completely ignore
+# the <fmri> argument.
+#
+SERVICE_STATUS="%s"
+
+set -- `getopt o:H $*`
+for i in $* ; do
+    case $i in
+        -H)    minus_h=$i; shift;;
+        -o)    minus_o=$2; shift;;
+        *)     break;;
+    esac
+done
+
+if [ "${minus_o}" ]; then
+    if [ -z "${minus_h}" ]; then
+        echo "FMRI"
+    fi
+    for service in $SERVICE_STATUS ; do
+        echo $service | sed -e 's/%%/ /' | read fmri state
+        echo $fmri
+    done
+    exit 0
+fi
+
+if [ -z "${minus_h}" ]; then
+    printf "%%-14s%%6s    %%s\n" STATE STIME FMRI
+fi
+for service in $SERVICE_STATUS ; do
+    echo $service | sed -e 's/%%/ /' | read fmri state
+    printf "%%-14s%%9s %%s\n" $state 00:00:00 $fmri
+done
+"""
+
+        _svcprop_template = \
+"""#!/usr/bin/ksh93
+#
+# This script produces false svcprop(1) output, using
+# a list of space separated strings, with each string
+# of the format <fmri>%%<state>%%<inst_root>%%<readonly>%%<standalone>%%<writable_root>
+#
+# eg.
+# SERVICE_PROPS="svc:/application/pkg/server:foo%%online%%/space/repo%%true%%false%%/space/writable_root"
+#
+# we expect to be called as "svcprop -c -p <property> <fmri>"
+# which is enough svcprop(1) functionalty for these tests. Any other
+# command line options will cause us to return nonsense.
+#
+
+typeset -A prop_state
+typeset -A prop_readonly
+typeset -A prop_inst_root
+typeset -A prop_standalone
+typeset -A prop_writable_root
+
+SERVICE_PROPS="%s"
+for service in $SERVICE_PROPS ; do
+        echo $service | sed -e 's/%%/ /g' | \
+            read fmri state inst_root readonly standalone writable_root
+        # create a hashable version of the FMRI
+        fmri=$(echo $fmri | sed -e 's/\///g' -e 's/://g')
+        prop_state[$fmri]=$state
+        prop_inst_root[$fmri]=$inst_root
+        prop_readonly[$fmri]=$readonly
+        prop_standalone[$fmri]=$standalone
+        prop_writable_root[$fmri]=$writable_root
+done
+
+
+FMRI=$(echo $4 | sed -e 's/\///g' -e 's/://g')
+case $3 in
+        "pkg/inst_root")
+                echo ${prop_inst_root[$FMRI]}
+                ;;
+        "pkg/readonly")
+                echo ${prop_readonly[$FMRI]}
+                ;;
+        "pkg/standalone")
+                echo ${prop_standalone[$FMRI]}
+                ;;
+        "pkg/writable_root")
+                echo ${prop_writable_root[$FMRI]}
+                ;;
+        "restarter/state")
+                echo ${prop_state[$FMRI]}
+                ;;
+        *)
+                echo "Completely bogus svcprop output. Sorry."
+esac
+"""
+
+        # A very minimal httpd.conf, which contains an Include directive
+        # that we will use to reference our pkg5 depot-config.conf file. We leave
+        # an Alias pointing to /server-status to make this server distinctive
+        # for this test case.
+        _default_httpd_conf = \
+"""ServerRoot "/usr/apache2/2.2"
+PidFile "%(runtime_dir)s/default_httpd.pid"
+Listen %(port)s
+<IfDefine 64bit>
+Include /etc/apache2/2.2/conf.d/modules-64.load
+</IfDefine>
+<IfDefine !64bit>
+Include /etc/apache2/2.2/conf.d/modules-32.load
+</IfDefine>
+
+User webservd
+Group webservd
+ServerAdmin [email protected]
+ServerName 127.0.0.1
+DocumentRoot "/var/apache2/2.2/htdocs"
+<Directory "/var/apache2/2.2/htdocs">
+    Options Indexes FollowSymLinks
+    AllowOverride None
+    Order allow,deny
+    Allow from all
+</Directory>
+<IfModule dir_module>
+    DirectoryIndex index.html
+</IfModule>
+LogFormat \"%%h %%l %%u %%t \\\"%%r\\\" %%>s %%b\" common
+ErrorLog "%(runtime_dir)s/error_log"
+CustomLog "%(runtime_dir)s/access_log" common
+LogLevel debug
+DefaultType text/plain
+# Reference the depot.conf file generated by pkg.depot-config, which makes this
+# web server into something that can serve pkg(5) repositories.
+Include %(depot_conf)s
+SSLRandomSeed startup builtin
+SSLRandomSeed connect builtin
+# We enable server-status here, using /pkg5test-server-status to it to make the
+# URI distinctive.
+<Location /pkg5test-server-status>
+    SetHandler server-status
+</Location>
+"""
+
         def setUp(self):
                 self.sc = None
-                pkg5unittest.ApacheDepotTestCase.setUp(self, ["test1", "test2",
-                    "test3"])
+                pkg5unittest.ApacheDepotTestCase.setUp(self, ["test1",
+                    "test2", "test3"])
                 self.rdir1 = self.dcs[1].get_repodir()
                 self.rdir2 = self.dcs[2].get_repodir()
                 self.rdir3 = self.dcs[3].get_repodir()
@@ -131,9 +272,9 @@
                 self.depot_port = self.next_free_port
                 self.next_free_port += 1
                 self.make_misc_files(self.misc_files)
-                self.__set_smf_state()
+                self._set_smf_state()
 
-        def __set_smf_state(self, svcs_conf=default_svcs_conf,
+        def _set_smf_state(self, svcs_conf=default_svcs_conf,
             svcprop_conf=default_svcprop_conf):
                 """Create wrapper scripts for svcprop and svcs based on the
                 arrays of arrays passed in as arguments. By default, the
@@ -176,8 +317,8 @@
                     for item in _svcprop_conf])
 
                 self.smf_cmds = {
-                    "usr/bin/svcs": self.__svcs_template % _svcs_conf,
-                    "usr/bin/svcprop": self.__svcprop_template % _svcprop_conf
+                    "usr/bin/svcs": self._svcs_template % _svcs_conf,
+                    "usr/bin/svcprop": self._svcprop_template % _svcprop_conf
                 }
                 self.make_misc_files(self.smf_cmds, "smf_cmds", mode=0755)
 
@@ -192,6 +333,21 @@
                         u = urllib2.urlopen(
                             "%s/depot/depot-wait-refresh" % hc.url).close()
 
+
+class TestHttpDepot(_Apache, pkg5unittest.ApacheDepotTestCase):
+        """Tests that exercise the pkg.depot-config CLI as well as checking the
+        functionality of the depot-config itself for configuring http service.
+        This test class will fail if not run as root, since many of the tests
+        use 'pkg.depot-config -a' which will attempt to chown a directory to
+        pkg5srv:pkg5srv.
+
+        The default_svcs_conf having an instance name of 'usr' is not a
+        coincidence: we use it there so that we catch RewriteRules that
+        mistakenly try to serve content from the root filesystem ('/') rather
+        than from beneath our DocumentRoot (assuming that test systems always
+        have a /usr directory)
+        """
+
         def test_0_htdepot(self):
                 """A basic test to see that we can start the depot,
                 as part of this, by starting the depot, ApacheController will
@@ -244,7 +400,7 @@
                 # ensure we also catch invalid SMF inst_roots
                 svcs_conf = [["svc:/application/pkg/server:default", "online" ]]
                 svcprop_conf = [["/tmp", "true", "false"]]
-                self.__set_smf_state(svcs_conf, svcprop_conf)
+                self._set_smf_state(svcs_conf, svcprop_conf)
                 ret, output, err = self.depotconfig("", out=True, stderr=True,
                     exit=1)
                 self.assert_("/tmp" in err, "error message did not contain "
@@ -331,15 +487,6 @@
                         self.assert_(invalid_tmp in err, "error message "
                             "did not contain %s: %s" % (invalid_tmp, err))
 
-        def test_9_invalid_httemplates_dir(self):
-                """We return an error given an invalid templates_dir"""
-
-                for invalid_tmp in ["/dev/null", "/etc/passwd", "/proc"]:
-                        ret, output, err = self.depotconfig("-T %s" % invalid_tmp,
-                            out=True, stderr=True, exit=1)
-                        self.assert_(invalid_tmp in err, "error message "
-                            "did not contain %s: %s" % (invalid_tmp, err))
-
         def test_10_httype(self):
                 """We return an error given an invalid type option."""
 
@@ -723,7 +870,6 @@
                     "-P testpkg5" %
                     (self.default_depot_runtime, self.rdir1, self.rdir2,
                     self.index_dir), exit=2)
-
                 self.depotconfig("-l %s -F -d usr=%s -d spaghetti=%s "
                     "-P testpkg5" %
                     (self.default_depot_runtime, self.rdir1, self.rdir2))
@@ -731,7 +877,7 @@
                 default_httpd_conf_path = os.path.join(self.test_root,
                     "default_httpd.conf")
                 httpd_conf = open(default_httpd_conf_path, "w")
-                httpd_conf.write(self.__default_httpd_conf %
+                httpd_conf.write(self._default_httpd_conf %
                     {"port": self.depot_port,
                     "depot_conf": self.depot_conf_fragment,
                     "runtime_dir": self.default_depot_runtime})
@@ -766,154 +912,331 @@
                 self.pkgrepo("-s %s/testpkg5/usr refresh" %
                     self.ac.url, exit=1)
 
-        __svcs_template = \
-"""#!/usr/bin/ksh93
-#
-# This script produces false svcs(1) output, using
-# a list of space separated strings, with each string
-# of the format <fmri>%%<state>
-#
-# Since the string here is generated from a Python program, we have to escape
-# all 'percent' characters.
-#
-# eg.
-# SERVICE_STATUS="svc:/application/pkg/server:foo%%online svc:/application/pkg/server:default%%offline svc:/application/pkg/server:usr%%online"
-# We expect to be called with 'svcs -H -o fmri <fmri>' but completely ignore
-# the <fmri> argument.
-#
-SERVICE_STATUS="%s"
+
+class TestHttpsDepot(_Apache, pkg5unittest.HTTPSTestClass):
+        """Tests that exercise the pkg.depot-config CLI as well as checking the
+        functionality of the depot-config itself for configuring https service.
+        This test class will fail if not run as root, since many of the tests
+        use 'pkg.depot-config -a' which will attempt to chown a directory to
+        pkg5srv:pkg5srv.
+        """
+
+        def test_0_invalid_option_combo(self):
+                """We return an error given an invalid option combo."""
+
+                cert = os.path.join(self.test_root, "tmp",
+                    "ido_exist_cert")
+                key = os.path.join(self.test_root, "tmp",
+                    "ido_exist_key")
+                self.make_misc_files(["tmp/ido_exist_cert",
+                    "tmp/ido_exist_key"])
+
+                # Test that without --https, providing certs or keys will fail.
+                dummy_ret, dummy_output, err = self.depotconfig(
+                    "--cert %s --key %s" % (cert, key),
+                    out=True, stderr=True, exit=2)
+                self.assert_(len(err), "error message: Without --https, "
+                            "providing cert or key should fail but succeeded "
+                            "instead.")
+
+                dummy_ret, dummy_output, err = self.depotconfig(
+                    "--ca-cert %s --ca-key %s" % (cert, key),
+                    out=True, stderr=True, exit=2)
+                self.assert_(len(err), "error message: Without --https, "
+                            "providing cert or key should fail but succeeded "
+                            "instead.")
 
-set -- `getopt o:H $*`
-for i in $* ; do
-    case $i in
-        -H)    minus_h=$i; shift;;
-        -o)    minus_o=$2; shift;;
-        *)     break;;
-    esac
-done
+                dummy_ret, dummy_output, err = self.depotconfig(
+                    "--cert-chain %s" % cert, out=True, stderr=True, exit=2)
+                self.assert_(len(err), "error message: Without --https, "
+                            "providing cert or key should fail but succeeded "
+                            "instead.")
+
+                # Checking that HTTPS is not supported in fragment mode.
+                dummy_ret, dummy_output, err = self.depotconfig(
+                    "--https -F", out=True, stderr=True, exit=2)
+                self.assert_(len(err), "error message: Without --https, "
+                            "providing cert or key should fail but succeeded "
+                            "instead.")
+
+        def test_1_missing_combo_options(self):
+                """We return errors if the option in a combo is not specified
+                at the same time."""
+
+                cert = os.path.join(self.test_root, "tmp",
+                    "ido_exist_cert")
+                key = os.path.join(self.test_root, "tmp",
+                    "ido_exist_key")
+                self.make_misc_files(["tmp/ido_exist_cert",
+                    "tmp/ido_exist_key"])
+
+                self.depotconfig("--https --cert %s" % cert, exit=2)
+                self.depotconfig("--https --key %s" % key, exit=2)
+                self.depotconfig("--https --ca-cert %s" % cert, exit=2)
+                self.depotconfig("--https --ca-key %s" % key, exit=2)
+                self.depotconfig("--https --cert-chain %s" % cert, exit=2)
+
+        def test_2_invalid_cert_key_dir(self):
+                """We return an error given an invalid cer_key_dir."""
+
+                for invalid_certkey_dir in ["/dev/null", "/etc/passwd"]:
+                        ret, output, err = self.depotconfig("--https "
+                            "--cert-key-dir %s" %
+                            invalid_certkey_dir, out=True, stderr=True, exit=1)
+                        self.assert_(invalid_certkey_dir in err, "error message "
+                           "did not contain %s: %s" % (invalid_certkey_dir, err))
+
+        def test_3_non_exist_cert_key(self):
+                """We return an error given an non-exist cert or key."""
 
-if [ "${minus_o}" ]; then
-    if [ -z "${minus_h}" ]; then
-        echo "FMRI"
-    fi
-    for service in $SERVICE_STATUS ; do
-        echo $service | sed -e 's/%%/ /' | read fmri state
-        echo $fmri
-    done
-    exit 0
-fi
+                non_exist_cert = os.path.join(self.test_root,
+                    "idonot_exist_cert")
+                non_exist_key = os.path.join(self.test_root,
+                    "idonot_exist_key")
+                exist_cert = os.path.join(self.test_root, "tmp",
+                    "ido_exist_cert")
+                exist_key = os.path.join(self.test_root, "tmp",
+                    "ido_exist_key")
+                self.make_misc_files(["tmp/ido_exist_cert",
+                    "tmp/ido_exist_key"])
+
+                # Test checking user provided server cert works.
+                dummy_ret, dummy_output, err = self.depotconfig("--https "
+                    "--cert %s --key %s" % (non_exist_cert, exist_key),
+                    out=True, stderr=True, exit=1)
+                self.assert_(non_exist_cert in err, "error message "
+                    "did not contain %s: %s" % (non_exist_cert, err))
 
-if [ -z "${minus_h}" ]; then
-    printf "%%-14s%%6s    %%s\n" STATE STIME FMRI
-fi
-for service in $SERVICE_STATUS ; do
-    echo $service | sed -e 's/%%/ /' | read fmri state
-    printf "%%-14s%%9s %%s\n" $state 00:00:00 $fmri
-done
-"""
+                # Test checking user provided server key works.
+                dummy_ret, dummy_output, err = self.depotconfig("--https "
+                    "--cert %s --key %s" % (exist_cert, non_exist_key),
+                    out=True, stderr=True, exit=1)
+                self.assert_(non_exist_key in err, "error message "
+                    "did not contain %s: %s" % (non_exist_key, err))
+
+                # Test checking user provided cert chain file works.
+                dummy_ret, dummy_output, err = self.depotconfig("--https "
+                    "--cert %s --key %s --cert-chain %s" %
+                    (exist_cert, exist_key, non_exist_cert),
+                    out=True, stderr=True, exit=1)
+                self.assert_(non_exist_cert in err, "error message "
+                    "did not contain %s: %s" % (non_exist_cert, err))
+
+                # Test checking user provided CA cert file works.
+                tmp_dir = os.path.join(self.test_root, "tmp")
+                dummy_ret, dummy_output, err = self.depotconfig("--https "
+                    "--ca-cert %s --ca-key %s --cert-key-dir %s" %
+                    (non_exist_cert, exist_key, tmp_dir),
+                    out=True, stderr=True, exit=1)
+                self.assert_(non_exist_cert in err, "error message "
+                    "did not contain %s: %s" % (non_exist_cert, err))
 
-        __svcprop_template = \
-"""#!/usr/bin/ksh93
-#
-# This script produces false svcprop(1) output, using
-# a list of space separated strings, with each string
-# of the format <fmri>%%<state>%%<inst_root>%%<readonly>%%<standalone>%%<writable_root>
-#
-# eg.
-# SERVICE_PROPS="svc:/application/pkg/server:foo%%online%%/space/repo%%true%%false%%/space/writable_root"
-#
-# we expect to be called as "svcprop -c -p <property> <fmri>"
-# which is enough svcprop(1) functionalty for these tests. Any other
-# command line options will cause us to return nonsense.
-#
+                # Test checking user provided CA key file works.
+                dummy_ret, dummy_output, err = self.depotconfig("--https "
+                    "--ca-cert %s --ca-key %s --cert-key-dir %s" %
+                    (exist_cert, non_exist_key, tmp_dir),
+                    out=True, stderr=True, exit=1)
+                self.assert_(non_exist_key in err, "error message "
+                    "did not contain %s: %s" % (non_exist_key, err))
+
+        def test_4_invalid_smf_fmri(self):
+                """We return an error given an invalid pkg/depot smf fmri."""
 
-typeset -A prop_state
-typeset -A prop_readonly
-typeset -A prop_inst_root
-typeset -A prop_standalone
-typeset -A prop_writable_root
+                some_fake_file = os.path.join(self.test_root, "tmp",
+                    "some_fake_file")
+                self.make_misc_files(["tmp/some_fake_file"])
+                tmp_dir = os.path.join(self.test_root, "tmp")
+                # Test with invalid fmri.
+                for invalid_fmri in ["svc:", "svc://notexist", some_fake_file]:
+                        dummy_ret, dummy_output, err = self.depotconfig(
+                            "--https --cert-key-dir %s --smf-fmri %s" %
+                            (tmp_dir, invalid_fmri), out=True, stderr=True)
+                        self.assert_(len(err), "error message: SMF FMRI "
+                            "setting should fail but succeeded instead.")
+
+                # Test with wrong fmri.
+                wrong_fmri = "pkg/server:default"
+                dummy_ret, dummy_output, err = self.depotconfig(
+                    "--https --cert-key-dir %s --smf-fmri %s" %
+                    (tmp_dir, wrong_fmri), out=True, stderr=True, exit=1)
+                self.assert_(len(err), "error message: SMF FMRI "
+                    "setting should fail but succeeded instead.")
+
+        def test_5_https_gen_cert(self):
+                """Test that https functionality works as expected."""
+
+                self.pkgsend_bulk(self.dcs[1].get_repo_url(),
+                    self.sample_pkg)
+                self.pkgrepo("-s %s add-publisher carrots" %
+                    self.dcs[1].get_repo_url())
+                self.pkgsend_bulk(self.dcs[1].get_repo_url(),
+                    self.carrots_pkg)
+                self.pkgsend_bulk(self.dcs[2].get_repo_url(),
+                    self.new_pkg)
 
-SERVICE_PROPS="%s"
-for service in $SERVICE_PROPS ; do
-        echo $service | sed -e 's/%%/ /g' | \
-            read fmri state inst_root readonly standalone writable_root
-        # create a hashable version of the FMRI
-        fmri=$(echo $fmri | sed -e 's/\///g' -e 's/://g')
-        prop_state[$fmri]=$state
-        prop_inst_root[$fmri]=$inst_root
-        prop_readonly[$fmri]=$readonly
-        prop_standalone[$fmri]=$standalone
-        prop_writable_root[$fmri]=$writable_root
-done
+                cert_key_dir = os.path.join(self.default_depot_runtime,
+                    "cert_key")
+                if os.path.isdir(cert_key_dir):
+                        shutil.rmtree(cert_key_dir)
 
+                cache_dir = os.path.join(self.test_root, "cache_test_dir")
+                self.depotconfig("-l %s -r %s -c %s -d usr=%s -d spa=%s -p %s "
+                    "--https -T %s -h localhost --cert-key-dir %s" %
+                    (self.default_depot_runtime, self.default_depot_runtime,
+                    cache_dir, self.rdir1, self.rdir2, self.depot_port,
+                    self.depot_template_dir, cert_key_dir))
+                server_id = "localhost_%s" % self.depot_port
+                ca_cert_file = os.path.join(cert_key_dir, "ca_%s_cert.pem" %
+                    server_id)
+                DebugValues["ssl_ca_file"] = ca_cert_file
+
+                # Start an Apache instance
+                self.default_depot_conf = os.path.join(
+                    self.default_depot_runtime, "depot_httpd.conf")
+                ac = pkg5unittest.HttpDepotController(self.default_depot_conf,
+                    self.depot_port, self.default_depot_runtime, testcase=self,
+                    https=True)
+                self.register_apache_controller("depot", ac)
+                ac.start()
+                self.image_create()
 
-FMRI=$(echo $4 | sed -e 's/\///g' -e 's/://g')
-case $3 in
-        "pkg/inst_root")
-                echo ${prop_inst_root[$FMRI]}
-                ;;
-        "pkg/readonly")
-                echo ${prop_readonly[$FMRI]}
-                ;;
-        "pkg/standalone")
-                echo ${prop_standalone[$FMRI]}
-                ;;
-        "pkg/writable_root")
-                echo ${prop_writable_root[$FMRI]}
-                ;;
-        "restarter/state")
-                echo ${prop_state[$FMRI]}
-                ;;
-        *)
-                echo "Completely bogus svcprop output. Sorry."
-esac
-"""
+                # add publishers for the two repositories being served by this
+                # Apache instance.
+                self.pkg("set-publisher -p %s/usr" % self.ac.url)
+                self.pkg("set-publisher -p %s/spa" % self.ac.url)
+                # install packages from the two different publishers in the
+                # first repository
+                self.pkg("install sample")
+                self.pkg("install carrots")
+                # install a package from the second repository
+                self.pkg("install new")
+                # we can't perform remote search or admin operations, since
+                # we've no supporting mod_wsgi process.
+                self.pkg("search -r new", exit=1)
+                self.pkgrepo("-s %s/testpkg5/usr refresh" %
+                    self.ac.url, exit=1)
+
+        def test_6_https_cert_chain(self):
+                """Test that https functionality with cert chain works as
+                expected."""
+
+                self.pkgsend_bulk(self.dcs[1].get_repo_url(),
+                    self.sample_pkg)
+                self.pkgrepo("-s %s add-publisher carrots" %
+                    self.dcs[1].get_repo_url())
+                self.pkgsend_bulk(self.dcs[1].get_repo_url(),
+                    self.carrots_pkg)
+                self.pkgsend_bulk(self.dcs[2].get_repo_url(),
+                    self.new_pkg)
+
+                cert_key_dir = os.path.join(self.default_depot_runtime,
+                    "cert_key_dir")
+                if os.path.isdir(cert_key_dir):
+                        shutil.rmtree(cert_key_dir)
+                os.makedirs(cert_key_dir)
+                cg = certgenerator.CertGenerator(base_dir=cert_key_dir)
+                cg.make_trust_anchor("ta", https=True)
+                cg.make_ca_cert("ca_ta", "ta", https=True)
+                cg.make_cs_cert("cs_ta", "ca_ta", parent_loc="chain_certs",
+                    https=True)
+
+                ta_cert_file = os.path.join(cg.raw_trust_anchor_dir,
+                    "ta_cert.pem")
+                ca_cert_file = os.path.join(cg.chain_certs_dir,
+                    "ca_ta_cert.pem")
+                cs_cert_file = os.path.join(cg.cs_dir, "cs_ta_cert.pem")
+                cs_key_file = os.path.join(cg.keys_dir, "cs_ta_key.pem")
+
+                cache_dir = os.path.join(self.test_root, "cache_test_dir")
+                self.depotconfig("-l %s -r %s -c %s -d usr=%s -d spa=%s -p %s "
+                    "--https -T %s -h localhost --cert %s --key %s "
+                    "--cert-chain %s" %
+                    (self.default_depot_runtime, self.default_depot_runtime,
+                    cache_dir, self.rdir1, self.rdir2, self.depot_port,
+                    self.depot_template_dir, cs_cert_file, cs_key_file,
+                    ca_cert_file))
 
-        # A very minimal httpd.conf, which contains an Include directive
-        # that we will use to reference our pkg5 depot-config.conf file. We leave
-        # an Alias pointing to /server-status to make this server distinctive
-        # for this test case.
-        __default_httpd_conf = \
-"""ServerRoot "/usr/apache2/2.2"
-PidFile "%(runtime_dir)s/default_httpd.pid"
-Listen %(port)s
-<IfDefine 64bit>
-Include /etc/apache2/2.2/conf.d/modules-64.load
-</IfDefine>
-<IfDefine !64bit>
-Include /etc/apache2/2.2/conf.d/modules-32.load
-</IfDefine>
+                DebugValues["ssl_ca_file"] = ta_cert_file
+
+                # Start an Apache instance
+                self.default_depot_conf = os.path.join(
+                    self.default_depot_runtime, "depot_httpd.conf")
+                ac = pkg5unittest.HttpDepotController(self.default_depot_conf,
+                    self.depot_port, self.default_depot_runtime, testcase=self,
+                    https=True)
+                self.register_apache_controller("depot", ac)
+                ac.start()
+                self.image_create()
+
+                # add publishers for the two repositories being served by this
+                # Apache instance.
+                self.pkg("set-publisher -p %s/usr" % self.ac.url)
+                self.pkg("set-publisher -p %s/spa" % self.ac.url)
+                # install packages from the two different publishers in the
+                # first repository
+                self.pkg("install sample")
+                self.pkg("install carrots")
+                # install a package from the second repository
+                self.pkg("install new")
+
+        def test_7_https_provided_ca(self):
+                """Test that pkg.depot-config functionality with provided
+                ca certificate and key works as expected."""
+
+                self.pkgsend_bulk(self.dcs[1].get_repo_url(),
+                    self.sample_pkg)
+                self.pkgrepo("-s %s add-publisher carrots" %
+                    self.dcs[1].get_repo_url())
+                self.pkgsend_bulk(self.dcs[1].get_repo_url(),
+                    self.carrots_pkg)
+                self.pkgsend_bulk(self.dcs[2].get_repo_url(),
+                    self.new_pkg)
 
-User webservd
-Group webservd
-ServerAdmin [email protected]
-ServerName 127.0.0.1
-DocumentRoot "/var/apache2/2.2/htdocs"
-<Directory "/var/apache2/2.2/htdocs">
-    Options Indexes FollowSymLinks
-    AllowOverride None
-    Order allow,deny
-    Allow from all
-</Directory>
-<IfModule dir_module>
-    DirectoryIndex index.html
-</IfModule>
-LogFormat \"%%h %%l %%u %%t \\\"%%r\\\" %%>s %%b\" common
-ErrorLog "%(runtime_dir)s/error_log"
-CustomLog "%(runtime_dir)s/access_log" common
-LogLevel debug
-DefaultType text/plain
-# Reference the depot.conf file generated by pkg.depot-config, which makes this
-# web server into something that can serve pkg(5) repositories.
-Include %(depot_conf)s
-SSLRandomSeed startup builtin
-SSLRandomSeed connect builtin
-# We enable server-status here, using /pkg5test-server-status to it to make the
-# URI distinctive.
-<Location /pkg5test-server-status>
-    SetHandler server-status
-</Location>
-"""
+                cert_key_dir = os.path.join(self.default_depot_runtime,
+                    "cert_key_dir")
+                if os.path.isdir(cert_key_dir):
+                        shutil.rmtree(cert_key_dir)
+                os.makedirs(cert_key_dir)
+
+                cg = certgenerator.CertGenerator(base_dir=cert_key_dir)
+                cg.make_trust_anchor("ta", https=True)
+
+                ta_cert_file = os.path.join(cg.raw_trust_anchor_dir,
+                    "ta_cert.pem")
+                ta_key_file = os.path.join(cg.keys_dir, "ta_key.pem")
+
+                cache_dir = os.path.join(self.test_root, "cache_test_dir")
+                self.depotconfig("-l %s -r %s -c %s -d usr=%s -d spa=%s -p %s "
+                    "--https -T %s -h localhost --ca-cert %s --ca-key %s "
+                    "--cert-key-dir %s" %
+                    (self.default_depot_runtime, self.default_depot_runtime,
+                    cache_dir, self.rdir1, self.rdir2, self.depot_port,
+                    self.depot_template_dir, ta_cert_file, ta_key_file,
+                    cert_key_dir))
+
+                DebugValues["ssl_ca_file"] = ta_cert_file
+
+                # Start an Apache instance
+                self.default_depot_conf = os.path.join(
+                    self.default_depot_runtime, "depot_httpd.conf")
+                ac = pkg5unittest.HttpDepotController(self.default_depot_conf,
+                    self.depot_port, self.default_depot_runtime, testcase=self,
+                    https=True)
+                self.register_apache_controller("depot", ac)
+                ac.start()
+                self.image_create()
+
+                # add publishers for the two repositories being served by this
+                # Apache instance.
+                self.pkg("set-publisher -p %s/usr" % self.ac.url)
+                self.pkg("set-publisher -p %s/spa" % self.ac.url)
+                # install packages from the two different publishers in the
+                # first repository
+                self.pkg("install sample")
+                self.pkg("install carrots")
+                # install a package from the second repository
+                self.pkg("install new")
+
 
 if __name__ == "__main__":
         unittest.main()
--- a/src/tests/cli/t_pkg_image_update.py	Wed Apr 09 17:53:35 2014 -0700
+++ b/src/tests/cli/t_pkg_image_update.py	Fri Apr 18 10:14:06 2014 +0530
@@ -29,14 +29,17 @@
 
 from pkg.client.pkgdefs import *
 
+import hashlib
 import os
 import random
 import unittest
 
+import pkg.misc as misc
 
 class TestImageUpdate(pkg5unittest.ManyDepotTestCase):
         # Only start/stop the depot once (instead of for every test)
         persistent_setup = True
+        need_ro_data = True
 
         foo10 = """
             open [email protected],5.11-0
@@ -116,6 +119,16 @@
             add set name=pkg.depend.install-hold value=test
             close """
 
+        elftest1 = """
+            open [email protected]
+            add file %s mode=0755 owner=root group=bin path=/bin/true
+            close """
+
+        elftest2 = """
+            open [email protected]
+            add file %s mode=0755 owner=root group=bin path=/bin/true
+            close """
+
         def setUp(self):
                 # Two repositories are created for test2.
                 pkg5unittest.ManyDepotTestCase.setUp(self, ["test1", "test2",
@@ -302,6 +315,82 @@
                 self.pkg("update missing", exit=1)
                 self.pkg("update --ignore-missing missing", exit=4)
 
+        def test_content_policy(self):
+                """ Test the content-update-policy property. When set to
+                'when-required' content should only be updated if the content
+                hash has changed, if set to 'always' content should be updated
+                if there is any file change at all."""
+
+                def get_test_sum(fname=None):
+                        """ Helper to get sha256 sum of installed test file."""
+                        if fname is None:
+                                fname = os.path.join(self.get_img_path(),
+                                    "bin/true")
+                        fsum , data = misc.get_data_digest(fname,
+                            hash_func=hashlib.sha256)
+                        return fsum
+
+                # Elftest1 and elftest2 have the same content and the same size,
+                # just different entries in the comment section. The content
+                # hash for both is the same, however the file hash is different.
+                elftest1 = self.elftest1 % os.path.join("ro_data",
+                    "elftest.so.1")
+                elftest2 = self.elftest2 % os.path.join("ro_data",
+                    "elftest.so.2")
+
+                # get the sha256 sums from the original files to distinguish
+                # what actually got installed
+                elf1sum = get_test_sum(fname=os.path.join(self.ro_data_root,
+                    "elftest.so.1"))
+                elf2sum = get_test_sum(fname=os.path.join(self.ro_data_root,
+                    "elftest.so.2"))
+
+                elf1, elf2 = self.pkgsend_bulk(self.rurl1, (elftest1, elftest2))
+
+                # prepare image, install [email protected] and verify
+                self.image_create(self.rurl1)
+                self.pkg("install -v %s" % elf1)
+                self.pkg("contents -m %s" % elf1)
+                self.assertEqual(elf1sum, get_test_sum())
+
+                # test default behavior (always update)
+                self.pkg("update -v elftest")
+                self.pkg("contents -m %s" % elf2)
+                self.assertEqual(elf2sum, get_test_sum())
+                # reset and start over
+                self.pkg("uninstall elftest")
+                self.pkg("install -v %s" % elf1)
+
+                # set policy to when-required, file shouldn't be updated
+                self.pkg("set-property content-update-policy when-required")
+                self.pkg("update -v elftest")
+                self.pkg("list %s" % elf2)
+                self.assertEqual(elf1sum, get_test_sum())
+                # reset and start over
+                self.pkg("uninstall elftest")
+                self.pkg("install -v %s" % elf1)
+
+                # set policy to always, file should be updated now
+                self.pkg("set-property content-update-policy always")
+                self.pkg("update -v elftest")
+                self.pkg("list %s" % elf2)
+                self.assertEqual(elf2sum, get_test_sum())
+
+                # do tests again for downgrading, test file shouldn't change
+                self.pkg("set-property content-update-policy when-required")
+                self.pkg("update -v %s" % elf1)
+                self.pkg("list %s" % elf1)
+                self.assertEqual(elf2sum, get_test_sum())
+                # reset and start over
+                self.pkg("uninstall elftest")
+                self.pkg("install -v %s" % elf2)
+
+                # set policy to always, file should be updated now
+                self.pkg("set-property content-update-policy always")
+                self.pkg("update -v %s" % elf1)
+                self.pkg("list %s" % elf1)
+                self.assertEqual(elf1sum, get_test_sum())
+
 
 class TestPkgUpdateOverlappingPatterns(pkg5unittest.SingleDepotTestCase):
 
--- a/src/tests/cli/t_pkg_install.py	Wed Apr 09 17:53:35 2014 -0700
+++ b/src/tests/cli/t_pkg_install.py	Fri Apr 18 10:14:06 2014 +0530
@@ -43,12 +43,38 @@
 import urllib2
 
 import pkg.actions
+import pkg.digest as digest
 import pkg.fmri as fmri
 import pkg.manifest as manifest
 import pkg.portable as portable
 
 from pkg.client.pkgdefs import EXIT_OOPS
 
+class _TestHelper(object):
+        """Private helper class for shared functionality between test
+        classes."""
+
+        def _assertEditables(self, moved=[], removed=[], installed=[],
+            updated=[]):
+                """Private helper function that verifies that expected editables
+                are listed in parsable output.  If no editable of a given type
+                is specified, then no editable files are expected."""
+
+                changed = []
+                if moved:
+                        changed.append(['moved', moved])
+                if removed:
+                        changed.append(['removed', removed])
+                if installed:
+                        changed.append(['installed', installed])
+                if updated:
+                        changed.append(['updated', updated])
+
+                self.assertEqualParsable(self.output,
+                        include=["change-editables"],
+                        change_editables=changed)
+
+
 class TestPkgInstallBasics(pkg5unittest.SingleDepotTestCase):
         # Only start/stop the depot once (instead of for every test)
         persistent_setup = True
@@ -994,14 +1020,10 @@
                 their paths can be installed or exact-installed, updated, and
                 uninstalled."""
 
-                self.install_fuzz_helper("install")
-                self.install_fuzz_helper("exact-install")
-
-        def install_fuzz_helper(self, install_cmd):
                 self.pkgsend_bulk(self.rurl, self.fuzzy)
                 self.image_create(self.rurl)
 
-                self.pkg("%s fuzzy@1" % install_cmd)
+                self.pkg("install fuzzy@1")
                 self.pkg("verify -v")
                 self.pkg("update -vvv fuzzy@2")
                 self.pkg("verify -v")
@@ -2056,9 +2078,10 @@
                 self.pkg("verify -v")
 
 
-class TestPkgInstallUpgrade(pkg5unittest.SingleDepotTestCase):
+class TestPkgInstallUpgrade(_TestHelper, pkg5unittest.SingleDepotTestCase):
         # Only start/stop the depot once (instead of for every test)
         persistent_setup = True
+        need_ro_data = True
 
         incorp10 = """
             open [email protected],5.11-0
@@ -2440,7 +2463,7 @@
             add depend type=require [email protected]
             close
             open [email protected]
-            add file tmp/preserve3 path=foo2 mode=0644 owner=root group=root original_name=orig_pkg:foo1 preserve=true
+            add file tmp/foo2 path=foo2 mode=0644 owner=root group=root original_name=orig_pkg:foo1 preserve=true
             add file tmp/bronze1 path=bronze1 mode=0644 owner=root group=root preserve=true
             close
         """
@@ -2495,7 +2518,7 @@
             "tmp/gold-shadow", "tmp/gold-ftpusers", "tmp/gold-silly",
             "tmp/silver-silly", "tmp/preserve1", "tmp/preserve2",
             "tmp/preserve3", "tmp/renold1", "tmp/renold3", "tmp/rennew1",
-            "tmp/rennew3", "tmp/liveroot1", "tmp/liveroot2",
+            "tmp/rennew3", "tmp/liveroot1", "tmp/liveroot2", "tmp/foo2",
         ]
 
         misc_files2 = {
@@ -2707,10 +2730,7 @@
                 """Test for editable files moving between packages or locations
                 or both."""
 
-                self.upgrade3_helper("install")
-                self.upgrade3_helper("exact-install")
-
-        def upgrade3_helper(self, install_cmd):
+                install_cmd = "install"
                 self.pkgsend_bulk(self.rurl, (self.silver10, self.silver20,
                     self.silver30, self.gold10, self.gold20, self.gold30,
                     self.golduser10, self.golduser20, self.silveruser))
@@ -2718,7 +2738,15 @@
                 self.image_create(self.rurl)
 
                 # test 1: move an editable file between packages
-                self.pkg("%s [email protected] [email protected]" % install_cmd)
+                self.pkg("%s --parsable=0 [email protected] [email protected]" % install_cmd)
+                self._assertEditables(
+                    installed=[
+                        'etc/ftpd/ftpusers',
+                        'etc/group',
+                        'etc/passwd',
+                        'etc/shadow',
+                    ]
+                )
                 self.pkg("verify -v")
 
                 # modify config file
@@ -2730,17 +2758,27 @@
                 self.file_contains(file_path, test_str)
 
                 # update packages
-                self.pkg("%s [email protected] [email protected]" % install_cmd)
+                self.pkg("%s -nvv [email protected] [email protected]" % install_cmd)
+                self.pkg("%s --parsable=0 [email protected] [email protected]" % install_cmd)
+                self._assertEditables()
                 self.pkg("verify -v")
 
                 # make sure /etc/passwd contains still correct string
                 self.file_contains(file_path, test_str)
 
-                self.pkg("uninstall silver gold")
+                self.pkg("uninstall --parsable=0 silver gold")
+                self._assertEditables(
+                    removed=[
+                        'etc/ftpd/ftpusers',
+                        'etc/group',
+                        'etc/passwd',
+                        'etc/shadow',
+                    ],
+                )
 
 
                 # test 2: change an editable file's path within a package
-                self.pkg("%s [email protected]" % install_cmd)
+                self.pkg("%s --parsable=0 [email protected]" % install_cmd)
                 self.pkg("verify -v")
 
                 # modify config file
@@ -2748,19 +2786,30 @@
                 file_path = "etc/passwd"
                 self.file_append(file_path, test_str)
 
-                self.pkg("%s [email protected]" % install_cmd)
+                self.pkg("%s --parsable=0 [email protected]" % install_cmd)
+                self._assertEditables(
+                    moved=[['etc/passwd', 'etc/config2']],
+                    removed=[
+                        'etc/ftpd/ftpusers',
+                        'etc/group',
+                        'etc/shadow',
+                    ],
+                )
                 self.pkg("verify -v")
 
                 # make sure /etc/config2 contains correct string
                 file_path = "etc/config2"
                 self.file_contains(file_path, test_str)
 
-                self.pkg("uninstall gold")
+                self.pkg("uninstall --parsable=0 gold")
+                self._assertEditables(
+                    removed=['etc/config2'],
+                )
                 self.pkg("verify -v")
 
 
                 # test 3: move an editable file between packages and change its path
-                self.pkg("%s [email protected] [email protected]" % install_cmd)
+                self.pkg("%s --parsable=0 [email protected] [email protected]" % install_cmd)
                 self.pkg("verify -v")
 
                 # modify config file
@@ -2770,19 +2819,27 @@
 
                 self.file_contains(file_path, test_str)
 
-                self.pkg("%s [email protected] [email protected]" % install_cmd)
+                self.pkg("%s --parsable=0 [email protected] [email protected]" % install_cmd)
+                self._assertEditables(
+                    moved=[['etc/passwd', 'etc/config2']],
+                    removed=[
+                        'etc/ftpd/ftpusers',
+                        'etc/group',
+                        'etc/shadow',
+                    ],
+                )
                 self.pkg("verify -v")
 
                 # make sure /etc/config2 now contains correct string
                 file_path = "etc/config2"
                 self.file_contains(file_path, test_str)
 
-                self.pkg("uninstall gold silver")
+                self.pkg("uninstall --parsable=0 gold silver")
 
 
                 # test 4: move /etc/passwd between packages and ensure that we
                 # can still uninstall a user at the same time.
-                self.pkg("%s [email protected] [email protected]" % install_cmd)
+                self.pkg("%s --parsable=0 [email protected] [email protected]" % install_cmd)
                 self.pkg("verify -v")
 
                 # add a user
@@ -2805,8 +2862,9 @@
                 silly_inode = os.stat(silly_path).st_ino
 
                 # update packages
-                self.pkg("%s [email protected] [email protected] [email protected] silveruser"
-                    % install_cmd)
+                self.pkg("%s --parsable=0 [email protected] [email protected] [email protected] "
+                    "silveruser" % install_cmd)
+                self._assertEditables()
 
                 # make sure Kermie is still installed and still has our local
                 # changes
@@ -2821,18 +2879,13 @@
                 """Test to make sure hardlinks are correctly restored when file
                 they point to is updated."""
 
-                self.upgrade4_helper("install")
-                self.upgrade4_helper("exact-install")
-
-        def upgrade4_helper(self, install_cmd):
                 self.pkgsend_bulk(self.rurl, (self.iron10, self.iron20))
-
-                self.image_create(self.rurl)
-
-                self.pkg("%s [email protected]" % install_cmd)
+                self.image_create(self.rurl)
+
+                self.pkg("install [email protected]")
                 self.pkg("verify -v")
 
-                self.pkg("%s [email protected]" % install_cmd)
+                self.pkg("install [email protected]")
                 self.pkg("verify -v")
 
         def test_upgrade_liveroot(self):
@@ -2923,57 +2976,78 @@
                 """Verify that file preserve=true works as expected during
                 package install, update, upgrade, and removal."""
 
-                self.file_preserve("install")
-                self.file_preserve("exact-install")
-
-        def file_preserve(self, install_cmd):
+                install_cmd = "install"
                 self.pkgsend_bulk(self.rurl, (self.preserve1, self.preserve2,
                     self.preserve3, self.renpreserve))
                 self.image_create(self.rurl)
 
                 # If there are no local modifications, no preservation should be
                 # done.  First with no content change ...
-                self.pkg("%s preserve@1" % install_cmd)
-                self.pkg("%s preserve@2" % install_cmd)
+                self.pkg("%s --parsable=0 preserve@1" % install_cmd)
+                self._assertEditables(
+                    installed=['testme'],
+                )
+                self.pkg("%s --parsable=0 preserve@2" % install_cmd)
+                self._assertEditables(
+                    updated=['testme'],
+                )
                 self.file_contains("testme", "preserve1")
                 self.pkg("verify preserve")
 
-                self.pkg("update preserve@1")
+                self.pkg("update --parsable=0 preserve@1")
+                self._assertEditables(
+                    updated=['testme'],
+                )
                 self.file_contains("testme", "preserve1")
                 self.pkg("verify preserve")
 
-                self.pkg("uninstall preserve")
+                self.pkg("uninstall --parsable=0 preserve")
 
                 # ... and again with content change.
-                self.pkg("install preserve@1")
-                self.pkg("install preserve@3")
+                self.pkg("install --parsable=0 preserve@1")
+                self._assertEditables(
+                    installed=['testme'],
+                )
+                self.pkg("install --parsable=0 preserve@3")
+                self._assertEditables(
+                    updated=['testme'],
+                )
                 self.file_contains("testme", "preserve3")
 
-                self.pkg("update preserve@1")
+                self.pkg("update --parsable=0 preserve@1")
+                self._assertEditables(
+                    updated=['testme'],
+                )
+
                 self.file_contains("testme", "preserve1")
 
                 self.pkg("verify preserve")
-                self.pkg("uninstall preserve")
+                self.pkg("uninstall --parsable=0 preserve")
 
                 # Modify the file locally and update to a version where the
                 # content changes.
-                self.pkg("%s preserve@1" % install_cmd)
+                self.pkg("%s --parsable=0 preserve@1" % install_cmd)
                 self.file_append("testme", "junk")
                 self.file_contains("testme", "preserve1")
-                self.pkg("%s preserve@3" % install_cmd)
+                self.pkg("%s --parsable=0 preserve@3" % install_cmd)
+                self._assertEditables()
                 self.file_contains("testme", "preserve1")
                 self.file_contains("testme", "junk")
                 self.file_doesnt_exist("testme.old")
                 self.file_doesnt_exist("testme.new")
                 self.pkg("verify preserve")
-                self.pkg("uninstall preserve")
+                self.pkg("uninstall --parsable=0 preserve")
 
                 # Modify the file locally and downgrade to a version where
                 # the content changes.
-                self.pkg("%s preserve@3" % install_cmd)
+                self.pkg("%s --parsable=0 preserve@3" % install_cmd)
                 self.file_append("testme", "junk")
                 self.file_contains("testme", "preserve3")
-                self.pkg("update preserve@1")
+                self.pkg("update --parsable=0 preserve@1")
+                self._assertEditables(
+                    moved=[['testme', 'testme.update']],
+                    installed=['testme'],
+                )
                 self.file_doesnt_contain("testme", "preserve3")
                 self.file_doesnt_contain("testme", "junk")
                 self.file_doesnt_exist("testme.old")
@@ -2981,61 +3055,74 @@
                 self.file_exists("testme.update")
                 self.file_remove("testme.update")
                 self.pkg("verify preserve")
-                self.pkg("uninstall preserve")
+                self.pkg("uninstall --parsable=0 preserve")
 
                 # Modify the file locally and update to a version where just the
                 # mode changes.
-                self.pkg("%s preserve@1" % install_cmd)
+                self.pkg("%s --parsable=0 preserve@1" % install_cmd)
                 self.file_append("testme", "junk")
 
-                self.pkg("%s preserve@2" % install_cmd)
+                self.pkg("%s --parsable=0 preserve@2" % install_cmd)
+                self._assertEditables(
+                    updated=['testme'],
+                )
                 self.file_contains("testme", "preserve1")
                 self.file_contains("testme", "junk")
                 self.file_doesnt_exist("testme.old")
                 self.file_doesnt_exist("testme.new")
 
-                self.pkg("update preserve@1")
+                self.pkg("update --parsable=0 preserve@1")
+                self._assertEditables(
+                    updated=['testme'],
+                )
                 self.file_contains("testme", "preserve1")
                 self.file_contains("testme", "junk")
                 self.file_doesnt_exist("testme.old")
                 self.file_doesnt_exist("testme.new")
                 self.file_doesnt_exist("testme.update")
 
-                self.pkg("%s preserve@2" % install_cmd)
+                self.pkg("%s --parsable=0 preserve@2" % install_cmd)
+                self._assertEditables(
+                    updated=['testme'],
+                )
                 self.file_doesnt_exist("testme.old")
                 self.file_doesnt_exist("testme.new")
 
                 # Remove the file locally and update the package; this should
                 # simply replace the missing file.
                 self.file_remove("testme")
-                self.pkg("%s preserve@3" % install_cmd)
+                self.pkg("%s --parsable=0 preserve@3" % install_cmd)
+                self._assertEditables(
+                    installed=['testme'],
+                )
                 self.pkg("verify preserve")
                 self.file_exists("testme")
 
                 # Remove the file locally and downgrade the package; this should
                 # simply replace the missing file.
                 self.file_remove("testme")
-                self.pkg("update preserve@2")
+                self.pkg("update --parsable=0 preserve@2")
+                self._assertEditables(
+                    installed=['testme'],
+                )
                 self.pkg("verify preserve")
                 self.file_exists("testme")
                 self.pkg("uninstall preserve@2")
 
-                # Preserved files don't get their mode changed, and verify will
-                # still balk, so fix up the mode.
-                self.pkg("%s preserve@1" % install_cmd)
-                self.pkg("%s preserve@2" % install_cmd)
-                self.file_chmod("testme", 0640)
+                # Verify preserved files will have their mode changed on update.
+                self.pkg("%s --parsable=0 preserve@1" % install_cmd)
+                self.pkg("%s --parsable=0 preserve@2" % install_cmd)
                 self.pkg("verify preserve")
 
                 # Verify that a package with a missing file that is marked with
                 # the preserve=true won't cause uninstall failure.
                 self.file_remove("testme")
                 self.file_doesnt_exist("testme")
-                self.pkg("uninstall preserve")
+                self.pkg("uninstall --parsable=0 preserve")
 
                 # Verify preserve works across package rename with and without
                 # original_name use and even when the original file is missing.
-                self.pkg("%s [email protected]" % install_cmd)
+                self.pkg("%s --parsable=0 [email protected]" % install_cmd)
                 foo1_path = os.path.join(self.get_img_path(), "foo1")
                 self.assert_(os.path.isfile(foo1_path))
                 bronze1_path = os.path.join(self.get_img_path(), "bronze1")
@@ -3044,39 +3131,53 @@
                 # Update across the rename boundary, then verify that the files
                 # were installed with their new name and the old ones were
                 # removed.
-                self.pkg("update orig_pkg")
+                self.pkg("update -nvv orig_pkg")
+                self.pkg("update --parsable=0 orig_pkg")
+                self._assertEditables(
+                    moved=[['foo1', 'foo2']],
+                )
+
                 foo2_path = os.path.join(self.get_img_path(), "foo2")
                 self.assert_(not os.path.exists(foo1_path))
                 self.assert_(os.path.isfile(foo2_path))
                 self.assert_(os.path.isfile(bronze1_path))
-                self.pkg("uninstall \*")
+                self.pkg("uninstall --parsable=0 \*")
 
                 # Update across the rename boundary, then truncate each of the
                 # preserved files.  They should remain empty even though one is
                 # changing names and the other is simply being preserved across
                 # a package rename.
-                self.pkg("%s [email protected]" % install_cmd)
+                self.pkg("%s --parsable=0 [email protected]" % install_cmd)
                 open(foo1_path, "wb").close()
                 open(bronze1_path, "wb").close()
-                self.pkg("update orig_pkg")
+                self.pkg("update --parsable=0 orig_pkg")
+                self._assertEditables(
+                    moved=[['foo1', 'foo2']],
+                )
                 self.assert_(not os.path.exists(foo1_path))
                 self.assert_(os.path.isfile(foo2_path))
                 self.assertEqual(os.stat(foo2_path).st_size, 0)
                 self.assert_(os.path.isfile(bronze1_path))
                 self.assertEqual(os.stat(bronze1_path).st_size, 0)
-                self.pkg("uninstall \*")
+                self.pkg("uninstall --parsable=0 \*")
+                self._assertEditables(
+                    removed=['bronze1', 'foo2'],
+                )
 
                 # Update across the rename boundary, then verify that a change
                 # in file name will cause re-delivery of preserved files, but
                 # unchanged, preserved files will not be re-delivered.
-                self.pkg("%s [email protected]" % install_cmd)
+                self.pkg("%s --parsable=0 [email protected]" % install_cmd)
                 os.unlink(foo1_path)
                 os.unlink(bronze1_path)
-                self.pkg("update orig_pkg")
+                self.pkg("update --parsable=0 orig_pkg")
+                self._assertEditables(
+                    moved=[['foo1', 'foo2']],
+                )
                 self.assert_(not os.path.exists(foo1_path))
                 self.assert_(os.path.isfile(foo2_path))
                 self.assert_(not os.path.exists(bronze1_path))
-                self.pkg("uninstall \*")
+                self.pkg("uninstall --parsable=0 \*")
 
                 # Ensure directory is empty before testing.
                 api_inst = self.get_img_api_obj()
@@ -3086,9 +3187,9 @@
 
                 # Verify that unmodified, preserved files will not be salvaged
                 # on uninstall.
-                self.pkg("%s [email protected]" % install_cmd)
+                self.pkg("%s --parsable=0 [email protected]" % install_cmd)
                 self.file_contains("testme", "preserve1")
-                self.pkg("uninstall preserve")
+                self.pkg("uninstall --parsable=0 preserve")
                 salvaged = [
                     n for n in os.listdir(sroot)
                     if n.startswith("testme-")
@@ -3097,19 +3198,16 @@
 
                 # Verify that modified, preserved files will be salvaged
                 # on uninstall.
-                self.pkg("%s [email protected]" % install_cmd)
+                self.pkg("%s --parsable=0 [email protected]" % install_cmd)
                 self.file_contains("testme", "preserve1")
                 self.file_append("testme", "junk")
-                self.pkg("uninstall preserve")
+                self.pkg("uninstall --parsable=0 preserve")
                 self.__salvage_file_contains(sroot, "testme", "junk")
 
         def test_file_preserve_renameold(self):
                 """Make sure that file upgrade with preserve=renameold works."""
 
-                self.file_preserve_renameold_helper("install")
-                self.file_preserve_renameold_helper("exact-install")
-
-        def file_preserve_renameold_helper(self, install_cmd):
+                install_cmd = "install"
                 plist = self.pkgsend_bulk(self.rurl, (self.renameold1,
                     self.renameold2, self.renameold3))
                 self.image_create(self.rurl)
@@ -3137,7 +3235,11 @@
                 # content changes.
                 self.pkg("%s renold@1" % install_cmd)
                 self.file_append("testme", "junk")
-                self.pkg("%s renold@3" % install_cmd)
+                self.pkg("%s --parsable=0 renold@3" % install_cmd)
+                self._assertEditables(
+                    moved=[['testme', 'testme.old']],
+                    installed=['testme'],
+                )
                 self.file_contains("testme.old", "junk")
                 self.file_doesnt_contain("testme", "junk")
                 self.file_contains("testme", "renold3")
@@ -3150,7 +3252,11 @@
                 # mode changes.
                 self.pkg("%s renold@1" % install_cmd)
                 self.file_append("testme", "junk")
-                self.pkg("%s renold@2" % install_cmd)
+                self.pkg("%s --parsable=0 renold@2" % install_cmd)
+                self._assertEditables(
+                    moved=[['testme', 'testme.old']],
+                    installed=['testme'],
+                )
                 self.file_contains("testme.old", "junk")
                 self.file_doesnt_contain("testme", "junk")
                 self.file_contains("testme", "renold1")
@@ -3162,17 +3268,17 @@
                 # simply replace the missing file.
                 self.pkg("%s renold@1" % install_cmd)
                 self.file_remove("testme")
-                self.pkg("%s renold@2" % install_cmd)
+                self.pkg("%s --parsable=0 renold@2" % install_cmd)
+                self._assertEditables(
+                    installed=['testme'],
+                )
                 self.pkg("verify renold")
                 self.pkg("uninstall renold")
 
         def test_file_preserve_renamenew(self):
                 """Make sure that file ugprade with preserve=renamenew works."""
 
-                self.file_preserve_renamenew_helper("install")
-                self.file_preserve_renamenew_helper("exact-install")
-
-        def file_preserve_renamenew_helper(self, install_cmd):
+                install_cmd = "install"
                 plist = self.pkgsend_bulk(self.rurl, (self.renamenew1,
                     self.renamenew2, self.renamenew3))
                 self.image_create(self.rurl)
@@ -3180,7 +3286,10 @@
                 # If there are no local modifications, no preservation should be
                 # done.  First with no content change ...
                 self.pkg("%s rennew@1" % install_cmd)
-                self.pkg("%s rennew@2" % install_cmd)
+                self.pkg("%s --parsable=0 rennew@2" % install_cmd)
+                self._assertEditables(
+                    updated=['testme'],
+                )
                 self.file_contains("testme", "rennew1")
                 self.file_doesnt_exist("testme.new")
                 self.file_doesnt_exist("testme.old")
@@ -3189,7 +3298,10 @@
 
                 # ... and again with content change
                 self.pkg("%s rennew@1" % install_cmd)
-                self.pkg("%s rennew@3" % install_cmd)
+                self.pkg("%s --parsable=0 rennew@3" % install_cmd)
+                self._assertEditables(
+                    updated=['testme'],
+                )
                 self.file_contains("testme", "rennew3")
                 self.file_doesnt_exist("testme.new")
                 self.file_doesnt_exist("testme.old")
@@ -3200,7 +3312,10 @@
                 # content changes.
                 self.pkg("%s rennew@1" % install_cmd)
                 self.file_append("testme", "junk")
-                self.pkg("%s rennew@3" % install_cmd)
+                self.pkg("%s --parsable=0 rennew@3" % install_cmd)
+                self._assertEditables(
+                    installed=['testme.new'],
+                )
                 self.file_contains("testme", "junk")
                 self.file_doesnt_contain("testme.new", "junk")
                 self.file_contains("testme.new", "rennew3")
@@ -3214,14 +3329,19 @@
                 # mode changes.
                 self.pkg("%s rennew@1" % install_cmd)
                 self.file_append("testme", "junk")
-                self.pkg("%s rennew@2" % install_cmd)
+                self.pkg("%s --parsable=0 rennew@2" % install_cmd)
+                self._assertEditables(
+                    installed=['testme.new'],
+                )
                 self.file_contains("testme", "junk")
                 self.file_doesnt_contain("testme.new", "junk")
                 self.file_contains("testme.new", "rennew1")
                 self.file_doesnt_exist("testme.old")
 
-                # Preserved files don't get their mode changed, and verify will
-                # still balk, so fix up the mode.
+                # The original file won't be touched on update, so verify fails.
+                self.pkg("verify rennew", exit=1)
+
+                # Ensure that after fixing mode, verify passes.
                 self.file_chmod("testme", 0640)
                 self.pkg("verify rennew")
                 self.pkg("uninstall rennew")
@@ -3231,7 +3351,10 @@
                 # simply replace the missing file.
                 self.pkg("%s rennew@1" % install_cmd)
                 self.file_remove("testme")
-                self.pkg("%s rennew@2" % install_cmd)
+                self.pkg("%s --parsable=0 rennew@2" % install_cmd)
+                self._assertEditables(
+                    installed=['testme'],
+                )
                 self.file_doesnt_exist("testme.new")
                 self.file_doesnt_exist("testme.old")
                 self.pkg("verify rennew")
@@ -3240,10 +3363,7 @@
         def test_file_preserve_legacy(self):
                 """Verify that preserve=legacy works as expected."""
 
-                self.file_preserve_legacy_helper("install")
-                self.file_preserve_legacy_helper("exact-install")
-
-        def file_preserve_legacy_helper(self, install_cmd):
+                install_cmd = "install"
                 self.pkgsend_bulk(self.rurl, (self.preslegacy,
                     self.renpreslegacy))
                 self.image_create(self.rurl)
@@ -3258,7 +3378,10 @@
                 # install if a package being installed delivers the same file
                 # and that the new file will be installed.
                 self.file_append("testme", "unpackaged")
-                self.pkg("%s [email protected]" % install_cmd)
+                self.pkg("%s --parsable=0 [email protected]" % install_cmd)
+                self._assertEditables(
+                    installed=['testme'],
+                )
                 self.file_contains("testme", "preserve1")
                 self.__salvage_file_contains(sroot, "testme", "unpackaged")
                 shutil.rmtree(sroot)
@@ -3266,14 +3389,21 @@
                 # Verify that a package transitioning to preserve=legacy from
                 # some other state will have the existing file renamed using
                 # .legacy as an extension.
-                self.pkg("update [email protected]")
+                self.pkg("update --parsable=0 [email protected]")
+                self._assertEditables(
+                    moved=[['testme', 'testme.legacy']],
+                    installed=['testme'],
+                )
                 self.file_contains("testme.legacy", "preserve1")
                 self.file_contains("testme", "preserve2")
 
                 # Verify that if an action with preserve=legacy is upgraded
                 # and its payload changes that the new payload is delivered
                 # but the old .legacy file is not modified.
-                self.pkg("update [email protected]")
+                self.pkg("update --parsable=0 [email protected]")
+                self._assertEditables(
+                    updated=['testme'],
+                )
                 self.file_contains("testme.legacy", "preserve1")
                 self.file_contains("testme", "preserve3")
 
@@ -3291,37 +3421,47 @@
                 # Verify that an initial install of an action with
                 # preserve=legacy will not install the payload of the action.
                 self.pkg("uninstall preslegacy")
-                self.pkg("%s [email protected]" % install_cmd)
+                self.pkg("%s --parsable=0 [email protected]" % install_cmd)
+                self._assertEditables()
                 self.file_doesnt_exist("testme")
 
                 # Verify that if the original preserved file is missing during
                 # a transition to preserve=legacy from some other state that
                 # the new action is still delivered and the operation succeeds.
                 self.pkg("uninstall preslegacy")
-                self.pkg("%s [email protected]" % install_cmd)
+                self.pkg("%s --parsable=0 [email protected]" % install_cmd)
+                self._assertEditables(
+                    installed=['testme'],
+                )
                 self.file_remove("testme")
-                self.pkg("update")
+                self.pkg("update --parsable=0")
+                self._assertEditables(
+                    installed=['testme'],
+                )
                 self.file_contains("testme", "preserve3")
 
                 # Verify that a preserved file can be moved from one package to
                 # another and transition to preserve=legacy at the same time.
                 self.pkg("uninstall preslegacy")
-                self.pkg("%s [email protected]" % install_cmd)
+                self.pkg("%s --parsable=0 [email protected]" % install_cmd)
+                self._assertEditables(
+                    installed=['testme'],
+                )
                 self.file_exists("testme")
-                self.pkg("update")
+                self.pkg("update --parsable=0")
+                self._assertEditables(
+                    moved=[['testme', 'newme.legacy']],
+                    installed=['newme'],
+                )
                 self.file_contains("testme.legacy", "preserve1")
                 self.file_contains("newme", "preserve2")
 
         def test_directory_salvage(self):
                 """Make sure basic directory salvage works as expected"""
 
-                self.directory_salvage_helper("install")
-                self.directory_salvage_helper("exact-install")
-
-        def directory_salvage_helper(self, install_cmd):
                 self.pkgsend_bulk(self.rurl, self.salvage)
                 self.image_create(self.rurl)
-                self.pkg("%s [email protected]" % install_cmd)
+                self.pkg("install [email protected]")
                 self.file_append("var/mail/foo", "foo's mail")
                 self.file_append("var/mail/bar", "bar's mail")
                 self.file_append("var/mail/baz", "baz's mail")
@@ -3334,16 +3474,12 @@
                 """Make sure directory salvage works as expected when salvaging
                 content to an existing packaged directory."""
 
-                self.directory_salvage_persistent_helper("install")
-                self.directory_salvage_persistent_helper("exact-install")
-
-        def directory_salvage_persistent_helper(self, install_cmd):
                 # we salvage content from two directories,
                 # var/noodles and var/spaghetti each of which disappear over
                 # subsequent updates.
                 self.pkgsend_bulk(self.rurl, self.salvage)
                 self.image_create(self.rurl)
-                self.pkg("%s [email protected]" % install_cmd)
+                self.pkg("install [email protected]")
                 self.file_append("var/mail/foo", "foo's mail")
                 self.file_append("var/noodles/noodles.txt", "yum")
                 self.pkg("update [email protected]")
@@ -3356,7 +3492,7 @@
 
                 # ensure that we can jump from 1.0 to 3.0 directly.
                 self.image_create(self.rurl)
-                self.pkg("%s [email protected]" % install_cmd)
+                self.pkg("install [email protected]")
                 self.file_append("var/noodles/noodles.txt", "yum")
                 self.pkg("update  [email protected]")
                 self.file_exists("var/persistent/noodles.txt")
@@ -3365,14 +3501,10 @@
                 """Make sure salvaging directories with special files works as
                 expected."""
 
-                self.special_salvage_helper("install")
-                self.special_salvage_helper("exact-install")
-
-        def special_salvage_helper(self, install_cmd):
                 self.pkgsend_bulk(self.rurl, self.salvage_special)
                 self.image_create(self.rurl, destroy=True, fs=("var",))
 
-                self.pkg("%s salvage-special" % install_cmd)
+                self.pkg("install salvage-special")
 
                 os.mkfifo(os.path.join(self.img_path(), "salvage", "fifo"))
                 sock = socket.socket(socket.AF_UNIX)
@@ -3416,15 +3548,11 @@
                 """Ensure that files transitioning to a link still follow
                 original_name preservation rules."""
 
-                self.link_preserve_helper("install")
-                self.link_preserve_helper("exact-install")
-
-        def link_preserve_helper(self, install_cmd):
                 self.pkgsend_bulk(self.rurl, (self.linkpreserve))
                 self.image_create(self.rurl, destroy=True, fs=("var",))
 
                 # Install package with original config file location.
-                self.pkg("%s [email protected]" % install_cmd)
+                self.pkg("install --parsable=0 [email protected]")
                 cfg_path = os.path.join("etc", "ssh", "sshd_config")
                 abs_path = os.path.join(self.get_img_path(), cfg_path)
 
@@ -3437,19 +3565,25 @@
                 # Install new package version, verify file replaced with link
                 # and modified version was moved to new location.
                 new_cfg_path = os.path.join("etc", "sunssh", "sshd_config")
-                self.pkg("update [email protected]")
+                self.pkg("update --parsable=0 [email protected]")
+                self._assertEditables(
+                    moved=[['etc/ssh/sshd_config', 'etc/sunssh/sshd_config']]
+                )
                 self.assert_(os.path.islink(abs_path))
                 self.file_exists(new_cfg_path)
                 self.file_contains(new_cfg_path, "modified")
 
                 # Uninstall, then install original version again.
                 self.pkg("uninstall linkpreserve")
-                self.pkg("%s [email protected]" % install_cmd)
+                self.pkg("install [email protected]")
                 self.file_contains(cfg_path, "preserve1")
 
                 # Install new package version and verify that unmodified file is
                 # replaced with new configuration file.
-                self.pkg("update [email protected]")
+                self.pkg("update --parsable=0 [email protected]")
+                self._assertEditables(
+                    moved=[['etc/ssh/sshd_config', 'etc/sunssh/sshd_config']]
+                )
                 self.file_contains(new_cfg_path, "preserve2")
 
         def test_many_hashalgs(self):
@@ -3457,20 +3591,22 @@
                 contains more hash attributes than the old action, that the
                 upgrade works."""
 
-                self.many_hashalgs_helper("install")
-                self.many_hashalgs_helper("exact-install")
-
-        def many_hashalgs_helper(self, install_cmd):
+                self.many_hashalgs_helper("install", "sha256")
+                self.many_hashalgs_helper("install", "sha512_256")
+                self.many_hashalgs_helper("exact-install", "sha256")
+                self.many_hashalgs_helper("exact-install", "sha512_256")
+
+        def many_hashalgs_helper(self, install_cmd, hash_alg):
                 self.pkgsend_bulk(self.rurl, (self.iron10))
                 self.image_create(self.rurl, destroy=True)
-                self.pkg("%s [email protected]" % install_cmd)
+                self.pkg("install [email protected]")
                 self.pkg("contents -m iron")
                 # We have not enabled SHA2 hash publication yet.
-                self.assert_("pkg.hash.sha256" not in self.output)
+                self.assert_(("pkg.hash.%s" % hash_alg) not in self.output)
 
                 # publish with SHA1 and SHA2 hashes
                 self.pkgsend_bulk(self.rurl, self.iron20,
-                    debug_hash="sha1+sha256")
+                    debug_hash="sha1+%s" % hash_alg)
 
                 # verify that a non-SHA2 aware client can install these bits
                 self.pkg("-D hash=sha1 update")
@@ -3479,12 +3615,13 @@
                 # This also tests package retrieval: we always retrieve packages
                 # with the least-preferred hash, but verify with the
                 # most-preferred hash.
-                self.pkg("%s [email protected]" % install_cmd)
+                self.pkg("install [email protected]")
                 self.pkg("contents -m iron")
-                self.assert_("pkg.hash.sha256" in self.output)
+                self.assert_("pkg.hash.%s" % hash_alg in self.output)
 
                 # publish with only SHA-2 hashes
-                self.pkgsend_bulk(self.rurl, self.iron20, debug_hash="sha256")
+                self.pkgsend_bulk(self.rurl, self.iron20,
+                    debug_hash="%s" % hash_alg)
 
                 # verify that a non-SHA2 aware client cannot install these bits
                 # since there are no SHA1 hashes present
@@ -3493,16 +3630,57 @@
                     "No file could be found for the specified hash name: "
                     "'NOHASH'" in self.errout)
 
-                # Make sure we've been publishing only with sha256 by removing
+                # Make sure we've been publishing only with SHA2 by removing
                 # those known attributes, then checking for the presence of
                 # the SHA-1 attributes.
-                self.pkg("-D hash=sha256 update")
+                self.pkg("-D hash=%s update" % hash_alg)
                 self.pkg("contents -m iron")
-                for attr in ["pkg.hash.sha256", "pkg.chash.sha256"]:
+                for attr in ["pkg.hash.%s" % hash_alg,
+                    "pkg.chash.%s" % hash_alg]:
                         self.output = self.output.replace(attr, "")
                 self.assert_("hash" not in self.output)
                 self.assert_("chash" not in self.output)
 
+        def test_content_hash_ignore(self):
+                """Test that pkgs with content-hash attributes are ignored for
+                install and verify by default."""
+
+                elfpkg_1 = """
+                    open [email protected]
+                    add file %s mode=0755 owner=root group=bin path=/bin/true
+                    close """
+                elfpkg = elfpkg_1 % os.path.join("ro_data", "elftest.so.1")
+                elf1 = self.pkgsend_bulk(self.rurl, (elfpkg,))[0]
+
+                repo_dir = self.dcs[1].get_repodir()
+                f = fmri.PkgFmri(elf1, None)
+                repo = self.get_repo(repo_dir)
+                mpath = repo.manifest(f)
+                # load manifest, add content-hash attr and store back to disk
+                mani = manifest.Manifest()
+                mani.set_content(pathname=mpath)
+                for a in mani.gen_actions():
+                        if "bin/true" in str(a):
+                                a.attrs["pkg.content-hash.sha256"] = "foo"
+                mani.store(mpath)
+                # rebuild repo catalog since manifest digest changed
+                repo.rebuild()
+
+                # assert that the current pkg gate has the correct hash ranking
+                self.assertTrue(len(digest.RANKED_CONTENT_HASH_ATTRS) > 0)
+                self.assertEqual(digest.RANKED_CONTENT_HASH_ATTRS[0], "elfhash")
+                
+                # test that pkgrecv, pkgrepo verify, pkg install and pkg verify
+                # do not complain about unknown hash
+                self.pkgrecv("%s -a -d %s '*'" % (repo_dir,
+                    os.path.join(self.test_root, "x.p5p")))
+                self.pkgrepo("verify -s %s" % repo_dir)
+                self.image_create(self.rurl, destroy=True)
+                self.pkg("install -v %s" % elf1)
+                # Note that we pass verification if any of the hashes match, but
+                # we require by default that the content hash matches. 
+                self.pkg("verify")
+
 
 class TestPkgInstallActions(pkg5unittest.SingleDepotTestCase):
         # Only start/stop the depot once (instead of for every test)
@@ -4032,79 +4210,6 @@
                 self.assert_("gonzo\n" not in file(fpath).readlines())
                 self.pkg("verify ftpuserimp")
 
-        def test_ftpuser_exact_install(self):
-                """Make sure we correctly handle /etc/ftpd/ftpusers."""
-
-                notftpuser = """
-                open notftpuser@1
-                add user username=animal group=root ftpuser=false
-                add depend fmri=pkg:/[email protected] type=require
-                close"""
-
-                ftpuserexp = """
-                open ftpuserexp@1
-                add user username=fozzie group=root ftpuser=true
-                add depend fmri=pkg:/[email protected] type=require
-                close"""
-
-                ftpuserimp = """
-                open ftpuserimp@1
-                add user username=gonzo group=root
-                add depend fmri=pkg:/[email protected] type=require
-                close"""
-
-                self.pkgsend_bulk(self.rurl, (self.basics0, notftpuser,
-                    ftpuserexp, ftpuserimp))
-                self.image_create(self.rurl)
-
-                self.pkg("install basics")
-
-                # Add a user with ftpuser=false.  Make sure the user is added to
-                # the file, and that the user verifies.
-                self.pkg("exact-install notftpuser")
-                fpath = self.get_img_path() + "/etc/ftpd/ftpusers"
-                self.assert_("animal\n" in file(fpath).readlines())
-                self.pkg("verify notftpuser")
-
-                # Put a user into the ftpusers file as shipped, then add that
-                # user, with ftpuser=false.  Make sure the user remains in the
-                # file, and that the user verifies.
-                self.pkg("uninstall notftpuser")
-                file(fpath, "a").write("animal\n")
-                self.pkg("exact-install notftpuser")
-                self.assert_("animal\n" in file(fpath).readlines())
-                self.pkg("verify notftpuser")
-
-                # Add a user with an explicit ftpuser=true.  Make sure the user
-                # is not added to the file, and that the user verifies.
-                self.pkg("exact-install ftpuserexp")
-                self.assert_("fozzie\n" not in file(fpath).readlines())
-                self.pkg("verify ftpuserexp")
-
-                # Put a user into the ftpusers file as shipped, then add that
-                # user, with an explicit ftpuser=true.  Make sure the user is
-                # stripped from the file, and that the user verifies.
-                self.pkg("uninstall ftpuserexp")
-                file(fpath, "a").write("fozzie\n")
-                self.pkg("exact-install ftpuserexp")
-                self.assert_("fozzie\n" not in file(fpath).readlines())
-                self.pkg("verify ftpuserexp")
-
-                # Add a user with an implicit ftpuser=true.  Make sure the user
-                # is not added to the file, and that the user verifies.
-                self.pkg("exact-install ftpuserimp")
-                self.assert_("gonzo\n" not in file(fpath).readlines())
-                self.pkg("verify ftpuserimp")
-
-                # Put a user into the ftpusers file as shipped, then add that
-                # user, with an implicit ftpuser=true.  Make sure the user is
-                # stripped from the file, and that the user verifies.
-                self.pkg("uninstall ftpuserimp")
-                file(fpath, "a").write("gonzo\n")
-                self.pkg("exact-install ftpuserimp")
-                self.assert_("gonzo\n" not in file(fpath).readlines())
-                self.pkg("verify ftpuserimp")
-
         def test_groupverify_install(self):
                 """Make sure we correctly verify group actions when users have
                 been added."""
@@ -4142,45 +4247,6 @@
                 gdata = file(gpath).readlines()
                 self.assert_(gdata[-1].find("muppets2") == 0)
 
-        def test_groupverify_exact_install(self):
-                """Make sure we correctly verify group actions when users have
-                been added."""
-
-                simplegroups = """
-                open simplegroup@1
-                add group groupname=muppets gid=100
-                add depend fmri=pkg:/[email protected] type=require
-                close
-                open simplegroup2@1
-                add group groupname=muppets2 gid=101
-                add depend fmri=pkg:/[email protected] type=require
-                close"""
-
-                self.pkgsend_bulk(self.rurl, (self.basics0, simplegroups))
-                self.image_create(self.rurl)
-
-                self.pkg("install basics")
-                self.pkg("exact-install simplegroup")
-                self.pkg("verify simplegroup")
-
-                # add additional members to group & verify
-                gpath = self.get_img_file_path("etc/group")
-                gdata = file(gpath).readlines()
-                gdata[-1] = gdata[-1].rstrip() + "kermit,misspiggy\n"
-                file(gpath, "w").writelines(gdata)
-                self.pkg("verify simplegroup")
-                self.pkg("uninstall simplegroup")
-
-                # verify that groups appear in gid order.
-                self.pkg("exact-install simplegroup simplegroup2")
-                self.pkg("verify")
-                gdata = file(gpath).readlines()
-                self.assert_(gdata[-1].find("muppets2") == 0)
-                self.pkg("uninstall simple*")
-                self.pkg("exact-install simplegroup2 simplegroup")
-                gdata = file(gpath).readlines()
-                self.assert_(gdata[-1].find("muppets2") == 0)
-
         def test_preexisting_group_install(self):
                 """Make sure we correct any errors in pre-existing group actions"""
                 simplegroup = """
@@ -4212,39 +4278,6 @@
                 self.pkg("update simplegroup")
                 self.pkg("verify simplegroup")
 
-        def test_preexisting_group_exact_install(self):
-                """Make sure we correct any errors in pre-existing group actions"""
-                simplegroup = """
-                open simplegroup@1
-                add group groupname=muppets gid=70
-                add depend fmri=pkg:/[email protected] type=require
-                close
-                open simplegroup@2
-                add dir path=/etc/muppet owner=root group=muppets mode=755
-                add group groupname=muppets gid=70
-                add depend fmri=pkg:/[email protected] type=require
-                close"""
-
-                self.pkgsend_bulk(self.rurl, (self.basics0, simplegroup))
-                self.image_create(self.rurl)
-
-                self.pkg("install basics")
-                gpath = self.get_img_file_path("etc/group")
-                gdata = file(gpath).readlines()
-                gdata = ["muppets::1010:\n"] + gdata
-                file(gpath, "w").writelines(gdata)
-                self.pkg("verify")
-                self.pkg("exact-install simplegroup@1")
-                self.pkg("verify simplegroup")
-                # check # lines beginning w/ 'muppets' in group file
-                gdata = file(gpath).readlines()
-                self.assert_(
-                    len([a for a in gdata if a.find("muppets") == 0]) == 1)
-
-                # make sure we can add new version of same package
-                self.pkg("update simplegroup")
-                self.pkg("verify simplegroup")
-
         def test_missing_ownergroup_install(self):
                 """test what happens when a owner or group is missing"""
                 missing = """
@@ -4303,69 +4336,6 @@
                 self.pkg("install missing_owner@1")
                 self.pkg("verify muppetsgroup muppetsuser missing*")
 
-        def test_missing_ownergroup_exact_install(self):
-                """Test what happens when a owner or group is missing."""
-
-                missing = """
-                open missing_group@1
-                add dir path=etc/muppet1 owner=root group=muppets mode=755
-                 add depend fmri=pkg:/[email protected] type=require
-                close
-                open missing_owner@1
-                add dir path=etc/muppet2 owner=muppets group=root mode=755
-                 add depend fmri=pkg:/[email protected] type=require
-                close
-                open muppetsuser@1
-                add user username=muppets group=bozomuppets uid=777
-                add depend fmri=pkg:/[email protected] type=require
-                close
-                open muppetsuser@2
-                add user username=muppets group=muppets uid=777
-                add depend fmri=pkg:/[email protected] type=require
-                add depend fmri=pkg:/muppetsgroup@1 type=require
-                close
-                open muppetsgroup@1
-                add group groupname=muppets gid=777
-                close
-                """
-
-                self.pkgsend_bulk(self.rurl, (self.basics0, missing))
-                self.image_create(self.rurl)
-                self.pkg("install basics")
-
-                # try exact-installing directory w/ a non-existing group
-                self.pkg("exact-install missing_group@1", exit=1)
-                # try exact-installing directory w/ a non-existing owner
-                self.pkg("exact-install missing_owner@1", exit=1)
-                # try exact-installing user w/ unknown group
-                self.pkg("exact-install muppetsuser@1", exit=1)
-
-                # install group
-                self.pkg("install muppetsgroup")
-                # install working user & see if it all works.
-                self.pkg("install muppetsuser@2")
-                self.pkg("exact-install muppetsgroup missing_group@1")
-                self.pkg("exact-install muppetsuser@2 missing_owner@1")
-                self.pkg("verify")
-                # edit group file to remove muppets group
-                gpath = self.get_img_file_path("etc/group")
-                gdata = file(gpath).readlines()
-                file(gpath, "w").writelines(gdata[0:-1])
-                # verify that we catch missing group
-                # in both group and user actions
-                self.pkg("verify muppetsgroup", 1)
-                self.pkg("verify muppetsuser", 1)
-                self.pkg("fix muppetsgroup", 0)
-                self.pkg("verify muppetsgroup muppetsuser missing*")
-                self.pkg("uninstall missing*")
-                # try installing w/ broken group
-                file(gpath, "w").writelines(gdata[0:-1])
-                self.pkg("exact-install missing_group@1", 1)
-                self.pkg("fix muppetsgroup")
-                self.pkg("exact-install muppetsgroup missing_group@1")
-                self.pkg("exact-install muppetsuser@2 missing_owner@1")
-                self.pkg("verify muppetsgroup muppetsuser missing*")
-
         def test_userverify_install(self):
                 """Make sure we correctly verify user actions when the on-disk
                 databases have been modified."""
@@ -4524,167 +4494,6 @@
                 pdata = file(ppath).readlines()
                 pdata[-1].index("kermit")
 
-        def test_userverify_exact_install(self):
-                """Make sure we correctly verify user actions when the on-disk
-                databases have been modified."""
-
-                simpleusers = """
-                open simpleuser@1
-                add user username=misspiggy group=root gcos-field="& loves Kermie" login-shell=/bin/sh uid=5
-                add depend fmri=pkg:/[email protected] type=require
-                close
-                open simpleuser2@1
-                add user username=kermit group=root gcos-field="& loves mspiggy" login-shell=/bin/sh password=UP uid=6
-                add depend fmri=pkg:/[email protected] type=require
-                close
-                open simpleuser2@2
-                add user username=kermit group=root gcos-field="& loves mspiggy" login-shell=/bin/sh uid=6
-                add depend fmri=pkg:/[email protected] type=require
-                close"""
-
-
-                self.pkgsend_bulk(self.rurl, (self.basics0, simpleusers))
-                self.image_create(self.rurl)
-
-                self.pkg("install basics")
-                self.pkg("exact-install simpleuser")
-                self.pkg("verify simpleuser")
-
-                ppath = self.get_img_path() + "/etc/passwd"
-                pdata = file(ppath).readlines()
-                spath = self.get_img_path() + "/etc/shadow"
-                sdata = file(spath).readlines()
-
-                def finderr(err):
-                        self.assert_("\t\t" + err in self.output)
-
-                # change a provided, empty-default field to something else
-                pdata[-1] = "misspiggy:x:5:0:& loves Kermie:/:/bin/zsh"
-                file(ppath, "w").writelines(pdata)
-                self.pkg("verify simpleuser", exit=1)
-                finderr("login-shell: '/bin/zsh' should be '/bin/sh'")
-                self.pkg("fix simpleuser")
-                self.pkg("verify simpleuser")
-
-                # change a provided, non-empty-default field to the default
-                pdata[-1] = "misspiggy:x:5:0:& User:/:/bin/sh"
-                file(ppath, "w").writelines(pdata)
-                self.pkg("verify simpleuser", exit=1)
-                finderr("gcos-field: '& User' should be '& loves Kermie'")
-                self.pkg("fix simpleuser")
-                self.pkg("verify simpleuser")
-
-                # change a non-provided, non-empty-default field to something
-                # other than the default
-                pdata[-1] = "misspiggy:x:5:0:& loves Kermie:/misspiggy:/bin/sh"
-                file(ppath, "w").writelines(pdata)
-                self.pkg("verify simpleuser", exit=1)
-                finderr("home-dir: '/misspiggy' should be '/'")
-                self.pkg("fix simpleuser")
-                self.pkg("verify simpleuser")
-
-                # add a non-provided, empty-default field
-                pdata[-1] = "misspiggy:x:5:0:& loves Kermie:/:/bin/sh"
-                sdata[-1] = "misspiggy:*LK*:14579:7:::::"
-                file(ppath, "w").writelines(pdata)
-                os.chmod(spath,
-                    stat.S_IMODE(os.stat(spath).st_mode)|stat.S_IWUSR)
-                file(spath, "w").writelines(sdata)
-                self.pkg("verify simpleuser", exit=1)
-                finderr("min: '7' should be '<empty>'")
-                # fails fix since we don't repair shadow entries on purpose
-                self.pkg("fix simpleuser")
-                self.pkg("verify simpleuser", exit=1)
-                finderr("min: '7' should be '<empty>'")
-
-                # remove a non-provided, non-empty-default field
-                pdata[-1] = "misspiggy:x:5:0:& loves Kermie::/bin/sh"
-                sdata[-1] = "misspiggy:*LK*:14579::::::"
-                file(ppath, "w").writelines(pdata)
-                file(spath, "w").writelines(sdata)
-                self.pkg("verify simpleuser", exit=1)
-                finderr("home-dir: '' should be '/'")
-                self.pkg("fix simpleuser")
-                self.pkg("verify simpleuser")
-
-                # remove a provided, non-empty-default field
-                pdata[-1] = "misspiggy:x:5:0::/:/bin/sh"
-                file(ppath, "w").writelines(pdata)
-                self.pkg("verify simpleuser", exit=1)
-                finderr("gcos-field: '' should be '& loves Kermie'")
-                self.pkg("fix simpleuser")
-                self.pkg("verify simpleuser")
-
-                # remove a provided, empty-default field
-                pdata[-1] = "misspiggy:x:5:0:& loves Kermie:/:"
-                file(ppath, "w").writelines(pdata)
-                self.pkg("verify simpleuser", exit=1)
-                finderr("login-shell: '' should be '/bin/sh'")
-                self.pkg("fix simpleuser")
-                self.pkg("verify simpleuser")
-
-                # remove the user from /etc/passwd
-                pdata[-1] = "misswiggy:x:5:0:& loves Kermie:/:"
-                file(ppath, "w").writelines(pdata)
-                self.pkg("verify simpleuser", exit=1)
-                finderr("login-shell: '<missing>' should be '/bin/sh'")
-                finderr("gcos-field: '<missing>' should be '& loves Kermie'")
-                finderr("group: '<missing>' should be 'root'")
-                self.pkg("fix simpleuser")
-                self.pkg("verify simpleuser")
-
-                # remove the user completely
-                pdata[-1] = "misswiggy:x:5:0:& loves Kermie:/:"
-                sdata[-1] = "misswiggy:*LK*:14579::::::"
-                file(ppath, "w").writelines(pdata)
-                file(spath, "w").writelines(sdata)
-                self.pkg("verify simpleuser", exit=1)
-                finderr("username: '<missing>' should be 'misspiggy'")
-                self.pkg("fix simpleuser")
-                self.pkg("verify simpleuser")
-
-                # change the password and show an error
-                self.pkg("verify simpleuser")
-                sdata[-1] = "misspiggy:NP:14579::::::"
-                file(spath, "w").writelines(sdata)
-                self.pkg("verify simpleuser", exit=1)
-                finderr("password: 'NP' should be '*LK*'")
-                self.pkg("fix simpleuser")
-                self.pkg("verify simpleuser")
-
-                # verify that passwords set to anything
-        # other than '*LK*" or 'NP' in manifest
-                # do not cause verify errors if changed.
-                self.pkg("exact-install --reject simpleuser simpleuser2@1")
-                self.pkg("verify simpleuser2")
-                pdata = file(ppath).readlines()
-                sdata = file(spath).readlines()
-                sdata[-1] = "kermit:$5$pWPEsjm2$GXjBRTjGeeWmJ81ytw3q1ah7QTaI7yJeRYZeyvB.Rp1:14579::::::"
-                file(spath, "w").writelines(sdata)
-                self.pkg("verify simpleuser2")
-
-                # verify that upgrading package to version that implicitly
-                # uses *LK* default causes password to change and that it
-                # verifies correctly
-                self.pkg("update simpleuser2@2")
-                self.pkg("verify simpleuser2")
-                sdata = file(spath).readlines()
-                sdata[-1].index("*LK*")
-
-                # ascertain that users are added in uid order when
-                # installed at the same time.
-                self.pkg("uninstall simpleuser2")
-                self.pkg("exact-install simpleuser simpleuser2")
-
-                pdata = file(ppath).readlines()
-                pdata[-1].index("kermit")
-
-                self.pkg("uninstall simpleuser simpleuser2")
-                self.pkg("exact-install simpleuser2 simpleuser")
-
-                pdata = file(ppath).readlines()
-                pdata[-1].index("kermit")
-
         def test_minugid(self):
                 """Ensure that an unspecified uid/gid results in the first
                 unused."""
@@ -8356,7 +8165,7 @@
                 self.pkg("update", exit=1)
 
 
-class TestConflictingActions(pkg5unittest.SingleDepotTestCase):
+class TestConflictingActions(_TestHelper, pkg5unittest.SingleDepotTestCase):
         """This set of tests verifies that packages which deliver conflicting
         actions into the same name in a namespace cannot be installed
         simultaneously."""
@@ -9188,119 +8997,6 @@
                 self.pkg("uninstall pkg2", exit=1)
                 self.pkg("verify pkg2")
 
-        def test_multiple_files_exact_install(self):
-                """Test the behavior of pkg(1) when multiple file actions
-                deliver to the same pathname."""
-
-                self.image_create(self.rurl)
-
-                # Duplicate files in the same package.
-                self.pkg("exact-install dupfiles", exit=1)
-
-                # Duplicate files in different packages, but in the same
-                # transaction.
-                self.pkg("exact-install dupfilesp1 dupfilesp2@0", exit=1)
-
-                # Duplicate files in different packages, in different
-                # transactions. This should succeed because exact-install will
-                # uninstall dupfilesp1 first.
-                self.pkg("exact-install dupfilesp1")
-                self.pkg("exact-install dupfilesp2@0")
-
-                # Test that being in a duplicate file situation doesn't break
-                # you completely and allows you to add and remove other
-                # packages.
-                self.pkg("-D broken-conflicting-action-handling=1 "
-                    "exact-install dupfilesp1 dupfilesp2@0")
-                self.pkg("exact-install implicitdirs2")
-                self.pkg("uninstall implicitdirs2")
-
-                # If the packages involved get upgraded by exact-install, that
-                # means the old actions has been removed. So we should be okay.
-                self.pkg("exact-install dupfilesp2 dupfilesp3")
-                self.pkg("verify")
-
-                # Test that removing one of two offending actions reverts the
-                # system to a clean state.
-                self.pkg("uninstall dupfilesp3")
-                self.pkg("verify")
-
-                # You should be able to upgrade to a fixed set of packages in
-                # order to move past the problem, too.
-                self.pkg("uninstall dupfilesp2")
-                self.pkg("-D broken-conflicting-action-handling=1 "
-                    "exact-install dupfilesp2@0")
-                self.pkg("update")
-                self.pkg("verify")
-
-                # If we upgrade to a version of a conflicting package that no
-                # longer has the conflict, amd at the same time introduce a new
-                # file action at the path with different contents, we should
-                # succeed with exact-install.
-                self.pkg("uninstall dupfilesp2")
-                self.pkg("-D broken-conflicting-action-handling=1 "
-                    "exact-install dupfilesp2@0")
-                self.pkg("exact-install dupfilesp2 dupfilesp4")
-
-                # Removing one of more than two offending actions can't do much
-                # of anything, but should leave the system alone.
-                self.pkg("uninstall '*'")
-                self.pkg("-D broken-conflicting-action-handling=1 "
-                    "exact-install dupfilesp1 dupfilesp2@0 dupfilesp3")
-                # XXX The checks here rely on verify failing due to hashes being
-                # wrong; they should probably report a duplicate action instead.
-                self.pkg("verify", exit=1)
-                out1, err1 = self.output, self.errout
-                self.pkg("uninstall dupfilesp3")
-                # Because we removed dupfilesp3, the error output in this verify
-                # won't exactly match that from the previous one, but the one
-                # remaining failing package should give the same output since we
-                # didn't modify the FS, so search for the current output in the
-                # old.
-                self.pkg("verify", exit=1)
-                out2 = self.output
-                # Strip the first (header) line; this error might not have been
-                # first in the previous output.
-                out2 = out2[out2.index("\n") + 1:]
-                self.assert_(out2 in out1)
-
-                # Removing all but one of the offending actions should get us
-                # back to sanity.
-                self.pkg("uninstall '*'")
-                self.pkg("-D broken-conflicting-action-handling=1 "
-                    "exact-install dupfilesp1 dupfilesp2@0 dupfilesp3")
-                self.pkg("uninstall dupfilesp3 dupfilesp2")
-                self.pkg("verify")
-
-                # Make sure we handle cleaning up multiple files properly.
-                self.pkg("uninstall '*'")
-                self.pkg("-D broken-conflicting-action-handling=1 "
-                    "exact-install dupfilesp1 dupfilesp2@0 dupotherfilesp1 "
-                    "dupotherfilesp2")
-                self.pkg("uninstall dupfilesp2 dupotherfilesp2")
-                self.pkg("verify")
-
-                # Make sure we get rid of all implicit directories.
-                self.pkg("uninstall '*'")
-                self.pkg("exact-install implicitdirs3 implicitdirs4")
-                self.pkg("uninstall implicitdirs3 implicitdirs4")
-
-                if os.path.isdir(os.path.join(self.get_img_path(), "usr/bin")):
-                        self.assert_(False, "Directory 'usr/bin' should not exist")
-
-                if os.path.isdir(os.path.join(self.get_img_path(), "usr")):
-                        self.assert_(False, "Directory 'usr' should not exist")
-
-                # Make sure identical actions don't cause problems.
-                self.pkg("exact-install -nv identicalfiles", exit=1)
-
-                # Trigger a bug similar to 17943 via duplicate files.
-                self.pkg("publisher")
-                self.pkg("-D broken-conflicting-action-handling=1 "
-                    "exact-install dupfilesp1@0 dupfilesp2@0 dupfilesp3@0 "
-                    "dupotherfilesp1@0 dupotherfilesp2@0 dupotherfilesp3@0")
-                self.pkg("update")
-
         def test_overlay_files_install(self):
                 """Test the behaviour of pkg(1) when actions for editable files
                 overlay other actions."""
@@ -9336,7 +9032,10 @@
                 # overlaying action declares its intent to overlay.
                 self.pkg("contents -m overlaid")
                 self.pkg("contents -mr overlayer")
-                self.pkg("install overlayer")
+                self.pkg("install --parsable=0 overlayer")
+                self._assertEditables(
+                    installed=["etc/pam.conf"],
+                )
                 self.file_contains("etc/pam.conf", "file2")
 
                 # Should fail because multiple actions are not allowed to
@@ -9356,38 +9055,54 @@
                 # Verify that the file isn't touched on uninstall of the
                 # overlaying package if package being overlaid is still
                 # installed.
-                self.pkg("uninstall -vvv overlayer")
+                self.pkg("uninstall --parsable=0 overlayer")
+                self._assertEditables()
                 self.file_contains("etc/pam.conf", "zigit")
                 self.file_contains("etc/pam.conf", "file2")
 
                 # Verify that removing the last package delivering an overlaid
                 # file removes the file.
-                self.pkg("uninstall overlaid")
+                self.pkg("uninstall --parsable=0 overlaid")
+                self._assertEditables(
+                    removed=["etc/pam.conf"],
+                )
                 self.file_doesnt_exist("etc/pam.conf")
 
                 # Verify that installing both packages at the same time results
                 # in only the overlaying file being delivered.
-                self.pkg("install overlaid@0 overlayer")
+                self.pkg("install --parsable=0 overlaid@0 overlayer")
+                self._assertEditables(
+                    installed=["etc/pam.conf"],
+                )
                 self.file_contains("etc/pam.conf", "file2")
 
                 # Verify that the file isn't touched on uninstall of the
                 # overlaid package if overlaying package is still installed.
                 self.file_append("etc/pam.conf", "zigit")
-                self.pkg("uninstall overlaid")
+                self.pkg("uninstall --parsable=0 overlaid")
+                self._assertEditables()
                 self.file_contains("etc/pam.conf", "file2")
                 self.file_contains("etc/pam.conf", "zigit")
 
                 # Re-install overlaid package and verify that file content
                 # does not change.
-                self.pkg("install overlaid@0")
+                self.pkg("install --parsable=0 overlaid@0")
+                self._assertEditables()
                 self.file_contains("etc/pam.conf", "file2")
                 self.file_contains("etc/pam.conf", "zigit")
-                self.pkg("uninstall overlaid overlayer")
+                self.pkg("uninstall --parsable=0 overlaid overlayer")
+                self._assertEditables(
+                    removed=["etc/pam.conf"],
+                )
 
                 # Should succeed because one action is overlayable and
                 # overlaying action declares its intent to overlay even
                 # though the overlaying action isn't marked with preserve.
-                self.pkg("install overlaid@0 unpreserved-overlayer")
+                self.pkg("install -nvv overlaid@0 unpreserved-overlayer")
+                self.pkg("install --parsable=0 overlaid@0 unpreserved-overlayer")
+                self._assertEditables(
+                    installed=["etc/pam.conf"],
+                )
                 self.file_contains("etc/pam.conf", "unpreserved")
 
                 # Should succeed because overlaid action permits modification
@@ -9413,7 +9128,8 @@
                 self.pkg("revert /etc/pam.conf")
                 self.file_contains("etc/pam.conf", "unpreserved")
                 self.file_doesnt_contain("etc/pam.conf", "zigit")
-                self.pkg("uninstall unpreserved-overlayer")
+                self.pkg("uninstall --parsable=0 unpreserved-overlayer")
+                self._assertEditables()
 
                 # Should revert to content delivered by overlaid action.
                 self.file_contains("etc/pam.conf", "unpreserved")
@@ -9423,21 +9139,28 @@
                 # Install overlaying package, then update overlaid package and
                 # verify that file content does not change if only preserve
                 # attribute changes.
-                self.pkg("install -vvv unpreserved-overlayer")
+                self.pkg("install --parsable=0 unpreserved-overlayer")
+                self._assertEditables(
+                    installed=["etc/pam.conf"],
+                )
                 self.file_contains("etc/pam.conf", "unpreserved")
-                self.pkg("install overlaid@1")
+                self.pkg("install --parsable=0 overlaid@1")
+                self._assertEditables()
                 self.file_contains("etc/pam.conf", "unpreserved")
-                self.pkg("uninstall -vvv overlaid")
+                self.pkg("uninstall --parsable=0 overlaid")
+                self._assertEditables()
 
                 # Now update overlaid package again, and verify that file
                 # content does not change even though overlaid content has.
-                self.pkg("install -vvv overlaid@2")
+                self.pkg("install --parsable=0 overlaid@2")
+                self._assertEditables()
                 self.file_contains("etc/pam.conf", "unpreserved")
 
                 # Now update overlaid package again this time as part of a
                 # rename, and verify that file content does not change even
                 # though file has moved between packages.
-                self.pkg("install -vvv overlaid@3")
+                self.pkg("install --parsable=0 overlaid@3")
+                self._assertEditables()
                 self.file_contains("etc/pam.conf", "unpreserved")
 
                 # Verify that unpreserved overlay is not salvaged when both
@@ -9452,7 +9175,11 @@
                 shutil.rmtree(sroot)
 
                 # Verify etc directory not found after uninstall.
-                self.pkg("uninstall -vvv overlaid-renamed unpreserved-overlayer")
+                self.pkg("uninstall --parsable=0 overlaid-renamed "
+                    "unpreserved-overlayer")
+                self._assertEditables(
+                    removed=['etc/pam.conf'],
+                )
                 salvaged = [
                     n for n in os.listdir(sroot)
                     if n.startswith("etc")
@@ -9464,15 +9191,28 @@
                 # the new location and the old location, that the content has
                 # not changed in either, and that the new configuration exists
                 # as expected as ".new".
-                self.pkg("install -vvv overlaid-renamed@3 unpreserved-overlayer")
-                self.pkg("install -vvv [email protected]")
+                self.pkg("install --parsable=0 overlaid-renamed@3 "
+                    "unpreserved-overlayer")
+                self._assertEditables(
+                    installed=['etc/pam.conf'],
+                )
+                self.pkg("install -nvv [email protected]")
+                self.pkg("install --parsable=0 [email protected]")
+                self._assertEditables(
+                    moved=[['etc/pam.conf', 'etc/pam/pam.conf']],
+                    installed=['etc/pam/pam.conf.new'],
+                )
                 self.file_contains("etc/pam.conf", "unpreserved")
                 self.file_contains("etc/pam/pam.conf", "unpreserved")
                 self.file_contains("etc/pam/pam.conf.new", "file4")
 
                 # Verify etc/pam.conf not salvaged after uninstall as overlay
                 # file has not been changed.
-                self.pkg("uninstall -vvv overlaid-renamed unpreserved-overlayer")
+                self.pkg("uninstall --parsable=0 overlaid-renamed "
+                    "unpreserved-overlayer")
+                self._assertEditables(
+                    removed=['etc/pam.conf', 'etc/pam/pam.conf'],
+                )
                 salvaged = [
                     n for n in os.listdir(os.path.join(sroot, "etc"))
                     if n.startswith("pam.conf")
@@ -9485,22 +9225,29 @@
                 # Install overlaying package, then update overlaid package and
                 # verify that file content does not change if only preserve
                 # attribute changes.
-                self.pkg("install -vvv overlayer")
+                self.pkg("install --parsable=0 overlayer")
+                self._assertEditables(
+                    installed=['etc/pam.conf'],
+                )
                 self.file_contains("etc/pam.conf", "file2")
                 self.file_append("etc/pam.conf", "zigit")
-                self.pkg("install overlaid@1")
+                self.pkg("install --parsable=0 overlaid@1")
+                self._assertEditables()
                 self.file_contains("etc/pam.conf", "zigit")
-                self.pkg("uninstall -vvv overlaid")
+                self.pkg("uninstall --parsable=0 overlaid")
+                self._assertEditables()
 
                 # Now update overlaid package again, and verify that file
                 # content does not change even though overlaid content has.
-                self.pkg("install -vvv overlaid@2")
+                self.pkg("install --parsable=0 overlaid@2")
+                self._assertEditables()
                 self.file_contains("etc/pam.conf", "zigit")
 
                 # Now update overlaid package again this time as part of a
                 # rename, and verify that file content does not change even
                 # though file has moved between packages.
-                self.pkg("install -vvv overlaid@3")
+                self.pkg("install --parsable=0 overlaid@3")
+                self._assertEditables()
                 self.file_contains("etc/pam.conf", "zigit")
 
                 # Verify that preserved overlay is salvaged when both overlaid
@@ -9515,7 +9262,10 @@
                 shutil.rmtree(sroot)
 
                 # Verify etc directory found after uninstall.
-                self.pkg("uninstall -vvv overlaid-renamed overlayer")
+                self.pkg("uninstall --parsable=0 overlaid-renamed overlayer")
+                self._assertEditables(
+                    removed=['etc/pam.conf'],
+                )
                 salvaged = [
                     n for n in os.listdir(sroot)
                     if n.startswith("etc")
@@ -9528,22 +9278,39 @@
                 # and the old location, that the content has not changed in
                 # either, and that the new configuration exists as expected as
                 # ".new".
-                self.pkg("install -vvv overlaid-renamed@3 overlayer")
+                self.pkg("install --parsable=0 overlaid-renamed@3 overlayer")
+                self._assertEditables(
+                    installed=['etc/pam.conf'],
+                )
                 self.file_append("etc/pam.conf", "zigit")
-                self.pkg("install -vvv [email protected]")
+                self.pkg("install --parsable=0 [email protected]")
+                self._assertEditables(
+                    moved=[['etc/pam.conf', 'etc/pam/pam.conf']],
+                    installed=['etc/pam/pam.conf.new'],
+                )
                 self.file_contains("etc/pam.conf", "zigit")
                 self.file_contains("etc/pam/pam.conf", "zigit")
                 self.file_contains("etc/pam/pam.conf.new", "file4")
-                self.pkg("uninstall -vvv overlaid-renamed overlayer")
+                self.pkg("uninstall --parsable=0 overlaid-renamed overlayer")
+                self._assertEditables(
+                    removed=['etc/pam.conf', 'etc/pam/pam.conf'],
+                )
 
                 # Next, update overlaid package again, this time as part of a
                 # file move.  Verify that the configuration file exists at both
                 # the new location and the old location, that the content has
                 # not changed in either, and that the new configuration exists
                 # as expected as ".new".
-                self.pkg("install -vvv overlaid-renamed@3 overlayer")
+                self.pkg("install --parsable=0 overlaid-renamed@3 overlayer")
+                self._assertEditables(
+                    installed=['etc/pam.conf'],
+                )
                 self.file_append("etc/pam.conf", "zigit")
-                self.pkg("install -vvv [email protected]")
+                self.pkg("install --parsable=0 [email protected]")
+                self._assertEditables(
+                    moved=[['etc/pam.conf', 'etc/pam/pam.conf']],
+                    installed=['etc/pam/pam.conf.new'],
+                )
                 self.file_contains("etc/pam.conf", "zigit")
                 self.file_contains("etc/pam/pam.conf", "zigit")
                 self.file_contains("etc/pam/pam.conf.new", "file4")
@@ -9551,17 +9318,27 @@
                 # Next, downgrade the package and verify that if an overlaid
                 # file moves back to its original location, the content of the
                 # overlay file will not change.
-                self.pkg("update -vvv overlaid-renamed@3")
+                self.pkg("update --parsable=0 overlaid-renamed@3")
+                self._assertEditables(
+                    removed=['etc/pam/pam.conf'],
+                )
                 self.file_contains("etc/pam.conf", "zigit")
 
                 # Now upgrade again for remaining tests.
-                self.pkg("install -vvv [email protected]")
+                self.pkg("install --parsable=0 [email protected]")
+                self._assertEditables(
+                    moved=[['etc/pam.conf', 'etc/pam/pam.conf']],
+                    installed=['etc/pam/pam.conf.new'],
+                )
 
                 # Verify etc/pam.conf and etc/pam/pam.conf salvaged after
                 # uninstall as overlay file and overlaid file is different from
                 # packaged.
                 shutil.rmtree(sroot)
-                self.pkg("uninstall -vvv overlaid-renamed overlayer")
+                self.pkg("uninstall --parsable=0 overlaid-renamed overlayer")
+                self._assertEditables(
+                    removed=['etc/pam.conf', 'etc/pam/pam.conf'],
+                )
                 salvaged = sorted(
                     n for n in os.listdir(os.path.join(sroot, "etc"))
                     if n.startswith("pam")
@@ -9579,348 +9356,66 @@
                 # Next, install overlaid package and overlaying package, then
                 # upgrade each to a version where the file has changed
                 # locations and verify that the content remains intact.
-                self.pkg("install -vvv overlaid@0 overlayer-move@0")
+                self.pkg("install --parsable=0 overlaid@0 overlayer-move@0")
+                self._assertEditables(
+                    installed=['etc/pam.conf'],
+                )
                 self.file_append("etc/pam.conf", "zigit")
-                self.pkg("install -vvv overlaid@3")
+                self.pkg("install --parsable=0 overlaid@3")
+                self._assertEditables()
                 self.file_contains("etc/pam.conf", "zigit")
-                self.pkg("install -vvv [email protected] overlayer-move@1")
+                self.pkg("install --parsable=0 [email protected] "
+                    "overlayer-move@1")
+                self._assertEditables(
+                    moved=[['etc/pam.conf', 'etc/pam/pam.conf']],
+                )
                 self.file_contains("etc/pam/pam.conf", "zigit")
 
                 # Next, downgrade overlaid-renamed and overlaying package to
                 # versions where the file is restored to its original location
                 # and verify that the content is reverted to the original
                 # overlay version since this is a downgrade.
-                self.pkg("update -vvv overlaid-renamed@3 overlayer-move@0")
+                self.pkg("update --parsable=0 overlaid-renamed@3 "
+                    "overlayer-move@0")
+                self._assertEditables(
+                    removed=['etc/pam/pam.conf'],
+                    installed=['etc/pam.conf'],
+                )
                 self.file_contains("etc/pam.conf", "file2")
-                self.pkg("uninstall overlaid-renamed overlayer-move")
+                self.pkg("uninstall --parsable=0 overlaid-renamed overlayer-move")
+                self._assertEditables(
+                    removed=['etc/pam.conf'],
+                )
 
                 # Next, install overlaid package and overlaying package and
                 # verify preserve acts as expected for overlay package as it is
                 # updated.
-                self.pkg("install -vvv overlaid@2 overlayer-update@0")
+                self.pkg("install --parsable=0 overlaid@2 overlayer-update@0")
+                self._assertEditables(
+                    installed=['etc/pam.conf'],
+                )
                 self.file_contains("etc/pam.conf", "file1")
                 # unpreserved -> preserved
-                self.pkg("install -vvv overlayer-update@1")
+                self.pkg("install --parsable=0 overlayer-update@1")
+                self._assertEditables(
+                    updated=['etc/pam.conf'],
+                )
                 self.file_contains("etc/pam.conf", "file2")
                 self.file_append("etc/pam.conf", "zigit")
                 # preserved -> renameold
-                self.pkg("install -vvv overlayer-update@2")
+                self.pkg("install --parsable=0 overlayer-update@2")
+                self._assertEditables(
+                    moved=[['etc/pam.conf', 'etc/pam.conf.old']],
+                    installed=['etc/pam.conf'],
+                )
                 self.file_doesnt_contain("etc/pam.conf", "zigit")
                 self.file_contains("etc/pam.conf.old", "zigit")
                 self.file_append("etc/pam.conf", "zagat")
                 # renameold -> renamenew
-                self.pkg("install -vvv overlayer-update@3")
-                self.file_contains("etc/pam.conf", "zagat")
-                self.file_contains("etc/pam.conf.new", "file4")
-
-        def test_overlay_files_exact_install(self):
-                """Test the behaviour of pkg(1) when actions for editable files
-                overlay other actions."""
-
-                # Ensure that overlay is allowed for file actions when one
-                # action has specified preserve attribute and overlay=allow,
-                # and *one* (only) other action has specified overlay=true
-                # (preserve does not have to be set).
-                self.image_create(self.rurl)
-
-                # With exact-install, this should succeed because exact-install
-                # will remove the previously installed package first.
-                # but not preserve (it isn't editable).
-                self.pkg("exact-install invalid-overlaid")
-                self.pkg("exact-install overlayer")
-                self.pkg("uninstall overlayer")
-
-                self.pkg("exact-install overlaid@0")
-                self.file_contains("etc/pam.conf", "file1")
-                # Should succeed with exact-install, because the previous
-                # package is actually removed.
-                self.pkg("contents -m overlaid")
-                self.pkg("contents -mr overlayer")
-                self.pkg("exact-install overlayer")
-                self.file_contains("etc/pam.conf", "file2")
-
-                # install back overlaid@0
-                self.pkg("exact-install overlaid@0")
-                self.pkg("verify overlaid")
-                self.file_contains("etc/pam.conf", "file1")
-                # This should also succeed with exact-install, because the
-                # previous package is removed.
-                self.pkg("exact-install invalid-overlayer")
-                self.file_contains("etc/pam.conf", "file2")
-                self.pkg("verify invalid-overlayer")
-                # This should also succeed with exact-install. Again.
-                self.pkg("exact-install overlaid@0")
-                self.pkg("exact-install mismatch-overlayer")
-                self.file_contains("etc/pam.conf", "file2")
-                self.pkg("verify mismatch-overlayer")
-                # This should alos succeed with exact-install, same reason.
-                self.pkg("exact-install overlaid@0")
-                self.pkg("exact-install multi-overlayer")
-                self.file_contains("etc/pam.conf", "file2")
-                self.pkg("verify multi-overlayer")
-                # Verify that removing the last package delivering an overlaid
-                # file removes the file.
-                self.pkg("uninstall multi-overlayer")
-                self.file_doesnt_exist("etc/pam.conf")
-
-                # Verify that exact-installing both packages at the same time
-                # results in only the overlaying file being delivered.
-                self.pkg("exact-install overlaid@0 overlayer")
-                self.file_contains("etc/pam.conf", "file2")
-
-                # Verify that the file isn't touched on uninstall of the
-                # overlaid package if overlaying package is still installed.
-                self.file_append("etc/pam.conf", "zigit")
-                self.pkg("uninstall overlaid")
-                self.file_contains("etc/pam.conf", "file2")
-                self.file_contains("etc/pam.conf", "zigit")
-
-                # Re-install overlaid package and verify that file content
-                # does not change.
-                self.pkg("exact-install overlaid@0")
-                self.file_contains("etc/pam.conf", "file2")
-                self.file_contains("etc/pam.conf", "zigit")
-                self.pkg("uninstall '*'")
-
-                # Should succeed because one action is overlayable and
-                # overlaying action declares its intent to overlay even
-                # though the overlaying action isn't marked with preserve.
-                self.pkg("exact-install overlaid@0 unpreserved-overlayer")
-                self.file_contains("etc/pam.conf", "unpreserved")
-
-                # Should succeed because overlaid action permits modification
-                # and contents matches overlaying action.
-                self.pkg("verify overlaid unpreserved-overlayer")
-
-                # Should succeed even though file has been modified since
-                # overlaid action permits modification.
-                self.file_append("etc/pam.conf", "zigit")
-                self.pkg("verify overlaid")
-
-                # Should fail because overlaying action does not permit
-                # modification.
-                self.pkg("verify unpreserved-overlayer", exit=1)
-
-                # Should revert to content delivered by overlaying action.
-                self.pkg("fix unpreserved-overlayer")
-                self.file_contains("etc/pam.conf", "unpreserved")
-                self.file_doesnt_contain("etc/pam.conf", "zigit")
-
-                # Should revert to content delivered by overlaying action.
-                self.file_append("etc/pam.conf", "zigit")
-                self.pkg("revert /etc/pam.conf")
-                self.file_contains("etc/pam.conf", "unpreserved")
-                self.file_doesnt_contain("etc/pam.conf", "zigit")
-                self.pkg("uninstall unpreserved-overlayer")
-
-                # Should revert to content delivered by overlaid action.
-                self.file_contains("etc/pam.conf", "unpreserved")
-                self.pkg("revert /etc/pam.conf")
-                self.file_contains("etc/pam.conf", "file1")
-
-                # Here if we use exact-install, we actually removed the
-                # unpreserved-overlayer and then unpreserved should disappear
-                # from etc/pam.conf.
-                self.pkg("exact-install -vvv unpreserved-overlayer")
-                self.file_contains("etc/pam.conf", "unpreserved")
-                self.pkg("exact-install overlaid@1")
-                self.file_contains("etc/pam.conf", "file1")
-                self.file_doesnt_contain("etc/pam.conf", "unpreserved")
-                self.pkg("uninstall -vvv overlaid")
-
-                # Now update overlaid package, and verify that it deliver the
-                # correct files
-                self.pkg("exact-install -vvv overlaid@2")
-                self.file_contains("etc/pam.conf", "file3")
-
-                self.pkg("exact-install -vvv overlaid@3")
-                self.file_contains("etc/pam.conf", "file3")
-
-                # Verify that unpreserved overlay is not salvaged when both
-                # overlaid and overlaying package are removed at the same time.
-                # (Preserved files are salvaged if they have been modified on
-                # uninstall.)
-
-                # Ensure directory is empty before testing.
-                api_inst = self.get_img_api_obj()
-                img_inst = api_inst.img
-                sroot = os.path.join(img_inst.imgdir, "lost+found")
-                shutil.rmtree(sroot)
-
-                # Verify etc directory not found after uninstall.
-                self.pkg("uninstall -vvv overlaid-renamed")
-                salvaged = [
-                    n for n in os.listdir(sroot)
-                    if n.startswith("etc")
-                ]
-                self.assertEqualDiff(salvaged, [])
-
-                # Next, update overlaid package again this time as part of a
-                # file move.  Verify that the old configuration file should
-                # be removed.
-                self.pkg("exact-install -vvv overlaid-renamed@3 "
-                    "unpreserved-overlayer")
-                self.pkg("exact-install -vvv [email protected]")
-                self.assert_(not os.path.exists(os.path.join(
-                    self.get_img_path(), "etc/pam.conf")))
-                self.file_contains("etc/pam/pam.conf.new", "file4")
-
-                # Verify etc/pam/pam.conf is salvaged after uninstall as
-                # overlay file has been changed.
-                self.pkg("uninstall -vvv overlaid-renamed")
-                salvaged = [
-                    n for n in os.listdir(os.path.join(sroot, "etc/pam"))
-                    if n.startswith("pam.conf")
-                ]
-                self.assert_(salvaged[0].startswith("pam.conf-"),
-                    msg=str(salvaged))
-
-                # Next, repeat the same set of tests performed above for
-                # renames and moves with an overlaying, preserved file.
-                #
-                # Exact-install overlaying package, then update overlaid
-                # package and verify that file content does not change if only
-                # preserve attribute changes.
-                self.pkg("exact-install -vvv overlayer")
-                self.file_contains("etc/pam.conf", "file2")
-                self.file_append("etc/pam.conf", "zigit")
-                self.pkg("exact-install -vvv overlaid@1")
-                self.file_contains("etc/pam.conf", "file2")
-                self.file_contains("etc/pam.conf", "zigit")
-
-                # Now update overlaid package again, and verify that file
-                # content does not change even though overlaid content has.
-                self.pkg("exact-install -vvv overlaid@2")
-                self.file_contains("etc/pam.conf", "zigit")
-                self.file_contains("etc/pam.conf", "file2")
-
-                # Now update overlaid package again this time as part of a
-                # rename, and verify that file content does not change even
-                # though file has moved between packages.
-                self.pkg("exact-install -vvv overlaid@3")
-                self.file_contains("etc/pam.conf", "zigit")
-
-                # Verify that preserved overlay is salvaged.
-                # (Preserved files are salvaged if they have been modified on
-                # uninstall.)
-
-                # Ensure directory is empty before testing.
-                api_inst = self.get_img_api_obj()
-                img_inst = api_inst.img
-                sroot = os.path.join(img_inst.imgdir, "lost+found")
-                shutil.rmtree(sroot)
-
-                # Verify etc directory found after uninstall.
-                self.pkg("uninstall -vvv overlaid-renamed")
-                salvaged = [
-                    n for n in os.listdir(sroot)
-                    if n.startswith("etc")
-                ]
-                self.assert_(salvaged[0].startswith("etc"),
-                    msg=str(salvaged))
-                self.assert_(salvaged[1].startswith("etc-"),
-                    msg=str(salvaged))
-
-                # Next, update overlaid package again, this time as part of a
-                # file move where the overlay attribute was dropped. Verify
-                # that the content has not changed after move to new location
-                # and that the new configuration exists as expected as
-                # ".new".
-                self.pkg("exact-install -vvv overlaid-renamed@3 overlayer")
-                self.file_append("etc/pam.conf", "zigit")
-                self.pkg("exact-install -vvv [email protected]")
-                self.file_contains("etc/pam/pam.conf", "zigit")
-                self.file_contains("etc/pam/pam.conf.new", "file4")
-                self.pkg("uninstall -vvv overlaid-renamed")
-
-                # Next, update overlaid package again, this time as part of a
-                # file move where the overlay attribute was dropped. Verify
-                # that the content has not changed after move to new location
-                # and that the new configuration exists as expected as
-                # ".new".
-                self.pkg("exact-install -vvv overlaid-renamed@3 overlayer")
-                self.file_append("etc/pam.conf", "zigit")
-                self.file_contains("etc/pam.conf", "file2")
-                self.pkg("exact-install -vvv [email protected]")
-                self.file_contains("etc/pam/pam.conf", "zigit")
-                self.file_contains("etc/pam/pam.conf.new", "file4")
-
-                # Next, downgrade the package and verify that if an overlaid
-                # file moves back to its original location. Because the
-                # previous etc/pam.conf for overlayer was removed by
-                # exact-install, the content of the overlay file will be a new
-                # one.
-                self.pkg("update -vvv overlaid-renamed@3")
-                self.file_contains("etc/pam.conf", "file3")
-                self.file_doesnt_contain("etc/pam.conf", "zigit")
-                self.file_append("etc/pam.conf", "zigit")
-                # Now upgrade again for remaining tests.
-                self.pkg("exact-install -vvv [email protected]")
-
-                # Verify etc/pam.conf and etc/pam/pam.conf salvaged after
-                # uninstall as overlay file and overlaid file is different from
-                # packaged.
-                shutil.rmtree(sroot)
-                self.pkg("uninstall -vvv overlaid-renamed")
-                salvaged = sorted(
-                    n for n in os.listdir(os.path.join(sroot, "etc"))
-                    if n.startswith("pam")
+                self.pkg("install --parsable=0 overlayer-update@3")
+                self._assertEditables(
+                    installed=['etc/pam.conf.new'],
                 )
-                # Should have three entries; one should be 'pam' directory
-                # (presumably containing pam.conf-X...), a 'pam-XXX' directory,
-                # and a etc-XXX directory.
-                self.assertEqualDiff(salvaged[0], "pam")
-                self.assert_(salvaged[1].startswith("pam-"),
-                    msg=str(salvaged))
-
-                salvaged = sorted(
-                    n for n in os.listdir(os.path.join(sroot, "etc/pam"))
-                    if n.startswith("pam")
-                )
-                self.assert_(salvaged[0].startswith("pam.conf-"),
-                    msg=str(salvaged))
-                salvaged = sorted(
-                    n for n in os.listdir(sroot)
-                    if n.startswith("etc-")
-                )
-                self.assert_(salvaged[0].startswith("etc-"),
-                    msg=str(salvaged))
-
-                # Next, exact-install overlaid package and overlaying package,
-                # then upgrade each to a version where the file has changed
-                # locations and verify that the content remains intact.
-                self.pkg("exact-install -vvv overlaid@0 overlayer-move@0")
-                self.file_append("etc/pam.conf", "zigit")
-                self.pkg("exact-install -vvv overlaid@3 overlayer-move@0")
-                self.file_contains("etc/pam.conf", "zigit")
-                self.pkg("exact-install -vvv [email protected] "
-                    "overlayer-move@1")
-                self.file_contains("etc/pam/pam.conf", "zigit")
-
-                # Next, downgrade overlaid-renamed and overlaying package to
-                # versions where the file is restored to its original location
-                # and verify that the content is reverted to the original
-                # overlay version since this is a downgrade.
-                self.pkg("update -vvv overlaid-renamed@3 overlayer-move@0")
-                self.file_contains("etc/pam.conf", "file2")
-                self.pkg("uninstall overlaid-renamed overlayer-move")
-
-                # Next, exact-install overlaid package and overlaying package
-                # and verify preserve acts as expected for overlay package as
-                # it is updated.
-                self.pkg("exact-install -vvv overlaid@2 overlayer-update@0")
-                self.file_contains("etc/pam.conf", "file1")
-                # unpreserved -> preserved
-                self.pkg("exact-install -vvv overlayer-update@1")
-                self.file_contains("etc/pam.conf", "file2")
-                self.file_append("etc/pam.conf", "zigit")
-                # preserved -> renameold
-                self.pkg("exact-install -vvv overlayer-update@2")
-                self.file_doesnt_contain("etc/pam.conf", "zigit")
-                self.file_contains("etc/pam.conf.old", "zigit")
-                self.file_append("etc/pam.conf", "zagat")
-                # renameold -> renamenew
-                self.pkg("exact-install -vvv overlayer-update@3")
                 self.file_contains("etc/pam.conf", "zagat")
                 self.file_contains("etc/pam.conf.new", "file4")
 
@@ -10069,152 +9564,6 @@
                 self.pkg("install dupmultitypes3@0")
                 self.pkg("update")
 
-        def test_different_types_exact_install(self):
-                """Test the behavior of pkg(1) when multiple actions of
-                different types deliver to the same pathname."""
-
-                self.image_create(self.rurl)
-
-                # In the same package.
-                self.pkg("exact-install duppath-filelink", exit=1)
-
-                # In different packages, in the same transaction.
-                self.pkg("exact-install dupfilesp1 duplink", exit=1)
-
-                # In different packages, in different transactions. This should
-                # succeed because exact-install will first uninstall
-                # dupfilesp1.
-                self.pkg("exact-install dupfilesp1")
-                self.pkg("exact-install duplink")
-
-                # Does removal of one of the busted packages get us out of the
-                # situation?
-                self.pkg("uninstall '*'")
-                self.pkg("-D broken-conflicting-action-handling=1 "
-                    "exact-install dupfilesp1 duplink")
-                self.pkg("verify", exit=1)
-                self.pkg("uninstall dupfilesp1")
-                self.pkg("verify")
-
-                # Implicit directory conflicts with a file
-                self.pkg("uninstall '*'")
-                self.pkg("exact-install implicitdirs", exit=1)
-
-                # Implicit directory coincides with a delivered directory.
-                self.pkg("exact-install implicitdirs2")
-
-                # Make sure that we don't die trying to fixup a directory using
-                # an implicit directory action.
-                self.pkg("uninstall '*'")
-                self.pkg("exact-install implicitdirs4")
-                self.pkg("-D broken-conflicting-action-handling=1 "
-                    "install implicitdirs7")
-                self.pkg("uninstall implicitdirs7")
-                # XXX We don't currently fix up anything beneath a directory
-                # that was restored, so we have to do it by hand.
-                os.mkdir("%s/usr/bin" % self.img_path())
-                shutil.copy("%s/tmp/file1" % self.test_root,
-                    "%s/usr/bin/something" % self.img_path())
-                owner = portable.get_user_by_name("root", self.img_path(), True)
-                group = portable.get_group_by_name("bin", self.img_path(), True)
-                os.chown("%s/usr/bin/something" % self.img_path(), owner, group)
-                os.chmod("%s/usr/bin/something" % self.img_path(), 0755)
-                self.pkg("verify")
-
-                # Removing one of more than two offending actions can't do much
-                # of anything, but should leave the system alone.
-                self.pkg("uninstall '*'")
-                self.pkg("-D broken-conflicting-action-handling=1 "
-                    "exact-install dupfilesp1 duplink dupdir@0")
-                tmap = {
-                    stat.S_IFIFO: "fifo",
-                    stat.S_IFCHR: "character device",
-                    stat.S_IFDIR: "directory",
-                    stat.S_IFBLK: "block device",
-                    stat.S_IFREG: "regular file",
-                    stat.S_IFLNK: "symbolic link",
-                    stat.S_IFSOCK: "socket",
-                }
-                thepath = "%s/dir/pathname" % self.img_path()
-                fmt = stat.S_IFMT(os.lstat(thepath).st_mode)
-                # XXX The checks here rely on verify failing due to action types
-                # not matching what's on the system; they should probably report
-                # duplicate actions instead.  Checking the output text is a bit
-                # ugly, too, but we do need to make sure that the two problems
-                # become one.
-                self.pkg("verify", exit=1)
-                verify_type_re = "File Type: '(.*?)' should be '(.*?)'"
-                matches = re.findall(verify_type_re, self.output)
-                # We make sure that what got reported is correct -- two actions
-                # of different types in conflict with whatever actually got laid
-                # down.
-                self.assert_(len(matches) == 2)
-                whatis = matches[0][0]
-                self.assert_(matches[1][0] == whatis)
-                self.assert_(whatis == tmap[fmt])
-                shouldbe = set(["symbolic link", "regular file", "directory"]) - \
-                    set([whatis])
-                self.assert_(set([matches[0][1], matches[1][1]]) == shouldbe)
-                # Now we uninstall one of the packages delivering a type which
-                # isn't what's on the filesystem.  The filesystem should remain
-                # unchanged, but one of the errors should go away.
-                if whatis == "directory":
-                        self.pkg("uninstall duplink")
-                else:
-                        self.pkg("uninstall dupdir")
-                self.pkg("verify", exit=1)
-                matches = re.findall(verify_type_re, self.output)
-                self.assert_(len(matches) == 1)
-                nfmt = stat.S_IFMT(os.lstat(thepath).st_mode)
-                self.assert_(nfmt == fmt)
-
-                # Now we do the same thing, but we uninstall the package
-                # delivering the type which *is* what's on the filesystem.  This
-                # should also leave the filesystem alone, even though what's
-                # there will match *neither* of the remaining installed
-                # packages.
-                self.pkg("uninstall '*'")
-                self.pkg("-D broken-conflicting-action-handling=1 "
-                    "exact-install dupfilesp1 duplink dupdir@0")
-                fmt = stat.S_IFMT(os.lstat(thepath).st_mode)
-                self.pkg("verify", exit=1)
-                matches = re.findall(verify_type_re, self.output)
-                self.assert_(len(matches) == 2)
-                whatis = matches[0][0]
-                self.assert_(matches[1][0] == whatis)
-                self.assert_(whatis == tmap[fmt])
-                shouldbe = set(["symbolic link", "regular file", "directory"]) - \
-                    set([whatis])
-                self.assert_(set([matches[0][1], matches[1][1]]) == shouldbe)
-                if whatis == "directory":
-                        self.pkg("uninstall dupdir")
-                elif whatis == "symbolic link":
-                        self.pkg("uninstall duplink")
-                elif whatis == "regular file":
-                        self.pkg("uninstall dupfilesp1")
-                self.pkg("verify", exit=1)
-                matches = re.findall(verify_type_re, self.output)
-                self.assert_(len(matches) == 2)
-                nfmt = stat.S_IFMT(os.lstat(thepath).st_mode)
-                self.assert_(nfmt == fmt)
-
-                # Go from multiple conflicting types down to just one type.
-                # This also tests the case where a package version being newly
-                # installed gets fixed at the same time.
-                self.pkg("uninstall '*'")
-                self.pkg("-D broken-conflicting-action-handling=1 "
-                    "exact-install dupmultitypes@1")
-                self.pkg("exact-install dupmultitypes")
-                self.pkg("verify")
-
-                # Upgrading from multiple instances of one refcounted type to
-                # multiple instances of another (here, link to directory)
-                # should succeed.
-                self.pkg("uninstall '*'")
-                self.pkg("exact-install dupmultitypes3@0")
-                self.pkg("update")
-
-
         def test_conflicting_attrs_fs_install(self):
                 """Test the behavior of pkg(1) when multiple non-file actions of
                 the same type deliver to the same pathname, but whose other
@@ -10379,180 +9728,12 @@
                     "dupdirp12")
                 self.pkg("uninstall dupdirp12")
 
-        def test_conflicting_attrs_fs_exact_install(self):
-                """Test the behavior of pkg(1) when multiple non-file actions of
-                the same type deliver to the same pathname, but whose other
-                attributes differ."""
-
-                self.image_create(self.rurl)
-
-                # One package, two links with different targets
-                self.pkg("exact-install duppath-nonidenticallinks", exit=1)
-
-                # One package, two directories with different perms
-                self.pkg("exact-install duppath-nonidenticaldirs", exit=1)
-
-                # One package, two dirs with same modes expressed two ways
-                self.pkg("exact-install duppath-almostidenticaldirs")
-
-                # One package delivers a directory explicitly, another
-                # implicitly.
-                self.pkg("exact-install implicitdirs2 implicitdirs3")
-                self.pkg("verify")
-
-                self.pkg("uninstall '*'")
-
-                # Make sure that we don't die trying to fixup a directory using
-                # an implicit directory action.
-                self.pkg("-D broken-conflicting-action-handling=1 "
-                    "exact-install implicitdirs4 implicitdirs5 implicitdirs6")
-                self.pkg("uninstall implicitdirs5")
-                self.pkg("verify")
-
-                self.pkg("uninstall '*'")
-
-                # Make sure that we don't die trying to fixup a directory using
-                # an implicit directory action when that's all that's left.
-                self.pkg("-D broken-conflicting-action-handling=1 "
-                    "exact-install implicitdirs4 implicitdirs5 implicitdirs6")
-                self.pkg("uninstall implicitdirs5 implicitdirs6")
-                self.pkg("verify")
-
-                self.pkg("uninstall '*'")
-
-                # If two packages deliver conflicting directories and another
-                # package delivers that directory implicitly, make sure the
-                # third package isn't blamed.
-                self.pkg("exact-install implicitdirs4 implicitdirs5 "
-                    "implicitdirs6", exit=1)
-                self.assert_("implicitdirs4" not in self.errout)
-
-                # Two packages, two links with different targets, installed at
-                # once
-                self.pkg("exact-install duppath-nonidenticallinksp1 "
-                    "duppath-nonidenticallinksp2@0", exit=1)
-
-                # Two packages, two links with different targets, installed
-                # separately. again with exact-install the operation will
-                # will succeed becauseit will remove the
-                # duppath-nonidenticallinksp1 pkg first.
-                self.pkg("exact-install duppath-nonidenticallinksp1")
-                self.pkg("exact-install duppath-nonidenticallinksp2@0")
-
-                self.pkg("uninstall '*'")
-
-                # If we get into a broken state, can we get out of it?
-                self.pkg("-D broken-conflicting-action-handling=1 "
-                    "exact-install duppath-nonidenticallinksp1 "
-                    "duppath-nonidenticallinksp2@0")
-                self.pkg("verify", exit=1)
-                self.pkg("exact-install duppath-nonidenticallinksp2")
-                self.pkg("verify")
-
-                # If we get into a broken state, can we make it a little bit
-                # better by uninstalling one of the packages?  Removing dupdir5
-                # here won't reduce the number of different groups under which
-                # dir is delivered, but does reduce the number of actions
-                # delivering it.
-                self.pkg("uninstall '*'")
-                self.pkg("-D broken-conflicting-action-handling=1 "
-                    "exact-install dupdirp1 dupdirp2@1 dupdirp5 dupdirp6")
-                self.pkg("uninstall dupdirp5")
-                self.pkg("verify", exit=1)
-
-                self.pkg("-D broken-conflicting-action-handling=1 install "
-                    "dupdirp5")
-                # Make sure we can exact-install a package delivering an
-                # implicit directory that's currently in conflict.
-                self.pkg("exact-install dupdirp7")
-                # And make sure we can uninstall it again.
-                self.pkg("uninstall dupdirp7")
-                self.pkg("list", exit=1)
-
-                # Add everything back in, remove everything but one variant of
-                # the directory and an implicit directory, and verify.
-                self.pkg("-D broken-conflicting-action-handling=1 "
-                    "exact-install dupdirp2@1 dupdirp5 dupdirp6 dupdirp7")
-                self.pkg("uninstall dupdirp2 dupdirp5 dupdirp6")
-                self.pkg("verify")
-
-                # Get us into a saner state by upgrading.
-                self.pkg("-D broken-conflicting-action-handling=1 "
-                    "exact-install dupdirp2@1 dupdirp5 dupdirp6 dupdirp7")
-                self.pkg("update dupdirp2@2")
-
-                # Get us into a sane state by upgrading.
-                self.pkg("uninstall dupdirp2 dupdirp5 dupdirp6")
-                self.pkg("-D broken-conflicting-action-handling=1 "
-                    "exact-install dupdirp2@1 dupdirp7")
-                self.pkg("update dupdirp2@2")
-                self.pkg("verify")
-
-                # We start in a sane state, but the update would result in
-                # conflict, though no more actions deliver the path in
-                # question.
-                self.pkg("uninstall '*'")
-                self.pkg("exact-install dupdirp1 dupdirp8@1")
-                self.pkg("update", exit=1)
-
-                # How about removing one of the conflicting packages?  We'll
-                # remove the package which doesn't match the state on disk.
-                self.pkg("uninstall '*'")
-                self.pkg("-D broken-conflicting-action-handling=1 "
-                    "exact-install duppath-nonidenticallinksp1 "
-                    "duppath-nonidenticallinksp2@0")
-                link = os.readlink("%s/dir/pathname" % self.img_path())
-                if link == "dir/something":
-                        self.pkg("uninstall duppath-nonidenticallinksp2")
-                else:
-                        self.pkg("uninstall duppath-nonidenticallinksp1")
-                self.pkg("verify")
-
-                # Now we'll try removing the package which *does* match the
-                # state on disk.  The code should clean up after us.
-                self.pkg("uninstall '*'")
-                self.pkg("-D broken-conflicting-action-handling=1 "
-                    "exact-install duppath-nonidenticallinksp1 "
-                    "duppath-nonidenticallinksp2@0")
-                link = os.readlink("%s/dir/pathname" % self.img_path())
-                if link == "dir/something":
-                        self.pkg("uninstall duppath-nonidenticallinksp1")
-                else:
-                        self.pkg("uninstall duppath-nonidenticallinksp2")
-                self.pkg("verify")
-
-                # Let's try a duplicate directory delivered with all sorts of
-                # crazy conflicts!
-                self.pkg("uninstall '*'")
-                self.pkg("exact-install dupdirp1 dupdirp2@1 dupdirp3 dupdirp4",
-                    exit=1)
-
-                pkgs = " ".join("massivedupdir%d" % x for x in xrange(20))
-                self.pkg("exact-install %s" % pkgs, exit=1)
-
-                # Trigger bug 17943: we install packages with conflicts in two
-                # directories (p9, p10).  We also install a package (p11) which
-                # delivers those directories implicitly.  Then remove the last,
-                # triggering the stack trace associated with the bug.
-                self.pkg("-D broken-conflicting-action-handling=1 "
-                    "exact-install dupdirp9 dupdirp10 dupdirp11")
-                self.pkg("uninstall dupdirp11")
-
-                # Do the same, but with a package that delivers var implicitly
-                # via a legacy action.
-                self.pkg("-D broken-conflicting-action-handling=1 "
-                    "exact-install dupdirp9 dupdirp10 dupdirp12")
-                self.pkg("uninstall dupdirp12")
-
         def test_conflicting_attrs_fs_varcets(self):
                 """Test the behavior of pkg(1) when multiple non-file actions of
                 the same type deliver to the same pathname, but differ in their
                 variants or facets."""
 
-                self.conflicting_attrs_fs_varcets_helper("install")
-                self.conflicting_attrs_fs_varcets_helper("exact-install")
-
-        def conflicting_attrs_fs_varcets_helper(self, install_cmd):
+                install_cmd = "install"
                 self.image_create(self.rurl)
 
                 # Two packages delivering the same directory, one under the
@@ -10767,61 +9948,6 @@
                     "otheruser othergroup@0")
                 self.pkg("update othergroup")
 
-        def test_multiple_users_exact_install(self):
-                """Test the behavior of pkg(1) when multiple user
-                actions deliver the same user."""
-
-                # This is largely identical to test_multiple_files; we may want
-                # to commonize in the future.
-
-                self.image_create(self.rurl)
-
-                self.pkg("exact-install userdb")
-
-                # Duplicate users in the same package
-                self.pkg("exact-install dupuser", exit=1)
-                # Make sure userdb is not accedentally removed.
-                self.pkg("list userdb")
-
-                # Duplicate users in different packages, but in the same
-                # transaction
-                self.pkg("exact-install userdb dupuserp1 dupuserp2@0", exit=1)
-
-                # Duplicate users in different packages, in different
-                # transactions. This should succeed with exact-install.
-                self.pkg("exact-install userdb dupuserp1")
-                self.pkg("exact-install userdb dupuserp2@0")
-
-                # Removing one of more than two offending actions can't do much
-                # of anything, but should leave the system alone.
-                self.image_destroy()
-                self.image_create(self.rurl)
-                self.pkg("-D broken-conflicting-action-handling=1 "
-                    "exact-install userdb dupuserp1 dupuserp2@0 dupuserp3")
-                self.pkg("verify", exit=1)
-                out1 = self.output
-                self.pkg("uninstall dupuserp3")
-                self.pkg("verify", exit=1)
-                out2 = self.output
-                out2 = out2[out2.index("\n") + 1:]
-                self.assert_(out2 in out1)
-
-                # Removing all but one of the offending actions should get us
-                # back to sanity.
-                self.image_destroy()
-                self.image_create(self.rurl)
-                self.pkg("-D broken-conflicting-action-handling=1 "
-                    "exact-install userdb dupuserp1 dupuserp2@0 dupuserp3")
-                self.pkg("uninstall dupuserp3 dupuserp2")
-                self.pkg("verify")
-
-                # Make sure we don't get confused when two actions in different
-                # namespace groups but with the same key attribute value are
-                # adjacent in the action cache.
-                self.pkg("-D broken-conflicting-action-handling=1 "
-                    "exact-install userdb otheruser othergroup@0")
-                self.pkg("update othergroup")
-
         def test_multiple_drivers(self):
                 """Test the behavior of pkg(1) when multiple driver actions
                 deliver the same driver."""
--- a/src/tests/cli/t_pkg_revert.py	Wed Apr 09 17:53:35 2014 -0700
+++ b/src/tests/cli/t_pkg_revert.py	Fri Apr 18 10:14:06 2014 +0530
@@ -21,7 +21,7 @@
 #
 
 #
-# Copyright (c) 2011, 2013 Oracle and/or its affiliates. All rights reserved.
+# Copyright (c) 2011, 2014 Oracle and/or its affiliates. All rights reserved.
 #
 
 import testutils
@@ -44,10 +44,7 @@
             add file etc/file1 mode=0555 owner=root group=bin path=etc/file1
             close
             # [email protected] is published as part of pkgs2
-            open [email protected],5.11-0
-            add dir mode=0755 owner=root group=bin path=etc
-            add file etc/file3 mode=0555 owner=root group=bin path=etc/file3 revert-tag=bob revert-tag=ted
-            close
+            # [email protected] is published as part of pkgs3
             open [email protected],5.11-0
             add dir mode=0755 owner=root group=bin path=etc
             add file etc/file4 mode=0555 owner=root group=bin path=etc/file4 revert-tag=bob revert-tag=ted revert-tag=carol
@@ -100,6 +97,14 @@
             close
             """
 
+        # A set of packages that we publish with additional hash attributes
+        pkgs3 = """
+            open [email protected],5.11-0
+            add dir mode=0755 owner=root group=bin path=etc
+            add file etc/file3 mode=0555 owner=root group=bin path=etc/file3 revert-tag=bob revert-tag=ted
+            close
+            """
+
         misc_files = ["etc/file1", "etc/file2", "etc/file3", "etc/file4",
 		      "etc/file5"]
 
@@ -181,6 +186,8 @@
                 self.plist = self.pkgsend_bulk(self.rurl, self.pkgs)
                 self.plist.extend(self.pkgsend_bulk(self.rurl, self.pkgs2,
                     debug_hash="sha1+sha256"))
+                self.plist.extend(self.pkgsend_bulk(self.rurl, self.pkgs3,
+                    debug_hash="sha1+sha512_256"))
 
         def test_revert(self):
                 self.image_create(self.rurl)
@@ -202,6 +209,9 @@
                 self.pkg("verify B", exit=1)
                 self.assert_(sha2 in self.output)
 
+                self.pkg("-D hash=sha1+sha512_256 verify C", exit=1)
+                sha2 = "13729cb7183961b48ce300c2588c86ad123e7c636f38a0f3c8408a75fd079d09"
+                self.assert_(sha2 in self.output, self.output)
                 self.pkg("verify C", exit=1)
                 self.pkg("verify D", exit=1)
 
@@ -233,19 +243,26 @@
                 self.pkg("revert -n --parsable=0 --tagged bob")
                 self.debug("\n".join(self.plist))
                 self.assertEqualParsable(self.output,
-                    affect_packages=[self.plist[10], self.plist[1],
-                    self.plist[2]])
+                    affect_packages=[self.plist[9], self.plist[12],
+                    self.plist[1]])
                 # When reverting damage, we always verify using the
                 # most-preferred hash, but retrieve content with the
-                # least-preferred hash: -D hash=sha1+sha256 should have no
-                # effect here whatsoever, but -D hash=sha256 should fail because
+                # least-preferred hash: -D hash=sha1+sha256 and
+                # -D hash=sha1+sha512_256 should have no effect here whatsoever,
+                # but -D hash=sha256 and -D hash=sha512_256 should fail because
                 # our repository stores its files by the SHA1 hash.
                 self.pkg("-D hash=sha256 revert --parsable=0 --tagged bob",
                     exit=1)
+                self.pkg("-D hash=sha512_256 revert --parsable=0 --tagged ted",
+                    exit=1)
+                self.pkg("-D hash=sha1+512_256 revert -n --parsable=0 \
+                    --tagged ted")
+                self.assertEqualParsable(self.output,
+                    affect_packages=[self.plist[12], self.plist[1]])
                 self.pkg("-D hash=sha1+sha256 revert --parsable=0 --tagged bob")
                 self.assertEqualParsable(self.output,
-                    affect_packages=[self.plist[10], self.plist[1],
-                    self.plist[2]])
+                    affect_packages=[self.plist[9], self.plist[12],
+                    self.plist[1]])
                 self.pkg("verify A", exit=1)
                 self.pkg("verify B")
                 self.pkg("verify C")
--- a/src/tests/cli/t_pkg_search.py	Wed Apr 09 17:53:35 2014 -0700
+++ b/src/tests/cli/t_pkg_search.py	Fri Apr 18 10:14:06 2014 +0530
@@ -21,7 +21,7 @@
 #
 
 #
-# Copyright (c) 2008, 2013, Oracle and/or its affiliates. All rights reserved.
+# Copyright (c) 2008, 2014, Oracle and/or its affiliates. All rights reserved.
 #
 
 import testutils
@@ -42,6 +42,7 @@
 import pkg.fmri as fmri
 import pkg.indexer as indexer
 import pkg.portable as portable
+import pkg.sha512_t as sha512_t
 
 
 class TestPkgSearchBasics(pkg5unittest.SingleDepotTestCase):
@@ -1046,13 +1047,6 @@
                 self.pkgsend_bulk(self.durl1, self.same_pub1, refresh_index=True)
                 self.durl2 = self.dcs[2].get_depot_url()
                 self.rurl2 = self.dcs[2].get_repo_url()
-                # our 2nd depot gets the package published with multiple hash
-                # attributes, but served from a single-hash-aware depot
-                # (the fact that it's single-hash-aware should make no
-                # difference to the content it serves so long as the index was
-                # generated while we were aware of multiple hashes.
-                self.pkgsend_bulk(self.rurl2, self.same_pub2,
-                    refresh_index=True, debug_hash="sha1+sha256")
 
         def test_7140657(self):
                 """ Check that pkg search with -s works as intended when there are
@@ -1110,7 +1104,7 @@
                 expected = self.reduceSpaces(expected_out2)
                 self.assertEqualDiff(expected, actual)
 
-        def test_search_multi_hash(self):
+        def test_search_multi_hash_1(self):
                 """Check that when searching a repository with multiple
                 hashes, all hash attributes are indexed and we can search
                 against all hash attributes.
@@ -1118,13 +1112,32 @@
                 This test depends on pkg.digest having DebugValue settings
                 that add sha256 hashes to the set of hashes we append to
                 actions at publication time."""
+                self.base_search_multi_hash("sha256", hashlib.sha256)
 
+        def test_search_multi_hash_2(self):
+                """Check that when searching a repository with multiple
+                hashes, all hash attributes are indexed and we can search
+                against all hash attributes.
+
+                This test depends on pkg.digest having DebugValue settings
+                that add sha512/256 hashes to the set of hashes we append to
+                actions at publication time."""
+                self.base_search_multi_hash("sha512_256", sha512_t.SHA512_t)
+
+        def base_search_multi_hash(self, hash_alg, hash_fun):
+                # our 2nd depot gets the package published with multiple hash
+                # attributes, but served from a single-hash-aware depot
+                # (the fact that it's single-hash-aware should make no
+                # difference to the content it serves so long as the index was
+                # generated while we were aware of multiple hashes.
+                self.pkgsend_bulk(self.rurl2, self.same_pub2,
+                    refresh_index=True, debug_hash="sha1+%s" % hash_alg)
                 self.image_create(self.durl2, prefix="samepub")
 
                 # manually calculate the hashes, in case of bugs in
                 # pkg.misc.get_data_digest
                 sha1_hash = hashlib.sha1("magic").hexdigest()
-                sha2_hash = hashlib.sha256("magic").hexdigest()
+                sha2_hash = hash_fun("magic").hexdigest()
 
                 self.pkg("search %s" % sha1_hash)
                 self.pkg("search %s" % sha2_hash)
@@ -1141,7 +1154,7 @@
 
                 self.pkg("search -H -o search.match_type %s" % sha2_hash)
                 self.assertEqualDiff(
-                    self.reduceSpaces(self.output), "pkg.hash.sha256\n")
+                    self.reduceSpaces(self.output), "pkg.hash.%s\n" % hash_alg)
 
                 # check that both searches match the same action
                 self.pkg("search -o action.raw %s" % sha1_hash)
@@ -1154,7 +1167,8 @@
                 # check that the same searches in the non-multihash-aware
                 # repository only return a result for the sha-1 hash
                 # (which checks that we're only setting multiple hashes
-                # on actions when hash=sha1+sha256 is set)
+                # on actions when hash=sha1+sha256 or hash=sha1+sha512_256
+                # is set)
                 self.pkg("search -s %s %s" % (self.durl1, sha1_hash))
                 self.pkg("search -s %s %s" % (self.durl1, sha2_hash), exit=1)
 
--- a/src/tests/cli/t_pkg_sysrepo.py	Wed Apr 09 17:53:35 2014 -0700
+++ b/src/tests/cli/t_pkg_sysrepo.py	Fri Apr 18 10:14:06 2014 +0530
@@ -21,7 +21,7 @@
 #
 
 #
-# Copyright (c) 2011, 2013, Oracle and/or its affiliates. All rights reserved.
+# Copyright (c) 2011, 2014, Oracle and/or its affiliates. All rights reserved.
 #
 
 import testutils
@@ -93,11 +93,16 @@
 
         baz10 = """
             open [email protected],5.11-0
-            add file tmp/example_three mode=0555 owner=root group=bin path=/usr/bin/another
+            add file tmp/example_three mode=0555 owner=root group=bin path=/usr/bin/another_1
+            close"""
+
+        caz10 = """
+            open [email protected],5.11-0
+            add file tmp/example_four mode=0555 owner=root group=bin path=/usr/bin/another_2
             close"""
 
         misc_files = ["tmp/example_file", "tmp/example_two",
-            "tmp/example_three"]
+            "tmp/example_three", "tmp/example_four"]
 
         expected_all_access =  """\
 PUBLISHER\tSTICKY\tSYSPUB\tENABLED\tTYPE\tSTATUS\tURI\tPROXY
@@ -160,6 +165,8 @@
                     debug_hash="sha1+sha256")
                 self.pkgsend_bulk(self.rurl3, self.baz10,
                     debug_hash="sha1+sha256")
+                self.pkgsend_bulk(self.rurl3, self.caz10,
+                    debug_hash="sha1+sha512_256")
                 self.pkgsend_bulk(self.rurl4, self.bar10)
                 self.pkgsend_bulk(self.rurl5, self.foo11)
 
@@ -614,6 +621,9 @@
                 self.pkg("install baz")
                 self.pkg("contents -m baz")
                 self.assert_("pkg.hash.sha256" in self.output)
+                self.pkg("install caz")
+                self.pkg("contents -m caz")
+                self.assert_("pkg.hash.sha512_256" in self.output)
 
         def test_02_communication(self):
                 """Test that the transport for communicating with the depots is
@@ -949,6 +959,7 @@
                 expected = """\
 bar (test3) 1.0-0 ---
 baz (test3) 1.0-0 ---
+caz (test3) 1.0-0 ---
 example_pkg 1.0-0 ---
 """
                 self.__check_package_lists(expected)
@@ -977,6 +988,7 @@
                 expected = """\
 bar (test3) 1.0-0 ---
 baz (test3) 1.0-0 ---
+caz (test3) 1.0-0 ---
 example_pkg 1.0-0 ---
 """
                 self.__check_package_lists(expected)
--- a/src/tests/cli/t_pkgrecv.py	Wed Apr 09 17:53:35 2014 -0700
+++ b/src/tests/cli/t_pkgrecv.py	Fri Apr 18 10:14:06 2014 +0530
@@ -21,7 +21,7 @@
 #
 
 #
-# Copyright (c) 2008, 2013, Oracle and/or its affiliates. All rights reserved.
+# Copyright (c) 2008, 2014, Oracle and/or its affiliates. All rights reserved.
 #
 
 import testutils
@@ -912,9 +912,11 @@
                 """Tests that we can recv to and from repositories with
                 multi-hash support, interoperating with repositories without
                 multi-hash support."""
+                self.base_12_multihash("sha256")
+                self.base_12_multihash("sha512_256")
 
+        def base_12_multihash(self, hash_alg):
                 f = fmri.PkgFmri(self.published[3], None)
-
                 # We create an image simply so we can use "contents -g" to
                 # inspect the repository.
                 self.image_create()
@@ -922,39 +924,40 @@
                 # First, recv the package and verify it has no extended hashes
                 self.pkgrecv(self.durl1, "-d %s %s" % (self.durl3, f))
                 self.pkg("contents -g %s -m %s" % (self.durl3, f))
-                self.assert_("pkg.hash.sha256" not in self.output)
+                self.assert_("pkg.hash.%s" % hash_alg not in self.output)
 
                 # Now stop and start the repository as multi-hash aware, and
                 # recv it again, making sure that we do not get multiple hashes
                 # added (because modifying the manifest would break signatures)
                 self.dcs[3].stop()
-                self.dcs[3].set_debug_feature("hash=sha1+sha256")
+                self.dcs[3].set_debug_feature("hash=sha1+%s" % hash_alg)
                 self.dcs[3].start()
                 self.pkgrecv(self.durl1, "-d %s %s" % (self.durl3, f))
                 self.pkg("contents -g %s -m %s" % (self.durl3, f))
-                self.assert_("pkg.hash.sha256" not in self.output)
+                self.assert_("pkg.hash.%s" % hash_alg not in self.output)
 
                 # Now check the reverse - that a package with multiple hashes
                 # can be received into a repository that is not multi-hash aware
                 b = "[email protected],5.11-0"
                 self.pkgsend_bulk(self.durl3, self.bronze10)
                 self.pkg("contents -g %s -m %s" % (self.durl3, b))
-                self.assert_("pkg.hash.sha256" in self.output)
+                self.assert_("pkg.hash.%s" % hash_alg in self.output)
                 self.pkgrecv(self.durl3, "-d %s %s" % (self.durl4, b))
                 self.pkg("contents -g %s -m %s" % (self.durl4, b))
-                self.assert_("pkg.hash.sha256" in self.output)
+                self.assert_("pkg.hash.%s" % hash_alg in self.output)
 
                 # Ensure that we can recv multi-hash packages into p5p files
-                p5p_path = os.path.join(self.test_root, "multi-hash.p5p")
+                p5p_path = os.path.join(self.test_root,
+                    "multi-hash-%s.p5p" % hash_alg)
                 self.pkgrecv(self.durl3, "-ad %s %s" % (p5p_path, b))
                 self.pkg("contents -g %s -m %s" % (p5p_path, b))
-                self.assert_("pkg.hash.sha256" in self.output)
+                self.assert_("pkg.hash.%s" % hash_alg in self.output)
 
                 # Finally, stop and start our scratch repository to clear the
                 # debug feature. If this doesn't happen because we've failed
                 # before now, it's not the end of the world.
                 self.dcs[3].stop()
-                self.dcs[3].unset_debug_feature("hash=sha1+sha256")
+                self.dcs[3].unset_debug_feature("hash=sha1+%s" % hash_alg)
                 self.dcs[3].start()
 
         def test_13_output(self):
--- a/src/tests/cli/t_pkgrepo.py	Wed Apr 09 17:53:35 2014 -0700
+++ b/src/tests/cli/t_pkgrepo.py	Fri Apr 18 10:14:06 2014 +0530
@@ -1384,37 +1384,39 @@
                                 continue
                         self.assert_(not os.listdir(rstore.file_root))
 
-                # Reset the src_repo for the rest of the test.
-                shutil.rmtree(src_repo)
-                self.create_repo(src_repo)
-                self.pkgrepo("set -s %s publisher/prefix=test" % src_repo)
-
-                published = self.pkgsend_bulk(src_repo, (self.tree10),
-                    debug_hash="sha1+sha256")
-
-                # Verify that we only have SHA-1 hashes in the rstore
-                repo = self.get_repo(src_repo)
-                known_hashes = self.fhashes.values()
-                for rstore in repo.rstores:
-                        if not rstore.publisher:
-                                continue
-                        for dir, dnames, fnames in os.walk(rstore.file_root):
-                                for f in fnames:
-                                        if f not in known_hashes:
-                                                self.assert_(False,
-                                                    "Unexpected content in "
-                                                    "repodir: %s" % f)
-
-                # Verify that when a repository has been published with multiple
-                # hashes, on removal, we only attempt to remove files using the
-                # least-preferred hash.
-                self.pkgrepo("remove -s %s tree" % src_repo)
-
-                # Verify repository file_root is empty.
-                for rstore in repo.rstores:
-                        if not rstore.publisher:
-                                continue
-                        self.assert_(not os.listdir(rstore.file_root))
+                for hash_alg in ["sha256", "sha512_256"]:
+                        # Reset the src_repo for the rest of the test.
+                        shutil.rmtree(src_repo)
+                        self.create_repo(src_repo)
+                        self.pkgrepo("set -s %s publisher/prefix=test" %
+                            src_repo)
+                        published = self.pkgsend_bulk(src_repo, (self.tree10),
+                            debug_hash="sha1+%s" % hash_alg)
+
+                        # Verify that we only have SHA-1 hashes in the rstore
+                        repo = self.get_repo(src_repo)
+                        known_hashes = self.fhashes.values()
+                        for rstore in repo.rstores:
+                                if not rstore.publisher:
+                                        continue
+                                for dir, dnames, fnames in \
+                                    os.walk(rstore.file_root):
+                                        for f in fnames:
+                                                if f not in known_hashes:
+                                                        self.assert_(False,
+                                                            "Unexpected content "
+                                                            "in repodir: %s" % f)
+
+                        # Verify that when a repository has been published with
+                        # multiple hashes, on removal, we only attempt to remove
+                        # files using the least-preferred hash.
+                        self.pkgrepo("remove -s %s tree" % src_repo)
+
+                        # Verify repository file_root is empty.
+                        for rstore in repo.rstores:
+                                if not rstore.publisher:
+                                        continue
+                                self.assert_(not os.listdir(rstore.file_root))
 
                 # Cleanup.
                 shutil.rmtree(src_repo)
@@ -1924,36 +1926,40 @@
                 self.assert_(bad_gzip_path in self.output)
 
                 # Check that when verifying content, we always use the most
-                # preferred hash. Remove all existing packages first.
-                self.pkgrepo("-s %s remove %s" % (repo_path, " ".join(fmris)))
-                fmris = self.pkgsend_bulk(repo_path, (self.tree10),
-                    debug_hash="sha1+sha256")
-                self.pkgrepo("-s %s verify" % repo_path, exit=0)
-
-                # break a file in the repository and ensure we spot it.
-                bad_hash_path = self.__inject_badhash("tmp/truck1")
-                bad_basename = os.path.basename(bad_hash_path)
-
-                self.pkgrepo("-s %s verify" % repo_path, exit=1)
-                self.assert_(
-                    self.output.count("ERROR: Invalid file hash") == 1)
-
-                # We should be verifying using the SHA-2 hash, and so we should
-                # only see the SHA-1 value in the output once, when printing
-                # the path to the file in the repository, not when reporting
-                # the computed or expected hash.
-                self.assert_(self.output.count(bad_basename) == 1)
-
-                # Verify that when we publish using SHA-1 only, that we get
-                # the SHA-1 value printed twice: once when printing the path
-                # to the file in the repository, and once when printing the
-                # expected hash.
-                self.pkgrepo("-s %s remove %s" % (repo_path, " ".join(fmris)))
-                fmris = self.pkgsend_bulk(repo_path, (self.tree10))
-                self.__inject_badhash("tmp/truck1")
-
-                self.pkgrepo("-s %s verify" % repo_path, exit=1)
-                self.assert_(self.output.count(bad_basename) == 2)
+                # preferred hash.
+                for hash_alg in ["sha256", "sha512_256"]:
+                        # Remove all existing packages first.
+                        self.pkgrepo("-s %s remove %s" % (repo_path,
+                            " ".join(fmris)))
+                        fmris = self.pkgsend_bulk(repo_path, (self.tree10),
+                            debug_hash="sha1+%s" % hash_alg)
+                        self.pkgrepo("-s %s verify" % repo_path, exit=0)
+
+                        # break a file in the repository and ensure we spot it.
+                        bad_hash_path = self.__inject_badhash("tmp/truck1")
+                        bad_basename = os.path.basename(bad_hash_path)
+
+                        self.pkgrepo("-s %s verify" % repo_path, exit=1)
+                        self.assert_(
+                            self.output.count("ERROR: Invalid file hash") == 1)
+
+                        # We should be verifying using the SHA-2 hash, and so we
+                        # should only see the SHA-1 value in the output once,
+                        # when printing the path to the file in the repository,
+                        # not when reporting the computed or expected hash.
+                        self.assert_(self.output.count(bad_basename) == 1)
+
+                        # Verify that when we publish using SHA-1 only, that we
+                        # get the SHA-1 value printed twice: once when printing
+                        # the path to the file in the repository, and once when
+                        # printing the expected hash.
+                        self.pkgrepo("-s %s remove %s" % (repo_path,
+                            " ".join(fmris)))
+                        fmris = self.pkgsend_bulk(repo_path, (self.tree10))
+                        self.__inject_badhash("tmp/truck1")
+
+                        self.pkgrepo("-s %s verify" % repo_path, exit=1)
+                        self.assert_(self.output.count(bad_basename) == 2)
 
         def test_12_verify_badmanifest(self):
                 """Test that verify finds bad manifests."""
--- a/src/tests/cli/t_pkgsend.py	Wed Apr 09 17:53:35 2014 -0700
+++ b/src/tests/cli/t_pkgsend.py	Fri Apr 18 10:14:06 2014 +0530
@@ -20,7 +20,7 @@
 # CDDL HEADER END
 #
 
-# Copyright (c) 2008, 2013, Oracle and/or its affiliates. All rights reserved.
+# Copyright (c) 2008, 2014, Oracle and/or its affiliates. All rights reserved.
 
 import testutils
 if __name__ == "__main__":
@@ -1295,6 +1295,10 @@
                 and only if they match the hash attributes we know how to
                 compute, other attributes are left alone."""
 
+                self.base_26_pkgsend_multihash("sha256")
+                self.base_26_pkgsend_multihash("sha512_256")
+
+        def base_26_pkgsend_multihash(self, hash_alg):
                 # we use a file:// URI rather than the repo URI so we don't have
                 # to worry about starting the depot in SHA-2 mode. Other tests
                 # in the test suite ensure SHA-2 publication is working over
@@ -1306,20 +1310,21 @@
                 with open(mfpath, "wb") as mf:
                         mf.write("""
 set name=pkg.fmri value=pkg:/[email protected]
-file %s path=/foo owner=root group=sys mode=0644 pkg.hash.sha256=spaghetti \
+file %s path=/foo owner=root group=sys mode=0644 pkg.hash.%s=spaghetti \
     pkg.hash.rot13=caesar
-""" % payload)
+""" % (payload, hash_alg))
                 self.pkgsend("", "-s %s publish %s" % (furi, mfpath))
                 self.image_create(furi)
                 self.pkg("contents -rm multihash")
-                self.assert_("pkg.hash.sha256=spaghetti" in self.output)
+                self.assert_("pkg.hash.%s=spaghetti" % hash_alg in self.output)
 
                 self.pkgsend("", "-s %s publish %s" % (furi, mfpath),
-                    debug_hash="sha1+sha256")
+                    debug_hash="sha1+%s" % hash_alg)
                 self.pkg("refresh")
 
                 self.pkg("contents -rm multihash")
-                self.assert_("pkg.hash.sha256=spaghetti" not in self.output)
+                self.assert_("pkg.hash.%s=spaghetti" % hash_alg
+                    not in self.output)
                 self.assert_("pkg.hash.rot13=caesar" in self.output)
 
 
--- a/src/tests/cli/t_pkgsign.py	Wed Apr 09 17:53:35 2014 -0700
+++ b/src/tests/cli/t_pkgsign.py	Fri Apr 18 10:14:06 2014 +0530
@@ -21,7 +21,7 @@
 #
 
 #
-# Copyright (c) 2010, 2013, Oracle and/or its affiliates. All rights reserved.
+# Copyright (c) 2010, 2014, Oracle and/or its affiliates. All rights reserved.
 #
 
 import testutils
@@ -527,6 +527,10 @@
                 """Test that having a package signed with more than one
                 signature doesn't cause anything to break."""
 
+                self.base_multiple_signatures("sha256")
+                self.base_multiple_signatures("sha512_256")
+
+        def base_multiple_signatures(self, hash_alg):
                 plist = self.pkgsend_bulk(self.rurl1, self.example_pkg10)
 
                 sign_args = "-k %(key)s -c %(cert)s -i %(i1)s -i %(i2)s " \
@@ -552,7 +556,8 @@
                             "ch5_ta1_cert.pem"),
                         "pkg": plist[0]
                     }
-                self.pkgsign(self.rurl1, sign_args, debug_hash="sha1+sha256")
+                self.pkgsign(self.rurl1, sign_args,
+                    debug_hash="sha1+%s" % hash_alg)
 
                 sign_args = "-k %(key)s -c %(cert)s %(name)s" % {
                     "name": plist[0],
@@ -569,7 +574,7 @@
 
                 # Make sure we've got exactly 1 signature with SHA2 hashes
                 self.pkg("contents -m")
-                self.assert_(self.output.count("pkg.chain.sha256") == 1)
+                self.assert_(self.output.count("pkg.chain.%s" % hash_alg) == 1)
                 self.assert_(self.output.count("pkg.chain.chashes") == 1)
                 # and SHA1 hashes on both signatures
                 self.assert_(self.output.count("chain=") == 2)
@@ -2375,26 +2380,29 @@
                 with open(mp, "wb") as fh:
                         for l in s:
                                 fh.write(l)
-                # Rebuild the catalog so that hash verification for the manifest
-                # won't cause problems.
-                r.rebuild()
-                # This should fail because the manifest already has identical
-                # signature actions in it.
-                self.pkgsign_simple(self.rurl1, plist[0], exit=1)
-
-                # The addition of SHA-256 hashes should still result in us
-                # believing the signatures are identical
-                self.pkgsign_simple(self.rurl1, plist[0], exit=1,
-                    debug_hash="sha1+sha256")
-
-                self.pkg_image_create(self.rurl1)
-                self.seed_ta_dir("ta3")
-                self.pkg("set-property signature-policy verify")
-
-                # This fails because the manifest contains duplicate signatures.
-                api_obj = self.get_img_api_obj()
-                self.assertRaises(apx.UnverifiedSignature, self._api_install,
-                    api_obj, ["example_pkg"])
+
+                for hash_alg in ["sha256", "sha512_256"]:
+                        # Rebuild the catalog so that hash verification for the
+                        # manifest won't cause problems.
+                        r.rebuild()
+                        # This should fail because the manifest already has
+                        # identical signature actions in it.
+                        self.pkgsign_simple(self.rurl1, plist[0], exit=1)
+
+                        # The addition of SHA-256 hashes should still result in
+                        # us believing the signatures are identical.
+                        self.pkgsign_simple(self.rurl1, plist[0], exit=1,
+                            debug_hash="sha1+%s" % hash_alg)
+
+                        self.pkg_image_create(self.rurl1)
+                        self.seed_ta_dir("ta3")
+                        self.pkg("set-property signature-policy verify")
+
+                        # This fails because the manifest contains duplicate
+                        # signatures.
+                        api_obj = self.get_img_api_obj()
+                        self.assertRaises(apx.UnverifiedSignature,
+                                self._api_install, api_obj, ["example_pkg"])
 
         def test_bug_16867_hashes_1(self):
                 """Test whether signing a package a second time with hashes
--- a/src/tests/pkg5unittest.py	Wed Apr 09 17:53:35 2014 -0700
+++ b/src/tests/pkg5unittest.py	Fri Apr 18 10:14:06 2014 +0530
@@ -133,7 +133,7 @@
 
 # Version test suite is known to work with.
 PKG_CLIENT_NAME = "pkg"
-CLIENT_API_VERSION = 78
+CLIENT_API_VERSION = 79
 
 ELIDABLE_ERRORS = [ TestSkippedException, depotcontroller.DepotStateException ]
 
@@ -960,10 +960,11 @@
         def assertEqualParsable(self, output, activate_be=True,
             add_packages=EmptyI, affect_packages=EmptyI, affect_services=EmptyI,
             backup_be_name=None, be_name=None, boot_archive_rebuild=False,
-            change_facets=EmptyI, change_packages=EmptyI,
-            change_mediators=EmptyI, change_variants=EmptyI,
-            child_images=EmptyI, create_backup_be=False, create_new_be=False,
-            image_name=None, licenses=EmptyI, remove_packages=EmptyI, release_notes=EmptyI,
+            change_editables=EmptyI, change_facets=EmptyI,
+            change_packages=EmptyI, change_mediators=EmptyI,
+            change_variants=EmptyI, child_images=EmptyI, create_backup_be=False,
+            create_new_be=False, image_name=None, licenses=EmptyI,
+            remove_packages=EmptyI, release_notes=EmptyI, include=EmptyI,
             version=0):
                 """Check that the parsable output in 'output' is what is
                 expected."""
@@ -989,8 +990,8 @@
                 # is correct.
                 self.assert_("space-required" in outd)
                 del outd["space-required"]
-                # Add 3 to outd to take account of self, output, and outd.
-                self.assertEqual(len(expected), len(outd) + 3, "Got a "
+                # Add 4 to account for self, output, include, and outd.
+                self.assertEqual(len(expected), len(outd) + 4, "Got a "
                     "different set of keys for expected and outd.  Those in "
                     "expected but not in outd:\n%s\nThose in outd but not in "
                     "expected:\n%s" % (
@@ -998,7 +999,13 @@
                         set(outd)),
                         sorted(set(outd) -
                         set([k.replace("_", "-") for k in expected]))))
+
+                seen = set()
                 for k in sorted(outd):
+                        seen.add(k)
+                        if include and k not in include:
+                                continue
+
                         ek = k.replace("-", "_")
                         ev = expected[ek]
                         if ev == EmptyI:
@@ -1010,6 +1017,11 @@
                             "of %s was expected to be\n%s but was\n%s" %
                             (image_name, k, ev, outd[k]))
 
+                if include:
+                        # Assert all sections expicitly requested were matched.
+                        self.assertEqualDiff(include, list(x for x in (seen &
+                            set(include))))
+
         def configure_rcfile(self, rcfile, config, test_root, section="DEFAULT",
             suffix=""):
                 """Reads the provided rcfile file, setting key/value
@@ -2624,7 +2636,7 @@
 
                 # debug_hash lets us choose the type of hash attributes that
                 # should be added to this package on publication. Valid values
-                # are: sha1, sha1+sha256, sha256
+                # are: sha1, sha256, sha1+sha256, sha512_256, sha1+sha512_256
                 if debug_hash:
                         args.append("-D hash=%s" % debug_hash)
 
@@ -2819,9 +2831,9 @@
                 arguments to point to template, logs, cache and proto areas
                 within our test root."""
 
-                if "-S" not in args and "-d" not in args and fill_missing_args:
+                if "-S" not in args and "-d " not in args and fill_missing_args:
                         args += " -S "
-                if "-c" not in args and fill_missing_args:
+                if "-c " not in args and fill_missing_args:
                         args += " -c %s" % os.path.join(self.test_root,
                             "depot_cache")
                 if "-l" not in args:
@@ -3198,6 +3210,15 @@
 
                 self._api_finish(api_obj, catch_wsie=catch_wsie)
 
+        def _api_revert(self, api_obj, args, catch_wsie=True, noexecute=False,
+            **kwargs):
+                self.debug("revert %s" % " ".join(args))
+                for pd in api_obj.gen_plan_revert(args, **kwargs):
+                        continue
+                if noexecute:
+                        return
+                self._api_finish(api_obj, catch_wsie=catch_wsie)
+
         def _api_uninstall(self, api_obj, pkg_list, catch_wsie=True, **kwargs):
                 self.debug("uninstall %s" % " ".join(pkg_list))
                 for pd in api_obj.gen_plan_uninstall(pkg_list, **kwargs):
@@ -3270,7 +3291,7 @@
                 file_path = os.path.join(self.get_img_path(), path)
                 portable.remove(file_path)
 
-        def file_contains(self, path, string):
+        def file_contains(self, path, string, appearances=1):
                 """Assert the existence of a string in a file in the image."""
 
                 file_path = os.path.join(self.get_img_path(), path)
@@ -3283,8 +3304,10 @@
 
                 for line in f:
                         if string in line:
-                                f.close()
-                                break
+                                appearances -= 1
+                                if appearances == 0:
+                                        f.close()
+                                        break
                 else:
                         f.close()
                         self.assert_(False, "File %s does not contain %s" %
@@ -3576,6 +3599,7 @@
                 return "ta%d" % ta
 
         def setUp(self, publishers, start_depots=True):
+
                 # We only have 5 usable CA certs and there are not many usecases
                 # for setting up more than 5 different SSL-secured depots.
                 assert len(publishers) < 6
@@ -4420,7 +4444,7 @@
 
         def __init__(self, conf, port, work_dir, testcase=None, https=False):
                 ApacheController.__init__(self, conf, port, work_dir,
-                    testcase=testcase, https=False)
+                    testcase=testcase, https=https)
                 self.apachectl = "/usr/apache2/2.2/bin/64/httpd.worker"
 
         def _network_ping(self):
@@ -4439,7 +4463,7 @@
 
         def __init__(self, conf, port, work_dir, testcase=None, https=False):
                 ApacheController.__init__(self, conf, port, work_dir,
-                    testcase=testcase, https=False)
+                    testcase=testcase, https=https)
                 self.apachectl = "/usr/apache2/2.2/bin/64/httpd.worker"
 
         def _network_ping(self):
Binary file src/tests/ro_data/elftest.so.1 has changed
Binary file src/tests/ro_data/elftest.so.2 has changed
--- a/src/util/apache2/depot/depot_httpd.conf.mako	Wed Apr 09 17:53:35 2014 -0700
+++ b/src/util/apache2/depot/depot_httpd.conf.mako	Fri Apr 18 10:14:06 2014 +0530
@@ -19,7 +19,7 @@
 #
 # CDDL HEADER END
 #
-# Copyright (c) 2013, Oracle and/or its affiliates. All rights reserved.
+# Copyright (c) 2013, 2014, Oracle and/or its affiliates. All rights reserved.
 #
 
 #
@@ -335,6 +335,34 @@
 CacheMaxFileSize 45690876
 % endif
 
+<%
+        ssl_cert_file_path = context.get("ssl_cert_file", "")
+        ssl_key_file_path = context.get("ssl_key_file", "")
+        if ssl_cert_file_path and ssl_key_file_path:
+                context.write("""
+# DNS domain name of the server
+ServerName %s
+# enable SSL
+SSLEngine On
+# Location of the server certificate and key.
+""" % context.get("host", "localhost"))
+                context.write("SSLCertificateFile %s\n" % ssl_cert_file_path)
+                context.write("SSLCertificateKeyFile %s\n" % ssl_key_file_path)
+                context.write("""
+# Intermediate CA certificate file. Required if your server certificate
+# is not signed by a top-level CA directly but an intermediate authority.
+# Comment out this section if you don't need one or if you are using a
+# test certificate
+""")
+                ssl_cert_chain_file_path = context.get("ssl_cert_chain_file",
+                    "")
+                if ssl_cert_chain_file_path:
+                        context.write("SSLCertificateChainFile %s\n" %
+                            ssl_cert_chain_file_path)
+                else:
+                        context.write("# SSLCertificateChainFile /cert_path\n")
+%>
+
 # Rules to serve static content directly from the file-repositories.
 <%include file="/depot.conf.mako"/>
 # with no URL-path, we show an index of the available repositories.
--- a/src/util/misc/user_attr.d/package:pkg	Wed Apr 09 17:53:35 2014 -0700
+++ b/src/util/misc/user_attr.d/package:pkg	Fri Apr 18 10:14:06 2014 +0530
@@ -1,1 +1,1 @@
-pkg5srv::RO::auths=solaris.smf.manage.pkg-mirror,solaris.smf.value.pkg-mirror
+pkg5srv::RO::auths=solaris.smf.manage.pkg-mirror,solaris.smf.value.pkg-mirror,solaris.smf.value.pkg-depot-config,solaris.smf.manage.pkg-depot