20283125 pkg set-publisher traceback for --add-property-value after --set-property
authorXiaobo Shen <xiaobo.shen@oracle.com>
Fri, 03 Apr 2015 19:02:53 -0700
changeset 3185 2e55bdb918e4
parent 3184 d3c35a18e7cc
child 3186 9b33142f625c
20283125 pkg set-publisher traceback for --add-property-value after --set-property 15709579 SUNBT7140048 build 64-bit modules 20571365 PKG commands need to be integrated with RAD
src/client.py
src/modules/client/api.py
src/modules/client/api_errors.py
src/modules/client/bootenv.py
src/modules/client/client_api.py
src/modules/client/options.py
src/modules/client/pkg_solver.py
src/modules/client/pkgdefs.py
src/modules/client/progress.py
src/modules/client/publisher.py
src/modules/client/rad_pkg.py
src/modules/misc.py
src/pkg/external_deps.txt
src/pkg/manifests/developer:opensolaris:pkg5.p5m
src/pkg/manifests/package:pkg.p5m
src/po/POTFILES.in
src/rad-invoke.py
src/setup.py
src/tests/cli/t_client_api.py
src/tests/cli/t_pkg_image_create.py
src/tests/cli/t_pkg_install.py
src/tests/cli/t_pkg_publisher.py
--- a/src/client.py	Thu Apr 02 08:44:15 2015 -0700
+++ b/src/client.py	Fri Apr 03 19:02:53 2015 -0700
@@ -68,6 +68,7 @@
         import pkg.client.api as api
         import pkg.client.api_errors as api_errors
         import pkg.client.bootenv as bootenv
+        import pkg.client.client_api as client_api
         import pkg.client.progress as progress
         import pkg.client.linkedimage as li
         import pkg.client.publisher as publisher
@@ -446,21 +447,6 @@
     verbose):
         """List packages."""
 
-        api_inst.progresstracker.set_purpose(
-            api_inst.progresstracker.PURPOSE_LISTING)
-
-        variants = False
-        pkg_list = api.ImageInterface.LIST_INSTALLED
-        if list_all:
-                variants = True
-                pkg_list = api.ImageInterface.LIST_ALL
-        elif list_installed_newest:
-                pkg_list = api.ImageInterface.LIST_INSTALLED_NEWEST
-        elif list_newest:
-                pkg_list = api.ImageInterface.LIST_NEWEST
-        elif list_upgradable:
-                pkg_list = api.ImageInterface.LIST_UPGRADABLE
-
         if verbose:
                 fmt_str = "{0:76} {1}"
         elif summary:
@@ -468,63 +454,33 @@
         else:
                 fmt_str = "{0:49} {1:26} {2}"
 
-        # Each pattern in pats can be a partial or full FMRI, so
-        # extract the individual components.  These patterns are
-        # transformed here so that partial failure can be detected
-        # when more than one pattern is provided.
-        rval, res = get_fmri_args(api_inst, pargs, cmd=op)
-        if not rval:
-                return EXIT_OOPS
-
-        api_inst.log_operation_start(op)
-        if pkg_list != api_inst.LIST_INSTALLED and refresh_catalogs:
-                # If the user requested packages other than those
-                # installed, ensure that a refresh is performed if
-                # needed since the catalog may be out of date or
-                # invalid as a result of publisher information
-                # changing (such as an origin uri, etc.).
-                try:
-                        api_inst.refresh()
-                except api_errors.PermissionsException:
-                        # Ignore permission exceptions with the
-                        # assumption that an unprivileged user is
-                        # executing this command and that the
-                        # refresh doesn't matter.
-                        pass
-                except api_errors.CatalogRefreshException as e:
-                        succeeded = display_catalog_failures(e,
-                            ignore_perms_failure=True)
-                        if succeeded != e.total:
-                                # If total number of publishers does
-                                # not match 'successful' number
-                                # refreshed, abort.
-                                return EXIT_OOPS
-
-                except:
-                        # Ignore the above error and just use what
-                        # already exists.
-                        pass
-
         state_map = [
-            [(api.PackageInfo.INSTALLED, "i")],
-            [(api.PackageInfo.FROZEN, "f")],
+            [("installed", "i")],
+            [("frozen", "f")],
             [
-                (api.PackageInfo.OBSOLETE, "o"),
-                (api.PackageInfo.RENAMED, "r")
+                ("obsolete", "o"),
+                ("renamed", "r")
             ],
         ]
 
-        # Now get the matching list of packages and display it.
-        found = False
         ppub = api_inst.get_highest_ranked_publisher()
         if ppub:
                 ppub = ppub.prefix
-        try:
-                res = api_inst.get_pkg_list(pkg_list, patterns=pargs,
-                    raise_unmatched=True, repos=origins, variants=variants)
-                for pt, summ, cats, states, attrs in res:
-                        found = True
-
+
+        # getting json output.
+        out_json = client_api._list_inventory(op, api_inst, pargs,
+            li_parent_sync, list_all, list_installed_newest, list_newest,
+            list_upgradable, origins, quiet, refresh_catalogs)
+
+        errors = None
+        if "errors" in out_json:
+                errors = out_json["errors"]
+                errors = _generate_error_messages(out_json["status"], errors,
+                    selected_type=["catalog_refresh", "catalog_refresh_failed"])
+
+        if "data" in out_json:
+                data = out_json["data"]
+                for entry in data:
                         if quiet:
                                 continue
 
@@ -545,14 +501,15 @@
                         status = ""
                         for sentry in state_map:
                                 for s, v in sentry:
-                                        if s in states:
+                                        if s in entry["states"]:
                                                 st = v
                                                 break
                                         else:
                                                 st = "-"
                                 status += st
 
-                        pub, stem, ver = pt
+                        pub, stem, ver = entry["pub"], entry["pkg"], \
+                            entry["version"]
                         if pub == ppub:
                                 spub = ""
                         else:
@@ -571,6 +528,7 @@
                                 continue
 
                         # Display short FMRI + summary.
+                        summ = entry["summary"]
                         pf = stem + spub
                         if summary:
                                 if summ is None:
@@ -581,113 +539,11 @@
                         # Default case; display short FMRI and version info.
                         sver = version.Version.split(ver)[-1]
                         msg(fmt_str.format(pf, sver, status))
-
-                if not found and not pargs:
-                        if pkg_list == api_inst.LIST_INSTALLED:
-                                if not quiet:
-                                        error(_("no packages installed"))
-                                api_inst.log_operation_end(
-                                    result=RESULT_NOTHING_TO_DO)
-                                return EXIT_OOPS
-                        elif pkg_list == api_inst.LIST_INSTALLED_NEWEST:
-                                if not quiet:
-                                        error(_("no packages installed or available "
-                                            "for installation"))
-                                api_inst.log_operation_end(
-                                    result=RESULT_NOTHING_TO_DO)
-                                return EXIT_OOPS
-                        elif pkg_list == api_inst.LIST_UPGRADABLE:
-                                if not quiet:
-                                        img = api_inst._img
-                                        cat = img.get_catalog(
-                                            img.IMG_CATALOG_INSTALLED)
-                                        if cat.package_count > 0:
-                                                error(_("no packages have "
-                                                    "newer versions available"))
-                                        else:
-                                                error(_("no packages are "
-                                                    "installed"))
-                                api_inst.log_operation_end(
-                                    result=RESULT_NOTHING_TO_DO)
-                                return EXIT_OOPS
-                        else:
-                                api_inst.log_operation_end(
-                                    result=RESULT_NOTHING_TO_DO)
-                                return EXIT_OOPS
-
-                api_inst.log_operation_end()
-                return EXIT_OK
-        except (api_errors.InvalidPackageErrors,
-            api_errors.ActionExecutionError,
-            api_errors.PermissionsException) as e:
-                error(e, cmd=op)
-                return EXIT_OOPS
-        except api_errors.CatalogRefreshException as e:
-                display_catalog_failures(e)
-                return EXIT_OOPS
-        except api_errors.InventoryException as e:
-                if e.illegal:
-                        for i in e.illegal:
-                                error(i)
-                        api_inst.log_operation_end(
-                            result=RESULT_FAILED_BAD_REQUEST)
-                        return EXIT_OOPS
-
-                if found and not quiet:
-                        # Ensure a blank line is inserted after list for
-                        # partial failure case.
-                        logger.error(" ")
-
-                if quiet:
-                        # Print nothing.
-                        pass
-                elif pkg_list == api.ImageInterface.LIST_ALL or \
-                    pkg_list == api.ImageInterface.LIST_NEWEST:
-                        error(_("no known packages matching:\n  {0}").format(
-                            "\n  ".join(e.notfound)), cmd=op)
-                elif pkg_list == api.ImageInterface.LIST_INSTALLED_NEWEST:
-                        error(_("no packages matching the following patterns "
-                            "are allowed by installed incorporations, or image "
-                            "variants that are known or installed\n  "
-                            "{0}").format("\n  ".join(e.notfound)), cmd=op)
-                        logger.error("Use -af to allow all versions.")
-                elif pkg_list == api.ImageInterface.LIST_UPGRADABLE:
-                        # Creating a list of packages that are uptodate
-                        # and that are not installed on the system.
-                        no_updates = []
-                        not_installed = []
-                        try:
-                                for entry in api_inst.get_pkg_list(
-                                    api.ImageInterface.LIST_INSTALLED,
-                                    patterns=e.notfound, raise_unmatched=True):
-                                        pub, stem, ver = entry[0]
-                                        no_updates.append(stem)
-                        except api_errors.InventoryException as exc:
-                                not_installed = exc.notfound
-
-                        err_str = ""
-                        if not_installed:
-                                err_str = _("no packages matching the following"
-                                    " patterns are installed:\n  {0}").format(
-                                    "\n  ".join(not_installed))
-
-                        if no_updates:
-                                err_str = err_str + _("no updates are available"
-                                    " for the following packages:\n  "
-                                    "{0}").format("\n  ".join(no_updates))
-                        if err_str:
-                                error(err_str, cmd=op)
-                else:
-                        error(_("no packages matching the following patterns "
-                            "are installed:\n  {0}").format(
-                            "\n  ".join(e.notfound)), cmd=op)
-
-                if found and e.notfound:
-                        # Only some patterns matched.
-                        api_inst.log_operation_end()
-                        return EXIT_PARTIAL
-                api_inst.log_operation_end(result=RESULT_NOTHING_TO_DO)
-                return EXIT_OOPS
+        # Print errors left.
+        if errors:
+                _generate_error_messages(out_json["status"], errors)
+
+        return out_json["status"]
 
 def get_tracker():
         if global_settings.client_output_parsable_version is not None:
@@ -1232,6 +1088,7 @@
 
 def display_plan(api_inst, child_image_plans, noexecute, omit_headers, op,
     parsable_version, quiet, quiet_plan, show_licenses, stage, verbose):
+        """Display plan function."""
 
         plan = api_inst.describe()
         if not plan:
@@ -1300,7 +1157,87 @@
 
                         msg(msg_text)
 
+def display_plan_cb(api_inst, child_image_plans=None, noexecute=False,
+    omit_headers=False, op=None, parsable_version=None, quiet=False,
+    quiet_plan=False, show_licenses=False, stage=None, verbose=None,
+    get_parsable_plan_cb=None, plan_only=False):
+        """Callback function for displaying plan."""
+
+        if plan_only:
+                __display_plan(api_inst, verbose, noexecute)
+                return
+
+        plan = api_inst.describe()
+        if not plan:
+                return
+
+        if stage not in [API_STAGE_DEFAULT, API_STAGE_PLAN] and not quiet_plan:
+                # we should have displayed licenses earlier so mark all
+                # licenses as having been displayed.
+                display_plan_licenses(api_inst, show_req=False)
+                return
+
+        if not quiet and parsable_version is None and \
+            api_inst.planned_nothingtodo(li_ignore_all=True):
+                if not quiet_plan:
+                        # nothing todo
+                        if op == PKG_OP_UPDATE:
+                                s = _("No updates available for this image.")
+                        else:
+                                s = _("No updates necessary for this image.")
+                        if api_inst.ischild():
+                                s += " ({0})".format(api_inst.get_linked_name())
+                        msg(s)
+
+                if op != PKG_OP_FIX or not verbose:
+                        # Even nothingtodo, but need to continue to display INFO
+                        # message if verbose is True.
+                        return
+
+        if parsable_version is None and not quiet_plan:
+                display_plan_licenses(api_inst, show_all=show_licenses)
+
+        if not quiet and not quiet_plan:
+                __display_plan(api_inst, verbose, noexecute, op=op)
+
+        if parsable_version is not None and get_parsable_plan_cb:
+                parsable_plan = get_parsable_plan_cb(api_inst,
+                    parsable_version, child_image_plans)
+                logger.info(json.dumps(parsable_plan))
+        elif not quiet:
+                if not quiet_plan:
+                        # Ensure a blank line is inserted before the message
+                        # output.
+                        msg()
+
+                last_item_id = None
+                for item_id, msg_time, msg_type, msg_text in \
+                    plan.gen_item_messages(ordered=True):
+
+                        if last_item_id is None or last_item_id != item_id:
+                                last_item_id = item_id
+                                if not noexecute and op == PKG_OP_FIX and \
+                                    msg_type == MSG_ERROR:
+                                        msg(_("Repairing: {0:50}").format(
+                                            item_id))
+
+                        if op == PKG_OP_FIX and not verbose and \
+                            msg_type == MSG_INFO:
+                                # If verbose is False, don't display any INFO
+                                # messages.
+                                continue
+
+                        if not omit_headers:
+                                omit_headers = True
+                                msg(_("{pkg_name:70} {result:>7}").format(
+                                    pkg_name=_("PACKAGE"),
+                                    result=_("STATUS")))
+
+                        msg(msg_text)
+
 def __api_prepare_plan(operation, api_inst):
+        """Prepare plan."""
+
         # Exceptions which happen here are printed in the above level, with
         # or without some extra decoration done here.
         # XXX would be nice to kick the progress tracker.
@@ -1348,6 +1285,8 @@
         return EXIT_OK
 
 def __api_execute_plan(operation, api_inst):
+        """Execute plan."""
+
         rval = None
         try:
                 api_inst.execute_plan()
@@ -1446,6 +1385,7 @@
         return rval
 
 def __api_alloc(imgdir, exact_match, pkg_image_used):
+        """Allocate API instance."""
 
         progresstracker = get_tracker()
         try:
@@ -1471,6 +1411,8 @@
                 return
 
 def __api_plan_exception(op, noexecute, verbose, api_inst):
+        """Handle plan exception."""
+
         e_type, e, e_traceback = sys.exc_info()
 
         if e_type == api_errors.ImageNotFoundException:
@@ -1564,6 +1506,7 @@
     _omit_headers=False, _origins=None, _parsable_version=None, _quiet=False,
     _quiet_plan=False, _review_release_notes=False, _show_licenses=False,
     _stage=API_STAGE_DEFAULT, _verbose=0, **kwargs):
+        """API plan invocation entry."""
 
         # All the api interface functions that we invoke have some
         # common arguments.  Set those up now.
@@ -1833,7 +1776,7 @@
                         _api_inst.reset()
 
                 if "pargs" not in pwargs:
-                       pwargs["pargs"] = []
+                        pwargs["pargs"] = []
 
                 op_func = cmds[op][0]
 
@@ -1982,35 +1925,116 @@
             refresh_catalogs=refresh_catalogs, reject_list=reject_pats,
             update_index=update_index)
 
-def install(op, api_inst, pargs,
-    accept, act_timeout, backup_be, backup_be_name, be_activate, be_name,
-    li_ignore, li_erecurse, li_parent_sync, new_be, noexecute, origins,
-    parsable_version, quiet, refresh_catalogs, reject_pats, show_licenses,
-    stage, update_index, verbose):
-        """Attempt to take package specified to INSTALLED state.  The operands
-        are interpreted as glob patterns."""
-
-        if not pargs:
-                usage(_("at least one package name required"), cmd=op)
-
-        rval, res = get_fmri_args(api_inst, pargs, cmd=op)
-        if not rval:
-                return EXIT_OOPS
-
-        xrval, xres = get_fmri_args(api_inst, reject_pats, cmd=op)
-        if not xrval:
-                return EXIT_OOPS
-
-        return __api_op(op, api_inst, _accept=accept, _li_ignore=li_ignore,
-            _noexecute=noexecute, _origins=origins,
-            _parsable_version=parsable_version, _quiet=quiet,
-            _show_licenses=show_licenses, _stage=stage, _verbose=verbose,
-            act_timeout=act_timeout, backup_be=backup_be,
-            backup_be_name=backup_be_name, be_activate=be_activate,
-            be_name=be_name, li_erecurse=li_erecurse,
-            li_parent_sync=li_parent_sync, new_be=new_be, pkgs_inst=pargs,
-            refresh_catalogs=refresh_catalogs, reject_list=reject_pats,
-            update_index=update_index)
+def __handle_client_json_api_output(out_json, op):
+        """This is the main client_json_api output handling function used for
+        install, update and uninstall and so on."""
+
+        if "errors" in out_json:
+                _generate_error_messages(out_json["status"],
+                    out_json["errors"], cmd=op)
+
+        if "data" in out_json and "release_notes_url" in out_json["data"]:
+                msg("\n" + "-" * 75)
+                msg(_("NOTE: Please review release notes posted at:\n" ))
+                msg(out_json["data"]["release_notes_url"])
+                msg("-" * 75 + "\n")
+        return out_json["status"]
+
+def _emit_error_general_cb(status, err, cmd=None, selected_type=[],
+    add_info=misc.EmptyDict):
+        """Callback for emitting general errors."""
+
+        if status == EXIT_BADOPT:
+                # Usage errors are not in any specific type, print it only
+                # there is no selected type.
+                if not selected_type:
+                        usage(err["reason"], cmd=cmd)
+                else:
+                        return False
+        elif "errtype" in err:
+                if err["errtype"] == "format_update":
+                        # if the selected_type is specified and err not in selected type,
+                        # Don't print and return False.
+                        if selected_type and err["errtype"] not in selected_type:
+                                return False
+                        emsg("\n")
+                        emsg(err["reason"])
+                        emsg(_("To continue, execute 'pkg update-format' as a "
+                            "privileged user and then try again.  Please note "
+                            "that updating the format of the image will render "
+                            "it unusable with older versions of the pkg(5) "
+                            "system."))
+                elif err["errtype"] == "catalog_refresh":
+                        if selected_type and err["errtype"] not in selected_type:
+                                return False
+
+                        if "reason" in err:
+                                emsg(err["reason"])
+                        elif "info" in err:
+                                msg(err["info"])
+                elif err["errtype"] == "catalog_refresh_failed":
+                        if selected_type and err["errtype"] not in selected_type:
+                                return False
+
+                        if "reason" in err:
+                                emsg(" ")
+                                emsg(err["reason"])
+                elif err["errtype"] == "publisher_set":
+                        if selected_type and err["errtype"] not in selected_type:
+                                return False
+
+                        emsg(err["reason"])
+                elif err["errtype"] == "plan_license":
+                        if selected_type and err["errtype"] not in selected_type:
+                                return False
+
+                        emsg(err["reason"])
+                        emsg(_("To indicate that you "
+                            "agree to and accept the terms of the licenses of "
+                            "the packages listed above, use the --accept "
+                            "option. To display all of the related licenses, "
+                            "use the --licenses option."))
+                elif err["errtype"] in ["inventory", "inventory_extra"]:
+                        if selected_type and err["errtype"] not in selected_type:
+                                return False
+
+                        emsg(" ")
+                        emsg(err["reason"])
+                        if err["errtype"] == "inventory_extra":
+                                emsg("Use -af to allow all versions.")
+                elif err["errtype"] == "unsupported_repo_op":
+                        if selected_type and err["errtype"] not in selected_type:
+                                return False
+
+                        emsg(_("""
+To add a publisher using this repository, execute the following command as a
+privileged user:
+
+pkg set-publisher -g {0} <publisher>
+""").format(add_info["repo_uri"]))
+                elif "info" in err:
+                        msg(err["info"])
+                elif "reason" in err:
+                        emsg(err["reason"])
+        else:
+                if selected_type:
+                        return False
+
+                if "reason" in err:
+                        emsg(err["reason"])
+                elif "info" in err:
+                        msg(err["info"])
+        return True
+
+def _generate_error_messages(status, err_list,
+    msg_cb=_emit_error_general_cb, selected_type=[], cmd=None,
+    add_info=misc.EmptyDict):
+        """Generate error messages."""
+
+        errs_left = [err for err in err_list if not msg_cb(status, err,
+            selected_type=selected_type, cmd=cmd, add_info=add_info)]
+        # Return errors not being printed.
+        return errs_left
 
 def exact_install(op, api_inst, pargs,
     accept, backup_be, backup_be_name, be_activate, be_name, li_ignore,
@@ -2019,51 +2043,30 @@
         """Attempt to take package specified to INSTALLED state.
         The operands are interpreted as glob patterns."""
 
-        if not pargs:
-                usage(_("at least one package name required"), cmd=op)
-
-        rval, res = get_fmri_args(api_inst, pargs, cmd=op)
-        if not rval:
-                return EXIT_OOPS
-
-        xrval, xres = get_fmri_args(api_inst, reject_pats, cmd=op)
-        if not xrval:
-                return EXIT_OOPS
-
-        return __api_op(op, api_inst, _accept=accept, _li_ignore=li_ignore,
-            _noexecute=noexecute, _origins=origins, _quiet=quiet,
-            _show_licenses=show_licenses, _verbose=verbose,
-            backup_be=backup_be, backup_be_name=backup_be_name,
-            be_activate=be_activate, be_name=be_name,
-            li_parent_sync=li_parent_sync, new_be=new_be,
-            _parsable_version=parsable_version, pkgs_inst=pargs,
-            refresh_catalogs=refresh_catalogs, reject_list=reject_pats,
-            update_index=update_index)
-
-def uninstall(op, api_inst, pargs,
-    act_timeout, backup_be, backup_be_name, be_activate, be_name,
-    ignore_missing, li_ignore, li_erecurse, li_parent_sync, new_be, noexecute,
-    parsable_version, quiet, stage, update_index, verbose):
-        """Attempt to take package specified to DELETED state."""
-
-        if not pargs:
-                usage(_("at least one package name required"), cmd=op)
-
-        if verbose and quiet:
-                usage(_("-v and -q may not be combined"), cmd=op)
-
-        rval, res = get_fmri_args(api_inst, pargs, cmd=op)
-        if not rval:
-                return EXIT_OOPS
-
-        return __api_op(op, api_inst, _li_ignore=li_ignore,
-            _noexecute=noexecute, _parsable_version=parsable_version,
-            _quiet=quiet, _stage=stage, _verbose=verbose,
-            act_timeout=act_timeout, backup_be=backup_be,
-            backup_be_name=backup_be_name, be_activate=be_activate,
-            be_name=be_name, ignore_missing=ignore_missing,
-            li_erecurse=li_erecurse, li_parent_sync=li_parent_sync,
-            new_be=new_be, pkgs_to_uninstall=pargs, update_index=update_index)
+        out_json = client_api._exact_install(op, api_inst, pargs, accept,
+            backup_be, backup_be_name, be_activate, be_name, li_ignore,
+            li_parent_sync, new_be, noexecute, origins, parsable_version,
+            quiet, refresh_catalogs, reject_pats, show_licenses, update_index,
+            verbose, display_plan_cb=display_plan_cb, logger=logger)
+
+        return  __handle_client_json_api_output(out_json, op)
+
+def install(op, api_inst, pargs,
+    accept, act_timeout, backup_be, backup_be_name, be_activate, be_name,
+    li_ignore, li_erecurse, li_parent_sync, new_be, noexecute, origins,
+    parsable_version, quiet, refresh_catalogs, reject_pats, show_licenses,
+    stage, update_index, verbose):
+        """Attempt to take package specified to INSTALLED state.  The operands
+        are interpreted as glob patterns."""
+
+        out_json = client_api._install(op, api_inst, pargs,
+            accept, act_timeout, backup_be, backup_be_name, be_activate,
+            be_name, li_ignore, li_erecurse, li_parent_sync, new_be, noexecute,
+            origins, parsable_version, quiet, refresh_catalogs, reject_pats,
+            show_licenses, stage, update_index, verbose,
+            display_plan_cb=display_plan_cb, logger=logger)
+
+        return  __handle_client_json_api_output(out_json, op)
 
 def update(op, api_inst, pargs, accept, act_timeout, backup_be, backup_be_name,
     be_activate, be_name, force, ignore_missing, li_ignore, li_erecurse,
@@ -2072,39 +2075,28 @@
         """Attempt to take all installed packages specified to latest
         version."""
 
-        rval, res = get_fmri_args(api_inst, pargs, cmd=op)
-        if not rval:
-                return EXIT_OOPS
-
-        xrval, xres = get_fmri_args(api_inst, reject_pats, cmd=op)
-        if not xrval:
-                return EXIT_OOPS
-
-        if res:
-                # If there are specific installed packages to update,
-                # then take only those packages to the latest version
-                # allowed by the patterns specified.  (The versions
-                # specified can be older than what is installed.)
-                pkgs_update = pargs
-                review_release_notes = False
-        else:
-                # If no packages were specified, attempt to update all installed
-                # packages.
-                pkgs_update = None
-                review_release_notes = True
-
-        return __api_op(op, api_inst, _accept=accept, _li_ignore=li_ignore,
-            _noexecute=noexecute, _origins=origins,
-            _parsable_version=parsable_version, _quiet=quiet,
-            _review_release_notes=review_release_notes,
-            _show_licenses=show_licenses, _stage=stage, _verbose=verbose,
-            act_timeout=act_timeout, backup_be=backup_be,
-            backup_be_name=backup_be_name, be_activate=be_activate,
-            be_name=be_name, force=force, ignore_missing=ignore_missing,
-            li_erecurse=li_erecurse, li_parent_sync=li_parent_sync,
-            new_be=new_be, pkgs_update=pkgs_update,
-            refresh_catalogs=refresh_catalogs, reject_list=reject_pats,
-            update_index=update_index)
+        out_json = client_api._update(op, api_inst, pargs, accept, act_timeout,
+            backup_be, backup_be_name, be_activate, be_name, force,
+            ignore_missing, li_ignore, li_erecurse, li_parent_sync, new_be,
+            noexecute, origins, parsable_version, quiet, refresh_catalogs,
+            reject_pats, show_licenses, stage, update_index, verbose,
+            display_plan_cb=display_plan_cb, logger=logger)
+
+        return __handle_client_json_api_output(out_json, op)
+
+def uninstall(op, api_inst, pargs,
+    act_timeout, backup_be, backup_be_name, be_activate, be_name,
+    ignore_missing, li_ignore, li_erecurse, li_parent_sync, new_be, noexecute,
+    parsable_version, quiet, stage, update_index, verbose):
+        """Attempt to take package specified to DELETED state."""
+
+        out_json = client_api._uninstall(op, api_inst, pargs,
+            act_timeout, backup_be, backup_be_name, be_activate, be_name,
+            ignore_missing, li_ignore, li_erecurse, li_parent_sync, new_be,
+            noexecute, parsable_version, quiet, stage, update_index, verbose,
+            display_plan_cb=display_plan_cb, logger=logger)
+
+        return __handle_client_json_api_output(out_json, op)
 
 def revert(op, api_inst, pargs,
     backup_be, backup_be_name, be_activate, be_name, new_be, noexecute,
@@ -2888,248 +2880,57 @@
                 retcode = EXIT_OK
         return retcode
 
-def info(api_inst, args):
+def info(op, api_inst, pargs, display_license, info_local, info_remote,
+    origins, quiet):
         """Display information about a package or packages.
         """
 
-        display_license = False
-        info_local = False
-        info_remote = False
-        origins = set()
-        quiet = False
-
-        opts, pargs = getopt.getopt(args, "g:lqr", ["license"])
-        for opt, arg in opts:
-                if opt == "-g":
-                        origins.add(misc.parse_uri(arg, cwd=orig_cwd))
-                        info_remote = True
-                elif opt == "-l":
-                        info_local = True
-                elif opt == "-q":
-                        quiet = True
-                        global_settings.client_output_quiet = True
-                elif opt == "-r":
-                        info_remote = True
-                elif opt == "--license":
-                        display_license = True
-
-        if not info_local and not info_remote:
-                info_local = True
-        elif info_local and info_remote:
-                usage(_("-l and -r may not be combined"), cmd="info")
-
-        if info_remote and not pargs:
-                usage(_("must request remote info for specific packages"),
-                    cmd="info")
-
-        err = 0
-
-        # Reset the progress tracker here, because we may have to switch to a
-        # different tracker due to the options parse.
-        _api_inst.progresstracker = get_tracker()
-
-        api_inst.progresstracker.set_purpose(
-            api_inst.progresstracker.PURPOSE_LISTING)
-
-        info_needed = api.PackageInfo.ALL_OPTIONS
-        if not display_license:
-                info_needed = api.PackageInfo.ALL_OPTIONS - \
-                    frozenset([api.PackageInfo.LICENSES])
-        info_needed -= api.PackageInfo.ACTION_OPTIONS
-        info_needed |= frozenset([api.PackageInfo.DEPENDENCIES])
-
-        try:
-                ret = api_inst.info(pargs, info_local, info_needed,
-                    ranked=info_remote, repos=origins)
-        except api_errors.ImageFormatUpdateNeeded as e:
-                format_update_error(e)
-                return EXIT_OOPS
-        except api_errors.NoPackagesInstalledException:
-                error(_("no packages installed"))
-                return EXIT_OOPS
-        except api_errors.ApiException as e:
-                error(e)
-                return EXIT_OOPS
-
-        pis = ret[api.ImageInterface.INFO_FOUND]
-        notfound = ret[api.ImageInterface.INFO_MISSING]
-        illegals = ret[api.ImageInterface.INFO_ILLEGALS]
-
-        if illegals:
-                # No other results will be returned if illegal patterns were
-                # specified.
-                for i in illegals:
-                        logger.error(str(i))
-                return EXIT_OOPS
-
-        no_licenses = []
-        for i, pi in enumerate(pis):
-                if not quiet and i > 0:
-                        msg("")
-
-                if display_license:
-                        if not pi.licenses:
-                                no_licenses.append(pi.fmri)
-                        elif not quiet:
-                                for lic in pi.licenses:
+        ret_json = client_api._info(op, api_inst, pargs, display_license,
+            info_local, info_remote, origins, quiet)
+
+        if "data" in ret_json:
+                # display_license is true.
+                if "licenses" in ret_json["data"]:
+                        data_type = "licenses"
+                elif "package_attrs" in ret_json["data"]:
+                        data_type = "package_attrs"
+
+                for i, pis in enumerate(ret_json["data"][data_type]):
+                        if not quiet and i > 0:
+                                msg("")
+
+                        if display_license and not quiet:
+                                for lic in pis:
                                         msg(lic)
-                        continue
-
-                if quiet:
-                        continue
-
-                state = ""
-                if api.PackageInfo.INSTALLED in pi.states:
-                        state = _("Installed")
-                elif api.PackageInfo.UNSUPPORTED in pi.states:
-                        state = _("Unsupported")
-                else:
-                        state = _("Not installed")
-
-                lparen = False
-                if api.PackageInfo.OBSOLETE in pi.states:
-                        state += " ({0}".format(_("Obsolete"))
-                        lparen = True
-                elif api.PackageInfo.RENAMED in pi.states:
-                        state += " ({0}".format(_("Renamed"))
-                        lparen = True
-                if api.PackageInfo.FROZEN in pi.states:
-                        if lparen:
-                                state += ", {0})".format(_("Frozen"))
-                        else:
-                                state += " ({0})".format(_("Frozen"))
-                elif lparen:
-                        state += ")"
-
-                # XXX-Consider using Python's 2.7 collections.OrderedDict
-                attr_list = []
-                seen = {}
-
-                def __append_attr_tuples(label, values):
-                        """Given arguments label and values, either extend
-                        the existing tuple value or add new one to
-                        attr_list"""
-
-                        if not isinstance(values, list):
-                                values = [values]
-                        if label in seen:
-                                seen[label].extend(values)
-                        else:
-                                attr_list.append((label, values))
-                                seen[label] = values
-
-                __append_attr_tuples(_("Name"), pi.pkg_stem)
-                __append_attr_tuples(_("Summary"), pi.summary)
-                if pi.description:
-                        __append_attr_tuples(_("Description"), pi.description)
-                if pi.category_info_list:
-                        category_info = []
-                        verbose = len(pi.category_info_list) > 1
-                        category_info.append \
-                            (pi.category_info_list[0].__str__(verbose))
-                        if len(pi.category_info_list) > 1:
-                                for ci in pi.category_info_list[1:]:
-                                        category_info.append \
-                                            (ci.__str__(verbose))
-                        __append_attr_tuples(_("Category"), category_info)
-
-                __append_attr_tuples(_("State"), state)
-
-                # Renamed packages have dependencies, but the dependencies
-                # may not apply to this image's variants so won't be
-                # returned.
-                if api.PackageInfo.RENAMED in pi.states:
-                        __append_attr_tuples(_("Renamed to"), pi.dependencies)
-
-                # XXX even more info on the publisher would be nice?
-                __append_attr_tuples(_("Publisher"), pi.publisher)
-                hum_ver = pi.get_attr_values("pkg.human-version")
-                if hum_ver and hum_ver[0] != str(pi.version):
-                        __append_attr_tuples(_("Version"), "{0} ({1})".format(
-                            pi.version, hum_ver[0]))
-                else:
-                        __append_attr_tuples(_("Version"), str(pi.version))
-
-                __append_attr_tuples(_("Branch"), str(pi.branch))
-                __append_attr_tuples(_("Packaging Date"), pi.packaging_date)
-                __append_attr_tuples(_("Size"), misc.bytes_to_str(pi.size))
-                __append_attr_tuples(_("FMRI"),
-                    pi.fmri.get_fmri(include_build=False))
-                # XXX add license/copyright info here?
-
-                addl_attr_list = {
-                    "info.keyword": _("Additional Keywords"),
-                    "info.upstream": _("Project Contact"),
-                    "info.maintainer": _("Project Maintainer"),
-                    "info.maintainer-url": _("Project Maintainer URL"),
-                    "pkg.detailed-url": _("Project URL"),
-                    "info.upstream-url": _("Project URL"),
-                    "info.repository-changeset": _("Repository Changeset"),
-                    "info.repository-url": _("Source URL"),
-                    "info.source-url": _("Source URL")
-                }
-
-                for item in sorted(pi.attrs, key=addl_attr_list.get):
-                        if item in addl_attr_list:
-                                __append_attr_tuples(addl_attr_list[item],
-                                    pi.get_attr_values(item))
-
-                try:
-                        max_width = max(
-                            len(attr[0])
-                            for attr in attr_list
-                        )
-                except ValueError:
-                        # Only display header if there are other attributes to
-                        # show
-                        continue
-
-                for attr, kval in attr_list:
-                        label = "{0}: ".format(attr.rjust(max_width))
-                        res = "\n".join(item for item in kval)
-                        if res:
-                                wrapper = textwrap.TextWrapper(
-                                    initial_indent=label,
-                                    break_on_hyphens=False,
-                                    break_long_words=False,
-                                    subsequent_indent=(max_width + 2) * " ",
-                                    width=80)
-                                msg(wrapper.fill(res))
-
-        if notfound:
-                if pis:
-                        err = EXIT_PARTIAL
-                        if not quiet:
-                                logger.error("")
-                else:
-                        err = EXIT_OOPS
-
-                if not quiet:
-                        if info_local:
-                                logger.error(_("""\
-pkg: info: no packages matching the following patterns you specified are
-installed on the system.  Try specifying -r to query remotely:"""))
-                        elif info_remote:
-                                logger.error(_("""\
-pkg: info: no packages matching the following patterns you specified were
-found in the catalog.  Try relaxing the patterns, refreshing, and/or
-examining the catalogs:"""))
-                        logger.error("")
-                        for p in notfound:
-                                logger.error("        {0}".format(p))
-
-        if no_licenses:
-                if len(no_licenses) == len(pis):
-                        err = EXIT_OOPS
-                else:
-                        err = EXIT_PARTIAL
-
-                if not quiet:
-                        error(_("no license information could be found for the "
-                            "following packages:"))
-                        for pfmri in no_licenses:
-                                logger.error("\t{0}".format(pfmri))
-        return err
+                                continue
+
+                        try:
+                                max_width = max(
+                                    len(attr[0])
+                                    for attr in pis
+                                )
+                        except ValueError:
+                                # Only display header if there are
+                                # other attributes to show.
+                                continue
+                        for attr_l in pis:
+                                attr, kval = tuple(attr_l)
+                                label = "{0}: ".format(attr.rjust(max_width))
+                                res = "\n".join(item for item in kval)
+                                if res:
+                                        wrapper = textwrap.TextWrapper(
+                                            initial_indent=label,
+                                            break_on_hyphens=False,
+                                            break_long_words=False,
+                                            subsequent_indent=(max_width + 2) \
+                                            * " ", width=80)
+                                        msg(wrapper.fill(res))
+
+        if "errors" in ret_json:
+                _generate_error_messages(ret_json["status"], ret_json["errors"],
+                    cmd="info")
+
+        return ret_json["status"]
 
 def calc_widths(lines, attrs, widths=None):
         """Given a set of lines and a set of attributes, calculate the minimum
@@ -3671,7 +3472,12 @@
                 # be printed on the same line as the spinner.
                 return EXIT_OOPS, ("\n" + str(e))
 
-def publisher_set(api_inst, args):
+def publisher_set(op, api_inst, pargs, ssl_key, ssl_cert, origin_uri,
+    reset_uuid, add_mirrors, remove_mirrors, add_origins, remove_origins,
+    refresh_allowed, disable, sticky, search_before, search_after,
+    search_first, approved_ca_certs, revoked_ca_certs, unset_ca_certs,
+    set_props, add_prop_values, remove_prop_values, unset_props, repo_uri,
+    proxy_uri):
         """pkg set-publisher [-Ped] [-k ssl_key] [-c ssl_cert] [--reset-uuid]
             [-g|--add-origin origin to add] [-G|--remove-origin origin to
             remove] [-m|--add-mirror mirror to add] [-M|--remove-mirror mirror
@@ -3688,944 +3494,141 @@
             [--proxy proxy to use]
             [publisher] """
 
-        cmd_name = "set-publisher"
-
-        ssl_key = None
-        ssl_cert = None
-        origin_uri = None
-        reset_uuid = False
-        add_mirrors = set()
-        remove_mirrors = set()
-        add_origins = set()
-        remove_origins = set()
-        refresh_allowed = True
-        disable = None
-        sticky = None
-        search_before = None
-        search_after = None
-        search_first = False
-        repo_uri = None
-        proxy_uri = None
-
-        approved_ca_certs = []
-        revoked_ca_certs = []
-        unset_ca_certs = []
-        set_props = {}
-        add_prop_values = {}
-        remove_prop_values = {}
-        unset_props = set()
-
-        opts, pargs = getopt.getopt(args, "Pedk:c:O:G:g:M:m:p:",
-            ["add-mirror=", "remove-mirror=", "add-origin=", "remove-origin=",
-            "no-refresh", "reset-uuid", "enable", "disable", "sticky",
-            "non-sticky", "search-after=", "search-before=", "search-first",
-            "approve-ca-cert=", "revoke-ca-cert=", "unset-ca-cert=",
-            "set-property=", "add-property-value=", "remove-property-value=",
-            "unset-property=", "proxy="])
-
-        for opt, arg in opts:
-                if opt == "-c":
-                        ssl_cert = arg
-                elif opt == "-d" or opt == "--disable":
-                        disable = True
-                elif opt == "-e" or opt == "--enable":
-                        disable = False
-                elif opt == "-g" or opt == "--add-origin":
-                        add_origins.add(misc.parse_uri(arg, cwd=orig_cwd))
-                elif opt == "-G" or opt == "--remove-origin":
-                        if arg == "*":
-                                # Allow wildcard to support an easy, scriptable
-                                # way of removing all existing entries.
-                                remove_origins.add("*")
-                        else:
-                                remove_origins.add(misc.parse_uri(arg,
-                                    cwd=orig_cwd))
-                elif opt == "-k":
-                        ssl_key = arg
-                elif opt == "-O":
-                        origin_uri = arg
-                elif opt == "-m" or opt == "--add-mirror":
-                        add_mirrors.add(misc.parse_uri(arg, cwd=orig_cwd))
-                elif opt == "-M" or opt == "--remove-mirror":
-                        if arg == "*":
-                                # Allow wildcard to support an easy, scriptable
-                                # way of removing all existing entries.
-                                remove_mirrors.add("*")
-                        else:
-                                remove_mirrors.add(misc.parse_uri(arg,
-                                    cwd=orig_cwd))
-                elif opt == "-p":
-                        if repo_uri:
-                                usage(_("The -p option can be specified only "
-                                    "once."), cmd=cmd_name)
-                        repo_uri = misc.parse_uri(arg, cwd=orig_cwd)
-                elif opt in ("-P", "--search-first"):
-                        search_first = True
-                elif opt == "--reset-uuid":
-                        reset_uuid = True
-                elif opt == "--no-refresh":
-                        refresh_allowed = False
-                elif opt == "--sticky":
-                        sticky = True
-                elif opt == "--non-sticky":
-                        sticky = False
-                elif opt == "--search-before":
-                        search_before = arg
-                elif opt == "--search-after":
-                        search_after = arg
-                elif opt == "--approve-ca-cert":
-                        approved_ca_certs.append(arg)
-                elif opt == "--revoke-ca-cert":
-                        revoked_ca_certs.append(arg)
-                elif opt == "--unset-ca-cert":
-                        unset_ca_certs.append(arg)
-                elif opt == "--set-property":
-                        t = arg.split("=", 1)
-                        if len(t) < 2:
-                                usage(_("properties to be set must be of the "
-                                    "form '<name>=<value>'. This is what was "
-                                    "given: {0}").format(arg), cmd=cmd_name)
-                        if t[0] in set_props:
-                                usage(_("a property may only be set once in a "
-                                    "command. {0} was set twice").format(t[0]),
-                                    cmd=cmd_name)
-                        set_props[t[0]] = t[1]
-                elif opt == "--add-property-value":
-                        t = arg.split("=", 1)
-                        if len(t) < 2:
-                                usage(_("property values to be added must be "
-                                    "of the form '<name>=<value>'. This is "
-                                    "what was given: {0}").format(arg),
-                                    cmd=cmd_name)
-                        add_prop_values.setdefault(t[0], [])
-                        add_prop_values[t[0]].append(t[1])
-                elif opt == "--remove-property-value":
-                        t = arg.split("=", 1)
-                        if len(t) < 2:
-                                usage(_("property values to be removed must be "
-                                    "of the form '<name>=<value>'. This is "
-                                    "what was given: {0}").format(arg),
-                                    cmd=cmd_name)
-                        remove_prop_values.setdefault(t[0], [])
-                        remove_prop_values[t[0]].append(t[1])
-                elif opt == "--unset-property":
-                        unset_props.add(arg)
-                elif opt == "--proxy":
-                        proxy_uri = arg
-
-        name = None
-        if len(pargs) == 0 and not repo_uri:
-                usage(_("requires a publisher name"), cmd="set-publisher")
-        elif len(pargs) > 1:
-                usage(_("only one publisher name may be specified"),
-                    cmd="set-publisher")
-        elif pargs:
-                name = pargs[0]
-
-        if origin_uri and (add_origins or remove_origins):
-                usage(_("the -O and -g, --add-origin, -G, or --remove-origin "
-                    "options may not be combined"), cmd="set-publisher")
-
-        if (search_before and search_after) or \
-            (search_before and search_first) or (search_after and search_first):
-                usage(_("search-before, search-after, and search-first (-P) "
-                    "may not be combined"), cmd="set-publisher")
-
-        if repo_uri and (add_origins or add_mirrors or remove_origins or
-            remove_mirrors or disable != None or not refresh_allowed or
-            reset_uuid):
-                usage(_("the -p option may not be combined with the -g, "
-                    "--add-origin, -G, --remove-origin, -m, --add-mirror, "
-                    "-M, --remove-mirror, --enable, --disable, --no-refresh, "
-                    "or --reset-uuid options"), cmd="set-publisher")
-
-        if proxy_uri and not (add_origins or add_mirrors or repo_uri or
-            remove_origins or remove_mirrors):
-                usage(_("the --proxy argument may only be combined with the -g,"
-                    " --add-origin,  -m, --add-mirror, or -p options"),
-                    cmd="set-publisher")
-
-        # Get sanitized SSL Cert/Key input values.
-        ssl_cert, ssl_key = _get_ssl_cert_key(api_inst.root, api_inst.is_zone,
-            ssl_cert, ssl_key)
-
-        if not repo_uri:
-                # Normal case.
-                ret = _set_pub_error_wrap(_add_update_pub, name, [],
-                    api_inst, name, disable=disable, sticky=sticky,
-                    origin_uri=origin_uri, add_mirrors=add_mirrors,
-                    remove_mirrors=remove_mirrors, add_origins=add_origins,
-                    remove_origins=remove_origins, ssl_cert=ssl_cert,
-                    ssl_key=ssl_key, search_before=search_before,
-                    search_after=search_after, search_first=search_first,
-                    reset_uuid=reset_uuid, refresh_allowed=refresh_allowed,
-                    set_props=set_props, add_prop_values=add_prop_values,
-                    remove_prop_values=remove_prop_values,
-                    unset_props=unset_props, approved_cas=approved_ca_certs,
-                    revoked_cas=revoked_ca_certs, unset_cas=unset_ca_certs,
-                    proxy_uri=proxy_uri)
-
-                rval, rmsg = ret
-                if rmsg:
-                        error(rmsg, cmd="set-publisher")
-                return rval
-
-        pubs = None
-        # Automatic configuration via -p case.
-        def get_pubs():
-                if proxy_uri:
-                        proxies = [publisher.ProxyURI(proxy_uri)]
-                else:
-                        proxies = []
-                repo = publisher.RepositoryURI(repo_uri,
-                    ssl_cert=ssl_cert, ssl_key=ssl_key, proxies=proxies)
-                return EXIT_OK, api_inst.get_publisherdata(repo=repo)
-
-        ret = None
-        try:
-                ret = _set_pub_error_wrap(get_pubs, name,
-                    [api_errors.UnsupportedRepositoryOperation])
-        except api_errors.UnsupportedRepositoryOperation as e:
-                # Fail if the operation can't be done automatically.
-                error(str(e), cmd="set-publisher")
-                logger.error(_("""
-To add a publisher using this repository, execute the following command as a
-privileged user:
-
-  pkg set-publisher -g {0} <publisher>
-""").format(repo_uri))
-                return EXIT_OOPS
-        else:
-                rval, rmsg = ret
-                if rval != EXIT_OK:
-                        error(rmsg, cmd="set-publisher")
-                        return rval
-                pubs = rmsg
-
-        # For the automatic publisher configuration case, update or add
-        # publishers based on whether they exist and if they match any
-        # specified publisher prefix.
-        if not pubs:
-                error(_("""
-The specified repository did not contain any publisher configuration
-information.  This is likely the result of a repository configuration
-error.  Please contact the repository administrator for further
-assistance."""))
-                return EXIT_OOPS
-
-        if name and name not in pubs:
-                known = [p.prefix for p in pubs]
-                unknown = [name]
-                e = api_errors.UnknownRepositoryPublishers(known=known,
-                    unknown=unknown, location=repo_uri)
-                error(str(e))
-                return EXIT_OOPS
-
-        added = []
-        updated = []
-        failed = []
-
-        for src_pub in sorted(pubs):
-                prefix = src_pub.prefix
-                if name and prefix != name:
-                        # User didn't request this one.
-                        continue
-
-                src_repo = src_pub.repository
-                if not api_inst.has_publisher(prefix=prefix):
-                        add_origins = []
-                        if not src_repo or not src_repo.origins:
-                                # If the repository publisher configuration
-                                # didn't include configuration information
-                                # for the publisher's repositories, assume
-                                # that the origin for the new publisher
-                                # matches the URI provided.
-                                add_origins.append(repo_uri)
-
-                        # Any -p origins/mirrors returned from get_pubs() should
-                        # use the proxy we declared, if any.
-                        if proxy_uri and src_repo:
-                                proxies = [publisher.ProxyURI(proxy_uri)]
-                                for repo_uri in src_repo.origins:
-                                        repo_uri.proxies = proxies
-                                for repo_uri in src_repo.mirrors:
-                                        repo_uri.proxies = proxies
-
-                        rval, rmsg = _set_pub_error_wrap(_add_update_pub, name,
-                            [], api_inst, prefix, pub=src_pub,
-                            add_origins=add_origins, ssl_cert=ssl_cert,
-                            ssl_key=ssl_key, sticky=sticky,
-                            search_after=search_after,
-                            search_before=search_before,
-                            search_first=search_first,
-                            set_props=set_props,
-                            add_prop_values=add_prop_values,
-                            remove_prop_values=remove_prop_values,
-                            unset_props=unset_props, proxy_uri=proxy_uri)
-                        if rval == EXIT_OK:
-                                added.append(prefix)
-
-                        # When multiple publishers result from a single -p
-                        # operation, this ensures that the new publishers are
-                        # ordered correctly.
-                        search_first = False
-                        search_after = prefix
-                        search_before = None
-                else:
-                        add_origins = []
-                        add_mirrors = []
-                        dest_pub = api_inst.get_publisher(prefix=prefix,
-                            duplicate=True)
-                        dest_repo = dest_pub.repository
-                        if dest_repo.origins and \
-                            not dest_repo.has_origin(repo_uri):
-                                add_origins = [repo_uri]
-
-                        if not src_repo and not add_origins:
-                                # The repository doesn't have to provide origin
-                                # information for publishers.  If it doesn't,
-                                # the origin of every publisher returned is
-                                # assumed to match the URI that the user
-                                # provided.  Since this is an update case,
-                                # nothing special needs to be done.
-                                if not dest_repo.origins:
-                                        add_origins = [repo_uri]
-                        elif src_repo:
-                                # Avoid duplicates by adding only those mirrors
-                                # or origins not already known.
-                                add_mirrors = [
-                                    u.uri
-                                    for u in src_repo.mirrors
-                                    if u.uri not in dest_repo.mirrors
-                                ]
-                                add_origins = [
-                                    u.uri
-                                    for u in src_repo.origins
-                                    if u.uri not in dest_repo.origins
-                                ]
-
-                                # Special bits to update; for these, take the
-                                # new value as-is (don't attempt to merge).
-                                for prop in ("collection_type", "description",
-                                    "legal_uris", "name", "refresh_seconds",
-                                    "registration_uri", "related_uris"):
-                                        src_val = getattr(src_repo, prop)
-                                        if src_val is not None:
-                                                setattr(dest_repo, prop,
-                                                    src_val)
-
-                        # If an alias doesn't already exist, update it too.
-                        if src_pub.alias and not dest_pub.alias:
-                                dest_pub.alias = src_pub.alias
-
-                        rval, rmsg = _set_pub_error_wrap(_add_update_pub, name,
-                            [], api_inst, prefix, pub=dest_pub,
-                            add_mirrors=add_mirrors, add_origins=add_origins,
-                            set_props=set_props,
-                            add_prop_values=add_prop_values,
-                            remove_prop_values=remove_prop_values,
-                            unset_props=unset_props, proxy_uri=proxy_uri)
-
-                        if rval == EXIT_OK:
-                                updated.append(prefix)
-
-                if rval != EXIT_OK:
-                        failed.append((prefix, rmsg))
-                        continue
-
-        first = True
-        for pub, rmsg in failed:
-                if first:
-                        first = False
-                        error("failed to add or update one or more "
-                            "publishers", cmd="set-publisher")
-                logger.error("  {0}:".format(pub))
-                logger.error(rmsg)
-
-        if added or updated:
-                if first:
-                        logger.info("pkg set-publisher:")
-                if added:
+        out_json = client_api._publisher_set(op, api_inst, pargs, ssl_key,
+            ssl_cert, origin_uri, reset_uuid, add_mirrors, remove_mirrors,
+            add_origins, remove_origins, refresh_allowed, disable, sticky,
+            search_before, search_after, search_first, approved_ca_certs,
+            revoked_ca_certs, unset_ca_certs, set_props, add_prop_values,
+            remove_prop_values, unset_props, repo_uri, proxy_uri)
+
+        errors = None
+        if "errors" in out_json:
+                errors = out_json["errors"]
+                errors = _generate_error_messages(out_json["status"], errors,
+                    selected_type=["publisher_set"])
+
+        if "data" in out_json:
+                if "header" in out_json["data"]:
+                        logger.info(out_json["data"]["header"])
+                if "added" in out_json["data"]:
                         logger.info(_("  Added publisher(s): {0}").format(
-                            ", ".join(added)))
-                if updated:
+                            ", ".join(out_json["data"]["added"])))
+                if "updated" in out_json["data"]:
                         logger.info(_("  Updated publisher(s): {0}").format(
-                            ", ".join(updated)))
-
-        if failed:
-                if len(failed) != len(pubs):
-                        # Not all publishers retrieved could be added or
-                        # updated.
-                        return EXIT_PARTIAL
-                return EXIT_OOPS
-
-        # Now that the configuration was successful, attempt to refresh the
-        # catalog data for all of the configured publishers.  If the refresh
-        # had been allowed earlier while configuring each publisher, then this
-        # wouldn't be necessary and some possibly invalid configuration could
-        # have been eliminated sooner.  However, that would be much slower as
-        # each refresh requires a client image state rebuild.
-        return __refresh(api_inst, added + updated)
-
-def _add_update_pub(api_inst, prefix, pub=None, disable=None, sticky=None,
-    origin_uri=None, add_mirrors=EmptyI, remove_mirrors=EmptyI,
-    add_origins=EmptyI, remove_origins=EmptyI, ssl_cert=None, ssl_key=None,
-    search_before=None, search_after=None, search_first=False,
-    reset_uuid=None, refresh_allowed=False,
-    set_props=EmptyI, add_prop_values=EmptyI,
-    remove_prop_values=EmptyI, unset_props=EmptyI, approved_cas=EmptyI,
-    revoked_cas=EmptyI, unset_cas=EmptyI, proxy_uri=None):
-
-        repo = None
-        new_pub = False
-        if not pub:
-                try:
-                        pub = api_inst.get_publisher(prefix=prefix,
-                            alias=prefix, duplicate=True)
-                        if reset_uuid:
-                                pub.reset_client_uuid()
-                        repo = pub.repository
-                except api_errors.UnknownPublisher as e:
-                        if not origin_uri and not add_origins and \
-                            (remove_origins or remove_mirrors or
-                            remove_prop_values or add_mirrors):
-                                return EXIT_OOPS, str(e)
-
-                        # No pre-existing, so create a new one.
-                        repo = publisher.Repository()
-                        pub = publisher.Publisher(prefix, repository=repo)
-                        new_pub = True
-        elif not api_inst.has_publisher(prefix=pub.prefix):
-                new_pub = True
-
-        if not repo:
-                repo = pub.repository
-                if not repo:
-                        # Could be a new publisher from auto-configuration
-                        # case where no origin was provided in repository
-                        # configuration.
-                        repo = publisher.Repository()
-                        pub.repository = repo
-
-        if disable is not None:
-                # Set disabled property only if provided.
-                pub.disabled = disable
-
-        if sticky is not None:
-                # Set stickiness only if provided
-                pub.sticky = sticky
-
-        if proxy_uri:
-                # we only support a single proxy for now.
-                proxies = [publisher.ProxyURI(proxy_uri)]
-        else:
-                proxies = []
-
-        if origin_uri:
-                # For compatibility with old -O behaviour, treat -O as a wipe
-                # of existing origins and add the new one.
-
-                # Only use existing cert information if the new URI uses
-                # https for transport.
-                if repo.origins and not (ssl_cert or ssl_key) and \
-                    any(origin_uri.startswith(scheme + ":")
-                        for scheme in publisher.SSL_SCHEMES):
-
-                        for uri in repo.origins:
-                                if ssl_cert is None:
-                                        ssl_cert = uri.ssl_cert
-                                if ssl_key is None:
-                                        ssl_key = uri.ssl_key
-                                break
-
-                repo.reset_origins()
-                o = publisher.RepositoryURI(origin_uri, proxies=proxies)
-                repo.add_origin(o)
-
-                # XXX once image configuration supports storing this
-                # information at the uri level, ssl info should be set
-                # here.
-
-        for entry in (("mirror", add_mirrors, remove_mirrors), ("origin",
-            add_origins, remove_origins)):
-                etype, add, remove = entry
-                # XXX once image configuration supports storing this
-                # information at the uri level, ssl info should be set
-                # here.
-                if "*" in remove:
-                        getattr(repo, "reset_{0}s".format(etype))()
-                else:
-                        for u in remove:
-                                getattr(repo, "remove_{0}".format(etype))(u)
-
-                for u in add:
-                        uri = publisher.RepositoryURI(u, proxies=proxies)
-                        getattr(repo, "add_{0}".format(etype))(uri)
-
-        # None is checked for here so that a client can unset a ssl_cert or
-        # ssl_key by using -k "" or -c "".
-        if ssl_cert is not None or ssl_key is not None:
-                # Assume the user wanted to update the ssl_cert or ssl_key
-                # information for *all* of the currently selected
-                # repository's origins and mirrors that use SSL schemes.
-                found_ssl = False
-                for uri in repo.origins:
-                        if uri.scheme not in publisher.SSL_SCHEMES:
-                                continue
-                        found_ssl = True
-                        if ssl_cert is not None:
-                                uri.ssl_cert = ssl_cert
-                        if ssl_key is not None:
-                                uri.ssl_key = ssl_key
-                for uri in repo.mirrors:
-                        if uri.scheme not in publisher.SSL_SCHEMES:
-                                continue
-                        found_ssl = True
-                        if ssl_cert is not None:
-                                uri.ssl_cert = ssl_cert
-                        if ssl_key is not None:
-                                uri.ssl_key = ssl_key
-
-                if (ssl_cert or ssl_key) and not found_ssl:
-                        # None of the origins or mirrors for the publisher
-                        # use SSL schemes so the cert and key information
-                        # won't be retained.
-                        usage(_("Publisher '{0}' does not have any SSL-based "
-                            "origins or mirrors.").format(prefix))
-
-        if set_props or add_prop_values or remove_prop_values or unset_props:
-                pub.update_props(set_props=set_props,
-                    add_prop_values=add_prop_values,
-                    remove_prop_values=remove_prop_values,
-                    unset_props=unset_props)
-
-        if new_pub:
-                api_inst.add_publisher(pub,
-                    refresh_allowed=refresh_allowed, approved_cas=approved_cas,
-                    revoked_cas=revoked_cas, unset_cas=unset_cas,
-                    search_after=search_after, search_before=search_before,
-                    search_first=search_first)
-        else:
-                for ca in approved_cas:
-                        try:
-                                ca = os.path.normpath(
-                                    os.path.join(orig_cwd, ca))
-                                with open(ca, "rb") as fh:
-                                        s = fh.read()
-                        except EnvironmentError as e:
-                                if e.errno == errno.ENOENT:
-                                        raise api_errors.MissingFileArgumentException(
-                                            ca)
-                                elif e.errno == errno.EACCES:
-                                        raise api_errors.PermissionsException(
-                                            ca)
-                                raise
-                        pub.approve_ca_cert(s)
-
-                for hsh in revoked_cas:
-                        pub.revoke_ca_cert(hsh)
-
-                for hsh in unset_cas:
-                        pub.unset_ca_cert(hsh)
-
-                api_inst.update_publisher(pub,
-                    refresh_allowed=refresh_allowed, search_after=search_after,
-                    search_before=search_before, search_first=search_first)
-
-        return EXIT_OK, None
-
-def publisher_unset(api_inst, args):
+                            ", ".join(out_json["data"]["updated"])))
+
+        if errors:
+                _generate_error_messages(out_json["status"], errors,
+                    cmd="set-publisher", add_info={"repo_uri": repo_uri})
+
+        return out_json["status"]
+
+def publisher_unset(api_inst, pargs):
         """pkg unset-publisher publisher ..."""
 
-        opts, pargs = getopt.getopt(args, "")
-        if not pargs:
-                usage(_("at least one publisher must be specified"),
-                    cmd="unset-publisher")
-
-        errors = []
-        goal = len(args)
-        progtrack = api_inst.progresstracker
-        progtrack.job_start(progtrack.JOB_PKG_CACHE, goal=goal)
-        for name in args:
-
-                try:
-                        api_inst.remove_publisher(prefix=name, alias=name)
-                except api_errors.ImageFormatUpdateNeeded as e:
-                        format_update_error(e)
-                        return EXIT_OOPS
-                except (api_errors.PermissionsException,
-                    api_errors.PublisherError,
-                    api_errors.ModifyingSyspubException) as e:
-                        errors.append((name, e))
-                finally:
-                        progtrack.job_add_progress(progtrack.JOB_PKG_CACHE)
-
-        progtrack.job_done(progtrack.JOB_PKG_CACHE)
-        retcode = EXIT_OK
-        if errors:
-                if len(errors) == len(args):
-                        # If the operation failed for every provided publisher
-                        # prefix or alias, complete failure occurred.
-                        retcode = EXIT_OOPS
-                else:
-                        # If the operation failed for only some of the provided
-                        # publisher prefixes or aliases, then partial failure
-                        # occurred.
-                        retcode = EXIT_PARTIAL
-
-                txt = ""
-                for name, err in errors:
-                        txt += "\n"
-                        txt += _("Removal failed for '{pub}': {msg}").format(
-                            pub=name, msg=err)
-                        txt += "\n"
-                error(txt, cmd="unset-publisher")
-
-        return retcode
-
-def publisher_list(api_inst, args):
-        """pkg publishers"""
-        omit_headers = False
-        preferred_only = False
-        inc_disabled = True
-        valid_formats = ( "tsv", )
-        output_format = "default"
-        field_data = {
-            "publisher" : [("default", "tsv"), _("PUBLISHER"), ""],
-            "attrs" : [("default"), "", ""],
-            "type" : [("default", "tsv"), _("TYPE"), ""],
-            "status" : [("default", "tsv"), _("STATUS"), ""],
-            "repo_loc" : [("default"), _("LOCATION"), ""],
-            "uri": [("tsv"), _("URI"), ""],
-            "sticky" : [("tsv"), _("STICKY"), ""],
-            "enabled" : [("tsv"), _("ENABLED"), ""],
-            "syspub" : [("tsv"), _("SYSPUB"), ""],
-            "proxy"  : [("tsv"), _("PROXY"), ""],
-            "proxied" : [("default"), _("P"), ""]
-        }
-
-        desired_field_order = (_("PUBLISHER"), "", _("STICKY"),
-                               _("SYSPUB"), _("ENABLED"), _("TYPE"),
-                               _("STATUS"), _("P"), _("LOCATION"))
-
-        # Custom sort function for preserving field ordering
-        def sort_fields(one, two):
-                return desired_field_order.index(get_header(one)) - \
-                    desired_field_order.index(get_header(two))
-
-        # Functions for manipulating field_data records
-
-        def filter_default(record):
-                return "default" in record[0]
-
-        def filter_tsv(record):
-                return "tsv" in record[0]
-
-        def get_header(record):
-                return record[1]
-
-        def get_value(record):
-                return record[2]
-
-        def set_value(record, value):
-                record[2] = value
-
-        # 'a' is left over
-        opts, pargs = getopt.getopt(args, "F:HPan")
-        for opt, arg in opts:
-                if opt == "-H":
-                        omit_headers = True
-                if opt == "-P":
-                        preferred_only = True
-                if opt == "-n":
-                        inc_disabled = False
-                if opt == "-F":
-                        output_format = arg
-                        if output_format not in valid_formats:
-                                usage(_("Unrecognized format {format}."
-                                    " Supported formats: {valid}").format(
-                                    format=output_format,
-                                    valid=valid_formats), cmd="publisher")
-                                return EXIT_OOPS
-
-        api_inst.progresstracker.set_purpose(
-            api_inst.progresstracker.PURPOSE_LISTING)
-
-        cert_cache = {}
-        def get_cert_info(ssl_cert):
-                if not ssl_cert:
-                        return None
-                if ssl_cert not in cert_cache:
-                        c = cert_cache[ssl_cert] = {}
-                        errors = c["errors"] = []
-                        times = c["info"] = {
-                            "effective": "",
-                            "expiration": "",
-                        }
-
-                        try:
-                                cert = misc.validate_ssl_cert(ssl_cert)
-                        except (EnvironmentError,
-                            api_errors.CertificateError,
-                            api_errors.PermissionsException) as e:
-                                # If the cert information can't be retrieved,
-                                # add the errors to a list and continue on.
-                                errors.append(e)
-                                c["valid"] = False
-                        else:
-                                nb = cert.get_notBefore()
-                                t = time.strptime(nb, "%Y%m%d%H%M%SZ")
-                                nb = datetime.datetime.utcfromtimestamp(
-                                    calendar.timegm(t))
-                                times["effective"] = nb.strftime("%c")
-
-                                na = cert.get_notAfter()
-                                t = time.strptime(na, "%Y%m%d%H%M%SZ")
-                                na = datetime.datetime.utcfromtimestamp(
-                                    calendar.timegm(t))
-                                times["expiration"] = na.strftime("%c")
-                                c["valid"] = True
-
-                return cert_cache[ssl_cert]
-
-        retcode = EXIT_OK
+        opts, pargs = getopt.getopt(pargs, "")
+        out_json = client_api._publisher_unset("unset-publisher", api_inst,
+            pargs)
+
+        if "errors" in out_json:
+                _generate_error_messages(out_json["status"],
+                    out_json["errors"], cmd="unset-publisher")
+
+        return out_json["status"]
+
+def publisher_list(op, api_inst, pargs, omit_headers, preferred_only,
+    inc_disabled, output_format):
+        """pkg publishers."""
+
+        ret_json = client_api._publisher_list(op, api_inst, pargs, omit_headers,
+            preferred_only, inc_disabled, output_format)
+        retcode = ret_json["status"]
+
         if len(pargs) == 0:
-                if preferred_only:
-                        pref_pub = api_inst.get_highest_ranked_publisher()
-                        if api_inst.has_publisher(pref_pub):
-                                pubs = [pref_pub]
-                        else:
-                                # Only publisher known is from an installed
-                                # package and is not configured in the image.
-                                pubs = []
-                else:
-                        pubs = [
-                            p for p in api_inst.get_publishers()
-                            if inc_disabled or not p.disabled
-                        ]
                 # Create a formatting string for the default output
-                # format
+                # format.
                 if output_format == "default":
                         fmt = "{0:14} {1:12} {2:8} {3:2} {4} {5}"
-                        filter_func = filter_default
 
                 # Create a formatting string for the tsv output
-                # format
+                # format.
                 if output_format == "tsv":
                         fmt = "{0}\t{1}\t{2}\t{3}\t{4}\t{5}\t{6}\t{7}"
-                        filter_func = filter_tsv
-                        desired_field_order = (_("PUBLISHER"), "", _("STICKY"),
-                               _("SYSPUB"), _("ENABLED"), _("TYPE"),
-                               _("STATUS"), _("URI"), _("PROXY"))
-
-                # Extract our list of headers from the field_data
-                # dictionary Make sure they are extracted in the
-                # desired order by using our custom sort function
-                hdrs = map(get_header, sorted(filter(filter_func,
-                           field_data.values()), sort_fields))
-
-                # Output an header if desired
+
+                # Output an header if desired.
                 if not omit_headers:
-                        msg(fmt.format(*hdrs))
-
-                for p in pubs:
-                        # Store all our publisher related data in
-                        # field_data ready for output
-
-                        set_value(field_data["publisher"], p.prefix)
-                        # Setup the synthetic attrs field if the
-                        # format is default.
-                        if output_format == "default":
-                                pstatus = ""
-
-                                if not p.sticky:
-                                        pstatus_list = [_("non-sticky")]
-                                else:
-                                        pstatus_list = []
-
-                                if p.disabled:
-                                        pstatus_list.append(_("disabled"))
-                                if p.sys_pub:
-                                        pstatus_list.append(_("syspub"))
-                                if pstatus_list:
-                                        pstatus = "({0})".format(
-                                            ", ".join(pstatus_list))
-                                set_value(field_data["attrs"], pstatus)
-
-                        if p.sticky:
-                                set_value(field_data["sticky"], _("true"))
-                        else:
-                                set_value(field_data["sticky"], _("false"))
-                        if not p.disabled:
-                                set_value(field_data["enabled"], _("true"))
-                        else:
-                                set_value(field_data["enabled"], _("false"))
-                        if p.sys_pub:
-                                set_value(field_data["syspub"], _("true"))
-                        else:
-                                set_value(field_data["syspub"], _("false"))
-
-                        # Only show the selected repository's information in
-                        # summary view.
-                        if p.repository:
-                                origins = p.repository.origins
-                                mirrors = p.repository.mirrors
-                        else:
-                                origins = mirrors = []
-
-                        set_value(field_data["repo_loc"], "")
-                        set_value(field_data["proxied"], "")
-                        # Update field_data for each origin and output
-                        # a publisher record in our desired format.
-                        for uri in sorted(origins):
-                                # XXX get the real origin status
-                                set_value(field_data["type"], _("origin"))
-                                set_value(field_data["status"], _("online"))
-                                set_value(field_data["proxy"], "-")
-                                set_value(field_data["proxied"], "F")
-
-                                set_value(field_data["uri"], uri)
-
-                                if uri.proxies:
-                                        set_value(field_data["proxied"], _("T"))
-                                        set_value(field_data["proxy"],
-                                            ", ".join(
-                                            [proxy.uri
-                                            for proxy in uri.proxies]))
-                                if uri.system:
-                                        set_value(field_data["repo_loc"],
-                                            SYSREPO_HIDDEN_URI)
-                                else:
-                                        set_value(field_data["repo_loc"], uri)
-
-                                values = map(get_value,
-                                    sorted(filter(filter_func,
-                                    field_data.values()), sort_fields)
-                                )
-                                msg(fmt.format(*values))
-                        # Update field_data for each mirror and output
-                        # a publisher record in our desired format.
-                        for uri in mirrors:
-                                # XXX get the real mirror status
-                                set_value(field_data["type"], _("mirror"))
-                                set_value(field_data["status"], _("online"))
-                                set_value(field_data["proxy"], "-")
-                                set_value(field_data["proxied"], _("F"))
-
-                                set_value(field_data["uri"], uri)
-
-                                if uri.proxies:
-                                        set_value(field_data["proxied"], _("T"))
-                                        set_value(field_data["proxy"],
-                                            ", ".join(
-                                            [p.uri for p in uri.proxies]))
-                                if uri.system:
-                                        set_value(field_data["repo_loc"],
-                                            SYSREPO_HIDDEN_URI)
-                                else:
-                                        set_value(field_data["repo_loc"], uri)
-
-                                values = map(get_value,
-                                    sorted(filter(filter_func,
-                                    field_data.values()), sort_fields)
-                                )
-                                msg(fmt.format(*values))
-
-                        if not origins and not mirrors:
-                                set_value(field_data["type"], "")
-                                set_value(field_data["status"], "")
-                                set_value(field_data["uri"], "")
-                                set_value(field_data["proxy"], "")
-                                values = map(get_value,
-                                    sorted(filter(filter_func,
-                                    field_data.values()), sort_fields)
-                                )
-                                msg(fmt.format(*values))
-
+                        msg(fmt.format(*ret_json["data"]["headers"]))
+
+                for p in ret_json["data"]["publishers"]:
+                        msg(fmt.format(*p))
         else:
-                def display_ssl_info(uri):
-                        retcode = EXIT_OK
-                        c = get_cert_info(uri.ssl_cert)
-                        msg(_("              SSL Key:"), uri.ssl_key)
-                        msg(_("             SSL Cert:"), uri.ssl_cert)
-
-                        if not c:
-                                return retcode
-
-                        if c["errors"]:
-                                retcode = EXIT_OOPS
-
-                        for e in c["errors"]:
-                                logger.error("\n" + str(e) + "\n")
-
-                        if c["valid"]:
+                def display_signing_certs(p):
+                        if "Approved CAs" in p:
+                                msg(_("         Approved CAs:"),
+                                    p["Approved CAs"][0])
+                                for h in p["Approved CAs"][1:]:
+                                        msg(_("                     :"), h)
+                        if "Revoked CAs" in p:
+                                msg(_("          Revoked CAs:"),
+                                    p["Revoked CAs"][0])
+                                for h in p["Revoked CAs"][1:]:
+                                        msg(_("                     :"), h)
+
+                def display_ssl_info(uri_data):
+                        msg(_("              SSL Key:"), uri_data["SSL Key"])
+                        msg(_("             SSL Cert:"), uri_data["SSL Cert"])
+
+                        if "errors" in ret_json:
+                                for e in ret_json["errors"]:
+                                        if "errtype" in e and \
+                                            e["errtype"] == "cert_info":
+                                                emsg(e["reason"])
+
+                        if "Cert. Effective Date" in uri_data:
                                 msg(_(" Cert. Effective Date:"),
-                                    c["info"]["effective"])
+                                    uri_data["Cert. Effective Date"])
                                 msg(_("Cert. Expiration Date:"),
-                                    c["info"]["expiration"])
+                                    uri_data["Cert. Expiration Date"])
+
+                if "data" not in ret_json or "publisher_details" not in \
+                    ret_json["data"]:
                         return retcode
 
-                def display_repository(r):
-                        retcode = 0
-                        for uri in r.origins:
-                                msg(_("           Origin URI:"), uri)
-                                if uri.proxies:
-                                        msg(_("                Proxy:"),
-                                            ", ".join(
-                                            [p.uri for p in uri.proxies]))
-                                rval = display_ssl_info(uri)
-                                if rval == 1:
-                                        retcode = EXIT_PARTIAL
-
-                        for uri in r.mirrors:
-                                msg(_("           Mirror URI:"), uri)
-                                if uri.proxies:
-                                        msg(_("                Proxy:"),
-                                            ", ".join(
-                                            [p.uri for p in uri.proxies]))
-                                rval = display_ssl_info(uri)
-                                if rval == 1:
-                                        retcode = EXIT_PARTIAL
-                        return retcode
-
-                def display_signing_certs(p):
-                        if p.approved_ca_certs:
-                                msg(_("         Approved CAs:"),
-                                    p.approved_ca_certs[0])
-                                for h in p.approved_ca_certs[1:]:
-                                        msg(_("                     :"), h)
-                        if p.revoked_ca_certs:
-                                msg(_("          Revoked CAs:"),
-                                    p.revoked_ca_certs[0])
-                                for h in p.revoked_ca_certs[1:]:
-                                        msg(_("                     :"), h)
-
-                for name in pargs:
-                        # detailed print
-                        pub = api_inst.get_publisher(prefix=name, alias=name)
-                        dt = api_inst.get_publisher_last_update_time(pub.prefix)
-                        if dt:
-                                dt = dt.strftime("%c")
-
+                for pub in ret_json["data"]["publisher_details"]:
                         msg("")
-                        msg(_("            Publisher:"), pub.prefix)
-                        msg(_("                Alias:"), pub.alias)
-
-                        rval = display_repository(pub.repository)
-                        if rval != 0:
-                                # There was an error in displaying some
-                                # of the information about a repository.
-                                # However, continue on.
-                                retcode = rval
-
-                        msg(_("          Client UUID:"), pub.client_uuid)
-                        msg(_("      Catalog Updated:"), dt)
+                        msg(_("            Publisher:"), pub["Publisher"])
+                        msg(_("                Alias:"), pub["Alias"])
+
+                        if "origins" in pub:
+                                for od in pub["origins"]:
+                                        msg(_("           Origin URI:"),
+                                            od["Origin URI"])
+                                        if "Proxy" in od:
+                                                msg(_("                Proxy:"),
+                                                    ", ".join(od["Proxy"]))
+                                        display_ssl_info(od)
+
+                        if "mirrors" in pub:
+                                for md in pub["mirrors"]:
+                                        msg(_("           Mirror URI:"),
+                                            md["Mirror URI"])
+                                        if "Proxy" in md:
+                                                msg(_("                Proxy:"),
+                                                    ", ".join(md["Proxy"]))
+                                        display_ssl_info(md)
+
+                        msg(_("          Client UUID:"),
+                            pub["Client UUID"])
+                        msg(_("      Catalog Updated:"),
+                            pub["Catalog Updated"])
                         display_signing_certs(pub)
-                        if pub.disabled:
-                                msg(_("              Enabled:"), _("No"))
-                        else:
-                                msg(_("              Enabled:"), _("Yes"))
-                        pub_items = sorted(pub.properties.iteritems())
+                        msg(_("              Enabled:"),
+                            _(pub["Enabled"]))
+
+                        if "Properties" not in pub:
+                                continue
+                        pub_items = sorted(
+                            pub["Properties"].iteritems())
                         property_padding = "                      "
                         properties_displayed = False
                         for k, v in pub_items:
@@ -5953,6 +4956,38 @@
 
     "sync_act" :              ("",  "sync-actuators"),
     "act_timeout" :           ("",  "sync-actuators-timeout"),
+
+    "ssl_key":                ("k", ""),
+    "ssl_cert":               ("c", ""),
+    "approved_ca_certs":      ("", "approve-ca-cert"),
+    "revoked_ca_certs":       ("", "revoke-ca-cert"),
+    "unset_ca_certs":         ("", "unset-ca-cert"),
+    "origin_uri":             ("O", ""),
+    "reset_uuid":             ("", "reset-uuid"),
+    "add_mirrors":            ("m", "add-mirror"),
+    "remove_mirrors":         ("M", "remove-mirror"),
+    "add_origins":            ("g", "add-origin"),
+    "remove_origins":         ("G", "remove-origin"),
+    "refresh_allowed":        ("", "no-refresh"),
+    "enable":                 ("e", "enable"),
+    "disable":                ("d", "disable"),
+    "sticky":                 ("", "sticky"),
+    "non_sticky":             ("", "non-sticky"),
+    "repo_uri":               ("p", ""),
+    "proxy_uri":              ("", "proxy"),
+    "search_before":          ("", "search-before"),
+    "search_after":           ("", "search-after"),
+    "search_first":           ("P", "search-first"),
+    "set_props":              ("", "set-property"),
+    "add_prop_values":        ("", "add-property-value"),
+    "remove_prop_values":     ("", "remove-property-value"),
+    "unset_props":            ("", "unset-property"),
+    "preferred_only":         ("P", ""),
+    "inc_disabled":           ("n", ""),
+    "info_local":             ("l", ""),
+    "info_remote":            ("r", ""),
+    "display_license":        ("", "license"),
+    "publisher_a":            ("a", "")
 }
 
 #
@@ -6325,7 +5360,7 @@
                 # This new exception will have the CLI options, so can be passed
                 # directly to usage().
                 new_e = api_errors.InvalidOptionError(err_type=e.err_type,
-                    options=cli_opts, msg=e.msg)
+                    options=cli_opts, msg=e.msg, valid_args=e.valid_args)
 
                 usage(str(new_e), cmd=subcommand)
 
--- a/src/modules/client/api.py	Thu Apr 02 08:44:15 2015 -0700
+++ b/src/modules/client/api.py	Fri Apr 03 19:02:53 2015 -0700
@@ -665,6 +665,9 @@
                         if val is not None:
                                 self.check_be_name(val)
                                 if not self._img.is_liveroot():
+                                        self._cancel_cleanup_exception()
+                                        self._activity_lock.release()
+                                        self._img.unlock()
                                         raise apx.BENameGivenOnDeadBE(val)
 
         def __plan_common_finish(self):
--- a/src/modules/client/api_errors.py	Thu Apr 02 08:44:15 2015 -0700
+++ b/src/modules/client/api_errors.py	Fri Apr 03 19:02:53 2015 -0700
@@ -3152,19 +3152,22 @@
         """Used to indicate an issue with verifying options passed to a certain
         operation."""
 
-        GENERIC     = "generic"      # generic option violation
-        OPT_REPEAT  = "opt_repeat"   # option repetition is not allowed
-        ARG_REPEAT  = "arg_repeat"   # argument repetition is not allowed
-        ARG_INVALID = "arg_invalid"  # argument is invalid
-        INCOMPAT    = "incompat"     # option 'a' can not be specified with option 'b'
-        REQUIRED    = "required"     # option 'a' requires option 'b'
-        XOR         = "xor"          # either option 'a' or option 'b' must be specified
-
-        def __init__(self, err_type=GENERIC, options=[], msg=None):
+        GENERIC      = "generic"      # generic option violation
+        OPT_REPEAT   = "opt_repeat"   # option repetition is not allowed
+        ARG_REPEAT   = "arg_repeat"   # argument repetition is not allowed
+        ARG_INVALID  = "arg_invalid"  # argument is invalid
+        INCOMPAT     = "incompat"     # option 'a' can not be specified with option 'b'
+        REQUIRED     = "required"     # option 'a' requires option 'b'
+        REQUIRED_ANY = "required_any" # option 'a' requires option 'b', 'c' or more
+        XOR          = "xor"          # either option 'a' or option 'b' must be specified
+
+        def __init__(self, err_type=GENERIC, options=[], msg=None,
+            valid_args=[]):
 
                 self.err_type = err_type
                 self.options = options
                 self.msg = msg
+                self.valid_args = valid_args
 
         def __str__(self):
 
@@ -3187,9 +3190,13 @@
                             op2=self.options[1])
                 elif self.err_type == self.ARG_INVALID:
                         assert len(self.options) == 2
-                        return _("Argument '{op1}' for option '{op2}' is "
+                        s = _("Argument '{op1}' for option '{op2}' is "
                             "invalid.").format(op1=self.options[0],
                             op2=self.options[1])
+                        if self.valid_args:
+                                s += _("\nSupported: {0}").format(", ".join(
+                                    self.valid_args))
+                        return s
                 elif self.err_type == self.INCOMPAT:
                         assert len(self.options) == 2
                         return _("The '{op1}' and '{op2}' option may "
@@ -3200,6 +3207,12 @@
                         return _("'{op1}' may only be used with "
                             "'{op2}'.").format(op1=self.options[0],
                             op2=self.options[1])
+                elif self.err_type == self.REQUIRED_ANY:
+                        assert len(self.options) > 2
+                        return _("'{op1}' may only be used with "
+                            "'{op2}' or {op3}.").format(op1=self.options[0],
+                            op2=", ".join(self.options[1:-1]),
+                            op3=self.options[-1])
                 elif self.err_type == self.XOR:
                         assert len(self.options) == 2
                         return _("Either '{op1}' or '{op2}' must be "
--- a/src/modules/client/bootenv.py	Thu Apr 02 08:44:15 2015 -0700
+++ b/src/modules/client/bootenv.py	Fri Apr 03 19:02:53 2015 -0700
@@ -689,7 +689,7 @@
 
         """BootEnvNull is a class that gets used when libbe doesn't exist."""
 
-        def __init__(self, img):
+        def __init__(self, img, progress_tracker=None):
                 pass
 
         @staticmethod
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/src/modules/client/client_api.py	Fri Apr 03 19:02:53 2015 -0700
@@ -0,0 +1,3255 @@
+#!/usr/bin/python
+#
+# CDDL HEADER START
+#
+# The contents of this file are subject to the terms of the
+# Common Development and Distribution License (the "License").
+# You may not use this file except in compliance with the License.
+#
+# You can obtain a copy of the license at usr/src/OPENSOLARIS.LICENSE
+# or http://www.opensolaris.org/os/licensing.
+# See the License for the specific language governing permissions
+# and limitations under the License.
+#
+# When distributing Covered Code, include this CDDL HEADER in each
+# file and include the License file at usr/src/OPENSOLARIS.LICENSE.
+# If applicable, add the following below this CDDL HEADER, with the
+# fields enclosed by brackets "[]" replaced with your own identifying
+# information: Portions Copyright [yyyy] [name of copyright owner]
+#
+# CDDL HEADER END
+#
+
+#
+# Copyright (c) 2015, Oracle and/or its affiliates. All rights reserved.
+#
+
+
+import calendar
+import collections
+import datetime
+import errno
+import getopt
+import itertools
+import simplejson as json
+import os
+import re
+import socket
+import sys
+import tempfile
+import textwrap
+import time
+import traceback
+import jsonschema
+
+import pkg
+import pkg.actions as actions
+import pkg.client.api as api
+import pkg.client.api_errors as api_errors
+import pkg.client.bootenv as bootenv
+import pkg.client.progress as progress
+import pkg.client.linkedimage as li
+import pkg.client.publisher as publisher
+import pkg.client.options as options
+import pkg.fmri as fmri
+import pkg.misc as misc
+import pkg.pipeutils as pipeutils
+import pkg.portable as portable
+import pkg.version as version
+
+from pkg.client import global_settings
+from pkg.client.api import (IMG_TYPE_ENTIRE, IMG_TYPE_PARTIAL,
+    IMG_TYPE_USER, RESULT_CANCELED, RESULT_FAILED_BAD_REQUEST,
+    RESULT_FAILED_CONFIGURATION, RESULT_FAILED_CONSTRAINED,
+    RESULT_FAILED_LOCKED, RESULT_FAILED_STORAGE, RESULT_NOTHING_TO_DO,
+    RESULT_SUCCEEDED, RESULT_FAILED_TRANSPORT, RESULT_FAILED_UNKNOWN,
+    RESULT_FAILED_OUTOFMEMORY)
+from pkg.client.debugvalues import DebugValues
+from pkg.client.pkgdefs import *
+from pkg.misc import EmptyI, msg, emsg, PipeError
+
+CLIENT_API_VERSION = 81
+PKG_CLIENT_NAME = "pkg"
+pkg_timer = pkg.misc.Timer("pkg client")
+SYSREPO_HIDDEN_URI = "<system-repository>"
+PROG_DELAY = 5.0
+
+
+def _byteify(input):
+        """Convert unicode string into byte string. This will be used by json
+        loads function."""
+
+        if isinstance(input, dict):
+                return dict([(_byteify(key), _byteify(value)) for key, value in
+                    input.iteritems()])
+        elif isinstance(input, list):
+                return [_byteify(element) for element in input]
+        elif isinstance(input, unicode):
+                return input.encode('utf-8')
+        else:
+                return input
+
+def _get_pkg_input_schema(subcommand, opts_mapping=misc.EmptyDict):
+        """Get the input schema for pkg subcommand."""
+
+        # Return None if the subcommand is not defined.
+        if subcommand not in cmds:
+                return None
+
+        props = {}
+        data_schema = __get_pkg_input_schema(subcommand,
+            opts_mapping=opts_mapping)
+        props.update(data_schema)
+        schema = __construct_json_schema("{0} input schema".format(subcommand),
+            properties=props)
+        return schema
+
+def _get_pkg_output_schema(subcommand):
+        """Get the output schema for pkg subcommand."""
+
+        # Return None if the subcommand is not defined.
+        if subcommand not in cmds:
+                return None
+
+        props = {"status": {"type": "number"},
+            "errors": {"type": "array",
+                "items": __default_error_json_schema()
+                }
+            }
+        required = ["status"]
+        data_schema = cmds[subcommand][1]()
+        if data_schema:
+                props["data"] = data_schema
+        schema = __construct_json_schema("{0} output schema".format(
+            subcommand), properties=props, required=required)
+        return schema
+
+def __get_pkg_input_schema(pkg_op, opts_mapping=misc.EmptyDict):
+        properties = {}
+        for entry in options.pkg_op_opts[pkg_op]:
+                if type(entry) != tuple:
+                        continue
+                if len(entry) == 4:
+                        opt, dummy_default, dummy_valid_args, \
+                            schema = entry
+
+                        if opt in opts_mapping:
+                                optn = opts_mapping[opt]
+                                if optn:
+                                        properties[optn] = schema
+                                else:
+                                        properties[opt] = schema
+                        else:
+                                properties[opt] = schema
+
+        arg_name = "pargs_json"
+        input_schema = \
+            {arg_name: {
+                "type": "array",
+                "items": {
+                    "type": "string"
+                    }
+                },
+                "opts_json": {"type": "object",
+                    "properties": properties
+                },
+            }
+        return input_schema
+
+def __pkg_list_output_schema():
+        data_schema = {"type": "array",
+            "items": {
+                "type": "object",
+                "properties": {
+                    "pub": {"type": "string"},
+                    "pkg": {"type": "string"},
+                    "version": {"type": "string"},
+                    "summary": {"type": "string"},
+                    "states": {"type": "array",
+                        "items": {"type": "string"}}
+                    }
+                }
+            }
+        return data_schema
+
+def __get_plan_props():
+        plan_props = {"type": "object",
+            "properties": {
+                "image-name": {"type": ["null", "string"]},
+                "affect-services": {
+                  "type": "array",
+                  "items": {}
+                },
+                "licenses": {
+                  "type": "array",
+                  "items": [
+                    {
+                      "type": "array",
+                      "items": [
+                        {"type": ["null", "string"]},
+                        {},
+                        {
+                          "type": "array",
+                          "items": [
+                            {"type": ["null", "string"]},
+                            {"type": ["null", "string"]},
+                            {"type": ["null", "string"]},
+                            {"type": ["null", "boolean"]},
+                            {"type": ["null", "boolean"]}
+                          ]}]
+                    },
+                    {"type": "array",
+                      "items": [
+                        {"type": ["null", "string"]},
+                        {},
+                        {"type": "array",
+                          "items": [
+                            {"type": ["null", "string"]},
+                            {"type": ["null", "string"]},
+                            {"type": ["null", "string"]},
+                            {"type": ["null", "boolean"]},
+                            {"type": ["null", "boolean"]}
+                          ]}]}]
+                },
+                "child-images": {
+                  "type": "array",
+                  "items": {}
+                },
+                "change-mediators": {
+                  "type": "array",
+                  "items": {}
+                },
+                "change-facets": {
+                  "type": "array",
+                  "items": {}
+                },
+                "remove-packages": {
+                  "type": "array",
+                  "items": {}
+                },
+                "be-name": {
+                  "type": ["null", "string"],
+                },
+                "space-available": {
+                  "type": ["null", "number"],
+                },
+                "boot-archive-rebuild": {
+                  "type": ["null", "boolean"],
+                },
+                "version": {
+                  "type": ["null", "number"],
+                },
+                "create-new-be": {
+                  "type": ["null", "boolean"],
+                },
+                "change-packages": {
+                  "type": "array",
+                  "items": {}
+                },
+                "space-required": {
+                  "type": ["null", "number"],
+                },
+                "change-variants": {
+                  "type": "array",
+                  "items": {}
+                },
+                "affect-packages": {
+                  "type": "array",
+                  "items": {}
+                },
+                "change-editables": {
+                  "type": "array",
+                  "items": {}
+                },
+                "create-backup-be": {
+                  "type": ["null", "boolean"],
+                },
+                "release-notes": {
+                  "type": "array",
+                  "items": {}
+                },
+                "add-packages": {
+                  "type": "array",
+                  "items": {
+                    "type": ["null", "string"]
+                  },
+                },
+                "backup-be-name": {
+                  "type": ["null", "string"]
+                },
+                "activate-be": {
+                  "type": ["null", "boolean"],
+                }
+              }
+            }
+        return plan_props
+
+def __pkg_exact_install_output_schema():
+        data_schema = {"type": "object",
+            "properties": {
+                "plan": __get_plan_props(),
+                "release_notes_url": {"type": ["null", "string"]}
+                }
+            }
+        return data_schema
+
+def __pkg_install_output_schema():
+        data_schema = {"type": "object",
+            "properties": {
+                "plan": __get_plan_props(),
+                "release_notes_url": {"type": ["null", "string"]}
+                }
+            }
+        return data_schema
+
+def __pkg_update_output_schema():
+        data_schema = {"type": "object",
+            "properties": {
+                "plan": __get_plan_props(),
+                "release_notes_url": {"type": ["null", "string"]}
+                }
+            }
+        return data_schema
+
+def __pkg_uninstall_output_schema():
+        data_schema = {"type": "object",
+            "properties": {
+                "plan": __get_plan_props(),
+                }
+            }
+        return data_schema
+
+def __pkg_publisher_set_output_schema():
+        data_schema = {"type": "object",
+            "properties": {
+                "header": {"type": "string"},
+                "added": {"type": "array", "items": {"type": "string"}},
+                "updated": {"type": "array", "items": {"type": "string"}}
+                }
+            }
+        return data_schema
+
+def __pkg_publisher_unset_output_schema():
+        return {}
+
+def __pkg_publisher_output_schema():
+        data_schema = {"type": "object",
+            "properties": {
+                "header": {"type": "array", "items": {"type": "string"}},
+                "publishers": {"type": "array", "items": {"type": "array",
+                    "items": {"type": ["null", "string"]}}},
+                "publisher_details": {"type": "array",
+                    "items": {"type": "object", "properties": {
+                        "Publisher": {"type": ["null", "string"]},
+                        "Alias": {"type": ["null", "string"]},
+                        "Client UUID": {"type": ["null", "string"]},
+                        "Catalog Updated": {"type": ["null", "string"]},
+                        "Enabled": {"type": ["null", "string"]},
+                        "Properties": {"type": "object"},
+                        "origins": {"type": "array",
+                            "items": {"type": "object"}},
+                        "mirrors": {"type": "array",
+                            "items": {"type": "object"}},
+                        "Approved CAs": {"type": "array"},
+                        "Revoked CAs": {"type": "array"},
+                    }}}
+                }
+            }
+        return data_schema
+
+def __pkg_info_output_schema():
+        data_schema = {"type": "object",
+            "properties": {
+                "licenses": {"type": "array", "items": {"type": "array",
+                    "items": {"type": ["null", "string"]}}},
+                "package_attrs": {"type": "array",
+                    "items": {"type": "array", "items": {"type": "array",
+                    "items": [{"type": ["null", "string"]}, {"type": "array",
+                    "items": {"type": ["null", "string"]}}]}}}
+            }
+        }
+        return data_schema
+
+def _format_update_error(e, errors_json=None):
+        # This message is displayed to the user whenever an
+        # ImageFormatUpdateNeeded exception is encountered.
+        if errors_json:
+                error = {"reason": str(e), "errtype": "format_update"}
+                errors_json.append(error)
+
+def _error_json(text, cmd=None, errors_json=None, errorType=None):
+        """Prepare an error message for json output. """
+
+        if not isinstance(text, basestring):
+                # Assume it's an object that can be stringified.
+                text = str(text)
+
+        # If the message starts with whitespace, assume that it should come
+        # *before* the command-name prefix.
+        text_nows = text.lstrip()
+        ws = text[:len(text) - len(text_nows)]
+
+        if cmd:
+                text_nows = "{0}: {1}".format(cmd, text_nows)
+                pkg_cmd = "pkg "
+        else:
+                pkg_cmd = "pkg: "
+
+        if errors_json != None:
+                error = {}
+                if errorType:
+                        error["errtype"] = errorType
+                error["reason"] = ws + pkg_cmd + text_nows
+                errors_json.append(error)
+
+def _collect_proxy_config_errors(errors_json=None):
+        """If the user has configured http_proxy or https_proxy in the
+        environment, collect the values. Some transport errors are
+        not debuggable without this information handy."""
+
+        http_proxy = os.environ.get("http_proxy", None)
+        https_proxy = os.environ.get("https_proxy", None)
+
+        if not http_proxy and not https_proxy:
+                return
+
+        err = "\nThe following proxy configuration is set in the " \
+            "environment:\n"
+        if http_proxy:
+                err += "http_proxy: {0}\n".format(http_proxy)
+        if https_proxy:
+                err += "https_proxy: {0}\n".format(https_proxy)
+        if errors_json:
+                errors_json.append({"reason": err})
+
+def _get_fmri_args(api_inst, pargs, cmd=None, errors_json=None):
+        """ Convenience routine to check that input args are valid fmris. """
+
+        res = []
+        errors = []
+        for pat, err, pfmri, matcher in api_inst.parse_fmri_patterns(pargs):
+                if not err:
+                        res.append((pat, err, pfmri, matcher))
+                        continue
+                if isinstance(err, version.VersionError):
+                        # For version errors, include the pattern so
+                        # that the user understands why it failed.
+                        errors.append("Illegal FMRI '{0}': {1}".format(pat,
+                            err))
+                else:
+                        # Including the pattern is redundant for other
+                        # exceptions.
+                        errors.append(err)
+        if errors:
+                _error_json("\n".join(str(e) for e in errors),
+                    cmd=cmd, errors_json=errors_json)
+        return len(errors) == 0, res
+
+def __default_error_json_schema():
+        """Get the default error json schema."""
+
+        error_schema = {
+            "type": "object",
+            "properties": {
+                "errtype": {"type": "string",
+                    "enum": ["format_update", "catalog_refresh",
+                    "catalog_refresh_failed", "inventory",
+                    "inventory_extra", "plan_license", "publisher_set",
+                    "unsupported_repo_op", "cert_info", "info_not_found",
+                    "info_no_licenses"]},
+                "reason": {"type": "string"},
+                "info": {"type": "string"}
+                }
+            }
+        return error_schema
+
+def __construct_json_schema(title, description=None, stype="object",
+    properties=None, required=None, additional_prop=False):
+        """Construct  json schema."""
+
+        json_schema = {"$schema": "http://json-schema.org/draft-04/schema#",
+            "title": title,
+            "type": stype,
+            }
+        if description:
+                json_schema["description"] = description
+        if properties:
+                json_schema["properties"] = properties
+        if required:
+                json_schema["required"] = required
+        json_schema["additionalProperties"] = additional_prop
+        return json_schema
+
+def __prepare_json(status, op=None, schema=None, data=None, errors=None):
+        """Prepare json structure for returning."""
+
+        ret_json = {"status": status}
+
+        if errors:
+                if not isinstance(errors, list):
+                        ret_json["errors"] = [errors]
+                else:
+                        ret_json["errors"] = errors
+        if data:
+                ret_json["data"] = data
+        if op:
+                op_schema = _get_pkg_output_schema(op)
+                try:
+                        jsonschema.validate(ret_json, op_schema)
+                except jsonschema.ValidationError as e:
+                        newret_json = {"status": EXIT_OOPS,
+                            "errors": [{"reason": str(e)}]}
+                        return newret_json
+        if schema:
+                ret_json["schema"] = schema
+
+        return ret_json
+
+def _collect_catalog_failures(cre, ignore_perms_failure=False, errors=None):
+        total = cre.total
+        succeeded = cre.succeeded
+
+        txt = _("pkg: {succeeded}/{total} catalogs successfully "
+            "updated:").format(succeeded=succeeded, total=total)
+        if errors != None:
+                if cre.failed:
+                        error = {"reason": txt, "errtype": "catalog_refresh"}
+                else:
+                        error = {"info": txt, "errtype": "catalog_refresh"}
+                errors.append(error)
+
+        for pub, err in cre.failed:
+                if ignore_perms_failure and \
+                    not isinstance(err, api_errors.PermissionsException):
+                        # If any errors other than a permissions exception are
+                        # found, then don't ignore them.
+                        ignore_perms_failure = False
+                        break
+
+        if cre.failed and ignore_perms_failure:
+                # Consider those that failed to have succeeded and add them
+                # to the actual successful total.
+                return succeeded + len(cre.failed)
+
+        for pub, err in cre.failed:
+                if errors != None:
+                        error = {"reason": str(err),
+                            "errtype": "catalog_refresh_failed"}
+                        errors.append(error)
+
+        if cre.errmessage:
+                if errors != None:
+                        error = {"reason": str(cre.errmessage),
+                            "errtype": "catalog_refresh"}
+                        errors.append(error)
+
+        return succeeded
+
+def _list_inventory(op, api_inst, pargs,
+    li_parent_sync, list_all, list_installed_newest, list_newest,
+    list_upgradable, origins, quiet, refresh_catalogs, **other_opts):
+        """List packages."""
+
+        api_inst.progresstracker.set_purpose(
+            api_inst.progresstracker.PURPOSE_LISTING)
+
+        variants = False
+        pkg_list = api.ImageInterface.LIST_INSTALLED
+        if list_all:
+                variants = True
+                pkg_list = api.ImageInterface.LIST_ALL
+        elif list_installed_newest:
+                pkg_list = api.ImageInterface.LIST_INSTALLED_NEWEST
+        elif list_newest:
+                pkg_list = api.ImageInterface.LIST_NEWEST
+        elif list_upgradable:
+                pkg_list = api.ImageInterface.LIST_UPGRADABLE
+
+        # Each pattern in pats can be a partial or full FMRI, so
+        # extract the individual components.  These patterns are
+        # transformed here so that partial failure can be detected
+        # when more than one pattern is provided.
+        errors_json = []
+        rval, res = _get_fmri_args(api_inst, pargs, cmd=op,
+            errors_json=errors_json)
+        if not rval:
+                return __prepare_json(EXIT_OOPS, errors=errors_json)
+
+        api_inst.log_operation_start(op)
+        if pkg_list != api_inst.LIST_INSTALLED and refresh_catalogs:
+                # If the user requested packages other than those
+                # installed, ensure that a refresh is performed if
+                # needed since the catalog may be out of date or
+                # invalid as a result of publisher information
+                # changing (such as an origin uri, etc.).
+                try:
+                        api_inst.refresh()
+                except api_errors.PermissionsException:
+                        # Ignore permission exceptions with the
+                        # assumption that an unprivileged user is
+                        # executing this command and that the
+                        # refresh doesn't matter.
+                        pass
+                except api_errors.CatalogRefreshException as e:
+                        succeeded = _collect_catalog_failures(e,
+                            ignore_perms_failure=True, errors=errors_json)
+                        if succeeded != e.total:
+                                # If total number of publishers does
+                                # not match 'successful' number
+                                # refreshed, abort.
+                                return __prepare_json(EXIT_OOPS,
+                                    errors=errors_json)
+
+                except:
+                        # Ignore the above error and just use what
+                        # already exists.
+                        pass
+
+        state_map = [
+            [(api.PackageInfo.INSTALLED, "installed")],
+            [(api.PackageInfo.FROZEN, "frozen")],
+            [
+                (api.PackageInfo.OBSOLETE, "obsolete"),
+                (api.PackageInfo.RENAMED, "renamed")
+            ],
+        ]
+
+        # Now get the matching list of packages and display it.
+        found = False
+
+        data = []
+        try:
+                res = api_inst.get_pkg_list(pkg_list, patterns=pargs,
+                    raise_unmatched=True, repos=origins, variants=variants)
+                for pt, summ, cats, states, attrs in res:
+                        found = True
+                        entry = {}
+                        pub, stem, ver = pt
+                        entry["pub"] = pub
+                        entry["pkg"] = stem
+                        entry["version"] = ver
+                        entry["summary"] = summ
+
+                        stateslist = []
+                        for sentry in state_map:
+                                for s, v in sentry:
+                                        if s in states:
+                                                stateslist.append(v)
+                                                break
+                        entry["states"] = stateslist
+                        data.append(entry)
+                if not found and not pargs:
+                        if pkg_list == api_inst.LIST_INSTALLED:
+                                if not quiet:
+                                        err = {"reason":
+                                            _("no packages installed")}
+                                        errors_json.append(err)
+                                api_inst.log_operation_end(
+                                    result=RESULT_NOTHING_TO_DO)
+                        elif pkg_list == api_inst.LIST_INSTALLED_NEWEST:
+                                if not quiet:
+                                        err = {"reason": _("no packages "
+                                            "installed or available for "
+                                            "installation")}
+                                        errors_json.append(err)
+                                api_inst.log_operation_end(
+                                    result=RESULT_NOTHING_TO_DO)
+                        elif pkg_list == api_inst.LIST_UPGRADABLE:
+                                if not quiet:
+                                        img = api_inst._img
+                                        cat = img.get_catalog(
+                                            img.IMG_CATALOG_INSTALLED)
+                                        if cat.package_count > 0:
+                                                err = {"reason":
+                                                    _("no packages have "
+                                                    "newer versions "
+                                                    "available")}
+                                        else:
+                                                err = {"reason":
+                                                    _("no packages are "
+                                                    "installed")}
+                                        errors_json.append(err)
+                                api_inst.log_operation_end(
+                                    result=RESULT_NOTHING_TO_DO)
+                        else:
+                                api_inst.log_operation_end(
+                                    result=RESULT_NOTHING_TO_DO)
+                        return __prepare_json(EXIT_OOPS,
+                            errors=errors_json)
+
+                api_inst.log_operation_end()
+                return __prepare_json(EXIT_OK, data=data,
+                    errors=errors_json)
+        except (api_errors.InvalidPackageErrors,
+            api_errors.ActionExecutionError,
+            api_errors.PermissionsException) as e:
+                _error_json(e, errors_json=errors_json)
+                return __prepare_json(EXIT_OOPS, data=data,
+                    errors=errors_json)
+        except api_errors.CatalogRefreshException as e:
+                _collect_catalog_failures(e, errors=errors_json)
+                return __prepare_json(EXIT_OOPS, data=data,
+                    errors=errors_json)
+        except api_errors.InventoryException as e:
+                if e.illegal:
+                        for i in e.illegal:
+                                _error_json(i, errors_json=errors_json)
+                        api_inst.log_operation_end(
+                            result=RESULT_FAILED_BAD_REQUEST)
+                        return __prepare_json(EXIT_OOPS, data=data,
+                            errors=errors_json)
+
+                if quiet:
+                        # Collect nothing.
+                        pass
+                elif pkg_list == api.ImageInterface.LIST_ALL or \
+                    pkg_list == api.ImageInterface.LIST_NEWEST:
+                        _error_json(_("no known packages matching:\n  {0}"
+                            ).format("\n  ".join(e.notfound)), cmd=op,
+                            errors_json=errors_json,
+                            errorType="inventory")
+                elif pkg_list == api.ImageInterface.LIST_INSTALLED_NEWEST:
+                        _error_json(_("no packages matching the following "
+                            "patterns are allowed by installed "
+                            "incorporations, or image variants that are known "
+                            "or installed\n  {0}").format(
+                            "\n  ".join(e.notfound)), cmd=op,
+                            errors_json=errors_json,
+                            errorType="inventory_extra")
+                elif pkg_list == api.ImageInterface.LIST_UPGRADABLE:
+                        # Creating a list of packages that are uptodate
+                        # and that are not installed on the system.
+                        no_updates = []
+                        not_installed = []
+                        try:
+                                for entry in api_inst.get_pkg_list(
+                                    api.ImageInterface.LIST_INSTALLED,
+                                    patterns=e.notfound, raise_unmatched=True):
+                                        pub, stem, ver = entry[0]
+                                        no_updates.append(stem)
+                        except api_errors.InventoryException as exc:
+                                not_installed = exc.notfound
+
+                        err_str = ""
+                        if not_installed:
+                                err_str = _("no packages matching the "
+                                    "following patterns are installed:\n  {0}"
+                                    ).format("\n  ".join(not_installed))
+
+                        if no_updates:
+                                err_str = err_str + _("no updates are "
+                                    "available for the following packages:\n  "
+                                    "{0}").format("\n  ".join(no_updates))
+                        if err_str:
+                                _error_json(err_str, cmd=op,
+                                    errors_json=errors_json,
+                                    errorType="inventory")
+                else:
+                        _error_json(_("no packages matching the following "
+                            "patterns are installed:\n  {0}").format(
+                            "\n  ".join(e.notfound)), cmd=op,
+                            errors_json=errors_json,
+                            errorType="inventory")
+
+                if found and e.notfound:
+                        # Only some patterns matched.
+                        api_inst.log_operation_end()
+                        return __prepare_json(EXIT_PARTIAL, data=data,
+                            errors=errors_json)
+                api_inst.log_operation_end(result=RESULT_NOTHING_TO_DO)
+                return __prepare_json(EXIT_OOPS, data=data, errors=errors_json)
+
+def _get_tracker(prog_delay=PROG_DELAY, prog_tracker=None):
+        if prog_tracker:
+                return prog_tracker
+        elif global_settings.client_output_parsable_version is not None:
+                progresstracker = progress.NullProgressTracker()
+        elif global_settings.client_output_quiet:
+                progresstracker = progress.QuietProgressTracker()
+        elif global_settings.client_output_progfd:
+                # This logic handles linked images: for linked children
+                # we elide the progress output.
+                output_file = os.fdopen(global_settings.client_output_progfd,
+                    "w")
+                child_tracker = progress.LinkedChildProgressTracker(
+                    output_file=output_file)
+                dot_tracker = progress.DotProgressTracker(
+                    term_delay=prog_delay, output_file=output_file)
+                progresstracker = progress.MultiProgressTracker(
+                    [child_tracker, dot_tracker])
+        else:
+                try:
+                        progresstracker = progress.FancyUNIXProgressTracker(
+                            term_delay=prog_delay)
+                except progress.ProgressTrackerException:
+                        progresstracker = progress.CommandLineProgressTracker(
+                            term_delay=prog_delay)
+        return progresstracker
+
+def _accept_plan_licenses(api_inst):
+        """Helper function that marks all licenses for the current plan as
+        accepted if they require acceptance."""
+
+        plan = api_inst.describe()
+        for pfmri, src, dest, accepted, displayed in plan.get_licenses():
+                if not dest.must_accept:
+                        continue
+                api_inst.set_plan_license_status(pfmri, dest.license,
+                    accepted=True)
+
+display_plan_options = ["basic", "fmris", "variants/facets", "services",
+    "actions", "boot-archive"]
+
+def __get_parsable_plan(api_inst, parsable_version, child_images=None):
+        """Display the parsable version of the plan."""
+
+        assert parsable_version == 0, "parsable_version was {0!r}".format(
+            parsable_version)
+        plan = api_inst.describe()
+        # Set the default values.
+        added_fmris = []
+        removed_fmris = []
+        changed_fmris = []
+        affected_fmris = []
+        backup_be_created = False
+        new_be_created = False
+        backup_be_name = None
+        be_name = None
+        boot_archive_rebuilt = False
+        be_activated = True
+        space_available = None
+        space_required = None
+        facets_changed = []
+        variants_changed = []
+        services_affected = []
+        mediators_changed = []
+        editables_changed = []
+        licenses = []
+        if child_images is None:
+                child_images = []
+        release_notes = []
+
+        if plan:
+                for rem, add in plan.get_changes():
+                        assert rem is not None or add is not None
+                        if rem is not None and add is not None:
+                                # Lists of lists are used here becuase json will
+                                # convert lists of tuples into lists of lists
+                                # anyway.
+                                if rem.fmri == add.fmri:
+                                        affected_fmris.append(str(rem))
+                                else:
+                                        changed_fmris.append(
+                                            [str(rem), str(add)])
+                        elif rem is not None:
+                                removed_fmris.append(str(rem))
+                        else:
+                                added_fmris.append(str(add))
+                variants_changed, facets_changed = plan.varcets
+                backup_be_created = plan.backup_be
+                new_be_created = plan.new_be
+                backup_be_name = plan.backup_be_name
+                be_name = plan.be_name
+                boot_archive_rebuilt = plan.update_boot_archive
+                be_activated = plan.activate_be
+                space_available = plan.bytes_avail
+                space_required = plan.bytes_added
+                services_affected = plan.services
+                mediators_changed = plan.mediators
+
+                emoved, eremoved, einstalled, eupdated = \
+                    plan.get_editable_changes()
+
+                # Lists of lists are used here to ensure a consistent ordering
+                # and because tuples will be convereted to lists anyway; a
+                # dictionary would be more logical for the top level entries,
+                # but would make testing more difficult and this is a small,
+                # known set anyway.
+                emoved = [[e for e in entry] for entry in emoved]
+                eremoved = [src for (src, dest) in eremoved]
+                einstalled = [dest for (src, dest) in einstalled]
+                eupdated = [dest for (src, dest) in eupdated]
+                if emoved:
+                        editables_changed.append(["moved", emoved])
+                if eremoved:
+                        editables_changed.append(["removed", eremoved])
+                if einstalled:
+                        editables_changed.append(["installed", einstalled])
+                if eupdated:
+                        editables_changed.append(["updated", eupdated])
+
+                for n in plan.get_release_notes():
+                        release_notes.append(n)
+
+                for dfmri, src_li, dest_li, acc, disp in \
+                    plan.get_licenses():
+                        src_tup = None
+                        if src_li:
+                                li_txt = misc.decode(src_li.get_text())
+                                src_tup = (str(src_li.fmri), src_li.license,
+                                    li_txt, src_li.must_accept,
+                                    src_li.must_display)
+                        dest_tup = None
+                        if dest_li:
+                                li_txt = misc.decode(dest_li.get_text())
+                                dest_tup = (str(dest_li.fmri),
+                                    dest_li.license, li_txt,
+                                    dest_li.must_accept, dest_li.must_display)
+                        licenses.append(
+                            (str(dfmri), src_tup, dest_tup))
+                        api_inst.set_plan_license_status(dfmri, dest_li.license,
+                            displayed=True)
+
+        # The image name for the parent image is always None.  If this image is
+        # a child image, then the image name will be set when the parent image
+        # processes this dictionary.
+        ret = {
+            "activate-be": be_activated,
+            "add-packages": sorted(added_fmris),
+            "affect-packages": sorted(affected_fmris),
+            "affect-services": sorted(services_affected),
+            "backup-be-name": backup_be_name,
+            "be-name": be_name,
+            "boot-archive-rebuild": boot_archive_rebuilt,
+            "change-facets": sorted(facets_changed),
+            "change-editables": editables_changed,
+            "change-mediators": sorted(mediators_changed),
+            "change-packages": sorted(changed_fmris),
+            "change-variants": sorted(variants_changed),
+            "child-images": child_images,
+            "create-backup-be": backup_be_created,
+            "create-new-be": new_be_created,
+            "image-name": None,
+            "licenses": sorted(licenses),
+            "release-notes": release_notes,
+            "remove-packages": sorted(removed_fmris),
+            "space-available": space_available,
+            "space-required": space_required,
+            "version": parsable_version,
+        }
+        return ret
+
+def __api_alloc(pkg_image, orig_cwd, prog_delay=PROG_DELAY, prog_tracker=None,
+    errors_json=None):
+        """Allocate API instance."""
+
+        provided_image_dir = True
+        pkg_image_used = False
+
+        if pkg_image:
+                imgdir = pkg_image
+
+        if "imgdir" not in locals():
+                imgdir, provided_image_dir = api.get_default_image_root(
+                    orig_cwd=orig_cwd)
+                if os.environ.get("PKG_IMAGE"):
+                        # It's assumed that this has been checked by the above
+                        # function call and hasn't been removed from the
+                        # environment.
+                        pkg_image_used = True
+
+        if not imgdir:
+                if errors_json:
+                        err = {"reason": "Could not find image. Set the "
+                            "pkg_image property to the\nlocation of an image."}
+                        errors_json.append(err)
+                return
+
+        progresstracker = _get_tracker(prog_delay=prog_delay,
+            prog_tracker=prog_tracker)
+        try:
+                return api.ImageInterface(imgdir, CLIENT_API_VERSION,
+                    progresstracker, None, PKG_CLIENT_NAME,
+                    exact_match=provided_image_dir)
+        except api_errors.ImageNotFoundException as e:
+                if e.user_specified:
+                        if pkg_image_used:
+                                _error_json(_("No image rooted at '{0}' "
+                                    "(set by $PKG_IMAGE)").format(e.user_dir),
+                                    errors=errors_json)
+                        else:
+                                _error_json(_("No image rooted at '{0}'")
+                                   .format(e.user_dir), errors=errors_json)
+                else:
+                        _error_json(_("No image found."), errors=errors_json)
+                return
+        except api_errors.PermissionsException as e:
+                _error_json(e, errors=errors_json)
+                return
+        except api_errors.ImageFormatUpdateNeeded as e:
+                _format_update_error(e, errors_json=errors_json)
+                return
+
+def __api_prepare_plan(operation, api_inst):
+        # Exceptions which happen here are printed in the above level, with
+        # or without some extra decoration done here.
+        # XXX would be nice to kick the progress tracker.
+        errors_json =[]
+        try:
+                api_inst.prepare()
+        except (api_errors.PermissionsException, api_errors.UnknownErrors) as e:
+                # Prepend a newline because otherwise the exception will
+                # be printed on the same line as the spinner.
+                _error_json("\n" + str(e), errors_json=errors_json)
+                return __prepare_json(EXIT_OOPS, errors=errors_json)
+        except api_errors.TransportError as e:
+                raise e
+        except api_errors.PlanLicenseErrors as e:
+                # Prepend a newline because otherwise the exception will
+                # be printed on the same line as the spinner.
+                _error_json(_("\nThe following packages require their "
+                    "licenses to be accepted before they can be installed "
+                    "or updated:\n {0}").format(str(e)),
+                    errors_json=errors_json, errorType="plan_license")
+                return __prepare_json(EXIT_LICENSE, errors=errors_json)
+        except api_errors.InvalidPlanError as e:
+                # Prepend a newline because otherwise the exception will
+                # be printed on the same line as the spinner.
+                _error_json("\n" + str(e), errors_json=errors_json)
+                return __prepare_json(EXIT_OOPS, errors=errors_json)
+        except api_errors.ImageFormatUpdateNeeded as e:
+                _format_update_error(e, errors_json=errors_json)
+                return __prepare_json(EXIT_OOPS, errors=errors_json)
+        except api_errors.ImageInsufficentSpace as e:
+                _error_json(str(e), errors_json=errors_json)
+                return __prepare_json(EXIT_OOPS, errors=errors_json)
+        except KeyboardInterrupt:
+                raise
+        except:
+                _error_json(_("\nAn unexpected error happened while preparing "
+                    "for {0}:").format(operation))
+                return __prepare_json(EXIT_OOPS, errors=errors_json)
+        return __prepare_json(EXIT_OK)
+
+def __api_execute_plan(operation, api_inst):
+        rval = None
+        errors_json = []
+        try:
+                api_inst.execute_plan()
+                pd = api_inst.describe()
+                if pd.actuator_timed_out:
+                        rval = __prepare_json(EXIT_ACTUATOR)
+                else:
+                        rval = __prepare_json(EXIT_OK)
+        except RuntimeError as e:
+                _error_json(_("{operation} failed: {err}").format(
+                    operation=operation, err=e), errors_json=errors_json)
+                rval = __prepare_json(EXIT_OOPS, errors=errors_json)
+        except (api_errors.InvalidPlanError,
+            api_errors.ActionExecutionError,
+            api_errors.InvalidPackageErrors) as e:
+                # Prepend a newline because otherwise the exception will
+                # be printed on the same line as the spinner.
+                _error_json("\n" + str(e), errors_json=errors_json)
+                rval = __prepare_json(EXIT_OOPS, errors=errors_json)
+        except (api_errors.LinkedImageException) as e:
+                _error_json(_("{operation} failed (linked image exception(s))"
+                    ":\n{err}").format(operation=operation, err=e),
+                    errors_json=errors_json)
+                rval = __prepare_json(e.lix_exitrv, errors=errors_json)
+        except api_errors.ImageUpdateOnLiveImageException:
+                _error_json(_("{0} cannot be done on live image").format(
+                    operation), errors_json=errors_json)
+                rval = __prepare_json(EXIT_NOTLIVE, errors=errors_json)
+        except api_errors.RebootNeededOnLiveImageException:
+                _error_json(_("Requested \"{0}\" operation would affect files "
+                    "that cannot be modified in live image.\n"
+                    "Please retry this operation on an alternate boot "
+                    "environment.").format(operation), errors=errors_json)
+                rval = __prepare_json(EXIT_NOTLIVE, errors=errors_json)
+        except api_errors.CorruptedIndexException as e:
+                _error_json("The search index appears corrupted.  Please "
+                    "rebuild the index with 'pkg rebuild-index'.",
+                    errors_json=errors_json)
+                rval = __prepare_json(EXIT_OOPS, errors=errors_json)
+        except api_errors.ProblematicPermissionsIndexException as e:
+                _error_json(str(e), errors_json=errors_json)
+                _error_json(_("\n(Failure to consistently execute pkg commands "
+                    "as a privileged user is often a source of this problem.)"),
+                    errors_json=errors_json)
+                rval = __prepare_json(EXIT_OOPS, errors=errors_json)
+        except (api_errors.PermissionsException, api_errors.UnknownErrors) as e:
+                # Prepend a newline because otherwise the exception will
+                # be printed on the same line as the spinner.
+                _error_json("\n" + str(e), errors_json=errors_json)
+                rval = __prepare_json(EXIT_OOPS, errors=errors_json)
+        except api_errors.ImageFormatUpdateNeeded as e:
+                _format_update_error(e, errors_json=errors_json)
+                rval = __prepare_json(EXIT_OOPS, errors=errors_json)
+        except api_errors.BEException as e:
+                _error_json(e, errors_json=errors_json)
+                rval = __prepare_json(EXIT_OOPS, errors=errors_json)
+        except api_errors.WrapSuccessfulIndexingException:
+                raise
+        except api_errors.ImageInsufficentSpace as e:
+                _error_json(str(e), errors_json=errors_json)
+                rval = __prepare_json(EXIT_OOPS, errors=errors_json)
+        except Exception as e:
+                _error_json(_("An unexpected error happened during "
+                    "{operation}: {err}").format(
+                    operation=operation, err=e), errors=errors_json)
+                rval = __prepare_json(EXIT_OOPS, errors=errors_json)
+        finally:
+                exc_type = exc_value = exc_tb = None
+                if rval is None:
+                        # Store original exception so that the real cause of
+                        # failure can be raised if this fails.
+                        exc_type, exc_value, exc_tb = sys.exc_info()
+
+                try:
+                        salvaged = api_inst.describe().salvaged
+                        newbe = api_inst.describe().new_be
+                        stat = None
+                        if rval:
+                                stat = rval["status"]
+                        if salvaged and (stat == EXIT_OK or not newbe):
+                                # Only show salvaged file list if populated
+                                # and operation was successful, or if operation
+                                # failed and a new BE was not created for
+                                # the operation.
+                                err = _("\nThe following "
+                                    "unexpected or editable files and "
+                                    "directories were\n"
+                                    "salvaged while executing the requested "
+                                    "package operation; they\nhave been moved "
+                                    "to the displayed location in the image:\n")
+                                for opath, spath in salvaged:
+                                        err += "  {0} -> {1}\n".format(opath,
+                                            spath)
+                                errors_json.append({"info": err})
+                except Exception:
+                        if rval is not None:
+                                # Only raise exception encountered here if the
+                                # exception previously raised was suppressed.
+                                raise
+
+                if exc_value or exc_tb:
+                        raise exc_value, None, exc_tb
+
+        return rval
+
+def __api_plan_exception(op, noexecute, verbose, api_inst, errors_json=[],
+    display_plan_cb=None):
+        e_type, e, e_traceback = sys.exc_info()
+
+        if e_type == api_errors.ImageNotFoundException:
+                _error_json(_("No image rooted at '{0}'").format(e.user_dir),
+                    cmd=op, errors_json=errors_json)
+                return __prepare_json(EXIT_OOPS, errors=errors_json)
+        if e_type == api_errors.InventoryException:
+                _error_json("\n" + _("{operation} failed (inventory exception):\n"
+                    "{err}").format(operation=op, err=e),
+                    errors_json=errors_json)
+                return __prepare_json(EXIT_OOPS, errors=errors_json)
+        if isinstance(e, api_errors.LinkedImageException):
+                _error_json(_("{operation} failed (linked image exception(s)):\n"
+                    "{err}").format(operation=op, err=e),
+                    errors_json=errors_json)
+                return __prepare_json(e.lix_exitrv, errors=errors_json)
+        if e_type == api_errors.IpkgOutOfDateException:
+                error ={"info": _("""\
+WARNING: pkg(5) appears to be out of date, and should be updated before
+running {op}.  Please update pkg(5) by executing 'pkg install
+pkg:/package/pkg' as a privileged user and then retry the {op}."""
+                    ).format(**locals())}
+                errors_json.append(error)
+                return __prepare_json(EXIT_OOPS, errors=errors_json)
+        if e_type == api_errors.NonLeafPackageException:
+                _error_json("\n" + str(e), cmd=op, errors_json=errors_json)
+                return __prepare_json(EXIT_OOPS, errors=errors_json)
+        if e_type == api_errors.CatalogRefreshException:
+                _collect_catalog_failures(e, errors=errors_json)
+                return __prepare_json(EXIT_OOPS, errors=errors_json)
+        if e_type == api_errors.ConflictingActionErrors:
+                _error_json("\n" + str(e), cmd=op, errors_json=errors_json)
+                if verbose and display_plan_cb:
+                        display_plan_cb(api_inst, verbose=verbose,
+                            noexecute=noexecute, plan_only=True)
+                return __prepare_json(EXIT_OOPS, errors=errors_json)
+        if e_type in (api_errors.InvalidPlanError,
+            api_errors.ReadOnlyFileSystemException,
+            api_errors.ActionExecutionError,
+            api_errors.InvalidPackageErrors):
+                _error_json("\n" + str(e), cmd=op, errors_json=errors_json)
+                return __prepare_json(EXIT_OOPS, errors=errors_json)
+        if e_type == api_errors.ImageFormatUpdateNeeded:
+                _format_update_error(e, errors_json=errors_json)
+                return __prepare_json(EXIT_OOPS, errors=errors_json)
+
+        if e_type == api_errors.ImageUpdateOnLiveImageException:
+                _error_json("\n" + _("The proposed operation cannot be "
+                    "performed on a live image."), cmd=op,
+                    errors_json=errors_json)
+                return __prepare_json(EXIT_NOTLIVE, errors=errors_json)
+
+        if issubclass(e_type, api_errors.BEException):
+                _error_json("\n" + str(e), cmd=op, errors_json=errors_json)
+                return __prepare_json(EXIT_OOPS, errors=errors_json)
+
+        if e_type == api_errors.PlanCreationException:
+                # Prepend a newline because otherwise the exception will
+                # be printed on the same line as the spinner.
+                txt = str(e)
+                if e.multiple_matches:
+                        txt += "\n\n" + _("Please provide one of the package "
+                            "FMRIs listed above to the install command.")
+                _error_json("\n" + txt, cmd=op, errors_json=errors_json)
+                if verbose:
+                        err_txt = "\n".join(e.verbose_info)
+                        if err_txt:
+                                errors_json.append({"reason": err_txt})
+                if e.invalid_mediations:
+                        # Bad user input for mediation.
+                        return __prepare_json(EXIT_BADOPT, errors=errors_json)
+                return __prepare_json(EXIT_OOPS, errors=errors_json)
+
+        if isinstance(e, (api_errors.CertificateError,
+            api_errors.UnknownErrors,
+            api_errors.PermissionsException,
+            api_errors.InvalidPropertyValue,
+            api_errors.InvalidResourceLocation)):
+                # Prepend a newline because otherwise the exception will
+                # be printed on the same line as the spinner.
+                _error_json("\n" + str(e), cmd=op, errors_json=errors_json)
+                return __prepare_json(EXIT_OOPS, errors=errors_json)
+        if e_type == fmri.IllegalFmri:
+                # Prepend a newline because otherwise the exception will
+                # be printed on the same line as the spinner.
+                _error_json("\n" + str(e), cmd=op, errors_json=errors_json)
+                return __prepare_json(EXIT_OOPS, errors=errors_json)
+        if isinstance(e, api_errors.SigningException):
+                # Prepend a newline because otherwise the exception will
+                # be printed on the same line as the spinner.
+                _error_json("\n" + str(e), cmd=op, errors_json=errors_json)
+                return __prepare_json(EXIT_OOPS, errors=errors_json)
+
+        # if we didn't deal with the exception above, pass it on.
+        raise
+        # NOTREACHED
+
+def __api_plan(_op, _api_inst, _accept=False, _li_ignore=None, _noexecute=False,
+    _omit_headers=False, _origins=None, _parsable_version=None, _quiet=False,
+    _quiet_plan=False, _review_release_notes=False, _show_licenses=False,
+    _stage=API_STAGE_DEFAULT, _verbose=0, display_plan_cb=None, logger=None,
+    **kwargs):
+
+        # All the api interface functions that we invoke have some
+        # common arguments.  Set those up now.
+        if _op not in (PKG_OP_REVERT, PKG_OP_FIX, PKG_OP_DEHYDRATE,
+            PKG_OP_REHYDRATE):
+                kwargs["li_ignore"] = _li_ignore
+        kwargs["noexecute"] = _noexecute
+        if _origins:
+                kwargs["repos"] = _origins
+        if _stage != API_STAGE_DEFAULT:
+                kwargs["pubcheck"] = False
+
+        # display plan debugging information
+        if _verbose > 2:
+                DebugValues.set_value("plan", "True")
+
+        # plan the requested operation
+        stuff_to_do = None
+
+        if _op == PKG_OP_ATTACH:
+                api_plan_func = _api_inst.gen_plan_attach
+        elif _op in [PKG_OP_CHANGE_FACET, PKG_OP_CHANGE_VARIANT]:
+                api_plan_func = _api_inst.gen_plan_change_varcets
+        elif _op == PKG_OP_DEHYDRATE:
+                api_plan_func = _api_inst.gen_plan_dehydrate
+        elif _op == PKG_OP_DETACH:
+                api_plan_func = _api_inst.gen_plan_detach
+        elif _op == PKG_OP_EXACT_INSTALL:
+                api_plan_func = _api_inst.gen_plan_exact_install
+        elif _op == PKG_OP_FIX:
+                api_plan_func = _api_inst.gen_plan_fix
+        elif _op == PKG_OP_INSTALL:
+                api_plan_func = _api_inst.gen_plan_install
+        elif _op == PKG_OP_REHYDRATE:
+                api_plan_func = _api_inst.gen_plan_rehydrate
+        elif _op == PKG_OP_REVERT:
+                api_plan_func = _api_inst.gen_plan_revert
+        elif _op == PKG_OP_SYNC:
+                api_plan_func = _api_inst.gen_plan_sync
+        elif _op == PKG_OP_UNINSTALL:
+                api_plan_func = _api_inst.gen_plan_uninstall
+        elif _op == PKG_OP_UPDATE:
+                api_plan_func = _api_inst.gen_plan_update
+        else:
+                raise RuntimeError("__api_plan() invalid op: {0}".format(_op))
+
+        errors_json = []
+        planned_self = False
+        child_plans = []
+        try:
+                for pd in api_plan_func(**kwargs):
+                        if planned_self:
+                                # we don't display anything for child images
+                                # since they currently do their own display
+                                # work (unless parsable output is requested).
+                                child_plans.append(pd)
+                                continue
+
+                        # the first plan description is always for ourself.
+                        planned_self = True
+                        pkg_timer.record("planning", logger=logger)
+
+                        # if we're in parsable mode don't display anything
+                        # until after we finish planning for all children
+                        if _parsable_version is None and display_plan_cb:
+                                display_plan_cb(_api_inst, [], _noexecute,
+                                    _omit_headers, _op, _parsable_version,
+                                    _quiet, _quiet_plan, _show_licenses,
+                                    _stage, _verbose,
+                                    get_parsable_plan_cb=__get_parsable_plan)
+
+                        # if requested accept licenses for child images.  we
+                        # have to do this before recursing into children.
+                        if _accept:
+                                _accept_plan_licenses(_api_inst)
+        except:
+                ret = __api_plan_exception(_op, _noexecute, _verbose,
+                    _api_inst, errors_json=errors_json,
+                    display_plan_cb=display_plan_cb)
+                if ret["status"] != EXIT_OK:
+                        pkg_timer.record("planning", logger=logger)
+                        return ret
+
+        if not planned_self:
+                # if we got an exception we didn't do planning for children
+                pkg_timer.record("planning", logger=logger)
+
+        elif _api_inst.isparent(_li_ignore):
+                # if we didn't get an exception and we're a parent image then
+                # we should have done planning for child images.
+                pkg_timer.record("planning children", logger=logger)
+
+        # if we didn't display our own plan (due to an exception), or if we're
+        # in parsable mode, then display our plan now.
+        parsable_plan = None
+        if not planned_self or _parsable_version is not None:
+                try:
+                        if display_plan_cb:
+                                display_plan_cb(_api_inst, child_plans,
+                                    _noexecute, _omit_headers, _op,
+                                    _parsable_version, _quiet, _quiet_plan,
+                                    _show_licenses, _stage, _verbose,
+                                    get_parsable_plan_cb=__get_parsable_plan)
+                        else:
+                                parsable_plan = __get_parsable_plan(_api_inst,
+                                    _parsable_version, child_plans)
+                                # Convert to json.
+                                parsable_plan = json.loads(json.dumps(
+                                    parsable_plan))
+                except api_errors.ApiException as e:
+                        _error_json(e, cmd=_op, errors_json=errors_json)
+                        return __prepare_json(EXIT_OOPS, errors=errors_json)
+
+        # if we didn't accept licenses (due to an exception) then do that now.
+        if not planned_self and _accept:
+                _accept_plan_licenses(_api_inst)
+
+        if parsable_plan:
+                data = {"plan": parsable_plan}
+                return __prepare_json(EXIT_OK, data=data)
+        return __prepare_json(EXIT_OK)
+
+def __api_plan_file(api_inst):
+        """Return the path to the PlanDescription save file."""
+
+        plandir = api_inst.img_plandir
+        return os.path.join(plandir, "plandesc")
+
+def __api_plan_save(api_inst, logger=None):
+        """Save an image plan to a file."""
+
+        # get a pointer to the plan
+        plan = api_inst.describe()
+
+        # save the PlanDescription to a file
+        path = __api_plan_file(api_inst)
+        oflags = os.O_CREAT | os.O_TRUNC | os.O_WRONLY
+        try:
+                fd = os.open(path, oflags, 0644)
+                with os.fdopen(fd, "wb") as fobj:
+                        plan._save(fobj)
+
+                # cleanup any old style imageplan save files
+                for f in os.listdir(api_inst.img_plandir):
+                        path = os.path.join(api_inst.img_plandir, f)
+                        if re.search("^actions\.[0-9]+\.json$", f):
+                                os.unlink(path)
+                        if re.search("^pkgs\.[0-9]+\.json$", f):
+                                os.unlink(path)
+        except OSError as e:
+                raise api_errors._convert_error(e)
+
+        pkg_timer.record("saving plan", logger=logger)
+
+def __api_plan_load(api_inst, stage, origins, logger=None):
+        """Loan an image plan from a file."""
+
+        # load an existing plan
+        path = __api_plan_file(api_inst)
+        plan = api.PlanDescription()
+        try:
+                with open(path) as fobj:
+                        plan._load(fobj)
+        except OSError as e:
+                raise api_errors._convert_error(e)
+
+        pkg_timer.record("loading plan", logger=logger)
+
+        api_inst.reset()
+        api_inst.set_alt_repos(origins)
+        api_inst.load_plan(plan, prepared=(stage == API_STAGE_EXECUTE))
+        pkg_timer.record("re-initializing plan", logger=logger)
+
+        if stage == API_STAGE_EXECUTE:
+                __api_plan_delete(api_inst)
+
+def __api_plan_delete(api_inst):
+        """Delete an image plan file."""
+
+        path = __api_plan_file(api_inst)
+        try:
+                os.unlink(path)
+        except OSError as e:
+                raise api_errors._convert_error(e)
+
+def __api_op(_op, _api_inst, _accept=False, _li_ignore=None, _noexecute=False,
+    _origins=None, _parsable_version=None, _quiet=False,
+    _review_release_notes=False, _show_licenses=False,
+    _stage=API_STAGE_DEFAULT, _verbose=0, display_plan_cb=None, logger=None,
+    **kwargs):
+        """Do something that involves the api.
+
+        Arguments prefixed with '_' are primarily used within this
+        function.  All other arguments must be specified via keyword
+        assignment and will be passed directly on to the api
+        interfaces being invoked."""
+
+        data = {}
+        if _stage in [API_STAGE_DEFAULT, API_STAGE_PLAN]:
+                # create a new plan
+                ret = __api_plan(_op=_op, _api_inst=_api_inst,
+                    _accept=_accept, _li_ignore=_li_ignore,
+                    _noexecute=_noexecute, _origins=_origins,
+                    _parsable_version=_parsable_version, _quiet=_quiet,
+                    _review_release_notes=_review_release_notes,
+                    _show_licenses=_show_licenses, _stage=_stage,
+                    _verbose=_verbose, display_plan_cb=display_plan_cb,
+                    logger=logger, **kwargs)
+
+                if ret["status"] != EXIT_OK:
+                        return ret
+                if "data" in ret:
+                        data.update(ret["data"])
+
+                if not _noexecute and _stage == API_STAGE_PLAN:
+                        # We always save the plan, even if it is a noop.  We
+                        # do this because we want to be able to verify that we
+                        # can load and execute a noop plan.  (This mimics
+                        # normal api behavior which doesn't prevent an api
+                        # consumer from creating a noop plan and then
+                        # preparing and executing it.)
+                        __api_plan_save(_api_inst, logger=logger)
+                if _api_inst.planned_nothingtodo():
+                        return __prepare_json(EXIT_NOP)
+                if _noexecute or _stage == API_STAGE_PLAN:
+                        return __prepare_json(EXIT_OK)
+        else:
+                assert _stage in [API_STAGE_PREPARE, API_STAGE_EXECUTE]
+                __api_plan_load(_api_inst, _stage, _origins, logger=logger)
+
+        # Exceptions which happen here are printed in the above level,
+        # with or without some extra decoration done here.
+        if _stage in [API_STAGE_DEFAULT, API_STAGE_PREPARE]:
+                ret = __api_prepare_plan(_op, _api_inst)
+                pkg_timer.record("preparing", logger=logger)
+
+                if ret["status"] != EXIT_OK:
+                        return ret
+                if _stage == API_STAGE_PREPARE:
+                        return __prepare_json(EXIT_OK)
+
+        ret = __api_execute_plan(_op, _api_inst)
+        pkg_timer.record("executing", logger=logger)
+
+        if _review_release_notes and ret["status"] == EXIT_OK and \
+            _stage == API_STAGE_DEFAULT and _api_inst.solaris_image():
+                data["release_notes_url"] = misc.get_release_notes_url()
+                ret = __prepare_json(EXIT_OK, data=data, op=_op)
+        elif ret["status"] == EXIT_OK and data:
+                ret = __prepare_json(EXIT_OK, data=data, op=_op)
+
+        return ret
+
+def _exact_install(op, api_inst, pargs,
+    accept, backup_be, backup_be_name, be_activate, be_name, li_ignore,
+    li_parent_sync, new_be, noexecute, origins, parsable_version, quiet,
+    refresh_catalogs, reject_pats, show_licenses, update_index, verbose,
+    display_plan_cb=None, logger=None):
+        errors_json = []
+        if not pargs:
+                error = {"reason": _("at least one package name required")}
+                errors_json.append(error)
+                return __prepare_json(EXIT_BADOPT, errors=errors_json, op=op)
+
+        rval, res = _get_fmri_args(api_inst, pargs, cmd=op,
+            errors_json=errors_json)
+        if not rval:
+                return __prepare_json(EXIT_OOPS, errors=errors_json)
+
+        xrval, xres = _get_fmri_args(api_inst, reject_pats, cmd=op,
+                    errors_json=errors_json)
+        if not xrval:
+                return __prepare_json(EXIT_OOPS, errors=errors_json)
+
+        return __api_op(op, api_inst, _accept=accept, _li_ignore=li_ignore,
+            _noexecute=noexecute, _origins=origins, _quiet=quiet,
+            _show_licenses=show_licenses, _verbose=verbose,
+            backup_be=backup_be, backup_be_name=backup_be_name,
+            be_activate=be_activate, be_name=be_name,
+            li_parent_sync=li_parent_sync, new_be=new_be,
+            _parsable_version=parsable_version, pkgs_inst=pargs,
+            refresh_catalogs=refresh_catalogs, reject_list=reject_pats,
+            update_index=update_index, display_plan_cb=display_plan_cb,
+            logger=logger)
+
+def _install(op, api_inst, pargs, accept, act_timeout, backup_be,
+    backup_be_name, be_activate, be_name, li_ignore, li_erecurse,
+    li_parent_sync, new_be, noexecute, origins, parsable_version, quiet,
+    refresh_catalogs, reject_pats, show_licenses, stage, update_index,
+    verbose, display_plan_cb=None, logger=None):
+        """Attempt to take package specified to INSTALLED state.  The operands
+        are interpreted as glob patterns."""
+
+        errors_json = []
+        if not pargs:
+                error = {"reason": _("at least one package name required")}
+                errors_json.append(error)
+                return __prepare_json(EXIT_BADOPT, errors=errors_json, op=op)
+
+        rval, res = _get_fmri_args(api_inst, pargs, cmd=op,
+            errors_json=errors_json)
+        if not rval:
+                return __prepare_json(EXIT_OOPS, errors=errors_json)
+
+        xrval, xres = _get_fmri_args(api_inst, reject_pats, cmd=op,
+                    errors_json=errors_json)
+        if not xrval:
+                return __prepare_json(EXIT_OOPS, errors=errors_json)
+
+        return __api_op(op, api_inst, _accept=accept, _li_ignore=li_ignore,
+            _noexecute=noexecute, _origins=origins,
+            _parsable_version=parsable_version, _quiet=quiet,
+            _show_licenses=show_licenses, _stage=stage, _verbose=verbose,
+            act_timeout=act_timeout, backup_be=backup_be,
+            backup_be_name=backup_be_name, be_activate=be_activate,
+            be_name=be_name, li_erecurse=li_erecurse,
+            li_parent_sync=li_parent_sync, new_be=new_be, pkgs_inst=pargs,
+            refresh_catalogs=refresh_catalogs, reject_list=reject_pats,
+            update_index=update_index, display_plan_cb=display_plan_cb,
+            logger=logger)
+
+def _update(op, api_inst, pargs, accept, act_timeout, backup_be, backup_be_name,
+    be_activate, be_name, force, ignore_missing, li_ignore, li_erecurse,
+    li_parent_sync, new_be, noexecute, origins, parsable_version, quiet,
+    refresh_catalogs, reject_pats, show_licenses, stage, update_index, verbose,
+    display_plan_cb=None, logger=None):
+        """Attempt to take all installed packages specified to latest
+        version."""
+
+        errors_json = []
+        rval, res = _get_fmri_args(api_inst, pargs, cmd=op,
+            errors_json=errors_json)
+        if not rval:
+                return __prepare_json(EXIT_OOPS, errors=errors_json)
+
+        xrval, xres = _get_fmri_args(api_inst, reject_pats, cmd=op,
+                errors_json=errors_json)
+        if not xrval:
+                return __prepare_json(EXIT_OOPS, errors=errors_json)
+
+        if res:
+                # If there are specific installed packages to update,
+                # then take only those packages to the latest version
+                # allowed by the patterns specified.  (The versions
+                # specified can be older than what is installed.)
+                pkgs_update = pargs
+                review_release_notes = False
+        else:
+                # If no packages were specified, attempt to update all
+                # installed packages.
+                pkgs_update = None
+                review_release_notes = True
+
+        return __api_op(op, api_inst, _accept=accept, _li_ignore=li_ignore,
+            _noexecute=noexecute, _origins=origins,
+            _parsable_version=parsable_version, _quiet=quiet,
+            _review_release_notes=review_release_notes,
+            _show_licenses=show_licenses, _stage=stage, _verbose=verbose,
+            act_timeout=act_timeout, backup_be=backup_be,
+            backup_be_name=backup_be_name, be_activate=be_activate,
+            be_name=be_name, force=force, ignore_missing=ignore_missing,
+            li_erecurse=li_erecurse, li_parent_sync=li_parent_sync,
+            new_be=new_be, pkgs_update=pkgs_update,
+            refresh_catalogs=refresh_catalogs, reject_list=reject_pats,
+            update_index=update_index, display_plan_cb=display_plan_cb,
+            logger=logger)
+
+def _uninstall(op, api_inst, pargs,
+    act_timeout, backup_be, backup_be_name, be_activate, be_name,
+    ignore_missing, li_ignore, li_erecurse, li_parent_sync, new_be, noexecute,
+    parsable_version, quiet, stage, update_index, verbose,
+    display_plan_cb=None, logger=None):
+        """Attempt to take package specified to DELETED state."""
+
+        errors_json = []
+        if not pargs:
+                error = {"reason": _("at least one package name required")}
+                errors_json.append(error)
+                return __prepare_json(EXIT_BADOPT, errors=errors_json, op=op)
+
+        rval, res = _get_fmri_args(api_inst, pargs, cmd=op,
+            errors_json=errors_json)
+        if not rval:
+                return __prepare_json(EXIT_OOPS, errors=errors_json)
+
+        return __api_op(op, api_inst, _li_ignore=li_ignore,
+            _noexecute=noexecute, _parsable_version=parsable_version,
+            _quiet=quiet, _stage=stage, _verbose=verbose,
+            act_timeout=act_timeout, backup_be=backup_be,
+            backup_be_name=backup_be_name, be_activate=be_activate,
+            be_name=be_name, ignore_missing=ignore_missing,
+            li_erecurse=li_erecurse, li_parent_sync=li_parent_sync,
+            new_be=new_be, pkgs_to_uninstall=pargs, update_index=update_index,
+            display_plan_cb=display_plan_cb, logger=logger)
+
+def _publisher_set(op, api_inst, pargs, ssl_key, ssl_cert, origin_uri,
+    reset_uuid, add_mirrors, remove_mirrors, add_origins, remove_origins,
+    refresh_allowed, disable, sticky, search_before, search_after,
+    search_first, approved_ca_certs, revoked_ca_certs, unset_ca_certs,
+    set_props, add_prop_values, remove_prop_values, unset_props, repo_uri,
+    proxy_uri):
+        """Function to set publisher."""
+
+        name = None
+        errors_json = []
+        if len(pargs) == 0 and not repo_uri:
+                errors_json.append({"reason": _("requires a publisher name")})
+                return __prepare_json(EXIT_BADOPT, errors=errors_json, op=op)
+        elif len(pargs) > 1:
+                errors_json.append({"reason": _("only one publisher name may "
+                    "be specified")})
+                return __prepare_json(EXIT_BADOPT, errors=errors_json, op=op)
+        elif pargs:
+                name = pargs[0]
+
+        # Get sanitized SSL Cert/Key input values.
+        ssl_cert, ssl_key = _get_ssl_cert_key(api_inst.root, api_inst.is_zone,
+            ssl_cert, ssl_key)
+
+        if not repo_uri:
+                # Normal case.
+                ret_json = _set_pub_error_wrap(_add_update_pub, name, [],
+                    api_inst, name, disable=disable, sticky=sticky,
+                    origin_uri=origin_uri, add_mirrors=add_mirrors,
+                    remove_mirrors=remove_mirrors, add_origins=add_origins,
+                    remove_origins=remove_origins, ssl_cert=ssl_cert,
+                    ssl_key=ssl_key, search_before=search_before,
+                    search_after=search_after, search_first=search_first,
+                    reset_uuid=reset_uuid, refresh_allowed=refresh_allowed,
+                    set_props=set_props, add_prop_values=add_prop_values,
+                    remove_prop_values=remove_prop_values,
+                    unset_props=unset_props, approved_cas=approved_ca_certs,
+                    revoked_cas=revoked_ca_certs, unset_cas=unset_ca_certs,
+                    proxy_uri=proxy_uri)
+
+                if "errors" in ret_json:
+                        for err in ret_json["errors"]:
+                                _error_json(err["reason"], cmd=op,
+                                    errors_json=errors_json)
+                return __prepare_json(ret_json["status"], errors=errors_json)
+
+        # Automatic configuration via -p case.
+        def get_pubs():
+                if proxy_uri:
+                        proxies = [publisher.ProxyURI(proxy_uri)]
+                else:
+                        proxies = []
+                repo = publisher.RepositoryURI(repo_uri,
+                    ssl_cert=ssl_cert, ssl_key=ssl_key, proxies=proxies)
+                return __prepare_json(EXIT_OK, data=api_inst.get_publisherdata(
+                    repo=repo))
+
+        ret_json = None
+        try:
+                ret_json = _set_pub_error_wrap(get_pubs, name,
+                    [api_errors.UnsupportedRepositoryOperation])
+        except api_errors.UnsupportedRepositoryOperation as e:
+                # Fail if the operation can't be done automatically.
+                _error_json(str(e), cmd=op, errors_json=errors_json,
+                    errorType="unsupported_repo_op")
+                return __prepare_json(EXIT_OOPS, errors=errors_json)
+        else:
+                if ret_json["status"] != EXIT_OK and "errors" in ret_json:
+                        for err in ret_json["errors"]:
+                                _error_json(err["reason"], cmd=op,
+                                    errors_json=errors_json)
+                        return __prepare_json(ret_json["status"],
+                            errors=errors_json)
+        # For the automatic publisher configuration case, update or add
+        # publishers based on whether they exist and if they match any
+        # specified publisher prefix.
+        if "data" not in ret_json:
+                _error_json(_("""
+The specified repository did not contain any publisher configuration
+information.  This is likely the result of a repository configuration
+error.  Please contact the repository administrator for further
+assistance."""), errors_json=errors_json)
+                return __prepare_json(EXIT_OOPS, errors=errors_json)
+
+        pubs = ret_json["data"]
+        if name and name not in pubs:
+                known = [p.prefix for p in pubs]
+                unknown = [name]
+                e = api_errors.UnknownRepositoryPublishers(known=known,
+                    unknown=unknown, location=repo_uri)
+                _error_json(str(e), errors_json=errors_json)
+                return __prepare_json(EXIT_OOPS, errors=errors_json)
+
+        added = []
+        updated = []
+        failed = []
+
+        for src_pub in sorted(pubs):
+                prefix = src_pub.prefix
+                if name and prefix != name:
+                        # User didn't request this one.
+                        continue
+
+                src_repo = src_pub.repository
+                if not api_inst.has_publisher(prefix=prefix):
+                        add_origins = []
+                        if not src_repo or not src_repo.origins:
+                                # If the repository publisher configuration
+                                # didn't include configuration information
+                                # for the publisher's repositories, assume
+                                # that the origin for the new publisher
+                                # matches the URI provided.
+                                add_origins.append(repo_uri)
+
+                        # Any -p origins/mirrors returned from get_pubs() should
+                        # use the proxy we declared, if any.
+                        if proxy_uri and src_repo:
+                                proxies = [publisher.ProxyURI(proxy_uri)]
+                                for repo_uri in src_repo.origins:
+                                        repo_uri.proxies = proxies
+                                for repo_uri in src_repo.mirrors:
+                                        repo_uri.proxies = proxies
+
+                        ret_json = _set_pub_error_wrap(_add_update_pub, name,
+                            [], api_inst, prefix, pub=src_pub,
+                            add_origins=add_origins, ssl_cert=ssl_cert,
+                            ssl_key=ssl_key, sticky=sticky,
+                            search_after=search_after,
+                            search_before=search_before,
+                            search_first=search_first,
+                            set_props=set_props,
+                            add_prop_values=add_prop_values,
+                            remove_prop_values=remove_prop_values,
+                            unset_props=unset_props, proxy_uri=proxy_uri)
+                        if ret_json["status"] == EXIT_OK:
+                                added.append(prefix)
+
+                        # When multiple publishers result from a single -p
+                        # operation, this ensures that the new publishers are
+                        # ordered correctly.
+                        search_first = False
+                        search_after = prefix
+                        search_before = None
+                else:
+                        add_origins = []
+                        add_mirrors = []
+                        dest_pub = api_inst.get_publisher(prefix=prefix,
+                            duplicate=True)
+                        dest_repo = dest_pub.repository
+                        if dest_repo.origins and \
+                            not dest_repo.has_origin(repo_uri):
+                                add_origins = [repo_uri]
+
+                        if not src_repo and not add_origins:
+                                # The repository doesn't have to provide origin
+                                # information for publishers.  If it doesn't,
+                                # the origin of every publisher returned is
+                                # assumed to match the URI that the user
+                                # provided.  Since this is an update case,
+                                # nothing special needs to be done.
+                                if not dest_repo.origins:
+                                        add_origins = [repo_uri]
+                        elif src_repo:
+                                # Avoid duplicates by adding only those mirrors
+                                # or origins not already known.
+                                add_mirrors = [
+                                    u.uri
+                                    for u in src_repo.mirrors
+                                    if u.uri not in dest_repo.mirrors
+                                ]
+                                add_origins = [
+                                    u.uri
+                                    for u in src_repo.origins
+                                    if u.uri not in dest_repo.origins
+                                ]
+
+                                # Special bits to update; for these, take the
+                                # new value as-is (don't attempt to merge).
+                                for prop in ("collection_type", "description",
+                                    "legal_uris", "name", "refresh_seconds",
+                                    "registration_uri", "related_uris"):
+                                        src_val = getattr(src_repo, prop)
+                                        if src_val is not None:
+                                                setattr(dest_repo, prop,
+                                                    src_val)
+
+                        # If an alias doesn't already exist, update it too.
+                        if src_pub.alias and not dest_pub.alias:
+                                dest_pub.alias = src_pub.alias
+
+                        ret_json = _set_pub_error_wrap(_add_update_pub, name,
+                            [], api_inst, prefix, pub=dest_pub,
+                            add_mirrors=add_mirrors, add_origins=add_origins,
+                            set_props=set_props,
+                            add_prop_values=add_prop_values,
+                            remove_prop_values=remove_prop_values,
+                            unset_props=unset_props, proxy_uri=proxy_uri)
+
+                        if ret_json["status"] == EXIT_OK:
+                                updated.append(prefix)
+
+                if ret_json["status"] != EXIT_OK:
+                        for err in ret_json["errors"]:
+                                failed.append((prefix, err["reason"]))
+                        continue
+
+        first = True
+        for pub, rmsg in failed:
+                if first:
+                        first = False
+                        _error_json("failed to add or update one or more "
+                            "publishers", cmd=op, errors_json=errors_json,
+                             errorType="publisher_set")
+                errors_json.append({"reason": "  {0}:\n{1}".format(pub, rmsg),
+                    "errtype": "publisher_set"})
+
+        data = {}
+        if added or updated:
+                if first:
+                        data["header"] = "pkg set-publisher:"
+                if added:
+                        data["added"] = added
+                if updated:
+                        data["updated"] = updated
+
+        if failed:
+                if len(failed) != len(pubs):
+                        # Not all publishers retrieved could be added or
+                        # updated.
+                        return __prepare_json(EXIT_PARTIAL, data=data,
+                            errors=errors_json)
+                return __prepare_json(EXIT_OOPS, data=data, errors=errors_json)
+
+        # Now that the configuration was successful, attempt to refresh the
+        # catalog data for all of the configured publishers.  If the refresh
+        # had been allowed earlier while configuring each publisher, then this
+        # wouldn't be necessary and some possibly invalid configuration could
+        # have been eliminated sooner.  However, that would be much slower as
+        # each refresh requires a client image state rebuild.
+        ret_json = __refresh(api_inst, added + updated)
+        ret_json["data"] = data
+        return ret_json
+
+def _publisher_unset(op, api_inst, pargs):
+        """Function to unset publishers."""
+
+        errors_json = []
+        if not pargs:
+                errors_json.append({"reason": _("at least one publisher must "
+                    "be specified")})
+                return __prepare_json(EXIT_BADOPT, errors=errors_json, op=op)
+
+        errors = []
+        goal = len(pargs)
+        progtrack = api_inst.progresstracker
+        progtrack.job_start(progtrack.JOB_PKG_CACHE, goal=goal)
+        for name in pargs:
+                try:
+                        api_inst.remove_publisher(prefix=name, alias=name)
+                except api_errors.ImageFormatUpdateNeeded as e:
+                        _format_update_error(e, errors_json)
+                        return __prepare_json(EXIT_OOPS, errors=errors_json)
+                except (api_errors.PermissionsException,
+                    api_errors.PublisherError,
+                    api_errors.ModifyingSyspubException) as e:
+                        errors.append((name, e))
+                finally:
+                        progtrack.job_add_progress(progtrack.JOB_PKG_CACHE)
+
+        progtrack.job_done(progtrack.JOB_PKG_CACHE)
+        retcode = EXIT_OK
+        errors_json = []
+        if errors:
+                if len(errors) == len(pargs):
+                        # If the operation failed for every provided publisher
+                        # prefix or alias, complete failure occurred.
+                        retcode = EXIT_OOPS
+                else:
+                        # If the operation failed for only some of the provided
+                        # publisher prefixes or aliases, then partial failure
+                        # occurred.
+                        retcode = EXIT_PARTIAL
+
+                txt = ""
+                for name, err in errors:
+                        txt += "\n"
+                        txt += _("Removal failed for '{pub}': {msg}").format(
+                            pub=name, msg=err)
+                        txt += "\n"
+                _error_json(txt, cmd=op, errors_json=errors_json)
+
+        return __prepare_json(retcode, errors=errors_json)
+
+def _publisher_list(op, api_inst, pargs, omit_headers, preferred_only,
+    inc_disabled, output_format):
+        """pkg publishers. Note: publisher_a is a left-over parameter."""
+
+        errors_json = []
+        field_data = {
+            "publisher" : [("default", "tsv"), _("PUBLISHER"), ""],
+            "attrs" : [("default"), "", ""],
+            "type" : [("default", "tsv"), _("TYPE"), ""],
+            "status" : [("default", "tsv"), _("STATUS"), ""],
+            "repo_loc" : [("default"), _("LOCATION"), ""],
+            "uri": [("tsv"), _("URI"), ""],
+            "sticky" : [("tsv"), _("STICKY"), ""],
+            "enabled" : [("tsv"), _("ENABLED"), ""],
+            "syspub" : [("tsv"), _("SYSPUB"), ""],
+            "proxy"  : [("tsv"), _("PROXY"), ""],
+            "proxied" : [("default"), _("P"), ""]
+        }
+
+        desired_field_order = (_("PUBLISHER"), "", _("STICKY"),
+                               _("SYSPUB"), _("ENABLED"), _("TYPE"),
+                               _("STATUS"), _("P"), _("LOCATION"))
+
+        # Custom sort function for preserving field ordering
+        def sort_fields(one, two):
+                return desired_field_order.index(get_header(one)) - \
+                    desired_field_order.index(get_header(two))
+
+        # Functions for manipulating field_data records
+        def filter_default(record):
+                return "default" in record[0]
+
+        def filter_tsv(record):
+                return "tsv" in record[0]
+
+        def get_header(record):
+                return record[1]
+
+        def get_value(record):
+                return record[2]
+
+        def set_value(record, value):
+                record[2] = value
+
+        api_inst.progresstracker.set_purpose(
+            api_inst.progresstracker.PURPOSE_LISTING)
+
+        cert_cache = {}
+        def get_cert_info(ssl_cert):
+                if not ssl_cert:
+                        return None
+                if ssl_cert not in cert_cache:
+                        c = cert_cache[ssl_cert] = {}
+                        errors = c["errors"] = []
+                        times = c["info"] = {
+                            "effective": "",
+                            "expiration": "",
+                        }
+
+                        try:
+                                cert = misc.validate_ssl_cert(ssl_cert)
+                        except (EnvironmentError,
+                            api_errors.CertificateError,
+                            api_errors.PermissionsException) as e:
+                                # If the cert information can't be retrieved,
+                                # add the errors to a list and continue on.
+                                errors.append(e)
+                                c["valid"] = False
+                        else:
+                                nb = cert.get_notBefore()
+                                t = time.strptime(nb, "%Y%m%d%H%M%SZ")
+                                nb = datetime.datetime.utcfromtimestamp(
+                                    calendar.timegm(t))
+                                times["effective"] = nb.strftime("%c")
+
+                                na = cert.get_notAfter()
+                                t = time.strptime(na, "%Y%m%d%H%M%SZ")
+                                na = datetime.datetime.utcfromtimestamp(
+                                    calendar.timegm(t))
+                                times["expiration"] = na.strftime("%c")
+                                c["valid"] = True
+
+                return cert_cache[ssl_cert]
+
+        retcode = EXIT_OK
+        data = {}
+        if len(pargs) == 0:
+                if preferred_only:
+                        pref_pub = api_inst.get_highest_ranked_publisher()
+                        if api_inst.has_publisher(pref_pub):
+                                pubs = [pref_pub]
+                        else:
+                                # Only publisher known is from an installed
+                                # package and is not configured in the image.
+                                pubs = []
+                else:
+                        pubs = [
+                            p for p in api_inst.get_publishers()
+                            if inc_disabled or not p.disabled
+                        ]
+                # Create a formatting string for the default output
+                # format
+                if output_format == "default":
+                        filter_func = filter_default
+
+                # Create a formatting string for the tsv output
+                # format
+                if output_format == "tsv":
+                        filter_func = filter_tsv
+                        desired_field_order = (_("PUBLISHER"), "", _("STICKY"),
+                               _("SYSPUB"), _("ENABLED"), _("TYPE"),
+                               _("STATUS"), _("URI"), _("PROXY"))
+
+                # Extract our list of headers from the field_data
+                # dictionary Make sure they are extracted in the
+                # desired order by using our custom sort function.
+                hdrs = map(get_header, sorted(filter(filter_func,
+                           field_data.values()), sort_fields))
+
+                if not omit_headers:
+                        data["headers"] = hdrs
+                data["publishers"] = []
+                for p in pubs:
+                        # Store all our publisher related data in
+                        # field_data ready for output
+
+                        set_value(field_data["publisher"], p.prefix)
+                        # Setup the synthetic attrs field if the
+                        # format is default.
+                        if output_format == "default":
+                                pstatus = ""
+
+                                if not p.sticky:
+                                        pstatus_list = [_("non-sticky")]
+                                else:
+                                        pstatus_list = []
+
+                                if p.disabled:
+                                        pstatus_list.append(_("disabled"))
+                                if p.sys_pub:
+                                        pstatus_list.append(_("syspub"))
+                                if pstatus_list:
+                                        pstatus = "({0})".format(
+                                            ", ".join(pstatus_list))
+                                set_value(field_data["attrs"], pstatus)
+
+                        if p.sticky:
+                                set_value(field_data["sticky"], _("true"))
+                        else:
+                                set_value(field_data["sticky"], _("false"))
+                        if not p.disabled:
+                                set_value(field_data["enabled"], _("true"))
+                        else:
+                                set_value(field_data["enabled"], _("false"))
+                        if p.sys_pub:
+                                set_value(field_data["syspub"], _("true"))
+                        else:
+                                set_value(field_data["syspub"], _("false"))
+
+                        # Only show the selected repository's information in
+                        # summary view.
+                        if p.repository:
+                                origins = p.repository.origins
+                                mirrors = p.repository.mirrors
+                        else:
+                                origins = mirrors = []
+
+                        set_value(field_data["repo_loc"], "")
+                        set_value(field_data["proxied"], "")
+                        # Update field_data for each origin and output
+                        # a publisher record in our desired format.
+                        for uri in sorted(origins):
+                                # XXX get the real origin status
+                                set_value(field_data["type"], _("origin"))
+                                set_value(field_data["status"], _("online"))
+                                set_value(field_data["proxy"], "-")
+                                set_value(field_data["proxied"], "F")
+
+                                set_value(field_data["uri"], uri)
+
+                                if uri.proxies:
+                                        set_value(field_data["proxied"], _("T"))
+                                        set_value(field_data["proxy"],
+                                            ", ".join(
+                                            [proxy.uri
+                                            for proxy in uri.proxies]))
+                                if uri.system:
+                                        set_value(field_data["repo_loc"],
+                                            SYSREPO_HIDDEN_URI)
+                                else:
+                                        set_value(field_data["repo_loc"], uri)
+
+                                values = map(get_value,
+                                    sorted(filter(filter_func,
+                                    field_data.values()), sort_fields)
+                                )
+                                entry = []
+                                for e in values:
+                                        if isinstance(e, basestring):
+                                                entry.append(e)
+                                        else:
+                                                entry.append(str(e))
+                                data["publishers"].append(entry)
+                        # Update field_data for each mirror and output
+                        # a publisher record in our desired format.
+                        for uri in mirrors:
+                                # XXX get the real mirror status
+                                set_value(field_data["type"], _("mirror"))
+                                set_value(field_data["status"], _("online"))
+                                set_value(field_data["proxy"], "-")
+                                set_value(field_data["proxied"], _("F"))
+
+                                set_value(field_data["uri"], uri)
+
+                                if uri.proxies:
+                                        set_value(field_data["proxied"],
+                                            _("T"))
+                                        set_value(field_data["proxy"],
+                                            ", ".join(
+                                            [p.uri for p in uri.proxies]))
+                                if uri.system:
+                                        set_value(field_data["repo_loc"],
+                                            SYSREPO_HIDDEN_URI)
+                                else:
+                                        set_value(field_data["repo_loc"], uri)
+
+                                values = map(get_value,
+                                    sorted(filter(filter_func,
+                                    field_data.values()), sort_fields)
+                                )
+                                entry = []
+                                for e in values:
+                                        if isinstance(e, basestring):
+                                                entry.append(e)
+                                        else:
+                                                entry.append(str(e))
+                                data["publishers"].append(entry)
+
+                        if not origins and not mirrors:
+                                set_value(field_data["type"], "")
+                                set_value(field_data["status"], "")
+                                set_value(field_data["uri"], "")
+                                set_value(field_data["proxy"], "")
+                                values = map(get_value,
+                                    sorted(filter(filter_func,
+                                    field_data.values()), sort_fields)
+                                )
+                                entry = []
+                                for e in values:
+                                        if isinstance(e, basestring):
+                                                entry.append(e)
+                                        else:
+                                                entry.append(str(e))
+                                data["publishers"].append(entry)
+        else:
+                def collect_ssl_info(uri, uri_data):
+                        retcode = EXIT_OK
+                        c = get_cert_info(uri.ssl_cert)
+                        uri_data["SSL Key"] = str(uri.ssl_key)
+                        uri_data["SSL Cert"] = str(uri.ssl_cert)
+
+                        if not c:
+                                return retcode
+
+                        if c["errors"]:
+                                retcode = EXIT_OOPS
+
+                        for e in c["errors"]:
+                                errors_json.append({"reason":
+                                    "\n" + str(e) + "\n", "errtype": "cert_info"})
+
+                        if c["valid"]:
+                                uri_data["Cert. Effective Date"] = \
+                                    str(c["info"]["effective"])
+                                uri_data["Cert. Expiration Date"] = \
+                                    str(c["info"]["expiration"])
+                        return retcode
+
+                def collect_repository(r, pub_data):
+                        retcode = 0
+                        origins_data = []
+                        for uri in r.origins:
+                                origin_data = {"Origin URI": str(uri)}
+                                if uri.proxies:
+                                        origin_data["Proxy"] = \
+                                            [str(p.uri) for p in uri.proxies]
+                                rval = collect_ssl_info(uri, origin_data)
+                                if rval == 1:
+                                        retcode = EXIT_PARTIAL
+                                origins_data.append(origin_data)
+
+                        mirrors_data = []
+                        for uri in r.mirrors:
+                                mirror_data = {"Mirror URI": str(uri)}
+                                if uri.proxies:
+                                        mirror_data["Proxy"] = \
+                                            [str(p.uri) for p in uri.proxies]
+                                rval = collect_ssl_info(uri, mirror_data)
+                                if rval == 1:
+                                        retcode = EXIT_PARTIAL
+                                mirrors_data.append(mirror_data)
+                        if origins_data:
+                                pub_data["origins"] = origins_data
+                        if mirrors_data:
+                                pub_data["mirrors"] = mirrors_data
+                        return retcode
+
+                def collect_signing_certs(p, pub_data):
+                        if p.approved_ca_certs:
+                                pub_data["Approved CAs"] = [str(cert) for
+                                    cert in p.approved_ca_certs]
+                        if p.revoked_ca_certs:
+                                pub_data["Revoked CAs"] = [str(cert) for
+                                    cert in p.revoked_ca_certs]
+
+                for name in pargs:
+                        # detailed print
+                        pub = api_inst.get_publisher(prefix=name, alias=name)
+                        dt = api_inst.get_publisher_last_update_time(pub.prefix)
+                        if dt:
+                                dt = dt.strftime("%c")
+
+                        pub_data = {}
+                        pub_data["Publisher"] = pub.prefix
+                        pub_data["Alias"] = pub.alias
+
+                        rval = collect_repository(pub.repository, pub_data)
+                        if rval != 0:
+                                # There was an error in displaying some
+                                # of the information about a repository.
+                                # However, continue on.
+                                retcode = rval
+
+                        pub_data["Client UUID"] = pub.client_uuid
+                        pub_data["Catalog Updated"] = dt
+                        collect_signing_certs(pub, pub_data)
+                        if pub.disabled:
+                                pub_data["Enabled"] = "No"
+                        else:
+                                pub_data["Enabled"] = "Yes"
+                        if pub.properties:
+                                pub_data["Properties"] = {}
+                                for k, v in pub.properties.iteritems():
+                                        pub_data["Properties"][k] = v
+                        if "publisher_details" not in data:
+                                data["publisher_details"] = [pub_data]
+                        else:
+                                data["publisher_details"].append(pub_data)
+
+        return __prepare_json(retcode, data=data, errors=errors_json, op=op)
+
+def _info(op, api_inst, pargs, display_license, info_local, info_remote,
+    origins, quiet):
+        """Display information about a package or packages.
+        """
+
+        errors_json = []
+        data = {}
+        if info_remote and not pargs:
+                error = {"reason": _("must request remote info for specific "
+                    "packages")}
+                errors_json.append(error)
+                return __prepare_json(EXIT_BADOPT, errors=errors_json, op=op)
+
+        err = EXIT_OK
+        # Reset the progress tracker here, because we may have to switch to a
+        # different tracker due to the options parse.
+        api_inst.progresstracker = _get_tracker()
+
+        api_inst.progresstracker.set_purpose(
+            api_inst.progresstracker.PURPOSE_LISTING)
+
+        info_needed = api.PackageInfo.ALL_OPTIONS
+        if not display_license:
+                info_needed = api.PackageInfo.ALL_OPTIONS - \
+                    frozenset([api.PackageInfo.LICENSES])
+        info_needed -= api.PackageInfo.ACTION_OPTIONS
+        info_needed |= frozenset([api.PackageInfo.DEPENDENCIES])
+
+        try:
+                ret = api_inst.info(pargs, info_local, info_needed,
+                    ranked=info_remote, repos=origins)
+        except api_errors.ImageFormatUpdateNeeded as e:
+                _format_update_error(e, errors_json=errors_json)
+                return __prepare_json(EXIT_OOPS, errors=errors_json)
+        except api_errors.NoPackagesInstalledException:
+                _error_json(_("no packages installed"), errors_json=errors_json)
+                return __prepare_json(EXIT_OOPS, errors=errors_json)
+        except api_errors.ApiException as e:
+                _error_json(e, errors_json=errors_json)
+                return __prepare_json(EXIT_OOPS, errors=errors_json)
+
+        pis = ret[api.ImageInterface.INFO_FOUND]
+        notfound = ret[api.ImageInterface.INFO_MISSING]
+        illegals = ret[api.ImageInterface.INFO_ILLEGALS]
+
+        if illegals:
+                # No other results will be returned if illegal patterns were
+                # specified.
+                for i in illegals:
+                        errors_json.append({"reason": str(i)})
+                return __prepare_json(EXIT_OOPS, errors=errors_json)
+
+        no_licenses = []
+        for i, pi in enumerate(pis):
+                if display_license:
+                        if not pi.licenses:
+                                no_licenses.append(pi.fmri)
+                        elif not quiet:
+                                lics = []
+                                for lic in pi.licenses:
+                                        lics.append(str(lic))
+                                if "licenses" not in data:
+                                        data["licenses"] = [lics]
+                                else:
+                                        data["licenses"].append(lics)
+                        continue
+
+                if quiet:
+                        continue
+
+                state = ""
+                if api.PackageInfo.INSTALLED in pi.states:
+                        state = _("Installed")
+                elif api.PackageInfo.UNSUPPORTED in pi.states:
+                        state = _("Unsupported")
+                else:
+                        state = _("Not installed")
+
+                lparen = False
+                if api.PackageInfo.OBSOLETE in pi.states:
+                        state += " ({0}".format(_("Obsolete"))
+                        lparen = True
+                elif api.PackageInfo.RENAMED in pi.states:
+                        state += " ({0}".format(_("Renamed"))
+                        lparen = True
+                if api.PackageInfo.FROZEN in pi.states:
+                        if lparen:
+                                state += ", {0})".format(_("Frozen"))
+                        else:
+                                state += " ({0})".format(_("Frozen"))
+                elif lparen:
+                        state += ")"
+
+                # XXX-Consider using Python's 2.7 collections.OrderedDict
+                attr_list = []
+                seen = {}
+
+                def __append_attr_lists(label, values):
+                        """Given arguments label and values, either extend
+                        the existing list value or add new one to
+                        attr_list"""
+
+                        if not isinstance(values, list):
+                                values = [values]
+                        if label in seen:
+                                seen[label].extend(values)
+                        else:
+                                attr_list.append([label, values])
+                                seen[label] = values
+
+                __append_attr_lists(_("Name"), pi.pkg_stem)
+                __append_attr_lists(_("Summary"), pi.summary)
+                if pi.description:
+                        __append_attr_lists(_("Description"), pi.description)
+                if pi.category_info_list:
+                        category_info = []
+                        verbose = len(pi.category_info_list) > 1
+                        category_info.append \
+                            (pi.category_info_list[0].__str__(verbose))
+                        if len(pi.category_info_list) > 1:
+                                for ci in pi.category_info_list[1:]:
+                                        category_info.append \
+                                            (ci.__str__(verbose))
+                        __append_attr_lists(_("Category"), category_info)
+
+                __append_attr_lists(_("State"), state)
+
+                # Renamed packages have dependencies, but the dependencies
+                # may not apply to this image's variants so won't be
+                # returned.
+                if api.PackageInfo.RENAMED in pi.states:
+                        __append_attr_lists(_("Renamed to"), pi.dependencies)
+
+                # XXX even more info on the publisher would be nice?
+                __append_attr_lists(_("Publisher"), pi.publisher)
+                hum_ver = pi.get_attr_values("pkg.human-version")
+                if hum_ver and hum_ver[0] != str(pi.version):
+                        __append_attr_lists(_("Version"), "{0} ({1})".format(
+                            pi.version, hum_ver[0]))
+                else:
+                        __append_attr_lists(_("Version"), str(pi.version))
+
+                __append_attr_lists(_("Branch"), str(pi.branch))
+                __append_attr_lists(_("Packaging Date"), pi.packaging_date)
+                __append_attr_lists(_("Size"), misc.bytes_to_str(pi.size))
+                __append_attr_lists(_("FMRI"),
+                    pi.fmri.get_fmri(include_build=False))
+                # XXX add license/copyright info here?
+
+                addl_attr_list = {
+                    "info.keyword": _("Additional Keywords"),
+                    "info.upstream": _("Project Contact"),
+                    "info.maintainer": _("Project Maintainer"),
+                    "info.maintainer-url": _("Project Maintainer URL"),
+                    "pkg.detailed-url": _("Project URL"),
+                    "info.upstream-url": _("Project URL"),
+                    "info.repository-changeset": _("Repository Changeset"),
+                    "info.repository-url": _("Source URL"),
+                    "info.source-url": _("Source URL")
+                }
+
+                for item in sorted(pi.attrs, key=addl_attr_list.get):
+                        if item in addl_attr_list:
+                                __append_attr_lists(addl_attr_list[item],
+                                    pi.get_attr_values(item))
+
+                if "package_attrs" not in data:
+                        data["package_attrs"] = [attr_list]
+                else:
+                        data["package_attrs"].append(attr_list)
+
+        if notfound:
+                err_txt = ""
+                if pis:
+                        err = EXIT_PARTIAL
+                        if not quiet:
+                                err_txt += "\n"
+                else:
+                        err = EXIT_OOPS
+                if not quiet:
+                        if info_local:
+                                err_txt += _("""\
+pkg: info: no packages matching the following patterns you specified are
+installed on the system.  Try querying remotely instead:\n""")
+                        elif info_remote:
+                                err_txt += _("""\
+pkg: info: no packages matching the following patterns you specified were
+found in the catalog.  Try relaxing the patterns, refreshing, and/or
+examining the catalogs:\n""")
+                        err_txt += "\n"
+                        for p in notfound:
+                                err_txt += "        {0}".format(p)
+                        errors_json.append({"reason": err_txt,
+                            "errtype": "info_not_found"})
+
+        if no_licenses:
+                err_txt = ""
+                if len(no_licenses) == len(pis):
+                        err = EXIT_OOPS
+                else:
+                        err = EXIT_PARTIAL
+
+                if not quiet:
+                        err_txt += _("no license information could be found "
+                            "for the following packages:\n")
+                        for pfmri in no_licenses:
+                                err_txt += "\t{0}\n".format(pfmri)
+                        _error_json(err_txt, errors_json=errors_json,
+                            errorType="info_no_licenses")
+
+        return __prepare_json(err, errors=errors_json, data=data, op=op)
+
+def __refresh(api_inst, pubs, full_refresh=False):
+        """Private helper method for refreshing publisher data."""
+
+        errors_json = []
+        try:
+                # The user explicitly requested this refresh, so set the
+                # refresh to occur immediately.
+                api_inst.refresh(full_refresh=full_refresh,
+                    immediate=True, pubs=pubs)
+        except api_errors.ImageFormatUpdateNeeded as e:
+                _format_update_error(e, errors_json=errors_json)
+                return __prepare_json(EXIT_OOPS, errors=errors_json)
+        except api_errors.PublisherError as e:
+                _error_json(e, errors_json=errors_json)
+                _error_json(_("'pkg publisher' will show a list of publishers."
+                    ), errors_json=errors_json)
+                return __prepare_json(EXIT_OOPS, errors=errors_json)
+        except (api_errors.UnknownErrors, api_errors.PermissionsException) as e:
+                # Prepend a newline because otherwise the exception will
+                # be printed on the same line as the spinner.
+                _error_json("\n" + str(e), errors_json=errors_json)
+                return __prepare_json(EXIT_OOPS, errors=errors_json)
+        except api_errors.CatalogRefreshException as e:
+                if _collect_catalog_failures(e, errors=errors_json) == 0:
+                        return __prepare_json(EXIT_OOPS, errors=errors_json)
+                return __prepare_json(EXIT_PARTIAL, errors=errors_json)
+        return __prepare_json(EXIT_OK)
+
+def _get_ssl_cert_key(root, is_zone, ssl_cert, ssl_key):
+        if ssl_cert is not None or ssl_key is not None:
+                # In the case of zones, the ssl cert given is assumed to
+                # be relative to the root of the image, not truly absolute.
+                orig_cwd = _get_orig_cwd()
+                if is_zone:
+                        if ssl_cert is not None:
+                                ssl_cert = os.path.abspath(
+                                    root + os.sep + ssl_cert)
+                        if ssl_key is not None:
+                                ssl_key = os.path.abspath(
+                                    root + os.sep + ssl_key)
+                elif orig_cwd:
+                        if ssl_cert and not os.path.isabs(ssl_cert):
+                                ssl_cert = os.path.normpath(os.path.join(
+                                    orig_cwd, ssl_cert))
+                        if ssl_key and not os.path.isabs(ssl_key):
+                                ssl_key = os.path.normpath(os.path.join(
+                                    orig_cwd, ssl_key))
+        return ssl_cert, ssl_key
+
+def _set_pub_error_wrap(func, pfx, raise_errors, *args, **kwargs):
+        """Helper function to wrap set-publisher private methods.  Returns
+        a tuple of (return value, message).  Callers should check the return
+        value for errors."""
+
+        errors_json = []
+        try:
+                return func(*args, **kwargs)
+        except api_errors.CatalogRefreshException as e:
+                for entry in raise_errors:
+                        if isinstance(e, entry):
+                                raise
+                txt = _("Could not refresh the catalog for {0}\n").format(
+                    pfx)
+                for pub, err in e.failed:
+                        txt += "   \n{0}".format(err)
+                errors_json.append({"reason": txt})
+                return __prepare_json(EXIT_OOPS, errors=errors_json)
+        except api_errors.InvalidDepotResponseException as e:
+                for entry in raise_errors:
+                        if isinstance(e, entry):
+                                raise
+                if pfx:
+                        errors_json.append({"reason": _("The origin URIs for "
+                            "'{pubname}' do not appear to point to a valid "
+                            "pkg repository.\nPlease verify the repository's "
+                            "location and the client's network configuration."
+                            "\nAdditional details:\n\n{details}").format(
+                            pubname=pfx, details=str(e))})
+                        return __prepare_json(EXIT_OOPS, errors=errors_json)
+                errors_json.append({"reason": _("The specified URI does "
+                    "not appear to point to a valid pkg repository.\nPlease "
+                    "check the URI and the client's network configuration."
+                    "\nAdditional details:\n\n{0}").format(str(e))})
+                return __prepare_json(EXIT_OOPS, errors=errors_json)
+        except api_errors.ImageFormatUpdateNeeded as e:
+                for entry in raise_errors:
+                        if isinstance(e, entry):
+                                raise
+                _format_update_error(e, errors_json=errors_json)
+                return __prepare_json(EXIT_OOPS, errors=errors_json)
+        except api_errors.ApiException as e:
+                for entry in raise_errors:
+                        if isinstance(e, entry):
+                                raise
+                # Prepend a newline because otherwise the exception will
+                # be printed on the same line as the spinner.
+                errors_json.append({"reason": ("\n" + str(e))})
+                return __prepare_json(EXIT_OOPS, errors=errors_json)
+
+def _add_update_pub(api_inst, prefix, pub=None, disable=None, sticky=None,
+    origin_uri=None, add_mirrors=EmptyI, remove_mirrors=EmptyI,
+    add_origins=EmptyI, remove_origins=EmptyI, ssl_cert=None, ssl_key=None,
+    search_before=None, search_after=None, search_first=False,
+    reset_uuid=None, refresh_allowed=False,
+    set_props=EmptyI, add_prop_values=EmptyI,
+    remove_prop_values=EmptyI, unset_props=EmptyI, approved_cas=EmptyI,
+    revoked_cas=EmptyI, unset_cas=EmptyI, proxy_uri=None):
+
+        repo = None
+        new_pub = False
+        errors_json = []
+        if not pub:
+                try:
+                        pub = api_inst.get_publisher(prefix=prefix,
+                            alias=prefix, duplicate=True)
+                        if reset_uuid:
+                                pub.reset_client_uuid()
+                        repo = pub.repository
+                except api_errors.UnknownPublisher as e:
+                        if not origin_uri and not add_origins and \
+                            (remove_origins or remove_mirrors or
+                            remove_prop_values or add_mirrors):
+                                errors_json.append({"reason": str(e)})
+                                return __prepare_json(EXIT_OOPS,
+                                    errors=errors_json)
+
+                        # No pre-existing, so create a new one.
+                        repo = publisher.Repository()
+                        pub = publisher.Publisher(prefix, repository=repo)
+                        new_pub = True
+        elif not api_inst.has_publisher(prefix=pub.prefix):
+                new_pub = True
+
+        if not repo:
+                repo = pub.repository
+                if not repo:
+                        # Could be a new publisher from auto-configuration
+                        # case where no origin was provided in repository
+                        # configuration.
+                        repo = publisher.Repository()
+                        pub.repository = repo
+
+        if disable is not None:
+                # Set disabled property only if provided.
+                pub.disabled = disable
+
+        if sticky is not None:
+                # Set stickiness only if provided
+                pub.sticky = sticky
+
+        if proxy_uri:
+                # we only support a single proxy for now.
+                proxies = [publisher.ProxyURI(proxy_uri)]
+        else:
+                proxies = []
+
+        if origin_uri:
+                # For compatibility with old -O behaviour, treat -O as a wipe
+                # of existing origins and add the new one.
+
+                # Only use existing cert information if the new URI uses
+                # https for transport.
+                if repo.origins and not (ssl_cert or ssl_key) and \
+                    any(origin_uri.startswith(scheme + ":")
+                        for scheme in publisher.SSL_SCHEMES):
+
+                        for uri in repo.origins:
+                                if ssl_cert is None:
+                                        ssl_cert = uri.ssl_cert
+                                if ssl_key is None:
+                                        ssl_key = uri.ssl_key
+                                break
+
+                repo.reset_origins()
+                o = publisher.RepositoryURI(origin_uri, proxies=proxies)
+                repo.add_origin(o)
+
+                # XXX once image configuration supports storing this
+                # information at the uri level, ssl info should be set
+                # here.
+
+        for entry in (("mirror", add_mirrors, remove_mirrors), ("origin",
+            add_origins, remove_origins)):
+                etype, add, remove = entry
+                # XXX once image configuration supports storing this
+                # information at the uri level, ssl info should be set
+                # here.
+                if "*" in remove:
+                        getattr(repo, "reset_{0}s".format(etype))()
+                else:
+                        for u in remove:
+                                getattr(repo, "remove_{0}".format(etype))(u)
+
+                for u in add:
+                        uri = publisher.RepositoryURI(u, proxies=proxies)
+                        getattr(repo, "add_{0}".format(etype))(uri)
+
+        # None is checked for here so that a client can unset a ssl_cert or
+        # ssl_key by using -k "" or -c "".
+        if ssl_cert is not None or ssl_key is not None:
+                # Assume the user wanted to update the ssl_cert or ssl_key
+                # information for *all* of the currently selected
+                # repository's origins and mirrors that use SSL schemes.
+                found_ssl = False
+                for uri in repo.origins:
+                        if uri.scheme not in publisher.SSL_SCHEMES:
+                                continue
+                        found_ssl = True
+                        if ssl_cert is not None:
+                                uri.ssl_cert = ssl_cert
+                        if ssl_key is not None:
+                                uri.ssl_key = ssl_key
+                for uri in repo.mirrors:
+                        if uri.scheme not in publisher.SSL_SCHEMES:
+                                continue
+                        found_ssl = True
+                        if ssl_cert is not None:
+                                uri.ssl_cert = ssl_cert
+                        if ssl_key is not None:
+                                uri.ssl_key = ssl_key
+
+                if (ssl_cert or ssl_key) and not found_ssl:
+                        # None of the origins or mirrors for the publisher
+                        # use SSL schemes so the cert and key information
+                        # won't be retained.
+                        errors_json.append({"reason": _("Publisher '{0}' does "
+                            "not have any SSL-based origins or mirrors."
+                            ).format(prefix)})
+                        return __prepare_json(EXIT_BADOPT, errors=errors_json)
+
+        if set_props or add_prop_values or remove_prop_values or unset_props:
+                pub.update_props(set_props=set_props,
+                    add_prop_values=add_prop_values,
+                    remove_prop_values=remove_prop_values,
+                    unset_props=unset_props)
+
+        if new_pub:
+                api_inst.add_publisher(pub,
+                    refresh_allowed=refresh_allowed, approved_cas=approved_cas,
+                    revoked_cas=revoked_cas, unset_cas=unset_cas,
+                    search_after=search_after, search_before=search_before,
+                    search_first=search_first)
+        else:
+                for ca in approved_cas:
+                        try:
+                                ca = os.path.normpath(
+                                    os.path.join(_get_orig_cwd(), ca))
+                                with open(ca, "rb") as fh:
+                                        s = fh.read()
+                        except EnvironmentError as e:
+                                if e.errno == errno.ENOENT:
+                                        raise api_errors.MissingFileArgumentException(
+                                            ca)
+                                elif e.errno == errno.EACCES:
+                                        raise api_errors.PermissionsException(
+                                            ca)
+                                raise
+                        pub.approve_ca_cert(s)
+
+                for hsh in revoked_cas:
+                        pub.revoke_ca_cert(hsh)
+
+                for hsh in unset_cas:
+                        pub.unset_ca_cert(hsh)
+
+                api_inst.update_publisher(pub,
+                    refresh_allowed=refresh_allowed, search_after=search_after,
+                    search_before=search_before, search_first=search_first)
+
+        return __prepare_json(EXIT_OK)
+
+def _get_orig_cwd():
+        """Get the original current working directory."""
+        try:
+                orig_cwd = os.getcwd()
+        except OSError as e:
+                try:
+                        orig_cwd = os.environ["PWD"]
+                        if not orig_cwd or orig_cwd[0] != "/":
+                                orig_cwd = None
+                except KeyError:
+                        orig_cwd = None
+        return orig_cwd
+
+def __pkg(subcommand, pargs_json, opts_json, pkg_image=None,
+    prog_delay=PROG_DELAY, prog_tracker=None, opts_mapping=misc.EmptyDict,
+    api_inst=None):
+        """Private function to invoke pkg subcommands."""
+
+        errors_json = []
+        if subcommand == None:
+                err = {"reason": "Sub-command cannot be none type."}
+                errors_json.append(err)
+                return None, __prepare_json(EXIT_OOPS, errors=errors_json)
+        if subcommand not in cmds:
+                err = {"reason": "Unknown sub-command: {0}.".format(
+                    subcommand)}
+                errors_json.append(err)
+                return None, __prepare_json(EXIT_OOPS, errors=errors_json)
+
+        arg_name = "pargs_json"
+        try:
+                if pargs_json == None:
+                        pargs = []
+                else:
+                        pargs = json.loads(pargs_json)
+                if not isinstance(pargs, list):
+                        if not isinstance(pargs, basestring):
+                                err = {"reason": "{0} is invalid.".format(
+                                    arg_name)}
+                                errors_json.append(err)
+                                return None, __prepare_json(EXIT_OOPS,
+                                    errors=errors_json)
+                        if isinstance(pargs, unicode):
+                                pargs = pargs.encode("utf-8")
+                        pargs = [pargs]
+                else:
+                        for idx in range(len(pargs)):
+                                if isinstance(pargs[idx], unicode):
+                                        pargs[idx] = pargs[idx].encode("utf-8")
+        except Exception as e:
+                err = {"reason": "{0} is invalid.".format(
+                    arg_name)}
+                errors_json.append(err)
+                return None, __prepare_json(EXIT_OOPS, errors=errors_json)
+
+        try:
+                if opts_json == None:
+                        opts = {}
+                else:
+                        opts = json.loads(opts_json, object_hook=_byteify)
+                if not isinstance(opts, dict):
+                        err = {"reason": "opts_json is invalid."}
+                        errors_json.append(err)
+                        return None, __prepare_json(EXIT_OOPS,
+                            errors=errors_json)
+        except:
+                err = {"reason": "opts_json is invalid."}
+                errors_json.append(err)
+                return None, __prepare_json(EXIT_OOPS, errors=errors_json)
+
+        try:
+                # Validate JSON input with JSON schema.
+                input_schema = _get_pkg_input_schema(subcommand,
+                    opts_mapping=opts_mapping)
+                jsonschema.validate({arg_name: pargs, "opts_json": opts},
+                    input_schema)
+        except jsonschema.ValidationError as e:
+                return None, __prepare_json(EXIT_OOPS,
+                    errors=[{"reason": str(e)}])
+
+        orig_cwd = _get_orig_cwd()
+
+        # Get ImageInterface and image object.
+        if not api_inst:
+                api_inst = __api_alloc(pkg_image, orig_cwd,
+                    prog_delay=prog_delay, prog_tracker=prog_tracker,
+                    errors_json=errors_json)
+        if api_inst is None:
+                return None, __prepare_json(EXIT_OOPS, errors=errors_json)
+
+        func = cmds[subcommand][0]
+        # Get the available options for the requested operation to create the
+        # getopt parsing strings.
+        valid_opts = options.get_pkg_opts(subcommand, add_table=cmd_opts)
+        pargs_limit = None
+        if len(cmds[subcommand]) > 2:
+                pargs_limit = cmds[subcommand][2]
+
+        if not valid_opts:
+                # if there are no options for an op, it has its own processing.
+                try:
+                        if subcommand in ["unset-publisher"]:
+                                return api_inst, func(subcommand, api_inst, pargs,
+                                    **opts)
+                        else:
+                                return api_inst, func(api_inst, pargs, **opts)
+                except getopt.GetoptError as e:
+                        err = {"reason": str(e)}
+                        return api_inst, __prepare_json(EXIT_OOPS, errors=err)
+        try:
+                opt_dict = misc.opts_parse(subcommand, [],
+                    valid_opts, opts_mapping, use_cli_opts=False, **opts)
+                if pargs_limit is not None and len(pargs) > pargs_limit:
+                        err = {"reason": _("illegal argument -- {0}").format(
+                            pargs[pargs_limit])}
+                        return api_inst, __prepare_json(EXIT_OOPS, errors=err)
+                opts = options.opts_assemble(subcommand, api_inst, opt_dict,
+                        add_table=cmd_opts, cwd=orig_cwd)
+        except api_errors.InvalidOptionError as e:
+                # We can't use the string representation of the exception since
+                # it references internal option names. We substitute the RAD
+                # options and create a new exception to make sure the messages
+                # are correct.
+
+                # Convert the internal options to RAD options. We make sure that
+                # when there is a short and a long version for the same option
+                # we print both to avoid confusion.
+                def get_cli_opt(option):
+                        try:
+                                option_name = None
+                                if option in opts_mapping:
+                                        option_name = opts_mapping[option]
+
+                                if option_name:
+                                        return option_name
+                                else:
+                                        return option
+                        except KeyError:
+                                # ignore if we can't find a match
+                                # (happens for repeated arguments or invalid
+                                # arguments)
+                                return option
+                        except TypeError:
+                                # ignore if we can't find a match
+                                # (happens for an invalid arguments list)
+                                return option
+                cli_opts = []
+                opt_def = []
+
+                for o in e.options:
+                        cli_opts.append(get_cli_opt(o))
+
+                        # collect the default value (see comment below)
+                        opt_def.append(options.get_pkg_opts_defaults(subcommand,
+                            o, add_table=cmd_opts))
+
+                # Prepare for headache:
+                # If we have an option 'b' which is set to True by default it
+                # will be toggled to False if the users specifies the according
+                # option on the CLI.
+                # If we now have an option 'a' which requires option 'b' to be
+                # set, we can't say "'a' requires 'b'" because the user can only
+                # specify 'not b'. So the correct message would be:
+                # "'a' is incompatible with 'not b'".
+                # We can get there by just changing the type of the exception
+                # for all cases where the default value of one of the options is
+                # True.
+                if e.err_type == api_errors.InvalidOptionError.REQUIRED:
+                        if len(opt_def) == 2 and (opt_def[0] or opt_def[1]):
+                                e.err_type = \
+                                    api_errors.InvalidOptionError.INCOMPAT
+
+                # This new exception will have the CLI options, so can be passed
+                # directly to usage().
+                new_e = api_errors.InvalidOptionError(err_type=e.err_type,
+                    options=cli_opts, msg=e.msg)
+                err = {"reason": str(new_e)}
+                return api_inst, __prepare_json(EXIT_BADOPT, errors=err)
+
+        return api_inst, func(op=subcommand, api_inst=api_inst,
+            pargs=pargs, **opts)
+
+def __handle_errors_json(func, non_wrap_print=True, subcommand=None,
+    pargs_json=None, opts_json=None, pkg_image=None,
+    prog_delay=PROG_DELAY, prog_tracker=None, opts_mapping=misc.EmptyDict,
+    api_inst=None, reset_api=False):
+        """Error handling for pkg subcommands."""
+
+        traceback_str = misc.get_traceback_message()
+        errors_json = []
+
+        _api_inst = None
+        try:
+                # Out of memory errors can be raised as EnvironmentErrors with
+                # an errno of ENOMEM, so in order to handle those exceptions
+                # with other errnos, we nest this try block and have the outer
+                # one handle the other instances.
+                try:
+                        if non_wrap_print:
+                                _api_inst, ret_json = func(subcommand, pargs_json,
+                                    opts_json, pkg_image=pkg_image,
+                                    prog_delay=prog_delay,
+                                    prog_tracker=prog_tracker,
+                                    opts_mapping=opts_mapping,
+                                    api_inst=api_inst)
+                        else:
+                                func()
+                except (MemoryError, EnvironmentError) as __e:
+                        if isinstance(__e, EnvironmentError) and \
+                            __e.errno != errno.ENOMEM:
+                                raise
+                        if _api_inst:
+                                _api_inst.abort(
+                                    result=RESULT_FAILED_OUTOFMEMORY)
+                        _error_json(misc.out_of_memory(),
+                            errors_json=errors_json)
+                        ret_json = __prepare_json(EXIT_OOPS,
+                            errors=errors_json)
+        except SystemExit as __e:
+                if _api_inst:
+                        _api_inst.abort(result=RESULT_FAILED_UNKNOWN)
+                raise __e
+        except (PipeError, KeyboardInterrupt):
+                if _api_inst:
+                        _api_inst.abort(result=RESULT_CANCELED)
+                # We don't want to display any messages here to prevent
+                # possible further broken pipe (EPIPE) errors.
+                ret_json = __prepare_json(EXIT_OOPS)
+        except api_errors.LinkedImageException as __e:
+                _error_json(_("Linked image exception(s):\n{0}").format(
+                      str(__e)), errors_json=errors_json)
+                ret_json = __prepare_json(__e.lix_exitrv, errors=errors_json)
+        except api_errors.CertificateError as __e:
+                if _api_inst:
+                        _api_inst.abort(result=RESULT_FAILED_CONFIGURATION)
+                _error_json(__e, errors_json=errors_json)
+                ret_json = __prepare_json(EXIT_OOPS, errors=errors_json)
+        except api_errors.PublisherError as __e:
+                if _api_inst:
+                        _api_inst.abort(result=RESULT_FAILED_BAD_REQUEST)
+                _error_json(__e, errors_json=errors_json)
+                ret_json = __prepare_json(EXIT_OOPS, errors=errors_json)
+        except api_errors.ImageLockedError as __e:
+                if _api_inst:
+                        _api_inst.abort(result=RESULT_FAILED_LOCKED)
+                _error_json(__e, errors_json=errors_json)
+                ret_json = __prepare_json(EXIT_LOCKED, errors=errors_json)
+        except api_errors.TransportError as __e:
+                if _api_inst:
+                        _api_inst.abort(result=RESULT_FAILED_TRANSPORT)
+
+                errors_json.append({"reason": _("Errors were encountered "
+                    "while attempting to retrieve package or file data "
+                    "for the requested operation.")})
+                errors_json.append({"reason": _("Details follow:\n\n{0}"
+                    ).format(__e)})
+                _collect_proxy_config_errors(errors_json=errors_json)
+                ret_json = __prepare_json(EXIT_OOPS, errors=errors_json)
+        except api_errors.InvalidCatalogFile as __e:
+                if _api_inst:
+                        _api_inst.abort(result=RESULT_FAILED_STORAGE)
+                errors_json.append({"reason": _("An error was encountered "
+                    "while attempting to read image state information to "
+                    "perform the requested operation. Details follow:\n\n{0}"
+                    ).format(__e)})
+                ret_json = __prepare_json(EXIT_OOPS, errors=errors_json)
+        except api_errors.InvalidDepotResponseException as __e:
+                if _api_inst:
+                        _api_inst.abort(result=RESULT_FAILED_TRANSPORT)
+                errors_json.append({"reason": _("\nUnable to contact a valid "
+                    "package repository. This may be due to a problem with "
+                    "the repository, network misconfiguration, or an "
+                    "incorrect pkg client configuration.  Please verify the "
+                    "client's network configuration and repository's location."
+                    "\nAdditional details:\n\n{0}").format(__e)})
+                _collect_proxy_config_errors(errors_json=errors_json)
+                ret_json = __prepare_json(EXIT_OOPS, errors=errors_json)
+        except api_errors.HistoryLoadException as __e:
+                # Since a history related error occurred, discard all
+                # information about the current operation(s) in progress.
+                if _api_inst:
+                        _api_inst.clear_history()
+                _error_json(_("An error was encountered while attempting to "
+                    "load history information\nabout past client operations."),
+                    errors_json=errors_json)
+                _error_json(__e, errors_json=errors_json)
+                ret_json = __prepare_json(EXIT_OOPS, errors=errors_json)
+        except api_errors.HistoryStoreException as __e:
+                # Since a history related error occurred, discard all
+                # information about the current operation(s) in progress.
+                if _api_inst:
+                        _api_inst.clear_history()
+                _error_json({"reason": _("An error was encountered while "
+                    "attempting to store information about the\ncurrent "
+                    "operation in client history. Details follow:\n\n{0}"
+                    ).format(__e)}, errors_json=errors_json)
+                ret_json = __prepare_json(EXIT_OOPS, errors=errors_json)
+        except api_errors.HistoryPurgeException as __e:
+                # Since a history related error occurred, discard all
+                # information about the current operation(s) in progress.
+                if _api_inst:
+                        _api_inst.clear_history()
+                errors_json.append({"reason": _("An error was encountered "
+                    "while attempting to purge client history. "
+                    "Details follow:\n\n{0}").format(__e)})
+                ret_json = __prepare_json(EXIT_OOPS, errors=errors_json)
+        except api_errors.VersionException as __e:
+                if _api_inst:
+                        _api_inst.abort(result=RESULT_FAILED_UNKNOWN)
+                _error_json(_("The pkg command appears out of sync with the "
+                    "libraries provided\nby pkg:/package/pkg. The client "
+                    "version is {client} while the library\nAPI version is "
+                    "{api}.").format(client=__e.received_version,
+                    api=__e.expected_version), errors_json=errors_json)
+                ret_json = __prepare_json(EXIT_OOPS, errors=errors_json)
+        except api_errors.WrapSuccessfulIndexingException as __e:
+                ret_json = __prepare_json(EXIT_OK)
+        except api_errors.WrapIndexingException as __e:
+                def _wrapper():
+                        raise __e.wrapped
+                ret_json = __handle_errors_json(_wrapper, non_wrap_print=False)
+
+                s = ""
+                if ret_json["status"] == 99:
+                        s += _("\n{err}{stacktrace}").format(
+                        err=__e, stacktrace=traceback_str)
+
+                s += _("\n\nDespite the error while indexing, the operation "
+                    "has completed successfuly.")
+                _error_json(s, errors_json=errors_json)
+                if "errors" in ret_json:
+                        ret_json["errors"].extend(errors_json)
+                else:
+                        ret_json["errors"] = errors_json
+        except api_errors.ReadOnlyFileSystemException as __e:
+                _error_json("The file system is read only.",
+                    errors_json=errors_json)
+                ret_json = __prepare_json(EXIT_OOPS, errors=errors_json)
+        except api_errors.UnexpectedLinkError as __e:
+                _error_json(str(__e), errors_json=errors_json)
+                ret_json = __prepare_json(EXIT_OOPS, errors=errors_json)
+        except:
+                if _api_inst:
+                        _api_inst.abort(result=RESULT_FAILED_UNKNOWN)
+                if non_wrap_print:
+                        traceback.print_exc()
+                        _error_json(traceback.format_exc()+"\n"+traceback_str,
+                            errors_json=errors_json)
+                ret_json = __prepare_json(99, errors=errors_json)
+
+        if reset_api:
+                try:
+                        if _api_inst:
+                                _api_inst.reset()
+                except:
+                        # If any errors occur during reset, we will discard
+                        # this api_inst.
+                        _api_inst = None
+
+        return _api_inst, ret_json
+
+def _pkg_invoke(subcommand=None, pargs_json=None, opts_json=None, pkg_image=None,
+    prog_delay=PROG_DELAY, prog_tracker=None, opts_mapping=misc.EmptyDict,
+    api_inst=None, reset_api=False, return_api=False):
+        """pkg subcommands invocation. Output will be in JSON format.
+        subcommand: a string type pkg subcommand.
+
+        pargs_json: a JSON blob containing a list of pargs.
+
+        opts_json: a JSON blob containing a dictionary of pkg
+        subcommand options.
+
+        pkg_image: a string type alternate image path.
+
+        prog_delay: long progress event delay in sec.
+
+        prog_tracker: progress tracker object.
+
+        alternate_pargs_name: by default, 'pargs_json' will be the name in
+            input JSON schema. This option allows consumer to change the
+            pargs_json into an alternate name.
+        """
+
+        _api_inst, ret_json = __handle_errors_json(__pkg,
+            subcommand=subcommand, pargs_json=pargs_json,
+            opts_json=opts_json, pkg_image=pkg_image,
+            prog_delay=prog_delay, prog_tracker=prog_tracker,
+            opts_mapping=opts_mapping, api_inst=api_inst, reset_api=reset_api)
+
+        if return_api:
+                return _api_inst, ret_json
+        else:
+                return ret_json
+
+class ClientInterface(object):
+        """Class to provide a general interface to various clients."""
+
+        def __init__(self, pkg_image=None, prog_delay=PROG_DELAY,
+            prog_tracker=None, opts_mapping=misc.EmptyDict):
+                self.api_inst = None
+                self.pkg_image = pkg_image
+                self.prog_delay = prog_delay
+                self.prog_tracker = prog_tracker
+                self.opts_mapping = opts_mapping
+
+        def __cmd_invoke(self, cmd, pargs_json=None, opts_json=None):
+                """Helper function for command invocation."""
+
+                # We will always reset api instance on exception.
+                _api_inst, ret_json = _pkg_invoke(cmd, pargs_json=pargs_json,
+                    opts_json=opts_json, pkg_image=self.pkg_image,
+                    prog_delay=self.prog_delay, prog_tracker=self.prog_tracker,
+                    opts_mapping=self.opts_mapping, api_inst=self.api_inst,
+                    reset_api=True, return_api=True)
+                self.api_inst = _api_inst
+                return ret_json
+
+        def list_inventory(self, pargs_json=None, opts_json=None):
+                """Invoke pkg list subcommand."""
+
+                return self.__cmd_invoke("list", pargs_json=pargs_json,
+                    opts_json=opts_json)
+
+        def info(self, pargs_json=None, opts_json=None):
+                """Invoke pkg info subcommand."""
+
+                return self.__cmd_invoke("info", pargs_json=pargs_json,
+                    opts_json=opts_json)
+
+        def exact_install(self, pargs_json=None, opts_json=None):
+                """Invoke pkg exact-install subcommand."""
+
+                return self.__cmd_invoke("exact-install",
+                    pargs_json=pargs_json, opts_json=opts_json)
+
+        def install(self, pargs_json=None, opts_json=None):
+                """Invoke pkg install subcommand."""
+
+                return self.__cmd_invoke("install", pargs_json=pargs_json,
+                    opts_json=opts_json)
+
+        def update(self, pargs_json=None, opts_json=None):
+                """Invoke pkg update subcommand."""
+
+                return self.__cmd_invoke("update", pargs_json=pargs_json,
+                    opts_json=opts_json)
+
+        def uninstall(self, pargs_json=None, opts_json=None):
+                """Invoke pkg uninstall subcommand."""
+
+                return self.__cmd_invoke("uninstall", pargs_json=pargs_json,
+                    opts_json=opts_json)
+
+        def publisher_set(self, pargs_json=None, opts_json=None):
+                """Invoke pkg set-publisher subcommand."""
+
+                return self.__cmd_invoke("set-publisher",
+                    pargs_json=pargs_json, opts_json=opts_json)
+
+        def publisher_unset(self, pargs_json=None):
+                """Invoke pkg unset-publisher subcommand."""
+
+                return self.__cmd_invoke("unset-publisher",
+                    pargs_json=pargs_json)
+
+        def publisher_list(self, pargs_json=None, opts_json=None):
+                """Invoke pkg publisher subcommand."""
+
+                return self._cmd_invoke("publisher", pargs_json=pargs_json,
+                    opts_json=opts_json)
+
+        def get_pkg_input_schema(self, subcommand):
+                """Get input schema for a specific subcommand."""
+
+                return _get_pkg_input_schema(subcommand,
+                    opts_mapping=self.opts_mapping)
+
+        def get_pkg_output_schema(self, subcommand):
+                """Get output schema for a specific subcommand."""
+
+                return _get_pkg_output_schema(subcommand)
+
+
+cmds = {
+    "exact-install"   : [_exact_install, __pkg_exact_install_output_schema],
+    "list"            : [_list_inventory, __pkg_list_output_schema],
+    "install"         : [_install, __pkg_install_output_schema],
+    "update"          : [_update, __pkg_update_output_schema],
+    "uninstall"       : [_uninstall, __pkg_uninstall_output_schema],
+    "set-publisher"   : [_publisher_set,
+                          __pkg_publisher_set_output_schema],
+    "unset-publisher" : [_publisher_unset,
+                          __pkg_publisher_unset_output_schema],
+    "publisher"       : [_publisher_list, __pkg_publisher_output_schema],
+    "info"            : [_info, __pkg_info_output_schema]
+}
+
+# Addendum table for option extensions.
+cmd_opts = {}
--- a/src/modules/client/options.py	Thu Apr 02 08:44:15 2015 -0700
+++ b/src/modules/client/options.py	Fri Apr 03 19:02:53 2015 -0700
@@ -87,7 +87,220 @@
 SYNC_ACT              = "sync_act"
 ACT_TIMEOUT           = "act_timeout"
 PUBLISHERS            = "publishers"
+SSL_KEY               = "ssl_key"
+SSL_CERT              = "ssl_cert"
+APPROVED_CA_CERTS     = "approved_ca_certs"
+REVOKED_CA_CERTS      = "revoked_ca_certs"
+UNSET_CA_CERTS        = "unset_ca_certs"
+ORIGIN_URI            = "origin_uri"
+RESET_UUID            = "reset_uuid"
+ADD_MIRRORS           = "add_mirrors"
+REMOVE_MIRRORS        = "remove_mirrors"
+ADD_ORIGINS           = "add_origins"
+REMOVE_ORIGINS        = "remove_origins"
+REFRESH_ALLOWED       = "refresh_allowed"
+PUB_ENABLE            = "enable"
+PUB_DISABLE           = "disable"
+PUB_STICKY            = "sticky"
+PUB_NON_STICKY        = "non_sticky"
+REPO_URI              = "repo_uri"
+PROXY_URI             = "proxy_uri"
+SEARCH_BEFORE         = "search_before"
+SEARCH_AFTER          = "search_after"
+SEARCH_FIRST          = "search_first"
+SET_PROPS             = "set_props"
+ADD_PROP_VALUES       = "add_prop_values"
+REMOVE_PROP_VALUES    = "remove_prop_values"
+UNSET_PROPS           = "unset_props"
+PREFERRED_ONLY        = "preferred_only"
+INC_DISABLED          = "inc_disabled"
+OUTPUT_FORMAT         = "output_format"
+DISPLAY_LICENSE       = "display_license"
+INFO_LOCAL            = "info_local"
+INFO_REMOTE           = "info_remote"
 
+def opts_table_cb_info(api_inst, opts, opts_new):
+        opts_new[ORIGINS] = set()
+        for e in opts[ORIGINS]:
+                opts_new[ORIGINS].add(misc.parse_uri(e,
+                    cwd=_orig_cwd))
+        if opts[ORIGINS]:
+                opts_new[INFO_REMOTE] = True
+        if opts[QUIET]:
+                global_settings.client_output_quiet = True
+        if not opts_new[INFO_LOCAL] and not opts_new[INFO_REMOTE]:
+                opts_new[INFO_LOCAL] = True
+        elif opts_new[INFO_LOCAL] and opts_new[INFO_REMOTE]:
+                raise InvalidOptionError(InvalidOptionError.INCOMPAT,
+                    [INFO_LOCAL, INFO_REMOTE])
+
+def __parse_set_props(args):
+        """"Parse set property options that were specified on the command
+        line into a dictionary.  Make sure duplicate properties were not
+        specified."""
+
+        set_props = dict()
+        for pv in args:
+                try:
+                        p, v = pv.split("=", 1)
+                except ValueError:
+                        raise InvalidOptionError(msg=_("properties to be set "
+                            "must be of the form '<name>=<value>'. This is "
+                            "what was given: {0}").format(pv))
+
+                if p in set_props:
+                        raise InvalidOptionError(msg=_("a property may only "
+                            "be set once in a command. {0} was set twice"
+                            ).format(p))
+                set_props[p] = v
+
+        return set_props
+
+def __parse_prop_values(args, add=True):
+        """"Parse add or remove property values options that were specified
+        on the command line into a dictionary.  Make sure duplicate properties
+        were not specified."""
+
+        props_values = dict()
+        if add:
+                add_txt = "added"
+        else:
+                add_txt = "removed"
+
+        for pv in args:
+                try:
+                        p, v = pv.split("=", 1)
+                except ValueError:
+                        raise InvalidOptionError(msg=_("property values to be "
+                            "{add} must be of the form '<name>=<value>'. "
+                            "This is what was given: {key}").format(
+                            add=add_txt, key=pv))
+
+                props_values.setdefault(p, [])
+                props_values[p].append(v)
+
+        return props_values
+
+def opts_table_cb_pub_list(api_inst, opts, opts_new):
+        if opts[OUTPUT_FORMAT] == None:
+                opts_new[OUTPUT_FORMAT] = "default"
+
+def opts_table_cb_pub_props(api_inst, opts, opts_new):
+        opts_new[SET_PROPS] = __parse_set_props(opts[SET_PROPS])
+        opts_new[ADD_PROP_VALUES] = __parse_prop_values(opts[ADD_PROP_VALUES])
+        opts_new[REMOVE_PROP_VALUES] = __parse_prop_values(
+            opts[REMOVE_PROP_VALUES], add=False)
+        opts_new[UNSET_PROPS] = set(opts[UNSET_PROPS])
+
+def opts_table_cb_pub_search(api_inst, opts, opts_new):
+        if opts[SEARCH_BEFORE] and opts[SEARCH_AFTER]:
+                raise InvalidOptionError(InvalidOptionError.INCOMPAT,
+                    [SEARCH_BEFORE, SEARCH_AFTER])
+
+        if opts[SEARCH_BEFORE] and opts[SEARCH_FIRST]:
+                raise InvalidOptionError(InvalidOptionError.INCOMPAT,
+                    [SEARCH_BEFORE, SEARCH_FIRST])
+
+        if opts[SEARCH_AFTER] and opts[SEARCH_FIRST]:
+                raise InvalidOptionError(InvalidOptionError.INCOMPAT,
+                    [SEARCH_AFTER, SEARCH_FIRST])
+
+def opts_table_cb_pub_opts(api_inst, opts, opts_new):
+        del opts_new[PUB_DISABLE]
+        del opts_new[PUB_ENABLE]
+        del opts_new[PUB_STICKY]
+        del opts_new[PUB_NON_STICKY]
+
+        if opts[PUB_DISABLE] and opts[PUB_ENABLE]:
+                raise InvalidOptionError(InvalidOptionError.INCOMPAT,
+                    [PUB_DISABLE, PUB_ENABLE])
+
+        if opts[PUB_STICKY] and opts[PUB_NON_STICKY]:
+                raise InvalidOptionError(InvalidOptionError.INCOMPAT,
+                    [PUB_STICKY, PUB_NON_STICKY])
+
+        opts_new[PUB_DISABLE] = None
+        if opts[PUB_DISABLE]:
+                opts_new[PUB_DISABLE] = True
+
+        if opts[PUB_ENABLE]:
+                opts_new[PUB_DISABLE] = False
+
+        opts_new[PUB_STICKY] = None
+        if opts[PUB_STICKY]:
+                opts_new[PUB_STICKY] = True
+
+        if opts[PUB_NON_STICKY]:
+                opts_new[PUB_STICKY] = False
+
+        if opts[ORIGIN_URI] and opts[ADD_ORIGINS]:
+                raise InvalidOptionError(InvalidOptionError.INCOMPAT,
+                    [ORIGIN_URI, ADD_ORIGINS])
+
+        if opts[ORIGIN_URI] and opts[REMOVE_ORIGINS]:
+                raise InvalidOptionError(InvalidOptionError.INCOMPAT,
+                    [ORIGIN_URI, REMOVE_ORIGINS])
+
+        if opts[REPO_URI] and opts[ADD_ORIGINS]:
+                raise InvalidOptionError(InvalidOptionError.INCOMPAT,
+                    [REPO_URI, ADD_ORIGINS])
+        if opts[REPO_URI] and opts[ADD_MIRRORS]:
+                raise InvalidOptionError(InvalidOptionError.INCOMPAT,
+                    [REPO_URI, ADD_MIRRORS])
+        if opts[REPO_URI] and opts[REMOVE_ORIGINS]:
+                raise InvalidOptionError(InvalidOptionError.INCOMPAT,
+                    [REPO_URI, REMOVE_ORIGINS])
+        if opts[REPO_URI] and opts[REMOVE_MIRRORS]:
+                raise InvalidOptionError(InvalidOptionError.INCOMPAT,
+                    [REPO_URI, REMOVE_MIRRORS])
+        if opts[REPO_URI] and opts[PUB_DISABLE]:
+                raise InvalidOptionError(InvalidOptionError.INCOMPAT,
+                    [REPO_URI, PUB_DISABLE])
+        if opts[REPO_URI] and opts[PUB_ENABLE]:
+                raise InvalidOptionError(InvalidOptionError.INCOMPAT,
+                    [REPO_URI, PUB_ENABLE])
+        if opts[REPO_URI] and not opts[REFRESH_ALLOWED]:
+                raise InvalidOptionError(InvalidOptionError.REQUIRED,
+                    [REPO_URI, REFRESH_ALLOWED])
+        if opts[REPO_URI] and opts[RESET_UUID]:
+                raise InvalidOptionError(InvalidOptionError.INCOMPAT,
+                    [REPO_URI, RESET_UUID])
+
+        if opts[PROXY_URI] and not (opts[ADD_ORIGINS] or opts[ADD_MIRRORS]
+            or opts[REPO_URI] or opts[REMOVE_ORIGINS] or opts[REMOVE_MIRRORS]):
+                raise InvalidOptionError(InvalidOptionError.REQUIRED_ANY,
+                    [PROXY_URI, ADD_ORIGINS, ADD_MIRRORS, REMOVE_ORIGINS,
+                    REMOVE_MIRRORS, REPO_URI])
+
+        opts_new[ADD_ORIGINS] = set()
+        opts_new[REMOVE_ORIGINS] = set()
+        opts_new[ADD_MIRRORS] = set()
+        opts_new[REMOVE_MIRRORS] = set()
+        for e in opts[ADD_ORIGINS]:
+                opts_new[ADD_ORIGINS].add(misc.parse_uri(e, cwd=_orig_cwd))
+        for e in opts[REMOVE_ORIGINS]:
+                if e == "*":
+                        # Allow wildcard to support an easy, scriptable
+                        # way of removing all existing entries.
+                        opts_new[REMOVE_ORIGINS].add("*")
+                else:
+                        opts_new[REMOVE_ORIGINS].add(misc.parse_uri(e,
+                            cwd=_orig_cwd))
+
+        for e in opts[ADD_MIRRORS]:
+                opts_new[ADD_MIRRORS].add(misc.parse_uri(e, cwd=_orig_cwd))
+        for e in opts[REMOVE_MIRRORS]:
+                if e == "*":
+                        # Allow wildcard to support an easy, scriptable
+                        # way of removing all existing entries.
+                        opts_new[REMOVE_MIRRORS].add("*")
+                else:
+                        opts_new[REMOVE_MIRRORS].add(misc.parse_uri(e,
+                            cwd=_orig_cwd))
+
+        if opts[REPO_URI]:
+                opts_new[REPO_URI] = misc.parse_uri(opts[REPO_URI],
+                    cwd=_orig_cwd)
 
 def opts_table_cb_beopts(api_inst, opts, opts_new):
 
@@ -586,113 +799,206 @@
 # options is a list which can contain:
 #
 # - Tuples formatted as:
-#       (k, v, [val])
+#       (k, v, [val], {})
 #   where the values are:
 #       k: the key value for the options dictionary
 #       v: the default value. valid values are: True/False, None, [], 0
 #       val: the valid argument list. It should be a list,
 #       and it is optional.
+#       {}: json schema.
 #
 
+opts_table_info = [
+    opts_table_cb_info,
+    (DISPLAY_LICENSE,    False, [], {"type": "boolean"}),
+    (INFO_LOCAL,         False, [], {"type": "boolean"}),
+    (INFO_REMOTE,        False, [], {"type": "boolean"}),
+    (ORIGINS,            [],    [], {"type": "array",
+                                     "items": {"type": "string"}
+                                    }),
+    (QUIET,              False, [], {"type": "boolean"})
+]
+
+opts_table_pub_list = [
+    opts_table_cb_pub_list,
+    (PREFERRED_ONLY,  False, [],                 {"type": "boolean"}),
+    (INC_DISABLED,    True,  [],                 {"type": "boolean"}),
+    (OUTPUT_FORMAT,   None,  ["default", "tsv"], {"type": ["null", "string"]}),
+    (OMIT_HEADERS,    False, [],                 {"type": "boolean"})
+]
+
+opts_table_pub_props = [
+    opts_table_cb_pub_props,
+    (SET_PROPS,           [], [], {"type": "array", "items": {"type": "string"}
+                                  }),
+    (ADD_PROP_VALUES,     [], [], {"type": "array",
+                                   "items": {"type": "string"}
+                                  }),
+    (REMOVE_PROP_VALUES,  [], [], {"type": "array",
+                                   "items": {"type": "string"}
+                                  }),
+    (UNSET_PROPS,         [], [], {"type": "array", "items": {"type": "string"}
+                                  })
+]
+
+opts_table_ssl = [
+    (SSL_KEY,            None, [],  {"type": ["null", "string"]}),
+    (SSL_CERT,           None, [],  {"type": ["null", "string"]}),
+    (APPROVED_CA_CERTS,  [],   [],  {"type": "array",
+                                     "items": {"type": "string"}
+                                    }),
+    (REVOKED_CA_CERTS,   [],   [],  {"type": "array",
+                                     "items": {"type": "string"}
+                                    }),
+    (UNSET_CA_CERTS,     [],   [],  {"type": "array",
+                                     "items": {"type": "string"}
+                                    }),
+]
+
+opts_table_pub_search = [
+    opts_table_cb_pub_search,
+    (SEARCH_BEFORE,   None,  [], {"type": ["null", "string"]}),
+    (SEARCH_AFTER,    None,  [], {"type": ["null", "string"]}),
+    (SEARCH_FIRST,    False, [], {"type": "boolean"}),
+]
+
+opts_table_pub_opts = [
+    opts_table_cb_pub_opts,
+    (ORIGIN_URI,      None,  [], {"type": ["null", "string"]}),
+    (RESET_UUID,      False, [], {"type": "boolean"}),
+    (ADD_MIRRORS,     [],    [], {"type": "array",
+                                  "items": {"type": "string"}
+                                 }),
+    (REMOVE_MIRRORS,  [],    [], {"type": "array",
+                                  "items": {"type": "string"}
+                                 }),
+    (ADD_ORIGINS,     [],    [], {"type": "array",
+                                  "items": {"type": "string"}
+                                 }),
+    (REMOVE_ORIGINS,  [],    [], {"type": "array",
+                                  "items": {"type": "string"}
+                                 }),
+    (REFRESH_ALLOWED, True,  [], {"type": "boolean"}),
+    (PUB_ENABLE,      False, [], {"type": "boolean"}),
+    (PUB_DISABLE,     False, [], {"type": "boolean"}),
+    (PUB_STICKY,      False, [], {"type": "boolean"}),
+    (PUB_NON_STICKY,  False, [], {"type": "boolean"}),
+    (REPO_URI,        None,  [], {"type": ["null", "string"]}),
+    (PROXY_URI,       None,  [], {"type": ["null", "string"]}),
+]
 
 opts_table_beopts = [
     opts_table_cb_beopts,
-    (BACKUP_BE_NAME,     None),
-    (BE_NAME,            None),
-    (DENY_NEW_BE,        False),
-    (NO_BACKUP_BE,       False),
-    (BE_ACTIVATE,        True),
-    (REQUIRE_BACKUP_BE,  False),
-    (REQUIRE_NEW_BE,     False),
+    (BACKUP_BE_NAME,     None,  [], {"type": ["null", "string"]}),
+    (BE_NAME,            None,  [], {"type": ["null", "string"]}),
+    (DENY_NEW_BE,        False, [], {"type": "boolean"}),
+    (NO_BACKUP_BE,       False, [], {"type": "boolean"}),
+    (BE_ACTIVATE,        True,  [], {"type": "boolean"}),
+    (REQUIRE_BACKUP_BE,  False, [], {"type": "boolean"}),
+    (REQUIRE_NEW_BE,     False, [], {"type": "boolean"}),
 ]
 
 opts_table_concurrency = [
     opts_table_cb_concurrency,
-    (CONCURRENCY,        None),
+    (CONCURRENCY,        None, [], {"type": ["null", "integer"],
+        "minimum": 0}),
 ]
 
 opts_table_force = [
-    (FORCE,                False),
+    (FORCE,                False, [], {"type": "boolean"}),
 ]
 
 opts_table_li_ignore = [
     opts_table_cb_li_ignore,
-    (LI_IGNORE_ALL,        False),
-    (LI_IGNORE_LIST,       []),
+    (LI_IGNORE_ALL,        False, [], {"type": "boolean"}),
+    (LI_IGNORE_LIST,       [],    [], {"type": "array",
+                                       "items": {"type": "string"}
+                                      }),
 ]
 
 opts_table_li_md_only = [
     opts_table_cb_md_only,
-    (LI_MD_ONLY,         False),
+    (LI_MD_ONLY,         False, [], {"type": "boolean"}),
 ]
 
 opts_table_li_no_pkg_updates = [
-    (LI_PKG_UPDATES,       True),
+    (LI_PKG_UPDATES,       True, [], {"type": "boolean"}),
 ]
 
 opts_table_li_no_psync = [
     opts_table_cb_li_no_psync,
-    (LI_PARENT_SYNC,       True),
+    (LI_PARENT_SYNC,       True, [], {"type": "boolean"}),
 ]
 
 opts_table_li_props = [
     opts_table_cb_li_props,
-    (LI_PROPS,             []),
+    (LI_PROPS,             [], [], {"type": "array",
+                                    "items": {"type": "string"}
+                                   }),
 ]
 
 opts_table_li_target = [
     opts_table_cb_li_target,
-    (LI_TARGET_ALL,        False),
-    (LI_TARGET_LIST,       []),
+    (LI_TARGET_ALL,        False, [], {"type": "boolean"}),
+    (LI_TARGET_LIST,       [],    [], {"type": "array",
+                                       "items": {"type": "string"}
+                                      }),
 ]
 
 opts_table_li_target1 = [
     opts_table_cb_li_target1,
-    (LI_NAME,              None),
+    (LI_NAME,              None, [], {"type": ["null", "string"]}),
 ]
 
 opts_table_li_recurse = [
     opts_table_cb_li_recurse,
-    (LI_ERECURSE_ALL,       False),
-    (LI_ERECURSE_INCL,      []),
-    (LI_ERECURSE_EXCL,      []),
+    (LI_ERECURSE_ALL,       False, [], {"type": "boolean"}),
+    (LI_ERECURSE_INCL,      [], [], {"type": "array",
+                                     "items": {"type": "string"}
+                                    }),
+    (LI_ERECURSE_EXCL,      [], [], {"type": "array",
+        "items": {"type": "string"}}),
 ]
 
 opts_table_licenses = [
-    (ACCEPT,               False),
-    (SHOW_LICENSES,        False),
+    (ACCEPT,               False, [], {"type": "boolean"}),
+    (SHOW_LICENSES,        False, [], {"type": "boolean"}),
 ]
 
 opts_table_no_headers = [
     opts_table_cb_no_headers_vs_quiet,
-    (OMIT_HEADERS,         False),
+    (OMIT_HEADERS,         False, [], {"type": "boolean"}),
 ]
 
 opts_table_no_index = [
-    (UPDATE_INDEX,         True),
+    (UPDATE_INDEX,         True, [], {"type": "boolean"}),
 ]
 
 opts_table_no_refresh = [
-    (REFRESH_CATALOGS,     True),
+    (REFRESH_CATALOGS,     True, [], {"type": "boolean"}),
 ]
 
 opts_table_reject = [
-    (REJECT_PATS,          []),
+    (REJECT_PATS,          [], [], {"type": "array",
+                                    "items": {"type": "string"}
+                                   }),
 ]
 
 opts_table_verbose = [
     opts_table_cb_v,
-    (VERBOSE,              0),
+    (VERBOSE,              0, [], {"type": "integer", "minimum": 0}),
 ]
 
 opts_table_quiet = [
     opts_table_cb_q,
-    (QUIET,                False),
+    (QUIET,                False, [], {"type": "boolean"}),
 ]
 
 opts_table_parsable = [
     opts_table_cb_parsable,
-    (PARSABLE_VERSION,     None),
+    (PARSABLE_VERSION,     None,  [], {"type": ["null", "integer"],
+                                       "minimum": 0, "maximum": 0
+                                      }),
 ]
 
 opts_table_nqv = \
@@ -700,32 +1006,37 @@
     opts_table_verbose + \
     [
     opts_table_cb_nqv,
-    (NOEXECUTE,            False),
+    (NOEXECUTE,            False, [], {"type": "boolean"}),
 ]
 
 opts_table_origins = [
     opts_table_cb_origins,
-    (ORIGINS,              []),
+    (ORIGINS,              [], [], {"type": "array",
+                                    "items": {"type": "string"}
+                                   }),
 ]
 
 opts_table_stage = [
     opts_table_cb_stage,
-    (STAGE,                None),
+    (STAGE,                None, [], {"type": ["null", "string"]}),
 ]
 
 opts_table_missing = [
-    (IGNORE_MISSING,       False),
+    (IGNORE_MISSING,       False, [], {"type": "boolean"}),
 ]
 
 opts_table_actuators = [
     opts_table_cb_actuators,
-    (SYNC_ACT,             False),
-    (ACT_TIMEOUT,          None)
+    (SYNC_ACT,             False, [], {"type": "boolean"}),
+    (ACT_TIMEOUT,          None,  [], {"type": ["null", "integer"],
+        "minimum": 0})
 ]
 
 opts_table_publishers = [
     opts_table_cb_publishers,
-    (PUBLISHERS, []),
+    (PUBLISHERS, [], [], {"type": "array",
+                          "items": {"type": "string"}
+                         }),
 ]
 
 #
@@ -754,6 +1065,17 @@
     opts_table_actuators + \
     []
 
+opts_set_publisher = \
+    opts_table_ssl + \
+    opts_table_pub_opts + \
+    opts_table_pub_props + \
+    opts_table_pub_search + \
+    []
+
+opts_info = \
+    opts_table_info + \
+    []
+
 # "update" cmd inherits all main cmd options
 opts_update = \
     opts_main + \
@@ -861,11 +1183,11 @@
     opts_table_verbose + \
     [
     opts_cb_list,
-    (LIST_INSTALLED_NEWEST, False),
-    (LIST_ALL,              False),
-    (LIST_NEWEST,           False),
-    (SUMMARY,               False),
-    (LIST_UPGRADABLE,       False),
+    (LIST_INSTALLED_NEWEST, False, [], {"type": "boolean"}),
+    (LIST_ALL,              False, [], {"type": "boolean"}),
+    (LIST_NEWEST,           False, [], {"type": "boolean"}),
+    (SUMMARY,               False, [], {"type": "boolean"}),
+    (LIST_UPGRADABLE,       False, [], {"type": "boolean"}),
 ]
 
 opts_dehydrate = \
@@ -890,6 +1212,10 @@
     opts_table_cb_nqv
 ]
 
+opts_publisher = \
+    opts_table_pub_list + \
+    []
+
 pkg_op_opts = {
 
     pkgdefs.PKG_OP_ATTACH         : opts_attach_linked,
@@ -900,17 +1226,21 @@
     pkgdefs.PKG_OP_DETACH         : opts_detach_linked,
     pkgdefs.PKG_OP_EXACT_INSTALL  : opts_main,
     pkgdefs.PKG_OP_FIX            : opts_fix,
+    pkgdefs.PKG_OP_INFO           : opts_info,
     pkgdefs.PKG_OP_INSTALL        : opts_install,
     pkgdefs.PKG_OP_LIST           : opts_list_inventory,
     pkgdefs.PKG_OP_LIST_LINKED    : opts_list_linked,
     pkgdefs.PKG_OP_PROP_LINKED    : opts_list_property_linked,
     pkgdefs.PKG_OP_PUBCHECK       : [],
+    pkgdefs.PKG_OP_PUBLISHER_LIST : opts_publisher,
     pkgdefs.PKG_OP_REHYDRATE      : opts_dehydrate,
     pkgdefs.PKG_OP_REVERT         : opts_revert,
     pkgdefs.PKG_OP_SET_MEDIATOR   : opts_set_mediator,
+    pkgdefs.PKG_OP_SET_PUBLISHER  : opts_set_publisher,
     pkgdefs.PKG_OP_SET_PROP_LINKED: opts_set_property_linked,
     pkgdefs.PKG_OP_SYNC           : opts_sync_linked,
     pkgdefs.PKG_OP_UNINSTALL      : opts_uninstall,
+    pkgdefs.PKG_OP_UNSET_PUBLISHER: [],
     pkgdefs.PKG_OP_UPDATE         : opts_update,
     pkgdefs.PKG_OP_VERIFY         : opts_verify
 }
@@ -942,7 +1272,9 @@
                 if len(o) == 2:
                         opt_name, default = o
                 elif len(o) == 3:
-                        opt_name, default, valid_args = o
+                        opt_name, default, dummy_valid_args = o
+                elif len(o) == 4:
+                        opt_name, default, dummy_valid_args, dummy_schema = o
                 if opt_name == opt:
                         return default
 
@@ -986,6 +1318,8 @@
                         avail_opt, default = o
                 elif len(o) == 3:
                         avail_opt, default, valid_args = o
+                elif len(o) == 4:
+                        avail_opt, default, valid_args, schema = o
                 # for options not given we substitue the default value
                 if avail_opt not in opts:
                         rv[avail_opt] = default
@@ -1012,7 +1346,8 @@
                         if raise_error:
                                 raise InvalidOptionError(
                                     InvalidOptionError.ARG_INVALID,
-                                    [opts[avail_opt], avail_opt])
+                                    [opts[avail_opt], avail_opt],
+                                    valid_args=valid_args)
 
                 rv[avail_opt] = opts[avail_opt]
 
--- a/src/modules/client/pkg_solver.py	Thu Apr 02 08:44:15 2015 -0700
+++ b/src/modules/client/pkg_solver.py	Fri Apr 03 19:02:53 2015 -0700
@@ -841,7 +841,7 @@
                 # Because we have introduced exact-install where
                 # self.__installed_fmris will be empty, in order to prevent
                 # downgrading, we need to look up the full installed dictionary
-                # stored in self.__installed_dict_tmp.
+                # stored in installed_dict_tmp.
                 if exact_install:
                         installed_fmris_tmp = frozenset(
                             installed_dict_tmp.values())
--- a/src/modules/client/pkgdefs.py	Thu Apr 02 08:44:15 2015 -0700
+++ b/src/modules/client/pkgdefs.py	Fri Apr 03 19:02:53 2015 -0700
@@ -21,7 +21,7 @@
 #
 
 #
-# Copyright (c) 2011, 2014, Oracle and/or its affiliates. All rights reserved.
+# Copyright (c) 2011, 2015, Oracle and/or its affiliates. All rights reserved.
 #
 
 """
@@ -54,17 +54,21 @@
 PKG_OP_DETACH          = "detach-linked"
 PKG_OP_EXACT_INSTALL   = "exact-install"
 PKG_OP_FIX             = "fix"
+PKG_OP_INFO            = "info"
 PKG_OP_INSTALL         = "install"
 PKG_OP_LIST            = "list"
 PKG_OP_LIST_LINKED     = "list-linked"
 PKG_OP_PROP_LINKED     = "property-linked"
 PKG_OP_PUBCHECK        = "pubcheck-linked"
+PKG_OP_PUBLISHER_LIST  = "publisher"
 PKG_OP_REHYDRATE       = "rehydrate"
 PKG_OP_REVERT          = "revert"
 PKG_OP_SET_MEDIATOR    = "set-mediator"
+PKG_OP_SET_PUBLISHER   = "set-publisher"
 PKG_OP_SET_PROP_LINKED = "set-property-linked"
 PKG_OP_SYNC            = "sync-linked"
 PKG_OP_UNINSTALL       = "uninstall"
+PKG_OP_UNSET_PUBLISHER = "unset-publisher"
 PKG_OP_UPDATE          = "update"
 PKG_OP_VERIFY          = "verify"
 pkg_op_values          = frozenset([
@@ -76,17 +80,21 @@
     PKG_OP_DETACH,
     PKG_OP_EXACT_INSTALL,
     PKG_OP_FIX,
+    PKG_OP_INFO,
     PKG_OP_INSTALL,
     PKG_OP_LIST,
     PKG_OP_LIST_LINKED,
     PKG_OP_PROP_LINKED,
     PKG_OP_PUBCHECK,
+    PKG_OP_PUBLISHER_LIST,
     PKG_OP_REVERT,
     PKG_OP_REHYDRATE,
     PKG_OP_SET_MEDIATOR,
+    PKG_OP_SET_PUBLISHER,
     PKG_OP_SET_PROP_LINKED,
     PKG_OP_SYNC,
     PKG_OP_UNINSTALL,
+    PKG_OP_UNSET_PUBLISHER,
     PKG_OP_UPDATE,
     PKG_OP_VERIFY
 ])
--- a/src/modules/client/progress.py	Thu Apr 02 08:44:15 2015 -0700
+++ b/src/modules/client/progress.py	Fri Apr 03 19:02:53 2015 -0700
@@ -33,6 +33,7 @@
 import itertools
 import math
 import sys
+import simplejson as json
 import time
 from functools import wraps
 
@@ -1874,7 +1875,8 @@
 
                 # The following string was originally expressed as
                 # "%*s: ". % \
-                #     (self.phase_max_width, self.phase_names[self.major_phase])
+                #     (self.phase_max_width, self.phase_names[self.major_phase]
+                #     )
                 # however xgettext incorrectly flags this as an improper use of
                 # non-parameterized messages, which gets detected as an error
                 # during our build.  So instead, we express the string using
@@ -2221,6 +2223,482 @@
                     adjs=self.reversion_adjs.items))
 
 
+class RADProgressTracker(CommandLineProgressTracker):
+        """This progress tracker is a subclass of CommandLineProgressTracker
+        which is specific for RAD progress event.
+        """
+
+        # Default to printing periodic output every 5 seconds.
+        TERM_DELAY = 5.0
+
+        # Output constants.
+        O_PHASE = "phase"
+        O_MESSAGE = "message"
+        O_TIME = "time_taken"
+        O_TIME_U = "time_unit"
+        O_TYPE = "type"
+        O_PRO_ITEMS = "processed_items"
+        O_GOAL_ITEMS = "goal_items"
+        O_PCT_DONE = "percent_done"
+        O_ITEM_U = "item_unit"
+        O_SPEED = "speed"
+        O_RUNNING = "running"
+        O_GOAL_PRO_ITEMS = "goal_processed_items"
+        O_REV_ITEMS = "reversioned_items"
+        O_GOAL_REV_ITEMS = "goal_reversion_items"
+        O_ADJ_ITEMS = "adjusted_items"
+        O_LI_OUTPUT = "li_output"
+        O_LI_ERROR = "li_errors"
+
+        def __init__(self, term_delay=TERM_DELAY, prog_event_handler=None):
+                CommandLineProgressTracker.__init__(self,
+                    term_delay=term_delay)
+                self.__prog_event_handler = prog_event_handler
+
+        def _phase_prefix(self):
+                if self.major_phase == self.PHASE_UTILITY:
+                        return ""
+
+                return self.phase_names[self.major_phase]
+
+        #
+        # Helper routines
+        #
+        def __prep_prog_json_str(self, msg=None, phase=None, prog_json=None):
+                # prepare progress json formatted string.
+                phase_name = self._phase_prefix()
+                if phase:
+                        phase_name = phase
+                if prog_json:
+                        ret_json = prog_json
+                else:
+                        ret_json = {self.O_PHASE: phase_name,
+                                    self.O_MESSAGE: msg
+                                   }
+                return json.dumps(ret_json)
+
+        def __generic_start(self, msg):
+                # In the case of listing/up-to-date check operations, we
+                # don't want to output planning information, so skip.
+                if self.purpose != self.PURPOSE_NORMAL:
+                        return
+
+                prog_str = self.__prep_prog_json_str(msg)
+                # If event handler is set, report an event. Otherwise, print.
+                if self.__prog_event_handler:
+                        self.__prog_event_handler(desc=prog_str+"\n")
+                else:
+                        self._pe.cprint(prog_str)
+                # indicate that we just printed.
+                self._ptimer.reset_now()
+
+        def __generic_done(self, msg=None, phase=None, prog_json=None):
+                # See __generic_start above.
+                if self.purpose != self.PURPOSE_NORMAL:
+                        return
+                if msg is None:
+                        msg = _("Done")
+                prog_str = self.__prep_prog_json_str(msg, phase=phase,
+                        prog_json=prog_json)
+                if self.__prog_event_handler:
+                        self.__prog_event_handler(desc=prog_str+"\n")
+                else:
+                        self._pe.cprint(prog_str, end='\n')
+                self._ptimer.reset()
+
+        def __generic_done_item(self, item, msg=None):
+                # See __generic_start above.
+                if self.purpose != self.PURPOSE_NORMAL:
+                        return
+                if msg is None:
+                        if global_settings.client_output_verbose > 0:
+                                msg = _("Done ({elapsed:>.3f}s)")
+                        else:
+                                msg = _("Done")
+                outmsg = msg.format(elapsed=item.elapsed())
+                prog_str = self.__prep_prog_json_str(outmsg)
+                if self.__prog_event_handler:
+                        self.__prog_event_handler(desc=prog_str+"\n")
+                else:
+                        self._pe.cprint(prog_str, end='\n')
+                self._ptimer.reset()
+
+        def _change_purpose(self, op, np):
+                self._ptimer.reset()
+                if np == self.PURPOSE_PKG_UPDATE_CHK:
+                        prog_str = self.__prep_prog_json_str(
+                            _("Checking that pkg(5) is up to date ..."))
+                        if self.__prog_event_handler:
+                                self.__prog_event_handler(desc=prog_str+"\n")
+                        else:
+                                self._pe.cprint(prog_str)
+
+        def _cache_cats_output(self, outspec):
+                if outspec.first:
+                        self.__generic_start(_("Caching catalogs ..."))
+                if outspec.last:
+                        self.__generic_done()
+
+        def _load_cat_cache_output(self, outspec):
+                if outspec.first:
+                        self.__generic_start(_("Loading catalog cache ..."))
+                if outspec.last:
+                        self.__generic_done()
+
+        def _refresh_output_progress(self, outspec):
+                # See __generic_start above.
+                if self.purpose != self.PURPOSE_NORMAL:
+                        return
+                if "startpublisher" in outspec.changed:
+                        p = self.pub_refresh.curinfo.prefix
+                        if self.refresh_target_catalog:
+                                m = _("Retrieving target catalog '{0}' "
+                                    "...").format(p)
+                        elif self.refresh_full_refresh:
+                                m = _("Retrieving catalog '{0}' ...").format(p)
+                        else:
+                                m = _("Refreshing catalog '{0}' ...").format(p)
+                        self.__generic_start(m)
+                elif "endpublisher" in outspec.changed:
+                        self.__generic_done()
+
+        def _plan_output(self, outspec, planitem):
+                if outspec.first:
+                        self.__generic_start(_("{0} ...").format(planitem.name))
+                if outspec.last:
+                        self.__generic_done_item(planitem)
+
+        def _plan_output_all_done(self):
+                prog_json = {self.O_PHASE: self._phase_prefix(),
+                    self.O_MESSAGE: _("Planning completed"),
+                    self.O_TIME: self.plan_generic.elapsed(),
+                    self.O_TIME_U: _("second")}
+                self.__generic_done(prog_json=prog_json)
+
+        def _mfst_fetch(self, outspec):
+                if not self._ptimer.time_to_print() and \
+                    not outspec.first and not outspec.last:
+                        return
+                if self.purpose != self.PURPOSE_NORMAL:
+                        return
+
+                # Reset timer; this prevents double printing for
+                # outspec.first and then again for the timer expiration
+                if outspec.first:
+                        self._ptimer.reset_now()
+
+                #
+                # There are a couple of reasons we might fetch manifests--
+                # pkgrecv, pkglint, etc. can all do this.  _phase_prefix()
+                # adjusts the output based on the major phase.
+                #
+                goalitems = self.mfst_fetch.goalitems
+                if goalitems == None:
+                        goalitems = 0
+                prog_str = json.dumps({self.O_PHASE: self._phase_prefix(),
+                    self.O_MESSAGE: _("Fetching manifests"),
+                    self.O_PRO_ITEMS: self.mfst_fetch.items,
+                    self.O_GOAL_ITEMS: goalitems,
+                    self.O_PCT_DONE: int(self.mfst_fetch.pctdone()),
+                    self.O_ITEM_U: _("manifest")
+                    })
+                if self.__prog_event_handler:
+                        self.__prog_event_handler(desc=prog_str+"\n")
+                else:
+                        self._pe.cprint(prog_str)
+
+        def _dl_output(self, outspec):
+                if not self._ptimer.time_to_print() and not outspec.first and \
+                    not outspec.last:
+                        return
+
+                # Reset timer; this prevents double printing for
+                # outspec.first and then again for the timer expiration
+                if outspec.first:
+                        self._ptimer.reset_now()
+
+                if not outspec.last:
+                        speed = self.dl_estimator.get_speed_estimate()
+                else:
+                        speed = self.dl_estimator.get_final_speed()
+                speedstr = "" if speed is None else \
+                    "({0})".format(self.dl_estimator.format_speed(speed))
+
+                if not outspec.last:
+                        # 'first' or time to print
+                        prog_str = json.dumps({
+                            self.O_PHASE: self._phase_prefix(),
+                            self.O_MESSAGE: _("Downloading"),
+                            self.O_PRO_ITEMS: self.dl_bytes.items,
+                            self.O_GOAL_ITEMS: self.dl_bytes.goalitems,
+                            self.O_PCT_DONE: int(self.dl_bytes.pctdone()),
+                            self.O_SPEED: speedstr,
+                            self.O_ITEM_U: _("byte")
+                            })
+                        if self.__prog_event_handler:
+                                self.__prog_event_handler(desc=prog_str+"\n")
+                        else:
+                                self._pe.cprint(prog_str)
+                else:
+                        # 'last'
+                        prog_json = {self.O_PHASE: self._phase_prefix(),
+                            self.O_MESSAGE: _("Download completed"),
+                            self.O_PRO_ITEMS: self.dl_bytes.goalitems,
+                            self.O_SPEED: speedstr,
+                            self.O_ITEM_U: _("byte"),
+                            self.O_TIME: self.dl_estimator.elapsed(),
+                            self.O_TIME_U: _("second")
+                            }
+                        self.__generic_done(prog_json=prog_json)
+
+        def _republish_output(self, outspec):
+                if "startpkg" in outspec.changed:
+                        pkgfmri = self.repub_pkgs.curinfo
+                        self.__generic_start(_("Republish: {0} ... ").format(
+                            pkgfmri.get_fmri(anarchy=True)))
+                if "endpkg" in outspec.changed:
+                        self.__generic_done()
+
+        def _archive_output(self, outspec):
+                if not self._ptimer.time_to_print() and not outspec:
+                        return
+                if outspec.first:
+                        # tell ptimer that we just printed.
+                        self._ptimer.reset_now()
+
+                if outspec.last:
+                        prog_json = {self.O_PHASE: self._phase_prefix(),
+                            self.O_MESSAGE: _("Archiving completed"),
+                            self.O_PRO_ITEMS: self.archive_bytes.goalitems,
+                            self.O_ITEM_U: _("byte"),
+                            self.O_TIME: self.archive_items.elapsed(),
+                            self.O_TIME_U: _("second")
+                            }
+                        self.__generic_done(prog_json=prog_json)
+                        return
+
+                prog_str = json.dumps({self.O_PHASE: self._phase_prefix(),
+                    self.O_MESSAGE: _("Archiving"),
+                    self.O_PRO_ITEMS: self.archive_bytes.items,
+                    self.O_GOAL_ITEMS: self.archive_bytes.goalitems,
+                    self.O_PCT_DONE: int(self.archive_bytes.pctdone()),
+                    self.O_ITEM_U: _("byte")
+                    })
+                if self.__prog_event_handler:
+                        self.__prog_event_handler(desc=prog_str+"\n")
+                else:
+                        self._pe.cprint(prog_str)
+
+        #
+        # The progress tracking infrastructure wants to tell us about each
+        # kind of action activity (install, remove, update).  For this
+        # progress tracker, we don't really care to expose that to the user,
+        # so we work in terms of total actions instead.
+        #
+        def _act_output(self, outspec, actionitem):
+                if not self._ptimer.time_to_print() and not outspec.first:
+                        return
+                # reset timer, since we're definitely printing now...
+                self._ptimer.reset_now()
+                total_actions = \
+                    sum(x.items for x in self._actionitems.values())
+                total_goal = \
+                    sum(x.goalitems for x in self._actionitems.values())
+                prog_str = json.dumps({self.O_PHASE: self._phase_prefix(),
+                    self.O_MESSAGE: _("Action activity"),
+                    self.O_PRO_ITEMS: total_actions,
+                    self.O_GOAL_ITEMS: total_goal,
+                    self.O_TYPE: actionitem.name,
+                    self.O_ITEM_U: _("action")
+                    })
+                if self.__prog_event_handler:
+                        self.__prog_event_handler(desc=prog_str+"\n")
+                else:
+                        self._pe.cprint(prog_str)
+
+        def _act_output_all_done(self):
+                total_goal = \
+                    sum(x.goalitems for x in self._actionitems.values())
+                total_time = \
+                    sum(x.elapsed() for x in self._actionitems.values())
+                if total_goal == 0:
+                        return
+
+                prog_json = {self.O_PHASE: self._phase_prefix(),
+                    self.O_MESSAGE: _("Completed actions activities"),
+                    self.O_PRO_ITEMS: total_goal,
+                    self.O_ITEM_U: _("action"),
+                    self.O_TIME: total_time,
+                    self.O_TIME_U: _("second")
+                    }
+                prog_str = self.__prep_prog_json_str(prog_json=prog_json)
+                if self.__prog_event_handler:
+                        self.__prog_event_handler(desc=prog_str+"\n")
+                else:
+                        self._pe.cprint(prog_str)
+
+        def _job_output(self, outspec, jobitem):
+                if outspec.first:
+                        self.__generic_start("{0} ... ".format(jobitem.name))
+                if outspec.last:
+                        self.__generic_done_item(jobitem)
+
+        def _lint_output(self, outspec):
+                if outspec.first:
+                        if self.lint_phasetype == self.LINT_PHASETYPE_SETUP:
+                                msg = "{0} ... ".format(
+                                    self.lintitems.name)
+                                prog_str = json.dumps({
+                                    self.O_PHASE: _("Setup"),
+                                    self.O_MESSAGE: msg
+                                    })
+                                if self.__prog_event_handler:
+                                        self.__prog_event_handler(
+                                            desc=prog_str+"\n")
+                                else:
+                                        self._pe.cprint(prog_str)
+                        elif self.lint_phasetype == self.LINT_PHASETYPE_EXECUTE:
+                                msg = "# --- {0} ---".format(
+                                    self.lintitems.name)
+                                prog_str = json.dumps({
+                                    self.O_PHASE: _("Execute"),
+                                    self.O_MESSAGE: msg
+                                    })
+                                if self.__prog_event_handler:
+                                        self.__prog_event_handler(
+                                            desc=prog_str+"\n")
+                                else:
+                                        self._pe.cprint(prog_str)
+                if outspec.last:
+                        if self.lint_phasetype == self.LINT_PHASETYPE_SETUP:
+                                self.__generic_done(phase=_("Setup"))
+                        elif self.lint_phasetype == self.LINT_PHASETYPE_EXECUTE:
+                                pass
+
+        def _li_recurse_start_output(self):
+                if self.linked_pkg_op == pkgdefs.PKG_OP_PUBCHECK:
+                        self.__generic_start(
+                            _("Linked image publisher check ..."))
+                        return
+
+        def _li_recurse_end_output(self):
+                if self.linked_pkg_op == pkgdefs.PKG_OP_PUBCHECK:
+                        self.__generic_done()
+                        return
+                prog_str = self.__prep_prog_json_str(
+                    _("Finished processing linked images."))
+                if self.__prog_event_handler:
+                        self.__prog_event_handler(
+                            desc=prog_str+"\n")
+                else:
+                        self._pe.cprint(prog_str)
+
+        def __li_dump_output(self, output):
+                if not output:
+                        return []
+                lines = output.splitlines()
+                return lines
+
+        def _li_recurse_output_output(self, lin, stdout, stderr):
+                if not stdout and not stderr:
+                        return
+                prog_json = {self.O_PHASE: self._phase_prefix(),
+                    self.O_MESSAGE: _("Linked image '{0}' output:").format(lin)}
+                prog_json[self.O_LI_OUTPUT] = self.__li_dump_output(stdout)
+                prog_json[self.O_LI_ERROR] = self.__li_dump_output(stderr)
+                prog_str = self.__prep_prog_json_str(prog_json=prog_json)
+                if self.__prog_event_handler:
+                        self.__prog_event_handler(
+                            desc=prog_str+"\n")
+                else:
+                        self._pe.cprint(prog_str)
+
+        def _li_recurse_status_output(self, done):
+                if self.linked_pkg_op == pkgdefs.PKG_OP_PUBCHECK:
+                        return
+
+                prog_str = json.dumps({self.O_PHASE: self._phase_prefix(),
+                    self.O_MESSAGE: _("Linked images status"),
+                    self.O_PRO_ITEMS: done,
+                    self.O_GOAL_ITEMS: self.linked_total,
+                    self.O_ITEM_U: _("linked image"),
+                    self.O_RUNNING: [str(i) for i in self.linked_running]
+                    })
+
+                if self.__prog_event_handler:
+                        self.__prog_event_handler(
+                            desc=prog_str+"\n")
+                else:
+                        self._pe.cprint(prog_str)
+
+        def _li_recurse_progress_output(self, lin):
+                if self.linked_pkg_op == pkgdefs.PKG_OP_PUBCHECK:
+                        return
+
+        def _reversion(self, pfmri, outspec):
+                if not self._ptimer.time_to_print() and not outspec:
+                        return
+
+                if outspec.first:
+                        # tell ptimer that we just printed.
+                        self._ptimer.reset_now()
+
+                if outspec.last:
+                        prog_json = {self.O_PHASE: _("Reversion"),
+                            self.O_MESSAGE: _("Done"),
+                            self.O_PRO_ITEMS: self.reversion_pkgs.items,
+                            self.O_REV_ITEMS: self.reversion_revs.items,
+                            self.O_ADJ_ITEMS: self.reversion_adjs.items,
+                            self.O_ITEM_U: _("package")
+                            }
+                        self.__generic_done(prog_json=prog_json)
+                        return
+
+                prog_str = json.dumps({self.O_PHASE: _("Reversion"),
+                    self.O_MESSAGE: "Reversioning",
+                    self.O_PRO_ITEMS: self.reversion_pkgs.items,
+                    self.O_GOAL_PRO_ITEMS: self.reversion_pkgs.goalitems,
+                    self.O_REV_ITEMS: self.reversion_revs.items,
+                    self.O_GOAL_REV_ITEMS: self.reversion_revs.goalitems,
+                    self.O_ADJ_ITEMS: self.reversion_adjs.items,
+                    self.O_ITEM_U: _("package")
+                    })
+                if self.__prog_event_handler:
+                        self.__prog_event_handler(
+                            desc=prog_str+"\n")
+                else:
+                        self._pe.cprint(prog_str)
+
+        @classmethod
+        def get_json_schema(cls):
+                """Construct json schema."""
+
+                json_schema = {"$schema":
+                    "http://json-schema.org/draft-04/schema#",
+                    "title": "progress schema",
+                    "type": "object",
+                    "properties": {cls.O_PHASE:  {"type": "string"},
+                        cls.O_MESSAGE: {"type": "string"},
+                        cls.O_TIME: {"type": "number"},
+                        cls.O_TIME_U: {"type": "string"},
+                        cls.O_TYPE: {"type": "string"},
+                        cls.O_PRO_ITEMS: {"type": "number"},
+                        cls.O_GOAL_ITEMS: {"type": "number"},
+                        cls.O_PCT_DONE: {"type": "number"},
+                        cls.O_ITEM_U: {"type": "string"},
+                        cls.O_SPEED: {"type": "string"},
+                        cls.O_RUNNING: {"type": "array"},
+                        cls.O_GOAL_PRO_ITEMS: {"type": "number"},
+                        cls.O_REV_ITEMS : {"type": "number"},
+                        cls.O_GOAL_REV_ITEMS: {"type": "number"}, 
+                        cls.O_ADJ_ITEMS: {"type": "number"},
+                        cls.O_LI_OUTPUT : {"type": "array"},
+                        cls.O_LI_ERROR : {"type": "array"},
+                        },
+                    "required":  [cls.O_PHASE, cls.O_MESSAGE]
+                }
+                return json_schema
+
 class LinkedChildProgressTracker(CommandLineProgressTracker):
         """This tracker is used for recursion with linked children.
         This is intended for use only by linked images."""
--- a/src/modules/client/publisher.py	Thu Apr 02 08:44:15 2015 -0700
+++ b/src/modules/client/publisher.py	Fri Apr 03 19:02:53 2015 -0700
@@ -2526,6 +2526,12 @@
                 # Add new values to properties.
                 for n in add_prop_values.keys():
                         self.properties.setdefault(n, [])
+                        if not isinstance(self.properties[n], list):
+                                raise api_errors.InvalidPropertyValue(_(
+                                    "Cannot add a value to a single valued "
+                                    "property, The property name is '{name}' "
+                                    "and the current value is '{value}'"
+                                    ).format(name=n, value=self.properties[n]))
                         self.properties[n].extend(add_prop_values[n])
                 # Remove values from properties.
                 for n in remove_prop_values.keys():
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/src/modules/client/rad_pkg.py	Fri Apr 03 19:02:53 2015 -0700
@@ -0,0 +1,109 @@
+#!/usr/bin/python
+#
+# CDDL HEADER START
+#
+# The contents of this file are subject to the terms of the
+# Common Development and Distribution License (the "License").
+# You may not use this file except in compliance with the License.
+#
+# You can obtain a copy of the license at usr/src/OPENSOLARIS.LICENSE
+# or http://www.opensolaris.org/os/licensing.
+# See the License for the specific language governing permissions
+# and limitations under the License.
+#
+# When distributing Covered Code, include this CDDL HEADER in each
+# file and include the License file at usr/src/OPENSOLARIS.LICENSE.
+# If applicable, add the following below this CDDL HEADER, with the
+# fields enclosed by brackets "[]" replaced with your own identifying
+# information: Portions Copyright [yyyy] [name of copyright owner]
+#
+# CDDL HEADER END
+#
+
+#
+# Copyright (c) 2015, Oracle and/or its affiliates. All rights reserved.
+#
+
+import pkg
+import pkg.client.client_api as entry
+import pkg.client.progress as progress
+
+# progress delay.
+PROG_DELAY   = 5.0
+
+rad2pkg_cmds_mapping = {
+    "list_packages": "list",
+    "set_publisher": "set-publisher",
+    "unset_publisher": "unset-publisher",
+    "exact_install": "exact-install"
+    }
+
+def __init_prog_tracker(prog_event_handler, prog_delay):
+        """Initialize progress tracker."""
+
+        progresstracker = progress.RADProgressTracker(
+            prog_event_handler=prog_event_handler,
+            term_delay=prog_delay)
+        return progresstracker
+
+def __correspond_pkg_cmd(rad_operation):
+        """Need to replace rad operation names with pkg subcommand."""
+
+        if rad_operation in rad2pkg_cmds_mapping:
+                pkg_cmd = rad2pkg_cmds_mapping[rad_operation]
+        else:
+                pkg_cmd = rad_operation
+        return pkg_cmd
+
+def rad_get_input_schema(operation):
+        """Get the input schema for RAD operation."""
+
+        pkg_cmd = __correspond_pkg_cmd(operation)
+        return entry._get_pkg_input_schema(pkg_cmd, opts_mapping)
+
+def rad_get_output_schema(operation):
+        """Get the output schema for RAD operation."""
+
+        pkg_cmd = __correspond_pkg_cmd(operation)
+        return entry._get_pkg_output_schema(pkg_cmd)
+
+def rad_get_progress_schema():
+        return progress.RADProgressTracker.get_json_schema()
+
+def rad_pkg(subcommand, pargs_json=None, opts_json=None, pkg_image=None,
+    prog_event_handler=None, prog_delay=PROG_DELAY):
+        """Perform pkg operation.
+
+        subcommand: a string type pkg subcommand.
+
+        pargs_json: a JSON blob containing a list of pargs.
+
+        opts_json: a JSON blob containing a dictionary of pkg
+        subcommand options.
+
+        pkg_image: a string type alternate image path.
+        """
+
+        ret_json = None
+
+        rad_prog_tracker = __init_prog_tracker(prog_event_handler, prog_delay)
+        try:
+                ret_json = entry._pkg_invoke(subcommand=subcommand,
+                    pargs_json=pargs_json, opts_json=opts_json,
+                    pkg_image=pkg_image, prog_delay=prog_delay,
+                    opts_mapping=opts_mapping, prog_tracker=rad_prog_tracker)
+                return ret_json
+        except Exception as ex:
+                if not ret_json:
+                        ret_json = {"status": 99, "errors": [{"reason":
+                            str(ex)}]}
+                return ret_json
+
+#
+# Mapping of the internal option name to an alternate name that user provided
+# via keyword argument.
+#
+# {option_name: alternate_name}
+#
+#
+opts_mapping = {}
--- a/src/modules/misc.py	Thu Apr 02 08:44:15 2015 -0700
+++ b/src/modules/misc.py	Fri Apr 03 19:02:53 2015 -0700
@@ -706,6 +706,21 @@
 class ProcFS(object):
         """This class is used as an interface to procfs."""
 
+        # Detect whether python is running in 32-bit or 64-bit
+        # environment based on pointer size.
+        _running_bit = struct.calcsize("P") * 8
+
+        actual_format = {32: {
+                              "long": "l",
+                              "uintptr_t": "I",
+                              "ulong": "L"
+                             },
+                         64: {
+                              "long": "q",
+                              "uintptr_t": "Q",
+                              "ulong": "Q"
+                             }}
+
         _ctype_formats = {
             # This dictionary maps basic c types into python format characters
             # that can be used with struct.unpack().  The format of this
@@ -717,26 +732,33 @@
             "char":        (1,  "c"),
             "char[]":      (1,  "s"),
             "int":         (1,  "i"),
-            "long":        (1,  "l"),
-            "uintptr_t":   (1,  "I"),
+            "long":        (1,  actual_format[_running_bit]["long"]),
+            "uintptr_t":   (1,  actual_format[_running_bit]["uintptr_t"]),
             "ushort_t":    (1,  "H"),
 
             # other simple types (repeat count should always be 1)
             "ctid_t":      (1,  "i"), # ctid_t -> id_t -> int
-            "dev_t":       (1,  "L"), # dev_t -> ulong_t
+
+            # dev_t -> ulong_t
+            "dev_t":       (1,  actual_format[_running_bit]["ulong"]),
             "gid_t":       (1,  "I"), # gid_t -> uid_t -> uint_t
             "pid_t":       (1,  "i"), # pid_t -> int
             "poolid_t":    (1,  "i"), # poolid_t -> id_t -> int
             "projid_t":    (1,  "i"), # projid_t -> id_t -> int
-            "size_t":      (1,  "L"), # size_t -> ulong_t
+
+            # size_t -> ulong_t
+            "size_t":      (1,  actual_format[_running_bit]["ulong"]),
             "taskid_t":    (1,  "i"), # taskid_t -> id_t -> int
-            "time_t":      (1,  "l"), # time_t -> long
+
+            # time_t -> long
+            "time_t":      (1,  actual_format[_running_bit]["long"]),
             "uid_t":       (1,  "I"), # uid_t -> uint_t
             "zoneid_t":    (1,  "i"), # zoneid_t -> id_t -> int
             "id_t":        (1,  "i"), # id_t -> int
 
             # structures must be represented as character arrays
-            "timestruc_t": (8,  "s"), # sizeof (timestruc_t) = 8
+            # sizeof (timestruc_t) = 8 in 32-bit process, and = 16 in 64-bit.
+            "timestruc_t": (_running_bit / 4,  "s"),
         }
 
         _timestruct_desc = [
@@ -786,6 +808,12 @@
             ("int",         1,  "pr_filler"),
         ]
 
+        # For 64 bit process, the alignment is off by 4 bytes from pr_pctmem
+        # field. So add an additional pad here.
+        if _running_bit == 64:
+                _psinfo_desc = _psinfo_desc[0:17] + [("int", 1, "dum_pad")] + \
+                    _psinfo_desc[17:]
+
         _struct_descriptions = {
             # this list contains all the known structure description lists
             # the entry format is: <structure name>: \
@@ -825,7 +853,6 @@
 
                 # unpack the data into a list
                 rv = list(struct.unpack(fmt, data))
-
                 # check for any nested data that needs unpacking
                 for index, v in enumerate(desc):
                         ctype = v[0]
@@ -840,13 +867,17 @@
         def psinfo():
                 """Read the psinfo file and return its contents."""
 
-                # This works only on Solaris, in 32-bit mode.  It may not work
-                # on older or newer versions than 5.11.  Ideally, we would use
-                # libproc, or check sbrk(0), but this is expedient.  In most
-                # cases (there's a small chance the file will decode, but
-                # incorrectly), failure will raise an exception, and we'll
+                # This works only on Solaris, in 32-bit or 64-bit mode.  It may
+                # not work on older or newer versions than 5.11.  Ideally, we
+                # would use libproc, or check sbrk(0), but this is expedient.
+                # In most cases (there's a small chance the file will decode,
+                # but incorrectly), failure will raise an exception, and we'll
                 # fail safe.
                 psinfo_size = 232
+
+                if ProcFS._running_bit == 64:
+                        psinfo_size = 288
+
                 try:
                         psinfo_data = file("/proc/self/psinfo").read(
                             psinfo_size)
@@ -861,7 +892,6 @@
 
                 return ProcFS._struct_unpack(psinfo_data, "psinfo_t")
 
-
 def __getvmusage():
         """Return the amount of virtual memory in bytes currently in use."""
 
@@ -1306,7 +1336,8 @@
                         portable.chown(path, uid, gid)
 
 
-def opts_parse(op, args, opts_table, opts_mapping, usage_cb=None):
+def opts_parse(op, args, opts_table, opts_mapping, usage_cb=None,
+    use_cli_opts=True, **opts_kv):
         """Generic table-based options parsing function.  Returns a tuple
         consisting of a list of parsed options in the form (option, argument)
         and the remaining unparsed options. The parsed-option list may contain
@@ -1319,7 +1350,7 @@
         'opts_table' is a list of options the operation supports.
         The format of the list entries should be a tuple containing the
         option and its default value:
-            (option, default_value)
+            (option, default_value, [valid values], [json schema])
         It is valid to have other entries in the list when they are required
         for additional option processing elsewhere. These are ignore here. If
         the list entry is a tuple it must conform to the format oulined above.
@@ -1346,15 +1377,25 @@
         is the value of this option in the parsed option dictionary.
 
         'usage_cb' is a function pointer that should display usage information
-        and will be invoked if invalid arguments are detected."""
-
-        # list for getopt long options
-        opts_l_list = []
-        # getopt str for short options
-        opts_s_str = ""
-
-        # dict to map options returned by getopt to keys
-        opts_keys = dict()
+        and will be invoked if invalid arguments are detected.
+
+        'use_cli_opts' is to indicate the option type is a CLI option or
+        a key-value pair option.
+
+        'opts_kv' is the user provided opts that should be parsed. It is a
+        dictionary with key as option name and value as option argument.
+        """
+
+        if use_cli_opts:
+                # list for getopt long options
+                opts_l_list = []
+                # getopt str for short options
+                opts_s_str = ""
+
+                # dict to map options returned by getopt to keys
+                opts_keys = dict()
+        else:
+                opts_name_mapping = {}
 
         for entry in opts_table:
                 # option table contains functions for verification, ignore here
@@ -1364,28 +1405,43 @@
                         opt, default = entry
                 elif len(entry) == 3:
                         opt, default, dummy_valid_args = entry
-                assert opt in opts_mapping
-                sopt, lopt = opts_mapping[opt]
-                # make sure an option was specified
-                assert sopt or lopt
-                if lopt != "":
-                        if default is None or type(default) == list:
-                                opts_l_list.append("{0}=".format(lopt))
-                        else:
-                                opts_l_list.append("{0}".format(lopt))
-                        opts_keys["--{0}".format(lopt)] = opt
-                if sopt != "":
-                        if default is None or type(default) == list:
-                                opts_s_str += "{0}:".format(sopt)
-                        else:
-                                opts_s_str += "{0}".format(sopt)
-                        opts_keys["-{0}".format(sopt)] = opt
+                elif len(entry) == 4:
+                        opt, default, dummy_valid_args, dummy_schema = entry
+                if use_cli_opts:
+                        assert opt in opts_mapping
+                        sopt, lopt = opts_mapping[opt]
+                        # make sure an option was specified
+                        assert sopt or lopt
+                        if lopt != "":
+                                if default is None or type(default) == list:
+                                        opts_l_list.append("{0}=".format(lopt))
+                                else:
+                                        opts_l_list.append("{0}".format(lopt))
+                                opts_keys["--{0}".format(lopt)] = opt
+                        if sopt != "":
+                                if default is None or type(default) == list:
+                                        opts_s_str += "{0}:".format(sopt)
+                                else:
+                                        opts_s_str += "{0}".format(sopt)
+                                opts_keys["-{0}".format(sopt)] = opt
+                else:
+                        # Add itself as a mapping for validation.
+                        opts_name_mapping[opt] = opt
+                        if opt in opts_mapping:
+                                optn = opts_mapping[opt]
+                                if optn:
+                                        opts_name_mapping[optn] = opt
 
         # Parse options.
-        try:
-                opts, pargs = getopt.getopt(args, opts_s_str, opts_l_list)
-        except getopt.GetoptError as e:
-                usage_cb(_("illegal option -- {0}").format(e.opt), cmd=op)
+        if use_cli_opts:
+                try:
+                        opts, pargs = getopt.getopt(args, opts_s_str,
+                            opts_l_list)
+                except getopt.GetoptError as e:
+                        usage_cb(_("illegal option -- {0}").format(e.opt),
+                            cmd=op)
+        else:
+                opts = opts_kv
 
         def get_default(option):
                 """Find the default value for a given option from opts_table."""
@@ -1396,45 +1452,69 @@
                                 opt, default = x
                         elif len(x) == 3:
                                 opt, default, dummy_valid_args = x
+                        elif len(x) == 4:
+                                opt, default, dummy_valid_args, \
+                                    dummy_schema = x
                         if option == opt:
                                 return default
 
-        # Assemble the options dictionary by passing in the right data types and
-        # take care of duplicates.
-        opt_dict = {}
-        for x in opts:
-                cli_opt, arg = x
-                opt = opts_keys[cli_opt]
-
+        def process_opts(opt, arg, opt_dict):
+                """Process option values."""
                 # Determine required option type based on the default value.
                 default = get_default(opt)
 
-                # Handle duplicates for integer and list types.
-                if type(default) == int:
-                        if opt in opt_dict:
-                                opt_dict[opt] += 1
-                        else:
-                                opt_dict[opt] = 1
-                        continue
-                if type(default) == list:
-                        if opt in opt_dict:
-                                opt_dict[opt].append(arg)
-                        else:
-                                opt_dict[opt] = [arg]
-                        continue
+                if use_cli_opts:
+                        # Handle duplicates for integer and list types.
+                        if type(default) == int:
+                                if opt in opt_dict:
+                                        opt_dict[opt] += 1
+                                else:
+                                        opt_dict[opt] = 1
+                                return
+                        if type(default) == list:
+                                if opt in opt_dict:
+                                        opt_dict[opt].append(arg)
+                                else:
+                                        opt_dict[opt] = [arg]
+                                return
 
                 # Boolean and string types can't be repeated.
                 if opt in opt_dict:
                         raise api_errors.InvalidOptionError(
                             api_errors.InvalidOptionError.OPT_REPEAT, [opt])
 
-                # For boolean options we have to toggle the default value.
+                # For boolean options we have to toggle the default value
+                # when in CLI mode.
                 if type(default) == bool:
-                        opt_dict[opt] = not default
+                        if use_cli_opts:
+                                opt_dict[opt] = not default
+                        else:
+                                opt_dict[opt] = arg
                 else:
                         opt_dict[opt] = arg
 
-        return opt_dict, pargs
+        # Assemble the options dictionary by passing in the right data types
+        # and take care of duplicates.
+        opt_dict = {}
+        if use_cli_opts:
+                for x in opts:
+                        cli_opt, arg = x
+                        opt = opts_keys[cli_opt]
+                        process_opts(opt, arg, opt_dict)
+
+                return opt_dict, pargs
+
+        for k, v in opts.items():
+                cli_opt, arg = k, v
+                if cli_opt in opts_name_mapping:
+                        cli_opt = opts_name_mapping[cli_opt]
+                else:
+                        raise api_errors.InvalidOptionError(
+                            api_errors.InvalidOptionError.GENERIC,
+                            [cli_opt])
+                process_opts(cli_opt, arg, opt_dict)
+
+        return opt_dict
 
 def api_cmdpath():
         """Returns the path to the executable that is invoking the api client
--- a/src/pkg/external_deps.txt	Thu Apr 02 08:44:15 2015 -0700
+++ b/src/pkg/external_deps.txt	Fri Apr 03 19:02:53 2015 -0700
@@ -12,6 +12,7 @@
     pkg:/library/python/cherrypy-27
     pkg:/library/python/coverage-27
     pkg:/library/python/jsonrpclib-27
+    pkg:/library/python/jsonschema-27
     pkg:/library/python/locale-services
     pkg:/library/python/m2crypto-27
     pkg:/library/python/mako-27
--- a/src/pkg/manifests/developer:opensolaris:pkg5.p5m	Thu Apr 02 08:44:15 2015 -0700
+++ b/src/pkg/manifests/developer:opensolaris:pkg5.p5m	Fri Apr 03 19:02:53 2015 -0700
@@ -38,6 +38,7 @@
 depend type=require fmri=pkg:/developer/versioning/mercurial
 depend type=require fmri=pkg:/library/python/coverage-27
 depend type=require fmri=pkg:/library/python/jsonrpclib-27
+depend type=require fmri=pkg:/library/python/jsonschema-27
 depend type=require fmri=pkg:/library/python/locale-services
 depend type=require fmri=pkg:/package/svr4
 depend type=require fmri=pkg:/runtime/python-27
--- a/src/pkg/manifests/package:pkg.p5m	Thu Apr 02 08:44:15 2015 -0700
+++ b/src/pkg/manifests/package:pkg.p5m	Fri Apr 03 19:02:53 2015 -0700
@@ -34,8 +34,13 @@
 file path=$(PYDIRVP)/pkg-0.1-py2.7.egg-info
 dir  path=$(PYDIRVP)/pkg/64
 file path=$(PYDIRVP)/pkg/64/_varcet.so
+file path=$(PYDIRVP)/pkg/64/arch.so
+file path=$(PYDIRVP)/pkg/64/elf.so
+file path=$(PYDIRVP)/pkg/64/pspawn.so
 file path=$(PYDIRVP)/pkg/64/sha512_t.so
+file path=$(PYDIRVP)/pkg/64/solver.so
 file path=$(PYDIRVP)/pkg/64/sysattr.so
+file path=$(PYDIRVP)/pkg/64/syscallat.so
 file path=$(PYDIRVP)/pkg/__init__.py
 file path=$(PYDIRVP)/pkg/_varcet.so
 dir  path=$(PYDIRVP)/pkg/actions
@@ -86,6 +91,7 @@
 # it can't import libbe_py, and is graceful in the face of its absence.
 #
 file path=$(PYDIRVP)/pkg/client/bootenv.py pkg.depend.bypass-generate=.*libbe.*
+file path=$(PYDIRVP)/pkg/client/client_api.py
 file path=$(PYDIRVP)/pkg/client/debugvalues.py
 file path=$(PYDIRVP)/pkg/client/firmware.py
 file path=$(PYDIRVP)/pkg/client/history.py
@@ -109,6 +115,7 @@
 file path=$(PYDIRVP)/pkg/client/progress.py
 file path=$(PYDIRVP)/pkg/client/publisher.py
 file path=$(PYDIRVP)/pkg/client/query_parser.py
+file path=$(PYDIRVP)/pkg/client/rad_pkg.py
 file path=$(PYDIRVP)/pkg/client/sigpolicy.py
 dir  path=$(PYDIRVP)/pkg/client/transport
 file path=$(PYDIRVP)/pkg/client/transport/__init__.py
@@ -247,6 +254,7 @@
 dir  path=usr/share/lib/pkg
 file path=usr/share/lib/pkg/opensolaris.org.sections
 file path=usr/share/lib/pkg/pkglintrc
+file path=usr/share/lib/pkg/rad-invoke mode=0755
 dir  path=usr/share/lib/pkg/web
 dir  path=usr/share/lib/pkg/web/_themes
 dir  path=usr/share/lib/pkg/web/_themes/default
--- a/src/po/POTFILES.in	Thu Apr 02 08:44:15 2015 -0700
+++ b/src/po/POTFILES.in	Fri Apr 03 19:02:53 2015 -0700
@@ -24,6 +24,7 @@
 modules/client/api.py
 modules/client/api_errors.py
 modules/client/bootenv.py
+modules/client/client_api.py
 modules/client/firmware.py
 modules/client/history.py
 modules/client/image.py
@@ -35,6 +36,7 @@
 modules/client/plandesc.py
 modules/client/progress.py
 modules/client/publisher.py
+modules/client/rad_pkg.py
 modules/client/transport/repo.py
 modules/config.py
 modules/file_layout/file_manager.py
@@ -72,6 +74,7 @@
 pkgrepo.py
 publish.py
 pull.py
+rad-invoke.py
 sign.py
 sysrepo.py
 util/publish/pkgdiff.py
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/src/rad-invoke.py	Fri Apr 03 19:02:53 2015 -0700
@@ -0,0 +1,132 @@
+#!/usr/bin/python2.7
+#
+# CDDL HEADER START
+#
+# The contents of this file are subject to the terms of the
+# Common Development and Distribution License (the "License").
+# You may not use this file except in compliance with the License.
+#
+# You can obtain a copy of the license at usr/src/OPENSOLARIS.LICENSE
+# or http://www.opensolaris.org/os/licensing.
+# See the License for the specific language governing permissions
+# and limitations under the License.
+#
+# When distributing Covered Code, include this CDDL HEADER in each
+# file and include the License file at usr/src/OPENSOLARIS.LICENSE.
+# If applicable, add the following below this CDDL HEADER, with the
+# fields enclosed by brackets "[]" replaced with your own identifying
+# information: Portions Copyright [yyyy] [name of copyright owner]
+#
+# CDDL HEADER END
+#
+
+#
+# Copyright (c) 2015, Oracle and/or its affiliates. All rights reserved.
+#
+
+import sys
+import gettext
+import getopt
+import locale
+import logging
+import os
+import pkg
+import pkg.client.rad_pkg as entry
+import pkg.misc as misc
+import simplejson as json
+
+# progress delay.
+PROG_DELAY   = 5.0
+
+class _InfoFilter(logging.Filter):
+        def filter(self, rec):
+                return rec.levelno <= logging.INFO
+
+class _StreamHandler(logging.StreamHandler):
+        """Simple subclass to ignore exceptions raised during logging output."""
+
+        def handleError(self, record):
+                # Ignore exceptions raised during output to stdout/stderr.
+                return
+
+ips_logger = None
+
+def error(text):
+        """Create error message."""
+
+        if os.getenv("__IPS_INVOKE_IN_RAD") == "true":
+                return {"status": 1, "errors": [{"reason": text}]}
+        ips_logger.error(text)
+        sys.exit(1)
+
+def __init_log():
+        """Initialize logger."""
+
+        global ips_logger
+
+        ips_logger = logging.getLogger("__name__")
+        ips_logger.propagate = 0
+        ips_logger.setLevel(logging.INFO)
+
+        # This logger is for delivering JSON result. Use stderr to distinguish
+        # it with progress output.
+        handler = _StreamHandler(sys.stderr)
+        handler.setLevel(logging.INFO)
+
+        # If this script is used in RAD, only retrieve log levels <= INFO.
+        if os.getenv("__IPS_INVOKE_IN_RAD") == "true":
+                handler.addFilter(_InfoFilter())
+        ips_logger.addHandler(handler)
+
+def main_func():
+        pkg_image = None
+        pargs_json = None
+        opts_json = None
+        prog_delay = PROG_DELAY
+        if os.getenv("__IPS_INVOKE_IN_RAD") != "true":
+                return error(_("This script can only be invoked by RAD"))
+        script_path = os.path.realpath(__file__)
+        try:
+                opts, pargs = getopt.getopt(sys.argv[1:],
+                    "hR:?", ["help", "pargs=", "opts=", "prog-delay="])
+                for opt, arg in opts:
+                        if opt == "--help" or opt == "-h":
+                                error("This is a RAD only script.")
+                        elif opt == "--pargs":
+                                pargs_json = arg
+                        elif opt == "--opts":
+                                opts_json = arg
+                        elif opt == "-R":
+                                pkg_image = arg
+                        elif opt == "--prog-delay":
+                                prog_delay = float(arg)
+                        else:
+                                error(_("unknown option {0} in file: {1}"
+                                    ).format(opt, script_path))
+        except getopt.GetoptError as e:
+                return error(_("illegal global option -- {0} in file: {1}"
+                    ).format(e.opt, script_path))
+        except ValueError as e:
+                return error(_("invalid option argument: {0} in file: {1}"
+                    ).format(str(e), script_path))
+        if len(pargs) < 1:
+                return error(_("missing argument in file: {0}").format(
+                    script_path))
+
+        return entry.rad_pkg(pargs[0], pargs_json=pargs_json,
+            opts_json=opts_json, pkg_image=pkg_image,
+            prog_delay=prog_delay)
+
+if __name__ == "__main__":
+        misc.setlocale(locale.LC_ALL, "")
+        gettext.install("pkg", "/usr/share/locale",
+            codeset=locale.getpreferredencoding())
+        __init_log()
+        ret_json = main_func()
+        ips_logger.info(json.dumps(ret_json))
+        try:
+                logging.shutdown()
+        except IOError:
+                # Ignore python's spurious pipe problems.
+                pass
+        sys.exit(ret_json["status"])
--- a/src/setup.py	Thu Apr 02 08:44:15 2015 -0700
+++ b/src/setup.py	Fri Apr 03 19:02:53 2015 -0700
@@ -122,6 +122,7 @@
 man5_zh_CN_dir = 'usr/share/man/zh_CN.UTF-8/man5'
 
 resource_dir = 'usr/share/lib/pkg'
+rad_dir = 'usr/share/lib/pkg'
 transform_dir = 'usr/share/pkg/transforms'
 ignored_deps_dir = 'usr/share/pkg/ignored_deps'
 smf_app_dir = 'lib/svc/manifest/application/pkg'
@@ -176,6 +177,9 @@
         svc_share_dir: [
                 ['svc/pkg5_include.sh', 'pkg5_include.sh'],
                 ],
+        rad_dir: [
+                ["rad-invoke.py", "rad-invoke"],
+                ],
         }
 
 scripts_windows = {
@@ -211,6 +215,9 @@
                 ['depot.py', 'depot.py'],
                 ['scripts/pkg.depotd.sh', 'pkg.depotd'],
                 ],
+        rad_dir: [
+                ["rad-invoke.py", "rad-invoke"],
+                ],
         }
 
 # indexed by 'osname'
@@ -1502,9 +1509,7 @@
         link_args = [ "-zstrip-class=nonalloc" ]
 else:
         link_args = []
-# We don't support 64-bit yet, but 64-bit _actions.so, _common.so, and
-# _varcet.so are needed for a system repository mod_wsgi application,
-# sysrepo_p5p.py.
+
 ext_modules = [
         Extension(
                 'actions._actions',
@@ -1536,7 +1541,8 @@
                 include_dirs = include_dirs + ["."],
                 extra_compile_args = compile_args,
                 extra_link_args = link_args + solver_link_args,
-                define_macros = [('_FILE_OFFSET_BITS', '64')]
+                define_macros = [('_FILE_OFFSET_BITS', '64')],
+                build_64 = True
                 ),
         ]
 elf_libraries = None
@@ -1624,6 +1630,7 @@
                         libraries = elf_libraries,
                         extra_compile_args = compile_args,
                         extra_link_args = link_args,
+                        build_64 = True
                         ),
                 ]
 
@@ -1640,7 +1647,8 @@
                             include_dirs = include_dirs,
                             extra_compile_args = compile_args,
                             extra_link_args = link_args,
-                            define_macros = [('_FILE_OFFSET_BITS', '64')]
+                            define_macros = [('_FILE_OFFSET_BITS', '64')],
+                            build_64 = True
                             ),
                     Extension(
                             'pspawn',
@@ -1648,7 +1656,8 @@
                             include_dirs = include_dirs,
                             extra_compile_args = compile_args,
                             extra_link_args = link_args,
-                            define_macros = [('_FILE_OFFSET_BITS', '64')]
+                            define_macros = [('_FILE_OFFSET_BITS', '64')],
+                            build_64 = True
                             ),
                     Extension(
                             'syscallat',
@@ -1656,7 +1665,8 @@
                             include_dirs = include_dirs,
                             extra_compile_args = compile_args,
                             extra_link_args = link_args,
-                            define_macros = [('_FILE_OFFSET_BITS', '64')]
+                            define_macros = [('_FILE_OFFSET_BITS', '64')],
+                            build_64 = True
                             ),
                     Extension(
                             'sysattr',
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/src/tests/cli/t_client_api.py	Fri Apr 03 19:02:53 2015 -0700
@@ -0,0 +1,955 @@
+#!/usr/bin/python
+#
+# CDDL HEADER START
+#
+# The contents of this file are subject to the terms of the
+# Common Development and Distribution License (the "License").
+# You may not use this file except in compliance with the License.
+#
+# You can obtain a copy of the license at usr/src/OPENSOLARIS.LICENSE
+# or http://www.opensolaris.org/os/licensing.
+# See the License for the specific language governing permissions
+# and limitations under the License.
+#
+# When distributing Covered Code, include this CDDL HEADER in each
+# file and include the License file at usr/src/OPENSOLARIS.LICENSE.
+# If applicable, add the following below this CDDL HEADER, with the
+# fields enclosed by brackets "[]" replaced with your own identifying
+# information: Portions Copyright [yyyy] [name of copyright owner]
+#
+# CDDL HEADER END
+#
+
+#
+# Copyright (c) 2015, Oracle and/or its affiliates. All rights reserved.
+#
+
+import testutils
+if __name__ == "__main__":
+        testutils.setup_environment("../../../proto")
+import pkg5unittest
+
+import os
+import pkg.client.client_api as cli_api
+import pkg.client.progress as progress
+import simplejson as json
+import jsonschema
+
+from pkg.client import global_settings
+
+
+class TestClientApi(pkg5unittest.ManyDepotTestCase):
+        # Only start/stop the depot once (instead of for every test)
+        persistent_setup = True
+
+        foo1 = """
+            open foo@1,5.11-0
+            close """
+
+        foo10 = """
+            open [email protected],5.11-0
+            close """
+
+        foo11 = """
+            open [email protected],5.11-0
+            close """
+
+        foo12 = """
+            open [email protected],5.11-0
+            close """
+
+        foo121 = """
+            open [email protected],5.11-0
+            close """
+
+        food12 = """
+            open [email protected],5.11-0
+            close """
+
+        newpkg10 = """
+            open [email protected]
+            close """
+
+        newpkg210 = """
+            open [email protected]
+            close """
+
+        hierfoo10 = """
+            open hier/[email protected],5.11-0
+            close """
+
+        def setUp(self):
+                pkg5unittest.ManyDepotTestCase.setUp(self, ["test1", "test2",
+                    "test2"])
+
+                self.rurl1 = self.dcs[1].get_repo_url()
+                self.pkgsend_bulk(self.rurl1, (self.foo1, self.foo10,
+                    self.foo11, self.foo12, self.foo121, self.food12,
+                    self.hierfoo10, self.newpkg10, self.newpkg210))
+
+                # Ensure that the second repo's packages have exactly the same
+                # timestamps as those in the first ... by copying the repo over.
+                # If the repos need to have some contents which are different,
+                # send those changes after restarting depot 2.
+                d1dir = self.dcs[1].get_repodir()
+                d2dir = self.dcs[2].get_repodir()
+                self.copy_repository(d1dir, d2dir, { "test1": "test2" })
+
+                # The new repository won't have a catalog, so rebuild it.
+                self.dcs[2].get_repo(auto_create=True).rebuild()
+
+                # The third repository should remain empty and not be
+                # published to.
+
+                # Next, create the image and configure publishers.
+                self.image_create(self.rurl1, prefix="test1")
+                self.rurl2 = self.dcs[2].get_repo_url()
+                self.pkg("set-publisher -O " + self.rurl2 + " test2")
+
+                self.rurl3 = self.dcs[3].get_repo_url()
+
+        def __call_cmd(self, subcommand, args, opts):
+                retjson = cli_api._pkg_invoke(subcommand=subcommand,
+                    pargs_json=json.dumps(args), opts_json=json.dumps(opts))
+                return retjson
+
+        def test_01_invalid_pkg_invoke_args(self):
+                """Test invalid pkg_invoke args is handled correctly."""
+
+                pkgs = ["foo"]
+                opts = {"list_installed_newest": True, "list_all": True}
+                os.environ["PKG_IMAGE"] = self.img_path()
+                retjson = self.__call_cmd(None, pkgs, opts)
+                self.assert_("errors" in retjson)
+                self.assert_("Sub-command"
+                    in retjson["errors"][0]["reason"])
+
+                invalidpargs = {"invalid": -1}
+                retjson = self.__call_cmd("list", invalidpargs, opts)
+                self.assert_("errors" in retjson)
+                self.assert_("pargs_json is invalid"
+                    in retjson["errors"][0]["reason"])
+
+                invalidpargs = {"invalid": -1}
+                retjson = self.__call_cmd("publisher", invalidpargs, opts)
+                self.assert_("errors" in retjson)
+                self.assert_("pargs_json is invalid"
+                    in retjson["errors"][0]["reason"])
+
+                invalidpargs = "+1+1random"
+                retjson = cli_api._pkg_invoke(subcommand="list",
+                    pargs_json=invalidpargs,
+                    opts_json=json.dumps(opts))
+                self.assert_("errors" in retjson)
+                self.assert_("pargs_json is invalid"
+                    in retjson["errors"][0]["reason"])
+
+                invalidopts = "+1+1random"
+                retjson = cli_api._pkg_invoke(subcommand="list",
+                    pargs_json=json.dumps([]),
+                    opts_json=invalidopts)
+                self.assert_("errors" in retjson)
+                self.assert_("opts_json is invalid"
+                    in retjson["errors"][0]["reason"])
+
+        def test_02_valid_pkg_invoke_args(self):
+                """Test valid arguments for pkg json."""
+
+                self.image_create(self.rurl1, prefix="test1")
+                os.environ["PKG_IMAGE"] = self.img_path()
+                pkgs = ["foo"]
+                opts = {"list_newest": True}
+
+                self.pkg("install pkg://test1/foo")
+                retjson = cli_api._pkg_invoke(subcommand="list", pargs_json=None,
+                    opts_json=json.dumps(opts))
+                self.assert_("errors" not in retjson)
+
+                retjson = cli_api._pkg_invoke(subcommand="list",
+                    pargs_json=json.dumps(["foo"]),
+                    opts_json=None)
+                self.assert_("errors" not in retjson)
+
+                retjson = self.__call_cmd("list", pkgs, opts)
+                self.assert_("errors" not in retjson)
+
+        def __schema_validation(self, input, schema):
+                """Test if the input is valid against the schema."""
+
+                try:
+                        jsonschema.validate(input, schema)
+                        return True
+                except:
+                        return False
+
+        def test_03_list_json_args_opts(self):
+                """Test json args or opts for list command."""
+
+                self.image_create(self.rurl1, prefix="test1")
+                pkgs = [1, 2, 3]
+                opts = {"list_installed_newest": True, "list_all": True}
+                os.environ["PKG_IMAGE"] = self.img_path()
+                retjson = self.__call_cmd("list", pkgs, opts)
+                self.assert_("errors" in retjson)
+                self.assert_("is not of type 'string'" in
+                    retjson["errors"][0]["reason"])
+
+                pkgs = [None]
+                retjson = self.__call_cmd("list", pkgs, opts)
+                self.assert_("errors" in retjson)
+                self.assert_("is not of type 'string'" in
+                    retjson["errors"][0]["reason"])
+
+                pkgs = []
+                opts = {"list_installed_newest": 1}
+                retjson = self.__call_cmd("list", pkgs, opts)
+                self.assert_("errors" in retjson)
+                self.assert_("is not of type 'boolean'" in
+                    retjson["errors"][0]["reason"])
+
+                opts = {"origins": 1}
+                retjson = self.__call_cmd("list", pkgs, opts)
+                self.assert_("errors" in retjson)
+                self.assert_("is not of type 'array'" in
+                    retjson["errors"][0]["reason"])
+
+                opts = {"random": 1}
+                retjson = self.__call_cmd("list", pkgs, opts)
+                self.assert_("errors" in retjson)
+                self.assert_("invalid option" in \
+                    retjson["errors"][0]["reason"])
+
+                # Test args and opts directly against schema.
+                pargs = "pargs_json"
+                list_schema = cli_api._get_pkg_input_schema("list")
+                list_input = {pargs: [], "opts_json": {}}
+                self.assert_(self.__schema_validation(list_input, list_schema))
+
+                list_input = {pargs: [12], "opts_json": {}}
+                self.assert_(not self.__schema_validation(list_input,
+                    list_schema))
+
+                list_input = {pargs: [],
+                    "opts_json": {"list_upgradable": "string"}}
+                self.assert_(not self.__schema_validation(list_input,
+                    list_schema))
+
+                list_input = {pargs: [], "opts_json": {"list_upgradable":
+                    False}}
+                self.assert_(self.__schema_validation(list_input,
+                    list_schema))
+
+                list_input = {pargs: [], "opts_json": {"origins": False}}
+                self.assert_(not self.__schema_validation(list_input,
+                    list_schema))
+
+                list_input = {pargs: [], "opts_json": {"origins": []}}
+                self.assert_(self.__schema_validation(list_input,
+                    list_schema))
+
+        def test_04_install_json_args_opts(self):
+                """Test json args or opts for install command."""
+
+                # Test invalid pkg name.
+                self.image_create(self.rurl1, prefix="test1")
+                os.environ["PKG_IMAGE"] = self.img_path()
+                pkgs = [1, 2, 3]
+                opts = {"backup_be": True}
+                retjson = self.__call_cmd("install", pkgs, opts)
+                self.assert_("errors" in retjson)
+                self.assert_("is not of type 'string'" in
+                    retjson["errors"][0]["reason"])
+
+                pkgs = [None]
+                opts = {"backup_be": True}
+                os.environ["PKG_IMAGE"] = self.img_path()
+                retjson = self.__call_cmd("install", pkgs, opts)
+                self.assert_("errors" in retjson)
+                self.assert_("is not of type 'string'" in
+                    retjson["errors"][0]["reason"])
+
+                # Test unknown option was reported.
+                pkgs = ["[email protected]"]
+                opts = {"unknown": "solaris"}
+                retjson = self.__call_cmd("install", pkgs, opts)
+                self.assert_("invalid option" in
+                    retjson["errors"][0]["reason"])
+
+                # Test without pkg specified.
+                pkgs = []
+                opts = {"verbose": 3}
+                retjson = self.__call_cmd("install", pkgs, opts)
+                self.assert_("at least one package" in
+                    retjson["errors"][0]["reason"])
+
+                # Run through pkg install.
+                pkgs = ["[email protected]"]
+                opts = {"verbose": 3, "parsable_version": 0}
+                global_settings.client_output_quiet = True
+                retjson = self.__call_cmd("install", pkgs, opts)
+                global_settings.client_output_quiet = False
+
+                # Test input directly against schema.
+                pargs = "pargs_json"
+                install_schema = cli_api._get_pkg_input_schema("install")
+                install_input = {pargs : [], "opts_json": {}}
+                self.assert_(self.__schema_validation(install_input,
+                    install_schema))
+
+                install_input = {pargs: [12], "opts_json": {}}
+                self.assert_(not self.__schema_validation(install_input,
+                    install_schema))
+
+                install_input = { pargs: ["pkg"], "opts_json": {}}
+                self.assert_(self.__schema_validation(install_input,
+                    install_schema))
+
+                install_input = {pargs : ["pkg"], "opts_json":
+                    {"parsable_version": "string"}}
+                self.assert_(not self.__schema_validation(install_input,
+                    install_schema))
+
+                install_input = {pargs: ["pkg"], "opts_json":
+                    {"parsable_version": 3}}
+                self.assert_(not self.__schema_validation(install_input,
+                    install_schema))
+
+                install_input = {pargs: ["pkg"], "opts_json":
+                    {"parsable_version": None}}
+                self.assert_(self.__schema_validation(install_input,
+                    install_schema))
+
+                install_input = {pargs: ["pkg"], "opts_json": {"reject_pats":
+                    False}}
+                self.assert_(not self.__schema_validation(install_input,
+                    install_schema))
+
+                install_input = {pargs: ["pkg"], "opts_json": {"reject_pats":
+                    []}}
+                self.assert_(self.__schema_validation(install_input,
+                    install_schema))
+
+                install_input = {pargs: ["pkg"], "opts_json": {"accept": "str"}}
+                self.assert_(not self.__schema_validation(install_input,
+                    install_schema))
+
+                install_input = {pargs: ["pkg"], "opts_json": {"accept": False}}
+                self.assert_(self.__schema_validation(install_input,
+                    install_schema))
+
+                install_input = {pargs: ["pkg"], "opts_json":
+                    {"act_timeout": 1.2}}
+                self.assert_(not self.__schema_validation(install_input,
+                    install_schema))
+
+                install_input = {pargs: ["pkg"], "opts_json":
+                    {"act_timeout": -1}}
+                self.assert_(not self.__schema_validation(install_input,
+                    install_schema))
+
+                install_input = {pargs: ["pkg"], "opts_json":
+                    {"li_erecurse_list": [None, None]}}
+                self.assert_(not self.__schema_validation(install_input,
+                    install_schema))
+
+                install_input = {pargs: [None], "opts_json":
+                    {"li_erecurse_list": []}}
+                self.assert_(not self.__schema_validation(install_input,
+                    install_schema))
+
+        def test_05_update_json_args_opts(self):
+                """Test json args or opts for update command."""
+
+                global_settings.client_output_quiet = True
+                self.image_create(self.rurl1, prefix="test1")
+                os.environ["PKG_IMAGE"] = self.img_path()
+                # Test invalid pkg name.
+                pkgs = [1, 2, 3]
+                opts = {"backup_be": True}
+                retjson = self.__call_cmd("update", pkgs, opts)
+                self.assert_("errors" in retjson)
+                self.assert_("is not of type 'string'" in
+                    retjson["errors"][0]["reason"])
+
+                pkgs = [None]
+                opts = {"backup_be": True}
+                os.environ["PKG_IMAGE"] = self.img_path()
+                retjson = self.__call_cmd("update", pkgs, opts)
+                self.assert_("errors" in retjson)
+                self.assert_("is not of type 'string'" in
+                    retjson["errors"][0]["reason"])
+
+                # Test unknown option was reported.
+                pkgs = ["[email protected]"]
+                opts = {"unknown": "solaris"}
+                retjson = self.__call_cmd("update", pkgs, opts)
+                self.assert_("invalid option" in
+                    retjson["errors"][0]["reason"])
+
+                # Test without pkg specified.
+                pkgs = []
+                opts = {"verbose": 3}
+                retjson = self.__call_cmd("update", pkgs, opts)
+                self.assert_(retjson["status"] == 4)
+
+                # Run through pkg update.
+                self.pkg("install pkg://test1/[email protected]")
+                pkgs = ["[email protected]"]
+                opts = {"verbose": 3, "parsable_version": 0}
+
+                retjson = self.__call_cmd("update", pkgs, opts)
+                self.assert_("errors" not in retjson)
+                self.assert_(retjson["status"] == 0)
+
+                pkgs=[]
+                retjson = self.__call_cmd("update", pkgs, opts)
+                self.assert_("errors" not in retjson)
+                self.assert_(retjson["status"] == 0)
+                global_settings.client_output_quiet = False
+
+                # Test input directly against schema.
+                pargs = "pargs_json"
+                update_schema = cli_api._get_pkg_input_schema("update")
+                update_input = {pargs: [], "opts_json": {}}
+                self.assert_(self.__schema_validation(update_input,
+                    update_schema))
+
+                update_input = {pargs: [None], "opts_json": {}}
+                self.assert_(not self.__schema_validation(update_input,
+                    update_schema))
+
+                update_input = {pargs: None, "opts_json": {}}
+                self.assert_(not self.__schema_validation(update_input,
+                    update_schema))
+
+                update_input = {pargs: [1, 2], "opts_json": {}}
+                self.assert_(not self.__schema_validation(update_input,
+                    update_schema))
+
+                update_input = {pargs: [], "opts_json": {"force": True}}
+                self.assert_(self.__schema_validation(update_input,
+                    update_schema))
+
+                update_input = {pargs: [], "opts_json": {"ignore_missing":
+                    True}}
+                self.assert_(self.__schema_validation(update_input,
+                    update_schema))
+
+        def test_06_uninstall_args_opts(self):
+                """Test json args or opts for update command."""
+
+                global_settings.client_output_quiet = True
+                self.image_create(self.rurl1, prefix="test1")
+                os.environ["PKG_IMAGE"] = self.img_path()
+                # Test invalid pkg name.
+                pkgs = [1, 2, 3]
+                opts = {}
+                retjson = self.__call_cmd("uninstall", pkgs, opts)
+                self.assert_("errors" in retjson)
+                self.assert_("is not of type 'string'" in
+                    retjson["errors"][0]["reason"])
+
+                pkgs = [None]
+                opts = {}
+                os.environ["PKG_IMAGE"] = self.img_path()
+                retjson = self.__call_cmd("uninstall", pkgs, opts)
+                self.assert_("errors" in retjson)
+                self.assert_("is not of type 'string'" in
+                    retjson["errors"][0]["reason"])
+
+                # Test unknown option was reported.
+                pkgs = ["[email protected]"]
+                opts = {"unknown": "solaris"}
+                retjson = self.__call_cmd("uninstall", pkgs, opts)
+                self.assert_("invalid option" in
+                    retjson["errors"][0]["reason"])
+
+                # Test without pkg specified.
+                pkgs = []
+                opts = {"verbose": 3}
+                retjson = self.__call_cmd("uninstall", pkgs, opts)
+                self.assert_("at least one package" in
+                    retjson["errors"][0]["reason"])
+
+                # Run through pkg uninstall.
+                self.pkg("install pkg://test1/[email protected]")
+                pkgs = ["foo"]
+                opts = {"verbose": 3, "parsable_version": 0}
+
+                retjson = self.__call_cmd("uninstall", pkgs, opts)
+                self.assert_("errors" not in retjson)
+                self.assert_(retjson["status"] == 0)
+                global_settings.client_output_quiet = False
+
+                # Test input directly against schema.
+                pargs = "pargs_json"
+                uninstall_schema = cli_api._get_pkg_input_schema("uninstall")
+                uninstall_input = {pargs: ["pkg"], "opts_json": {}}
+                self.assert_(self.__schema_validation(uninstall_input,
+                    uninstall_schema))
+
+                uninstall_input = {pargs: None, "opts_json": {}}
+                self.assert_(not self.__schema_validation(uninstall_input,
+                    uninstall_schema))
+
+                uninstall_input = {pargs: [], "opts_json": {"ignore_missing":
+                    True}}
+                self.assert_(self.__schema_validation(uninstall_input,
+                    uninstall_schema))
+
+        def test_07_set_publisher_args_opts(self):
+                """Test json args or opts for update command."""
+
+                global_settings.client_output_quiet = True
+                self.rurl1 = self.dcs[1].get_repo_url()
+                self.image_create(self.rurl1)
+                os.environ["PKG_IMAGE"] = self.img_path()
+                # Test invalid pkg name.
+                pubs = ["test1"]
+                opts = {"origin_uri": self.rurl1}
+                retjson = self.__call_cmd("set-publisher", pubs, opts)
+                self.assert_(retjson["status"] == 0)
+                self.assert_("errors" not in retjson)
+
+                retjson = self.__call_cmd("unset-publisher", pubs, {})
+                self.assert_(retjson["status"] == 0)
+                self.assert_("errors" not in retjson)
+
+                opts = {"add_origins": [self.rurl1]}
+                retjson = self.__call_cmd("set-publisher", pubs, opts)
+                self.assert_("errors" not in retjson)
+
+                pkgs = ["[email protected]"]
+                opts = {"verbose": 3, "parsable_version": 0}
+                retjson = self.__call_cmd("install", pkgs, opts)
+                self.assert_("errors" not in retjson)
+                self.assert_(retjson["status"] == 0)
+
+                pkgs = ["newpkg"]
+                opts = {"verbose": 3, "parsable_version": 0}
+                retjson = self.__call_cmd("uninstall", pkgs, opts)
+                self.assert_("errors" not in retjson)
+                self.assert_(retjson["status"] == 0)
+
+                self.pkg("set-publisher -O " + self.rurl2 + " test2")
+                retjson = cli_api._pkg_invoke(
+                    subcommand="unset-publisher",
+                    pargs_json=json.dumps(["test2"]),
+                    opts_json=None)
+                self.assert_(retjson["status"] == 0)
+                self.assert_("errors" not in retjson)
+
+                retjson = self.__call_cmd("unset-publisher", pubs, {})
+                self.assert_(retjson["status"] == 0)
+                self.assert_("errors" not in retjson)
+
+                opts = {"repo_uri": self.rurl1}
+                retjson = self.__call_cmd("set-publisher", [], opts)
+                self.assert_(retjson["data"]["added"] == ["test1"])
+                self.assert_(retjson["status"] == 0)
+                self.assert_("errors" not in retjson)
+
+                retjson = self.__call_cmd("set-publisher", [], opts)
+                self.assert_(retjson["data"]["updated"] == ["test1"])
+                self.assert_(retjson["status"] == 0)
+                self.assert_("errors" not in retjson)
+
+                pkgs = ["pkg://test1/foo@1"]
+                opts = {"verbose": 3, "parsable_version": 0}
+                retjson = self.__call_cmd("install", pkgs, opts)
+                self.assert_("errors" not in retjson)
+                self.assert_(retjson["status"] == 0)
+
+                opts = {"repo_uri": self.rurl2, "set_props": ["prop1=here",
+                    "prop2=there"]}
+                retjson = self.__call_cmd("set-publisher", [], opts)
+                self.assert_(retjson["status"] == 0)
+                self.assert_("errors" not in retjson)
+
+                opts = {"repo_uri": self.rurl2, "unset_props": ["prop1",
+                    "prop2"]}
+                retjson = self.__call_cmd("set-publisher", [], opts)
+                self.assert_(retjson["status"] == 0)
+                self.assert_("errors" not in retjson)
+
+                opts = {"repo_uri": self.rurl2, "search_before": "a",
+                    "search_after": "b"}
+                retjson = self.__call_cmd("set-publisher", [], opts)
+                self.assert_(retjson["status"] == 2)
+                self.assert_("errors" in retjson)
+
+                opts = {"repo_uri": self.rurl2, "add_origins": ["a"]}
+                retjson = self.__call_cmd("set-publisher", [], opts)
+                self.assert_(retjson["status"] == 2)
+                self.assert_("errors" in retjson)
+
+                opts = {"repo_uri": self.rurl2, "refresh_allowed": False}
+                retjson = self.__call_cmd("set-publisher", [], opts)
+                self.assert_(retjson["status"] == 2)
+                self.assert_("combined" in retjson["errors"][0]["reason"])
+
+                opts = {"proxy_uri": self.rurl2}
+                retjson = self.__call_cmd("set-publisher", [], opts)
+                self.assert_(retjson["status"] == 2)
+                self.assert_("only be used" in retjson["errors"][0]["reason"])
+                global_settings.client_output_quiet = False
+
+                # Test input directly against schema.
+                pargs = "pargs_json"
+                schema = cli_api._get_pkg_input_schema("set-publisher")
+
+                test_input = {pargs: ["test1"], "opts_json": {"enable": True}}
+                self.assert_(self.__schema_validation(test_input,
+                    schema))
+
+                test_input = {pargs: None, "opts_json": {"enable": True}}
+                self.assert_(not self.__schema_validation(test_input,
+                    schema))
+
+                test_input = {pargs: [], "opts_json": {"repo_uri": "test"}}
+                self.assert_(self.__schema_validation(test_input,
+                    schema))
+
+                schema = cli_api._get_pkg_input_schema("unset-publisher")
+                test_input = {pargs: [], "opts_json": {}}
+                self.assert_(self.__schema_validation(test_input,
+                    schema))
+
+        def test_08_publisher_args_opts(self):
+                global_settings.client_output_quiet = True
+                self.rurl1 = self.dcs[1].get_repo_url()
+                self.image_create(self.rurl1)
+                os.environ["PKG_IMAGE"] = self.img_path()
+                opts = {"repo_uri": self.rurl1}
+                retjson = self.__call_cmd("set-publisher", [], opts)
+                self.assert_("errors" not in retjson)
+                self.assert_(retjson["status"] == 0)
+                # Test unset pub name.
+                pubs = ["no_pub"]
+                opts = {}
+                retjson = self.__call_cmd("publisher", pubs, opts)
+                self.assert_(retjson["status"] == 1)
+                self.assert_("Unknown publisher" in \
+                    retjson["errors"][0]["reason"])
+
+                pubs = []
+                opts = {"omit_headers": True}
+                retjson = self.__call_cmd("publisher", pubs, opts)
+                self.assert_(retjson["status"] == 0)
+                self.assert_("data" in retjson)
+                self.assert_("headers" not in retjson["data"])
+
+                pubs = []
+                opts = {"output_format": "tsv"}
+                retjson = self.__call_cmd("publisher", pubs, opts)
+                self.assert_(len(retjson["data"]["headers"]) == 8)
+                self.assert_("errors" not in retjson)
+                self.assert_(retjson["status"] == 0)
+
+                pubs = []
+                opts = {"output_format": "invalid"}
+                retjson = self.__call_cmd("publisher", pubs, opts)
+                self.assert_("errors" in retjson)
+                self.assert_(retjson["status"] == 2)
+
+                pubs = []
+                opts = {"output_format": ["invalid"]}
+                retjson = self.__call_cmd("publisher", pubs, opts)
+                self.assert_("errors" in retjson)
+                self.assert_(retjson["status"] == 1)
+
+                pubs = []
+                opts = {"output_format": None}
+                retjson = self.__call_cmd("publisher", pubs, opts)
+                self.assert_("errors" in retjson)
+                self.assert_(retjson["status"] == 2)
+
+                pubs = []
+                opts = {"inc_disabled": False}
+                retjson = self.__call_cmd("publisher", pubs, opts)
+                self.assert_("errors" not in retjson)
+                self.assert_(retjson["status"] == 0)
+
+                pubs = []
+                opts = {"inc_disabled": "False"}
+                retjson = self.__call_cmd("publisher", pubs, opts)
+                self.assert_("errors" in retjson)
+                self.assert_(retjson["status"] == 1)
+
+                pubs = ["test1"]
+                opts = {}
+                retjson = self.__call_cmd("publisher", pubs, opts)
+                self.assert_("errors" not in retjson)
+                self.assert_(retjson["status"] == 0)
+                self.assert_("publisher_details" in retjson["data"])
+                self.assert_(len(retjson["data"]["publisher_details"]) == 1)
+
+        def test_09_info_args_opts(self):
+                global_settings.client_output_quiet = True
+                self.rurl1 = self.dcs[1].get_repo_url()
+                self.image_create(self.rurl1)
+                os.environ["PKG_IMAGE"] = self.img_path()
+                opts = {"repo_uri": self.rurl1}
+                retjson = self.__call_cmd("set-publisher", [], opts)
+                self.assert_("errors" not in retjson)
+                self.assert_(retjson["status"] == 0)
+
+                self.pkg("install pkg://test1/[email protected]")
+                pkgs = ["foo"]
+                opts = {"origins": [self.rurl1]}
+                retjson = self.__call_cmd("info", pkgs, opts)
+                self.assert_(retjson["status"] == 0)
+                self.assert_("errors" not in retjson)
+                self.assert_("package_attrs" in retjson["data"])
+                self.assert_(len(retjson["data"]["package_attrs"]) == 2)
+
+                pkgs = []
+                opts = {"origins": [self.rurl1]}
+                retjson = self.__call_cmd("info", pkgs, opts)
+                self.assert_("errors" in retjson)
+                self.assert_(retjson["status"] == 2)
+
+                pkgs = []
+                opts = {"origins": [None]}
+                retjson = self.__call_cmd("info", pkgs, opts)
+                self.assert_("errors" in retjson)
+                self.assert_(retjson["status"] == 1)
+
+                pkgs = []
+                opts = {"origins": None}
+                retjson = self.__call_cmd("info", pkgs, opts)
+                self.assert_("errors" in retjson)
+                self.assert_(retjson["status"] == 1)
+
+                opts = {"origins": "single"}
+                retjson = self.__call_cmd("info", pkgs, opts)
+                self.assert_("errors" in retjson)
+                self.assert_(retjson["status"] == 1)
+
+                pkgs = ["foo"]
+                opts = {"origins": [self.rurl1], "quiet": "True"}
+                retjson = self.__call_cmd("info", pkgs, opts)
+                self.assert_("errors" in retjson)
+                self.assert_("data" not in retjson)
+                self.assert_(retjson["status"] == 1)
+
+                pkgs = ["foo"]
+                opts = {"origins": [self.rurl1], "quiet": True}
+                retjson = self.__call_cmd("info", pkgs, opts)
+                self.assert_("errors" not in retjson)
+                self.assert_("data" not in retjson)
+                self.assert_(retjson["status"] == 0)
+
+                pkgs = []
+                opts = {"origins": [self.rurl1], "quiet": True}
+                retjson = self.__call_cmd("info", pkgs, opts)
+                self.assert_("errors" in retjson)
+                self.assert_("data" not in retjson)
+                self.assert_(retjson["status"] == 2)
+
+                pkgs = ["foo"]
+                opts = {}
+                retjson = self.__call_cmd("info", pkgs, opts)
+                self.assert_("errors" not in retjson)
+                self.assert_(len(retjson["data"]["package_attrs"]) == 1)
+                self.assert_(retjson["data"]["package_attrs"][0][2][1][0] \
+                    == "Installed")
+                self.assert_(retjson["status"] == 0)
+
+                pkgs = []
+                opts = {"origins": [self.rurl1], "quiet": True}
+                retjson = self.__call_cmd("info", pkgs, opts)
+                self.assert_("errors" in retjson)
+                self.assert_("data" not in retjson)
+                self.assert_(retjson["status"] == 2)
+
+                pkgs = []
+                opts = {"info_local": True, "info_remote": True}
+                retjson = self.__call_cmd("info", pkgs, opts)
+                self.assert_("errors" in retjson)
+                self.assert_("data" not in retjson)
+                self.assert_(retjson["status"] == 2)
+
+                # Test with wrong value type.
+                pkgs = []
+                opts = {"info_local": "true"}
+                retjson = self.__call_cmd("info", pkgs, opts)
+                self.assert_("errors" in retjson)
+                self.assert_("data" not in retjson)
+                self.assert_(retjson["status"] == 1)
+
+        def test_10_exact_install_json_args_opts(self):
+                """Test json args or opts for exact-install command."""
+
+                # Test invalid pkg name.
+                self.image_create(self.rurl1, prefix="test1")
+                os.environ["PKG_IMAGE"] = self.img_path()
+                pkgs = [1, 2, 3]
+                opts = {"backup_be": True}
+                retjson = self.__call_cmd("exact-install", pkgs, opts)
+                self.assert_("errors" in retjson)
+                self.assert_("is not of type 'string'" in
+                    retjson["errors"][0]["reason"])
+
+                pkgs = [None]
+                opts = {"backup_be": True}
+                os.environ["PKG_IMAGE"] = self.img_path()
+                retjson = self.__call_cmd("exact-install", pkgs, opts)
+                self.assert_("errors" in retjson)
+                self.assert_("is not of type 'string'" in
+                    retjson["errors"][0]["reason"])
+
+                # Test unknown option was reported.
+                pkgs = ["[email protected]"]
+                opts = {"unknown": "solaris"}
+                retjson = self.__call_cmd("exact-install", pkgs, opts)
+                self.assert_("invalid option" in
+                    retjson["errors"][0]["reason"])
+
+                # Test without pkg specified.
+                pkgs = []
+                opts = {"verbose": 3}
+                retjson = self.__call_cmd("exact-install", pkgs, opts)
+                self.assert_("at least one package" in
+                    retjson["errors"][0]["reason"])
+
+                # Run through pkg install.
+                pkgs = ["[email protected]"]
+                opts = {"verbose": 3, "parsable_version": 0}
+                global_settings.client_output_quiet = True
+                retjson = self.__call_cmd("exact-install", pkgs, opts)
+                global_settings.client_output_quiet = False
+
+                # Test input directly against schema.
+                pargs = "pargs_json"
+                einstall_schema = cli_api._get_pkg_input_schema("exact-install")
+                einstall_input = {pargs : [], "opts_json": {}}
+                self.assert_(self.__schema_validation(einstall_input,
+                    einstall_schema))
+
+                einstall_input = {pargs: [12], "opts_json": {}}
+                self.assert_(not self.__schema_validation(einstall_input,
+                    einstall_schema))
+
+                einstall_input = { pargs: ["pkg"], "opts_json": {}}
+                self.assert_(self.__schema_validation(einstall_input,
+                    einstall_schema))
+
+                einstall_input = {pargs : ["pkg"], "opts_json":
+                    {"parsable_version": "string"}}
+                self.assert_(not self.__schema_validation(einstall_input,
+                    einstall_schema))
+
+                einstall_input = {pargs: ["pkg"], "opts_json":
+                    {"parsable_version": 3}}
+                self.assert_(not self.__schema_validation(einstall_input,
+                    einstall_schema))
+
+                einstall_input = {pargs: ["pkg"], "opts_json":
+                    {"parsable_version": None}}
+                self.assert_(self.__schema_validation(einstall_input,
+                    einstall_schema))
+
+                einstall_input = {pargs: ["pkg"], "opts_json": {"reject_pats":
+                    False}}
+                self.assert_(not self.__schema_validation(einstall_input,
+                    einstall_schema))
+
+                einstall_input = {pargs: ["pkg"], "opts_json": {"reject_pats":
+                    []}}
+                self.assert_(self.__schema_validation(einstall_input,
+                    einstall_schema))
+
+                einstall_input = {pargs: ["pkg"], "opts_json": {"accept":
+                    "str"}}
+                self.assert_(not self.__schema_validation(einstall_input,
+                    einstall_schema))
+
+                einstall_input = {pargs: ["pkg"], "opts_json": {"accept":
+                    False}}
+                self.assert_(self.__schema_validation(einstall_input,
+                    einstall_schema))
+
+                einstall_input = {pargs: ["pkg"], "opts_json":
+                    {"reject_pats": [None, None]}}
+                self.assert_(not self.__schema_validation(einstall_input,
+                    einstall_schema))
+
+                einstall_input = {pargs: [None], "opts_json":
+                    {"reject_pats": []}}
+                self.assert_(not self.__schema_validation(einstall_input,
+                    einstall_schema))
+
+        def test_11_ClientInterface(self):
+                """Test the clientInterface class."""
+                pt = progress.QuietProgressTracker()
+                cli_inst = cli_api.ClientInterface(pkg_image=self.img_path(),
+                    prog_tracker=pt, opts_mapping={"be_name": "boot_env"})
+                opts = {"repo_uri": self.rurl1}
+                retjson = cli_inst.publisher_set(json.dumps([]),
+                    json.dumps(opts))
+                epset_schema_in = cli_inst.get_pkg_input_schema(
+                    "set-publisher")
+                epset_schema_out = cli_inst.get_pkg_output_schema(
+                    "set-publisher")
+                epset_input = {"pargs_json": [], "opts_json": opts}
+                self.assert_(self.__schema_validation(epset_input,
+                    epset_schema_in))
+                self.assert_(self.__schema_validation(retjson,
+                    epset_schema_out))
+
+                # Test uninstalling an not installed pkg.
+                opts = {}
+                args = ["no_install"]
+                retjson = cli_inst.uninstall(json.dumps(args), json.dumps(opts))
+                self.assert_(retjson["status"] == 1)
+                self.assert_("errors" in retjson)
+                eunins_schema_in = cli_inst.get_pkg_input_schema("uninstall")
+                # Test input schema was replaced by an mapped option name.
+                self.assert_("boot_env" in json.dumps(eunins_schema_in))
+                eunins_schema_out = cli_inst.get_pkg_output_schema("uninstall")
+                eunins_input = {"pargs_json": args, "opts_json": opts}
+                self.assert_(self.__schema_validation(eunins_input,
+                    eunins_schema_in))
+                self.assert_(self.__schema_validation(retjson,
+                    eunins_schema_out))
+
+                # Test be related exception does not crash the system.
+                opts = {"boot_env": "s12"}
+                args = ["no_install"]
+                retjson = cli_inst.uninstall(json.dumps(args),
+                    json.dumps(opts))
+                self.assert_(retjson["status"] == 1)
+                self.assert_("errors" in retjson)
+                self.assert_("boot_env" not in json.dumps(retjson))
+
+                retjson = cli_inst.uninstall(json.dumps(["newpkg2"]),
+                    json.dumps({}))
+                self.assert_(retjson["status"] == 1)
+                self.assert_("errors" in retjson)
+
+                opts = {"parsable_version": 0}
+                args = ["[email protected]"]
+                retjson = cli_inst.install(json.dumps(args), json.dumps(opts))
+                self.assert_(retjson["status"] == 0)
+                self.assert_("errors" not in retjson)
+                eins_schema_in = cli_inst.get_pkg_input_schema("install")
+                eins_schema_out = cli_inst.get_pkg_output_schema("install")
+                eins_input = {"pargs_json": args, "opts_json": opts}
+                self.assert_(self.__schema_validation(eins_input,
+                    eins_schema_in))
+                self.assert_(self.__schema_validation(retjson,
+                    eins_schema_out))
+
+                retjson = cli_inst.list_inventory()
+                self.assert_(retjson["status"] == 0)
+                self.assert_("errors" not in retjson)
+                self.assert_("newpkg2" in json.dumps(retjson))
+
+                retjson = cli_inst.uninstall(json.dumps(args), json.dumps(opts))
+                self.assert_(retjson["status"] == 0)
+                self.assert_("errors" not in retjson)
+
+                retjson = cli_inst.publisher_set(json.dumps(["test1"]))
+                self.assert_(retjson["status"] == 0)
+                self.assert_("errors" not in retjson)
--- a/src/tests/cli/t_pkg_image_create.py	Thu Apr 02 08:44:15 2015 -0700
+++ b/src/tests/cli/t_pkg_image_create.py	Fri Apr 03 19:02:53 2015 -0700
@@ -547,7 +547,7 @@
                 # an unprivileged user.  Each must be done with and without
                 # the publisher prefix to test that these are stripped and
                 # read properly (because of the publisher preferred prefix).
-                self.pkg("publisher -a", su_wrap=True)
+                self.pkg("publisher", su_wrap=True)
                 self.pkg("info pkg://test1/quux corge", su_wrap=True)
                 self.pkg("info pkg://test2/corge quux", su_wrap=True)
                 self.pkg("update -nv --no-refresh", su_wrap=True, exit=4)
@@ -618,7 +618,7 @@
                 # an unprivileged user.  Each must be done with and without
                 # the publisher prefix to test that these are stripped and
                 # read properly.
-                self.pkg("--debug simulate_live_root={0} publisher -a".format(
+                self.pkg("--debug simulate_live_root={0} publisher".format(
                     self.get_img_path()), su_wrap=True)
                 self.pkg("--debug simulate_live_root={0} info "
                     "pkg://test1/quux corge".format(self.get_img_path()),
@@ -673,7 +673,7 @@
                 self.pkg("verify")
 
                 # Verify updated image works as expected.
-                self.pkg("publisher -a", su_wrap=True)
+                self.pkg("publisher", su_wrap=True)
                 self.pkg("info pkg://test1/quux corge", su_wrap=True)
                 self.pkg("info pkg://test2/corge quux", su_wrap=True)
                 self.pkg("update -nv --no-refresh", su_wrap=True, exit=4)
--- a/src/tests/cli/t_pkg_install.py	Thu Apr 02 08:44:15 2015 -0700
+++ b/src/tests/cli/t_pkg_install.py	Fri Apr 03 19:02:53 2015 -0700
@@ -38,6 +38,7 @@
 import socket
 import subprocess
 import stat
+import struct
 import tempfile
 import time
 import unittest
@@ -3911,7 +3912,10 @@
                 sock.close()
                 # We also test block and character special files, but only if
                 # os.mknod() is available, which it isn't always.
-                if hasattr(os, "mknod"):
+                # Since mknod only supports 32-bit integer currently, we have
+                # to check if we are running in 32-bit.
+                run_bit = struct.calcsize("P") * 8
+                if hasattr(os, "mknod") and run_bit == 32:
                         st = os.stat("/dev/null")
                         os.mknod(os.path.join(self.img_path(), "salvage",
                             "node"), st.st_mode, st.st_dev)
--- a/src/tests/cli/t_pkg_publisher.py	Thu Apr 02 08:44:15 2015 -0700
+++ b/src/tests/cli/t_pkg_publisher.py	Fri Apr 03 19:02:53 2015 -0700
@@ -458,6 +458,8 @@
                 self.pkg("publisher test")
                 self.assert_("Properties:" in self.output)
                 self.assert_("foo = bar" in self.output)
+                self.pkg("set-publisher --add-property-value foo=bar1 test",
+                    exit=1)
 
                 self.pkg("set-publisher --set-property "
                     "signature-policy=require-names --add-property-value "
@@ -1120,7 +1122,6 @@
                 self.pkg("publisher -n | grep test2", exit=1) # unless -n
 
                 self.pkg("list -a bar", exit=1)
-                self.pkg("publisher -a | grep test2")
                 self.pkg("set-publisher -P test2")
                 self.pkg("publisher test2")
                 self.pkg("set-publisher -e test2")
@@ -1131,7 +1132,6 @@
                 self.pkg("publisher | grep test2")
                 self.pkg("publisher -n | grep test2", exit=1)
                 self.pkg("list -a bar", exit=1)
-                self.pkg("publisher -a | grep test2")
                 self.pkg("set-publisher --enable test2")
                 self.pkg("publisher -n | grep test2")
                 self.pkg("list -a bar")