1980 be_name=be_name, facets=facets, li_erecurse=li_erecurse, |
1923 be_name=be_name, facets=facets, li_erecurse=li_erecurse, |
1981 li_parent_sync=li_parent_sync, new_be=new_be, |
1924 li_parent_sync=li_parent_sync, new_be=new_be, |
1982 refresh_catalogs=refresh_catalogs, reject_list=reject_pats, |
1925 refresh_catalogs=refresh_catalogs, reject_list=reject_pats, |
1983 update_index=update_index) |
1926 update_index=update_index) |
1984 |
1927 |
|
1928 def __handle_client_json_api_output(out_json, op): |
|
1929 """This is the main client_json_api output handling function used for |
|
1930 install, update and uninstall and so on.""" |
|
1931 |
|
1932 if "errors" in out_json: |
|
1933 _generate_error_messages(out_json["status"], |
|
1934 out_json["errors"], cmd=op) |
|
1935 |
|
1936 if "data" in out_json and "release_notes_url" in out_json["data"]: |
|
1937 msg("\n" + "-" * 75) |
|
1938 msg(_("NOTE: Please review release notes posted at:\n" )) |
|
1939 msg(out_json["data"]["release_notes_url"]) |
|
1940 msg("-" * 75 + "\n") |
|
1941 return out_json["status"] |
|
1942 |
|
1943 def _emit_error_general_cb(status, err, cmd=None, selected_type=[], |
|
1944 add_info=misc.EmptyDict): |
|
1945 """Callback for emitting general errors.""" |
|
1946 |
|
1947 if status == EXIT_BADOPT: |
|
1948 # Usage errors are not in any specific type, print it only |
|
1949 # there is no selected type. |
|
1950 if not selected_type: |
|
1951 usage(err["reason"], cmd=cmd) |
|
1952 else: |
|
1953 return False |
|
1954 elif "errtype" in err: |
|
1955 if err["errtype"] == "format_update": |
|
1956 # if the selected_type is specified and err not in selected type, |
|
1957 # Don't print and return False. |
|
1958 if selected_type and err["errtype"] not in selected_type: |
|
1959 return False |
|
1960 emsg("\n") |
|
1961 emsg(err["reason"]) |
|
1962 emsg(_("To continue, execute 'pkg update-format' as a " |
|
1963 "privileged user and then try again. Please note " |
|
1964 "that updating the format of the image will render " |
|
1965 "it unusable with older versions of the pkg(5) " |
|
1966 "system.")) |
|
1967 elif err["errtype"] == "catalog_refresh": |
|
1968 if selected_type and err["errtype"] not in selected_type: |
|
1969 return False |
|
1970 |
|
1971 if "reason" in err: |
|
1972 emsg(err["reason"]) |
|
1973 elif "info" in err: |
|
1974 msg(err["info"]) |
|
1975 elif err["errtype"] == "catalog_refresh_failed": |
|
1976 if selected_type and err["errtype"] not in selected_type: |
|
1977 return False |
|
1978 |
|
1979 if "reason" in err: |
|
1980 emsg(" ") |
|
1981 emsg(err["reason"]) |
|
1982 elif err["errtype"] == "publisher_set": |
|
1983 if selected_type and err["errtype"] not in selected_type: |
|
1984 return False |
|
1985 |
|
1986 emsg(err["reason"]) |
|
1987 elif err["errtype"] == "plan_license": |
|
1988 if selected_type and err["errtype"] not in selected_type: |
|
1989 return False |
|
1990 |
|
1991 emsg(err["reason"]) |
|
1992 emsg(_("To indicate that you " |
|
1993 "agree to and accept the terms of the licenses of " |
|
1994 "the packages listed above, use the --accept " |
|
1995 "option. To display all of the related licenses, " |
|
1996 "use the --licenses option.")) |
|
1997 elif err["errtype"] in ["inventory", "inventory_extra"]: |
|
1998 if selected_type and err["errtype"] not in selected_type: |
|
1999 return False |
|
2000 |
|
2001 emsg(" ") |
|
2002 emsg(err["reason"]) |
|
2003 if err["errtype"] == "inventory_extra": |
|
2004 emsg("Use -af to allow all versions.") |
|
2005 elif err["errtype"] == "unsupported_repo_op": |
|
2006 if selected_type and err["errtype"] not in selected_type: |
|
2007 return False |
|
2008 |
|
2009 emsg(_(""" |
|
2010 To add a publisher using this repository, execute the following command as a |
|
2011 privileged user: |
|
2012 |
|
2013 pkg set-publisher -g {0} <publisher> |
|
2014 """).format(add_info["repo_uri"])) |
|
2015 elif "info" in err: |
|
2016 msg(err["info"]) |
|
2017 elif "reason" in err: |
|
2018 emsg(err["reason"]) |
|
2019 else: |
|
2020 if selected_type: |
|
2021 return False |
|
2022 |
|
2023 if "reason" in err: |
|
2024 emsg(err["reason"]) |
|
2025 elif "info" in err: |
|
2026 msg(err["info"]) |
|
2027 return True |
|
2028 |
|
2029 def _generate_error_messages(status, err_list, |
|
2030 msg_cb=_emit_error_general_cb, selected_type=[], cmd=None, |
|
2031 add_info=misc.EmptyDict): |
|
2032 """Generate error messages.""" |
|
2033 |
|
2034 errs_left = [err for err in err_list if not msg_cb(status, err, |
|
2035 selected_type=selected_type, cmd=cmd, add_info=add_info)] |
|
2036 # Return errors not being printed. |
|
2037 return errs_left |
|
2038 |
|
2039 def exact_install(op, api_inst, pargs, |
|
2040 accept, backup_be, backup_be_name, be_activate, be_name, li_ignore, |
|
2041 li_parent_sync, new_be, noexecute, origins, parsable_version, quiet, |
|
2042 refresh_catalogs, reject_pats, show_licenses, update_index, verbose): |
|
2043 """Attempt to take package specified to INSTALLED state. |
|
2044 The operands are interpreted as glob patterns.""" |
|
2045 |
|
2046 out_json = client_api._exact_install(op, api_inst, pargs, accept, |
|
2047 backup_be, backup_be_name, be_activate, be_name, li_ignore, |
|
2048 li_parent_sync, new_be, noexecute, origins, parsable_version, |
|
2049 quiet, refresh_catalogs, reject_pats, show_licenses, update_index, |
|
2050 verbose, display_plan_cb=display_plan_cb, logger=logger) |
|
2051 |
|
2052 return __handle_client_json_api_output(out_json, op) |
|
2053 |
1985 def install(op, api_inst, pargs, |
2054 def install(op, api_inst, pargs, |
1986 accept, act_timeout, backup_be, backup_be_name, be_activate, be_name, |
2055 accept, act_timeout, backup_be, backup_be_name, be_activate, be_name, |
1987 li_ignore, li_erecurse, li_parent_sync, new_be, noexecute, origins, |
2056 li_ignore, li_erecurse, li_parent_sync, new_be, noexecute, origins, |
1988 parsable_version, quiet, refresh_catalogs, reject_pats, show_licenses, |
2057 parsable_version, quiet, refresh_catalogs, reject_pats, show_licenses, |
1989 stage, update_index, verbose): |
2058 stage, update_index, verbose): |
1990 """Attempt to take package specified to INSTALLED state. The operands |
2059 """Attempt to take package specified to INSTALLED state. The operands |
1991 are interpreted as glob patterns.""" |
2060 are interpreted as glob patterns.""" |
1992 |
2061 |
1993 if not pargs: |
2062 out_json = client_api._install(op, api_inst, pargs, |
1994 usage(_("at least one package name required"), cmd=op) |
2063 accept, act_timeout, backup_be, backup_be_name, be_activate, |
1995 |
2064 be_name, li_ignore, li_erecurse, li_parent_sync, new_be, noexecute, |
1996 rval, res = get_fmri_args(api_inst, pargs, cmd=op) |
2065 origins, parsable_version, quiet, refresh_catalogs, reject_pats, |
1997 if not rval: |
2066 show_licenses, stage, update_index, verbose, |
1998 return EXIT_OOPS |
2067 display_plan_cb=display_plan_cb, logger=logger) |
1999 |
2068 |
2000 xrval, xres = get_fmri_args(api_inst, reject_pats, cmd=op) |
2069 return __handle_client_json_api_output(out_json, op) |
2001 if not xrval: |
|
2002 return EXIT_OOPS |
|
2003 |
|
2004 return __api_op(op, api_inst, _accept=accept, _li_ignore=li_ignore, |
|
2005 _noexecute=noexecute, _origins=origins, |
|
2006 _parsable_version=parsable_version, _quiet=quiet, |
|
2007 _show_licenses=show_licenses, _stage=stage, _verbose=verbose, |
|
2008 act_timeout=act_timeout, backup_be=backup_be, |
|
2009 backup_be_name=backup_be_name, be_activate=be_activate, |
|
2010 be_name=be_name, li_erecurse=li_erecurse, |
|
2011 li_parent_sync=li_parent_sync, new_be=new_be, pkgs_inst=pargs, |
|
2012 refresh_catalogs=refresh_catalogs, reject_list=reject_pats, |
|
2013 update_index=update_index) |
|
2014 |
|
2015 def exact_install(op, api_inst, pargs, |
|
2016 accept, backup_be, backup_be_name, be_activate, be_name, li_ignore, |
|
2017 li_parent_sync, new_be, noexecute, origins, parsable_version, quiet, |
|
2018 refresh_catalogs, reject_pats, show_licenses, update_index, verbose): |
|
2019 """Attempt to take package specified to INSTALLED state. |
|
2020 The operands are interpreted as glob patterns.""" |
|
2021 |
|
2022 if not pargs: |
|
2023 usage(_("at least one package name required"), cmd=op) |
|
2024 |
|
2025 rval, res = get_fmri_args(api_inst, pargs, cmd=op) |
|
2026 if not rval: |
|
2027 return EXIT_OOPS |
|
2028 |
|
2029 xrval, xres = get_fmri_args(api_inst, reject_pats, cmd=op) |
|
2030 if not xrval: |
|
2031 return EXIT_OOPS |
|
2032 |
|
2033 return __api_op(op, api_inst, _accept=accept, _li_ignore=li_ignore, |
|
2034 _noexecute=noexecute, _origins=origins, _quiet=quiet, |
|
2035 _show_licenses=show_licenses, _verbose=verbose, |
|
2036 backup_be=backup_be, backup_be_name=backup_be_name, |
|
2037 be_activate=be_activate, be_name=be_name, |
|
2038 li_parent_sync=li_parent_sync, new_be=new_be, |
|
2039 _parsable_version=parsable_version, pkgs_inst=pargs, |
|
2040 refresh_catalogs=refresh_catalogs, reject_list=reject_pats, |
|
2041 update_index=update_index) |
|
2042 |
|
2043 def uninstall(op, api_inst, pargs, |
|
2044 act_timeout, backup_be, backup_be_name, be_activate, be_name, |
|
2045 ignore_missing, li_ignore, li_erecurse, li_parent_sync, new_be, noexecute, |
|
2046 parsable_version, quiet, stage, update_index, verbose): |
|
2047 """Attempt to take package specified to DELETED state.""" |
|
2048 |
|
2049 if not pargs: |
|
2050 usage(_("at least one package name required"), cmd=op) |
|
2051 |
|
2052 if verbose and quiet: |
|
2053 usage(_("-v and -q may not be combined"), cmd=op) |
|
2054 |
|
2055 rval, res = get_fmri_args(api_inst, pargs, cmd=op) |
|
2056 if not rval: |
|
2057 return EXIT_OOPS |
|
2058 |
|
2059 return __api_op(op, api_inst, _li_ignore=li_ignore, |
|
2060 _noexecute=noexecute, _parsable_version=parsable_version, |
|
2061 _quiet=quiet, _stage=stage, _verbose=verbose, |
|
2062 act_timeout=act_timeout, backup_be=backup_be, |
|
2063 backup_be_name=backup_be_name, be_activate=be_activate, |
|
2064 be_name=be_name, ignore_missing=ignore_missing, |
|
2065 li_erecurse=li_erecurse, li_parent_sync=li_parent_sync, |
|
2066 new_be=new_be, pkgs_to_uninstall=pargs, update_index=update_index) |
|
2067 |
2070 |
2068 def update(op, api_inst, pargs, accept, act_timeout, backup_be, backup_be_name, |
2071 def update(op, api_inst, pargs, accept, act_timeout, backup_be, backup_be_name, |
2069 be_activate, be_name, force, ignore_missing, li_ignore, li_erecurse, |
2072 be_activate, be_name, force, ignore_missing, li_ignore, li_erecurse, |
2070 li_parent_sync, new_be, noexecute, origins, parsable_version, quiet, |
2073 li_parent_sync, new_be, noexecute, origins, parsable_version, quiet, |
2071 refresh_catalogs, reject_pats, show_licenses, stage, update_index, verbose): |
2074 refresh_catalogs, reject_pats, show_licenses, stage, update_index, verbose): |
2072 """Attempt to take all installed packages specified to latest |
2075 """Attempt to take all installed packages specified to latest |
2073 version.""" |
2076 version.""" |
2074 |
2077 |
2075 rval, res = get_fmri_args(api_inst, pargs, cmd=op) |
2078 out_json = client_api._update(op, api_inst, pargs, accept, act_timeout, |
2076 if not rval: |
2079 backup_be, backup_be_name, be_activate, be_name, force, |
2077 return EXIT_OOPS |
2080 ignore_missing, li_ignore, li_erecurse, li_parent_sync, new_be, |
2078 |
2081 noexecute, origins, parsable_version, quiet, refresh_catalogs, |
2079 xrval, xres = get_fmri_args(api_inst, reject_pats, cmd=op) |
2082 reject_pats, show_licenses, stage, update_index, verbose, |
2080 if not xrval: |
2083 display_plan_cb=display_plan_cb, logger=logger) |
2081 return EXIT_OOPS |
2084 |
2082 |
2085 return __handle_client_json_api_output(out_json, op) |
2083 if res: |
2086 |
2084 # If there are specific installed packages to update, |
2087 def uninstall(op, api_inst, pargs, |
2085 # then take only those packages to the latest version |
2088 act_timeout, backup_be, backup_be_name, be_activate, be_name, |
2086 # allowed by the patterns specified. (The versions |
2089 ignore_missing, li_ignore, li_erecurse, li_parent_sync, new_be, noexecute, |
2087 # specified can be older than what is installed.) |
2090 parsable_version, quiet, stage, update_index, verbose): |
2088 pkgs_update = pargs |
2091 """Attempt to take package specified to DELETED state.""" |
2089 review_release_notes = False |
2092 |
2090 else: |
2093 out_json = client_api._uninstall(op, api_inst, pargs, |
2091 # If no packages were specified, attempt to update all installed |
2094 act_timeout, backup_be, backup_be_name, be_activate, be_name, |
2092 # packages. |
2095 ignore_missing, li_ignore, li_erecurse, li_parent_sync, new_be, |
2093 pkgs_update = None |
2096 noexecute, parsable_version, quiet, stage, update_index, verbose, |
2094 review_release_notes = True |
2097 display_plan_cb=display_plan_cb, logger=logger) |
2095 |
2098 |
2096 return __api_op(op, api_inst, _accept=accept, _li_ignore=li_ignore, |
2099 return __handle_client_json_api_output(out_json, op) |
2097 _noexecute=noexecute, _origins=origins, |
|
2098 _parsable_version=parsable_version, _quiet=quiet, |
|
2099 _review_release_notes=review_release_notes, |
|
2100 _show_licenses=show_licenses, _stage=stage, _verbose=verbose, |
|
2101 act_timeout=act_timeout, backup_be=backup_be, |
|
2102 backup_be_name=backup_be_name, be_activate=be_activate, |
|
2103 be_name=be_name, force=force, ignore_missing=ignore_missing, |
|
2104 li_erecurse=li_erecurse, li_parent_sync=li_parent_sync, |
|
2105 new_be=new_be, pkgs_update=pkgs_update, |
|
2106 refresh_catalogs=refresh_catalogs, reject_list=reject_pats, |
|
2107 update_index=update_index) |
|
2108 |
2100 |
2109 def revert(op, api_inst, pargs, |
2101 def revert(op, api_inst, pargs, |
2110 backup_be, backup_be_name, be_activate, be_name, new_be, noexecute, |
2102 backup_be, backup_be_name, be_activate, be_name, new_be, noexecute, |
2111 parsable_version, quiet, tagged, verbose): |
2103 parsable_version, quiet, tagged, verbose): |
2112 """Attempt to revert files to their original state, either |
2104 """Attempt to revert files to their original state, either |
2886 retcode = EXIT_OOPS |
2878 retcode = EXIT_OOPS |
2887 elif good_res: |
2879 elif good_res: |
2888 retcode = EXIT_OK |
2880 retcode = EXIT_OK |
2889 return retcode |
2881 return retcode |
2890 |
2882 |
2891 def info(api_inst, args): |
2883 def info(op, api_inst, pargs, display_license, info_local, info_remote, |
|
2884 origins, quiet): |
2892 """Display information about a package or packages. |
2885 """Display information about a package or packages. |
2893 """ |
2886 """ |
2894 |
2887 |
2895 display_license = False |
2888 ret_json = client_api._info(op, api_inst, pargs, display_license, |
2896 info_local = False |
2889 info_local, info_remote, origins, quiet) |
2897 info_remote = False |
2890 |
2898 origins = set() |
2891 if "data" in ret_json: |
2899 quiet = False |
2892 # display_license is true. |
2900 |
2893 if "licenses" in ret_json["data"]: |
2901 opts, pargs = getopt.getopt(args, "g:lqr", ["license"]) |
2894 data_type = "licenses" |
2902 for opt, arg in opts: |
2895 elif "package_attrs" in ret_json["data"]: |
2903 if opt == "-g": |
2896 data_type = "package_attrs" |
2904 origins.add(misc.parse_uri(arg, cwd=orig_cwd)) |
2897 |
2905 info_remote = True |
2898 for i, pis in enumerate(ret_json["data"][data_type]): |
2906 elif opt == "-l": |
2899 if not quiet and i > 0: |
2907 info_local = True |
2900 msg("") |
2908 elif opt == "-q": |
2901 |
2909 quiet = True |
2902 if display_license and not quiet: |
2910 global_settings.client_output_quiet = True |
2903 for lic in pis: |
2911 elif opt == "-r": |
2904 msg(lic) |
2912 info_remote = True |
2905 continue |
2913 elif opt == "--license": |
2906 |
2914 display_license = True |
2907 try: |
2915 |
2908 max_width = max( |
2916 if not info_local and not info_remote: |
2909 len(attr[0]) |
2917 info_local = True |
2910 for attr in pis |
2918 elif info_local and info_remote: |
2911 ) |
2919 usage(_("-l and -r may not be combined"), cmd="info") |
2912 except ValueError: |
2920 |
2913 # Only display header if there are |
2921 if info_remote and not pargs: |
2914 # other attributes to show. |
2922 usage(_("must request remote info for specific packages"), |
2915 continue |
|
2916 for attr_l in pis: |
|
2917 attr, kval = tuple(attr_l) |
|
2918 label = "{0}: ".format(attr.rjust(max_width)) |
|
2919 res = "\n".join(item for item in kval) |
|
2920 if res: |
|
2921 wrapper = textwrap.TextWrapper( |
|
2922 initial_indent=label, |
|
2923 break_on_hyphens=False, |
|
2924 break_long_words=False, |
|
2925 subsequent_indent=(max_width + 2) \ |
|
2926 * " ", width=80) |
|
2927 msg(wrapper.fill(res)) |
|
2928 |
|
2929 if "errors" in ret_json: |
|
2930 _generate_error_messages(ret_json["status"], ret_json["errors"], |
2923 cmd="info") |
2931 cmd="info") |
2924 |
2932 |
2925 err = 0 |
2933 return ret_json["status"] |
2926 |
|
2927 # Reset the progress tracker here, because we may have to switch to a |
|
2928 # different tracker due to the options parse. |
|
2929 _api_inst.progresstracker = get_tracker() |
|
2930 |
|
2931 api_inst.progresstracker.set_purpose( |
|
2932 api_inst.progresstracker.PURPOSE_LISTING) |
|
2933 |
|
2934 info_needed = api.PackageInfo.ALL_OPTIONS |
|
2935 if not display_license: |
|
2936 info_needed = api.PackageInfo.ALL_OPTIONS - \ |
|
2937 frozenset([api.PackageInfo.LICENSES]) |
|
2938 info_needed -= api.PackageInfo.ACTION_OPTIONS |
|
2939 info_needed |= frozenset([api.PackageInfo.DEPENDENCIES]) |
|
2940 |
|
2941 try: |
|
2942 ret = api_inst.info(pargs, info_local, info_needed, |
|
2943 ranked=info_remote, repos=origins) |
|
2944 except api_errors.ImageFormatUpdateNeeded as e: |
|
2945 format_update_error(e) |
|
2946 return EXIT_OOPS |
|
2947 except api_errors.NoPackagesInstalledException: |
|
2948 error(_("no packages installed")) |
|
2949 return EXIT_OOPS |
|
2950 except api_errors.ApiException as e: |
|
2951 error(e) |
|
2952 return EXIT_OOPS |
|
2953 |
|
2954 pis = ret[api.ImageInterface.INFO_FOUND] |
|
2955 notfound = ret[api.ImageInterface.INFO_MISSING] |
|
2956 illegals = ret[api.ImageInterface.INFO_ILLEGALS] |
|
2957 |
|
2958 if illegals: |
|
2959 # No other results will be returned if illegal patterns were |
|
2960 # specified. |
|
2961 for i in illegals: |
|
2962 logger.error(str(i)) |
|
2963 return EXIT_OOPS |
|
2964 |
|
2965 no_licenses = [] |
|
2966 for i, pi in enumerate(pis): |
|
2967 if not quiet and i > 0: |
|
2968 msg("") |
|
2969 |
|
2970 if display_license: |
|
2971 if not pi.licenses: |
|
2972 no_licenses.append(pi.fmri) |
|
2973 elif not quiet: |
|
2974 for lic in pi.licenses: |
|
2975 msg(lic) |
|
2976 continue |
|
2977 |
|
2978 if quiet: |
|
2979 continue |
|
2980 |
|
2981 state = "" |
|
2982 if api.PackageInfo.INSTALLED in pi.states: |
|
2983 state = _("Installed") |
|
2984 elif api.PackageInfo.UNSUPPORTED in pi.states: |
|
2985 state = _("Unsupported") |
|
2986 else: |
|
2987 state = _("Not installed") |
|
2988 |
|
2989 lparen = False |
|
2990 if api.PackageInfo.OBSOLETE in pi.states: |
|
2991 state += " ({0}".format(_("Obsolete")) |
|
2992 lparen = True |
|
2993 elif api.PackageInfo.RENAMED in pi.states: |
|
2994 state += " ({0}".format(_("Renamed")) |
|
2995 lparen = True |
|
2996 if api.PackageInfo.FROZEN in pi.states: |
|
2997 if lparen: |
|
2998 state += ", {0})".format(_("Frozen")) |
|
2999 else: |
|
3000 state += " ({0})".format(_("Frozen")) |
|
3001 elif lparen: |
|
3002 state += ")" |
|
3003 |
|
3004 # XXX-Consider using Python's 2.7 collections.OrderedDict |
|
3005 attr_list = [] |
|
3006 seen = {} |
|
3007 |
|
3008 def __append_attr_tuples(label, values): |
|
3009 """Given arguments label and values, either extend |
|
3010 the existing tuple value or add new one to |
|
3011 attr_list""" |
|
3012 |
|
3013 if not isinstance(values, list): |
|
3014 values = [values] |
|
3015 if label in seen: |
|
3016 seen[label].extend(values) |
|
3017 else: |
|
3018 attr_list.append((label, values)) |
|
3019 seen[label] = values |
|
3020 |
|
3021 __append_attr_tuples(_("Name"), pi.pkg_stem) |
|
3022 __append_attr_tuples(_("Summary"), pi.summary) |
|
3023 if pi.description: |
|
3024 __append_attr_tuples(_("Description"), pi.description) |
|
3025 if pi.category_info_list: |
|
3026 category_info = [] |
|
3027 verbose = len(pi.category_info_list) > 1 |
|
3028 category_info.append \ |
|
3029 (pi.category_info_list[0].__str__(verbose)) |
|
3030 if len(pi.category_info_list) > 1: |
|
3031 for ci in pi.category_info_list[1:]: |
|
3032 category_info.append \ |
|
3033 (ci.__str__(verbose)) |
|
3034 __append_attr_tuples(_("Category"), category_info) |
|
3035 |
|
3036 __append_attr_tuples(_("State"), state) |
|
3037 |
|
3038 # Renamed packages have dependencies, but the dependencies |
|
3039 # may not apply to this image's variants so won't be |
|
3040 # returned. |
|
3041 if api.PackageInfo.RENAMED in pi.states: |
|
3042 __append_attr_tuples(_("Renamed to"), pi.dependencies) |
|
3043 |
|
3044 # XXX even more info on the publisher would be nice? |
|
3045 __append_attr_tuples(_("Publisher"), pi.publisher) |
|
3046 hum_ver = pi.get_attr_values("pkg.human-version") |
|
3047 if hum_ver and hum_ver[0] != str(pi.version): |
|
3048 __append_attr_tuples(_("Version"), "{0} ({1})".format( |
|
3049 pi.version, hum_ver[0])) |
|
3050 else: |
|
3051 __append_attr_tuples(_("Version"), str(pi.version)) |
|
3052 |
|
3053 __append_attr_tuples(_("Branch"), str(pi.branch)) |
|
3054 __append_attr_tuples(_("Packaging Date"), pi.packaging_date) |
|
3055 __append_attr_tuples(_("Size"), misc.bytes_to_str(pi.size)) |
|
3056 __append_attr_tuples(_("FMRI"), |
|
3057 pi.fmri.get_fmri(include_build=False)) |
|
3058 # XXX add license/copyright info here? |
|
3059 |
|
3060 addl_attr_list = { |
|
3061 "info.keyword": _("Additional Keywords"), |
|
3062 "info.upstream": _("Project Contact"), |
|
3063 "info.maintainer": _("Project Maintainer"), |
|
3064 "info.maintainer-url": _("Project Maintainer URL"), |
|
3065 "pkg.detailed-url": _("Project URL"), |
|
3066 "info.upstream-url": _("Project URL"), |
|
3067 "info.repository-changeset": _("Repository Changeset"), |
|
3068 "info.repository-url": _("Source URL"), |
|
3069 "info.source-url": _("Source URL") |
|
3070 } |
|
3071 |
|
3072 for item in sorted(pi.attrs, key=addl_attr_list.get): |
|
3073 if item in addl_attr_list: |
|
3074 __append_attr_tuples(addl_attr_list[item], |
|
3075 pi.get_attr_values(item)) |
|
3076 |
|
3077 try: |
|
3078 max_width = max( |
|
3079 len(attr[0]) |
|
3080 for attr in attr_list |
|
3081 ) |
|
3082 except ValueError: |
|
3083 # Only display header if there are other attributes to |
|
3084 # show |
|
3085 continue |
|
3086 |
|
3087 for attr, kval in attr_list: |
|
3088 label = "{0}: ".format(attr.rjust(max_width)) |
|
3089 res = "\n".join(item for item in kval) |
|
3090 if res: |
|
3091 wrapper = textwrap.TextWrapper( |
|
3092 initial_indent=label, |
|
3093 break_on_hyphens=False, |
|
3094 break_long_words=False, |
|
3095 subsequent_indent=(max_width + 2) * " ", |
|
3096 width=80) |
|
3097 msg(wrapper.fill(res)) |
|
3098 |
|
3099 if notfound: |
|
3100 if pis: |
|
3101 err = EXIT_PARTIAL |
|
3102 if not quiet: |
|
3103 logger.error("") |
|
3104 else: |
|
3105 err = EXIT_OOPS |
|
3106 |
|
3107 if not quiet: |
|
3108 if info_local: |
|
3109 logger.error(_("""\ |
|
3110 pkg: info: no packages matching the following patterns you specified are |
|
3111 installed on the system. Try specifying -r to query remotely:""")) |
|
3112 elif info_remote: |
|
3113 logger.error(_("""\ |
|
3114 pkg: info: no packages matching the following patterns you specified were |
|
3115 found in the catalog. Try relaxing the patterns, refreshing, and/or |
|
3116 examining the catalogs:""")) |
|
3117 logger.error("") |
|
3118 for p in notfound: |
|
3119 logger.error(" {0}".format(p)) |
|
3120 |
|
3121 if no_licenses: |
|
3122 if len(no_licenses) == len(pis): |
|
3123 err = EXIT_OOPS |
|
3124 else: |
|
3125 err = EXIT_PARTIAL |
|
3126 |
|
3127 if not quiet: |
|
3128 error(_("no license information could be found for the " |
|
3129 "following packages:")) |
|
3130 for pfmri in no_licenses: |
|
3131 logger.error("\t{0}".format(pfmri)) |
|
3132 return err |
|
3133 |
2934 |
3134 def calc_widths(lines, attrs, widths=None): |
2935 def calc_widths(lines, attrs, widths=None): |
3135 """Given a set of lines and a set of attributes, calculate the minimum |
2936 """Given a set of lines and a set of attributes, calculate the minimum |
3136 width each column needs to hold its contents.""" |
2937 width each column needs to hold its contents.""" |
3137 |
2938 |
3686 [--remove-property-value name of property=value to remove] |
3492 [--remove-property-value name of property=value to remove] |
3687 [--unset-property name of property to delete] |
3493 [--unset-property name of property to delete] |
3688 [--proxy proxy to use] |
3494 [--proxy proxy to use] |
3689 [publisher] """ |
3495 [publisher] """ |
3690 |
3496 |
3691 cmd_name = "set-publisher" |
3497 out_json = client_api._publisher_set(op, api_inst, pargs, ssl_key, |
3692 |
3498 ssl_cert, origin_uri, reset_uuid, add_mirrors, remove_mirrors, |
3693 ssl_key = None |
3499 add_origins, remove_origins, refresh_allowed, disable, sticky, |
3694 ssl_cert = None |
3500 search_before, search_after, search_first, approved_ca_certs, |
3695 origin_uri = None |
3501 revoked_ca_certs, unset_ca_certs, set_props, add_prop_values, |
3696 reset_uuid = False |
3502 remove_prop_values, unset_props, repo_uri, proxy_uri) |
3697 add_mirrors = set() |
3503 |
3698 remove_mirrors = set() |
3504 errors = None |
3699 add_origins = set() |
3505 if "errors" in out_json: |
3700 remove_origins = set() |
3506 errors = out_json["errors"] |
3701 refresh_allowed = True |
3507 errors = _generate_error_messages(out_json["status"], errors, |
3702 disable = None |
3508 selected_type=["publisher_set"]) |
3703 sticky = None |
3509 |
3704 search_before = None |
3510 if "data" in out_json: |
3705 search_after = None |
3511 if "header" in out_json["data"]: |
3706 search_first = False |
3512 logger.info(out_json["data"]["header"]) |
3707 repo_uri = None |
3513 if "added" in out_json["data"]: |
3708 proxy_uri = None |
|
3709 |
|
3710 approved_ca_certs = [] |
|
3711 revoked_ca_certs = [] |
|
3712 unset_ca_certs = [] |
|
3713 set_props = {} |
|
3714 add_prop_values = {} |
|
3715 remove_prop_values = {} |
|
3716 unset_props = set() |
|
3717 |
|
3718 opts, pargs = getopt.getopt(args, "Pedk:c:O:G:g:M:m:p:", |
|
3719 ["add-mirror=", "remove-mirror=", "add-origin=", "remove-origin=", |
|
3720 "no-refresh", "reset-uuid", "enable", "disable", "sticky", |
|
3721 "non-sticky", "search-after=", "search-before=", "search-first", |
|
3722 "approve-ca-cert=", "revoke-ca-cert=", "unset-ca-cert=", |
|
3723 "set-property=", "add-property-value=", "remove-property-value=", |
|
3724 "unset-property=", "proxy="]) |
|
3725 |
|
3726 for opt, arg in opts: |
|
3727 if opt == "-c": |
|
3728 ssl_cert = arg |
|
3729 elif opt == "-d" or opt == "--disable": |
|
3730 disable = True |
|
3731 elif opt == "-e" or opt == "--enable": |
|
3732 disable = False |
|
3733 elif opt == "-g" or opt == "--add-origin": |
|
3734 add_origins.add(misc.parse_uri(arg, cwd=orig_cwd)) |
|
3735 elif opt == "-G" or opt == "--remove-origin": |
|
3736 if arg == "*": |
|
3737 # Allow wildcard to support an easy, scriptable |
|
3738 # way of removing all existing entries. |
|
3739 remove_origins.add("*") |
|
3740 else: |
|
3741 remove_origins.add(misc.parse_uri(arg, |
|
3742 cwd=orig_cwd)) |
|
3743 elif opt == "-k": |
|
3744 ssl_key = arg |
|
3745 elif opt == "-O": |
|
3746 origin_uri = arg |
|
3747 elif opt == "-m" or opt == "--add-mirror": |
|
3748 add_mirrors.add(misc.parse_uri(arg, cwd=orig_cwd)) |
|
3749 elif opt == "-M" or opt == "--remove-mirror": |
|
3750 if arg == "*": |
|
3751 # Allow wildcard to support an easy, scriptable |
|
3752 # way of removing all existing entries. |
|
3753 remove_mirrors.add("*") |
|
3754 else: |
|
3755 remove_mirrors.add(misc.parse_uri(arg, |
|
3756 cwd=orig_cwd)) |
|
3757 elif opt == "-p": |
|
3758 if repo_uri: |
|
3759 usage(_("The -p option can be specified only " |
|
3760 "once."), cmd=cmd_name) |
|
3761 repo_uri = misc.parse_uri(arg, cwd=orig_cwd) |
|
3762 elif opt in ("-P", "--search-first"): |
|
3763 search_first = True |
|
3764 elif opt == "--reset-uuid": |
|
3765 reset_uuid = True |
|
3766 elif opt == "--no-refresh": |
|
3767 refresh_allowed = False |
|
3768 elif opt == "--sticky": |
|
3769 sticky = True |
|
3770 elif opt == "--non-sticky": |
|
3771 sticky = False |
|
3772 elif opt == "--search-before": |
|
3773 search_before = arg |
|
3774 elif opt == "--search-after": |
|
3775 search_after = arg |
|
3776 elif opt == "--approve-ca-cert": |
|
3777 approved_ca_certs.append(arg) |
|
3778 elif opt == "--revoke-ca-cert": |
|
3779 revoked_ca_certs.append(arg) |
|
3780 elif opt == "--unset-ca-cert": |
|
3781 unset_ca_certs.append(arg) |
|
3782 elif opt == "--set-property": |
|
3783 t = arg.split("=", 1) |
|
3784 if len(t) < 2: |
|
3785 usage(_("properties to be set must be of the " |
|
3786 "form '<name>=<value>'. This is what was " |
|
3787 "given: {0}").format(arg), cmd=cmd_name) |
|
3788 if t[0] in set_props: |
|
3789 usage(_("a property may only be set once in a " |
|
3790 "command. {0} was set twice").format(t[0]), |
|
3791 cmd=cmd_name) |
|
3792 set_props[t[0]] = t[1] |
|
3793 elif opt == "--add-property-value": |
|
3794 t = arg.split("=", 1) |
|
3795 if len(t) < 2: |
|
3796 usage(_("property values to be added must be " |
|
3797 "of the form '<name>=<value>'. This is " |
|
3798 "what was given: {0}").format(arg), |
|
3799 cmd=cmd_name) |
|
3800 add_prop_values.setdefault(t[0], []) |
|
3801 add_prop_values[t[0]].append(t[1]) |
|
3802 elif opt == "--remove-property-value": |
|
3803 t = arg.split("=", 1) |
|
3804 if len(t) < 2: |
|
3805 usage(_("property values to be removed must be " |
|
3806 "of the form '<name>=<value>'. This is " |
|
3807 "what was given: {0}").format(arg), |
|
3808 cmd=cmd_name) |
|
3809 remove_prop_values.setdefault(t[0], []) |
|
3810 remove_prop_values[t[0]].append(t[1]) |
|
3811 elif opt == "--unset-property": |
|
3812 unset_props.add(arg) |
|
3813 elif opt == "--proxy": |
|
3814 proxy_uri = arg |
|
3815 |
|
3816 name = None |
|
3817 if len(pargs) == 0 and not repo_uri: |
|
3818 usage(_("requires a publisher name"), cmd="set-publisher") |
|
3819 elif len(pargs) > 1: |
|
3820 usage(_("only one publisher name may be specified"), |
|
3821 cmd="set-publisher") |
|
3822 elif pargs: |
|
3823 name = pargs[0] |
|
3824 |
|
3825 if origin_uri and (add_origins or remove_origins): |
|
3826 usage(_("the -O and -g, --add-origin, -G, or --remove-origin " |
|
3827 "options may not be combined"), cmd="set-publisher") |
|
3828 |
|
3829 if (search_before and search_after) or \ |
|
3830 (search_before and search_first) or (search_after and search_first): |
|
3831 usage(_("search-before, search-after, and search-first (-P) " |
|
3832 "may not be combined"), cmd="set-publisher") |
|
3833 |
|
3834 if repo_uri and (add_origins or add_mirrors or remove_origins or |
|
3835 remove_mirrors or disable != None or not refresh_allowed or |
|
3836 reset_uuid): |
|
3837 usage(_("the -p option may not be combined with the -g, " |
|
3838 "--add-origin, -G, --remove-origin, -m, --add-mirror, " |
|
3839 "-M, --remove-mirror, --enable, --disable, --no-refresh, " |
|
3840 "or --reset-uuid options"), cmd="set-publisher") |
|
3841 |
|
3842 if proxy_uri and not (add_origins or add_mirrors or repo_uri or |
|
3843 remove_origins or remove_mirrors): |
|
3844 usage(_("the --proxy argument may only be combined with the -g," |
|
3845 " --add-origin, -m, --add-mirror, or -p options"), |
|
3846 cmd="set-publisher") |
|
3847 |
|
3848 # Get sanitized SSL Cert/Key input values. |
|
3849 ssl_cert, ssl_key = _get_ssl_cert_key(api_inst.root, api_inst.is_zone, |
|
3850 ssl_cert, ssl_key) |
|
3851 |
|
3852 if not repo_uri: |
|
3853 # Normal case. |
|
3854 ret = _set_pub_error_wrap(_add_update_pub, name, [], |
|
3855 api_inst, name, disable=disable, sticky=sticky, |
|
3856 origin_uri=origin_uri, add_mirrors=add_mirrors, |
|
3857 remove_mirrors=remove_mirrors, add_origins=add_origins, |
|
3858 remove_origins=remove_origins, ssl_cert=ssl_cert, |
|
3859 ssl_key=ssl_key, search_before=search_before, |
|
3860 search_after=search_after, search_first=search_first, |
|
3861 reset_uuid=reset_uuid, refresh_allowed=refresh_allowed, |
|
3862 set_props=set_props, add_prop_values=add_prop_values, |
|
3863 remove_prop_values=remove_prop_values, |
|
3864 unset_props=unset_props, approved_cas=approved_ca_certs, |
|
3865 revoked_cas=revoked_ca_certs, unset_cas=unset_ca_certs, |
|
3866 proxy_uri=proxy_uri) |
|
3867 |
|
3868 rval, rmsg = ret |
|
3869 if rmsg: |
|
3870 error(rmsg, cmd="set-publisher") |
|
3871 return rval |
|
3872 |
|
3873 pubs = None |
|
3874 # Automatic configuration via -p case. |
|
3875 def get_pubs(): |
|
3876 if proxy_uri: |
|
3877 proxies = [publisher.ProxyURI(proxy_uri)] |
|
3878 else: |
|
3879 proxies = [] |
|
3880 repo = publisher.RepositoryURI(repo_uri, |
|
3881 ssl_cert=ssl_cert, ssl_key=ssl_key, proxies=proxies) |
|
3882 return EXIT_OK, api_inst.get_publisherdata(repo=repo) |
|
3883 |
|
3884 ret = None |
|
3885 try: |
|
3886 ret = _set_pub_error_wrap(get_pubs, name, |
|
3887 [api_errors.UnsupportedRepositoryOperation]) |
|
3888 except api_errors.UnsupportedRepositoryOperation as e: |
|
3889 # Fail if the operation can't be done automatically. |
|
3890 error(str(e), cmd="set-publisher") |
|
3891 logger.error(_(""" |
|
3892 To add a publisher using this repository, execute the following command as a |
|
3893 privileged user: |
|
3894 |
|
3895 pkg set-publisher -g {0} <publisher> |
|
3896 """).format(repo_uri)) |
|
3897 return EXIT_OOPS |
|
3898 else: |
|
3899 rval, rmsg = ret |
|
3900 if rval != EXIT_OK: |
|
3901 error(rmsg, cmd="set-publisher") |
|
3902 return rval |
|
3903 pubs = rmsg |
|
3904 |
|
3905 # For the automatic publisher configuration case, update or add |
|
3906 # publishers based on whether they exist and if they match any |
|
3907 # specified publisher prefix. |
|
3908 if not pubs: |
|
3909 error(_(""" |
|
3910 The specified repository did not contain any publisher configuration |
|
3911 information. This is likely the result of a repository configuration |
|
3912 error. Please contact the repository administrator for further |
|
3913 assistance.""")) |
|
3914 return EXIT_OOPS |
|
3915 |
|
3916 if name and name not in pubs: |
|
3917 known = [p.prefix for p in pubs] |
|
3918 unknown = [name] |
|
3919 e = api_errors.UnknownRepositoryPublishers(known=known, |
|
3920 unknown=unknown, location=repo_uri) |
|
3921 error(str(e)) |
|
3922 return EXIT_OOPS |
|
3923 |
|
3924 added = [] |
|
3925 updated = [] |
|
3926 failed = [] |
|
3927 |
|
3928 for src_pub in sorted(pubs): |
|
3929 prefix = src_pub.prefix |
|
3930 if name and prefix != name: |
|
3931 # User didn't request this one. |
|
3932 continue |
|
3933 |
|
3934 src_repo = src_pub.repository |
|
3935 if not api_inst.has_publisher(prefix=prefix): |
|
3936 add_origins = [] |
|
3937 if not src_repo or not src_repo.origins: |
|
3938 # If the repository publisher configuration |
|
3939 # didn't include configuration information |
|
3940 # for the publisher's repositories, assume |
|
3941 # that the origin for the new publisher |
|
3942 # matches the URI provided. |
|
3943 add_origins.append(repo_uri) |
|
3944 |
|
3945 # Any -p origins/mirrors returned from get_pubs() should |
|
3946 # use the proxy we declared, if any. |
|
3947 if proxy_uri and src_repo: |
|
3948 proxies = [publisher.ProxyURI(proxy_uri)] |
|
3949 for repo_uri in src_repo.origins: |
|
3950 repo_uri.proxies = proxies |
|
3951 for repo_uri in src_repo.mirrors: |
|
3952 repo_uri.proxies = proxies |
|
3953 |
|
3954 rval, rmsg = _set_pub_error_wrap(_add_update_pub, name, |
|
3955 [], api_inst, prefix, pub=src_pub, |
|
3956 add_origins=add_origins, ssl_cert=ssl_cert, |
|
3957 ssl_key=ssl_key, sticky=sticky, |
|
3958 search_after=search_after, |
|
3959 search_before=search_before, |
|
3960 search_first=search_first, |
|
3961 set_props=set_props, |
|
3962 add_prop_values=add_prop_values, |
|
3963 remove_prop_values=remove_prop_values, |
|
3964 unset_props=unset_props, proxy_uri=proxy_uri) |
|
3965 if rval == EXIT_OK: |
|
3966 added.append(prefix) |
|
3967 |
|
3968 # When multiple publishers result from a single -p |
|
3969 # operation, this ensures that the new publishers are |
|
3970 # ordered correctly. |
|
3971 search_first = False |
|
3972 search_after = prefix |
|
3973 search_before = None |
|
3974 else: |
|
3975 add_origins = [] |
|
3976 add_mirrors = [] |
|
3977 dest_pub = api_inst.get_publisher(prefix=prefix, |
|
3978 duplicate=True) |
|
3979 dest_repo = dest_pub.repository |
|
3980 if dest_repo.origins and \ |
|
3981 not dest_repo.has_origin(repo_uri): |
|
3982 add_origins = [repo_uri] |
|
3983 |
|
3984 if not src_repo and not add_origins: |
|
3985 # The repository doesn't have to provide origin |
|
3986 # information for publishers. If it doesn't, |
|
3987 # the origin of every publisher returned is |
|
3988 # assumed to match the URI that the user |
|
3989 # provided. Since this is an update case, |
|
3990 # nothing special needs to be done. |
|
3991 if not dest_repo.origins: |
|
3992 add_origins = [repo_uri] |
|
3993 elif src_repo: |
|
3994 # Avoid duplicates by adding only those mirrors |
|
3995 # or origins not already known. |
|
3996 add_mirrors = [ |
|
3997 u.uri |
|
3998 for u in src_repo.mirrors |
|
3999 if u.uri not in dest_repo.mirrors |
|
4000 ] |
|
4001 add_origins = [ |
|
4002 u.uri |
|
4003 for u in src_repo.origins |
|
4004 if u.uri not in dest_repo.origins |
|
4005 ] |
|
4006 |
|
4007 # Special bits to update; for these, take the |
|
4008 # new value as-is (don't attempt to merge). |
|
4009 for prop in ("collection_type", "description", |
|
4010 "legal_uris", "name", "refresh_seconds", |
|
4011 "registration_uri", "related_uris"): |
|
4012 src_val = getattr(src_repo, prop) |
|
4013 if src_val is not None: |
|
4014 setattr(dest_repo, prop, |
|
4015 src_val) |
|
4016 |
|
4017 # If an alias doesn't already exist, update it too. |
|
4018 if src_pub.alias and not dest_pub.alias: |
|
4019 dest_pub.alias = src_pub.alias |
|
4020 |
|
4021 rval, rmsg = _set_pub_error_wrap(_add_update_pub, name, |
|
4022 [], api_inst, prefix, pub=dest_pub, |
|
4023 add_mirrors=add_mirrors, add_origins=add_origins, |
|
4024 set_props=set_props, |
|
4025 add_prop_values=add_prop_values, |
|
4026 remove_prop_values=remove_prop_values, |
|
4027 unset_props=unset_props, proxy_uri=proxy_uri) |
|
4028 |
|
4029 if rval == EXIT_OK: |
|
4030 updated.append(prefix) |
|
4031 |
|
4032 if rval != EXIT_OK: |
|
4033 failed.append((prefix, rmsg)) |
|
4034 continue |
|
4035 |
|
4036 first = True |
|
4037 for pub, rmsg in failed: |
|
4038 if first: |
|
4039 first = False |
|
4040 error("failed to add or update one or more " |
|
4041 "publishers", cmd="set-publisher") |
|
4042 logger.error(" {0}:".format(pub)) |
|
4043 logger.error(rmsg) |
|
4044 |
|
4045 if added or updated: |
|
4046 if first: |
|
4047 logger.info("pkg set-publisher:") |
|
4048 if added: |
|
4049 logger.info(_(" Added publisher(s): {0}").format( |
3514 logger.info(_(" Added publisher(s): {0}").format( |
4050 ", ".join(added))) |
3515 ", ".join(out_json["data"]["added"]))) |
4051 if updated: |
3516 if "updated" in out_json["data"]: |
4052 logger.info(_(" Updated publisher(s): {0}").format( |
3517 logger.info(_(" Updated publisher(s): {0}").format( |
4053 ", ".join(updated))) |
3518 ", ".join(out_json["data"]["updated"]))) |
4054 |
3519 |
4055 if failed: |
3520 if errors: |
4056 if len(failed) != len(pubs): |
3521 _generate_error_messages(out_json["status"], errors, |
4057 # Not all publishers retrieved could be added or |
3522 cmd="set-publisher", add_info={"repo_uri": repo_uri}) |
4058 # updated. |
3523 |
4059 return EXIT_PARTIAL |
3524 return out_json["status"] |
4060 return EXIT_OOPS |
3525 |
4061 |
3526 def publisher_unset(api_inst, pargs): |
4062 # Now that the configuration was successful, attempt to refresh the |
|
4063 # catalog data for all of the configured publishers. If the refresh |
|
4064 # had been allowed earlier while configuring each publisher, then this |
|
4065 # wouldn't be necessary and some possibly invalid configuration could |
|
4066 # have been eliminated sooner. However, that would be much slower as |
|
4067 # each refresh requires a client image state rebuild. |
|
4068 return __refresh(api_inst, added + updated) |
|
4069 |
|
4070 def _add_update_pub(api_inst, prefix, pub=None, disable=None, sticky=None, |
|
4071 origin_uri=None, add_mirrors=EmptyI, remove_mirrors=EmptyI, |
|
4072 add_origins=EmptyI, remove_origins=EmptyI, ssl_cert=None, ssl_key=None, |
|
4073 search_before=None, search_after=None, search_first=False, |
|
4074 reset_uuid=None, refresh_allowed=False, |
|
4075 set_props=EmptyI, add_prop_values=EmptyI, |
|
4076 remove_prop_values=EmptyI, unset_props=EmptyI, approved_cas=EmptyI, |
|
4077 revoked_cas=EmptyI, unset_cas=EmptyI, proxy_uri=None): |
|
4078 |
|
4079 repo = None |
|
4080 new_pub = False |
|
4081 if not pub: |
|
4082 try: |
|
4083 pub = api_inst.get_publisher(prefix=prefix, |
|
4084 alias=prefix, duplicate=True) |
|
4085 if reset_uuid: |
|
4086 pub.reset_client_uuid() |
|
4087 repo = pub.repository |
|
4088 except api_errors.UnknownPublisher as e: |
|
4089 if not origin_uri and not add_origins and \ |
|
4090 (remove_origins or remove_mirrors or |
|
4091 remove_prop_values or add_mirrors): |
|
4092 return EXIT_OOPS, str(e) |
|
4093 |
|
4094 # No pre-existing, so create a new one. |
|
4095 repo = publisher.Repository() |
|
4096 pub = publisher.Publisher(prefix, repository=repo) |
|
4097 new_pub = True |
|
4098 elif not api_inst.has_publisher(prefix=pub.prefix): |
|
4099 new_pub = True |
|
4100 |
|
4101 if not repo: |
|
4102 repo = pub.repository |
|
4103 if not repo: |
|
4104 # Could be a new publisher from auto-configuration |
|
4105 # case where no origin was provided in repository |
|
4106 # configuration. |
|
4107 repo = publisher.Repository() |
|
4108 pub.repository = repo |
|
4109 |
|
4110 if disable is not None: |
|
4111 # Set disabled property only if provided. |
|
4112 pub.disabled = disable |
|
4113 |
|
4114 if sticky is not None: |
|
4115 # Set stickiness only if provided |
|
4116 pub.sticky = sticky |
|
4117 |
|
4118 if proxy_uri: |
|
4119 # we only support a single proxy for now. |
|
4120 proxies = [publisher.ProxyURI(proxy_uri)] |
|
4121 else: |
|
4122 proxies = [] |
|
4123 |
|
4124 if origin_uri: |
|
4125 # For compatibility with old -O behaviour, treat -O as a wipe |
|
4126 # of existing origins and add the new one. |
|
4127 |
|
4128 # Only use existing cert information if the new URI uses |
|
4129 # https for transport. |
|
4130 if repo.origins and not (ssl_cert or ssl_key) and \ |
|
4131 any(origin_uri.startswith(scheme + ":") |
|
4132 for scheme in publisher.SSL_SCHEMES): |
|
4133 |
|
4134 for uri in repo.origins: |
|
4135 if ssl_cert is None: |
|
4136 ssl_cert = uri.ssl_cert |
|
4137 if ssl_key is None: |
|
4138 ssl_key = uri.ssl_key |
|
4139 break |
|
4140 |
|
4141 repo.reset_origins() |
|
4142 o = publisher.RepositoryURI(origin_uri, proxies=proxies) |
|
4143 repo.add_origin(o) |
|
4144 |
|
4145 # XXX once image configuration supports storing this |
|
4146 # information at the uri level, ssl info should be set |
|
4147 # here. |
|
4148 |
|
4149 for entry in (("mirror", add_mirrors, remove_mirrors), ("origin", |
|
4150 add_origins, remove_origins)): |
|
4151 etype, add, remove = entry |
|
4152 # XXX once image configuration supports storing this |
|
4153 # information at the uri level, ssl info should be set |
|
4154 # here. |
|
4155 if "*" in remove: |
|
4156 getattr(repo, "reset_{0}s".format(etype))() |
|
4157 else: |
|
4158 for u in remove: |
|
4159 getattr(repo, "remove_{0}".format(etype))(u) |
|
4160 |
|
4161 for u in add: |
|
4162 uri = publisher.RepositoryURI(u, proxies=proxies) |
|
4163 getattr(repo, "add_{0}".format(etype))(uri) |
|
4164 |
|
4165 # None is checked for here so that a client can unset a ssl_cert or |
|
4166 # ssl_key by using -k "" or -c "". |
|
4167 if ssl_cert is not None or ssl_key is not None: |
|
4168 # Assume the user wanted to update the ssl_cert or ssl_key |
|
4169 # information for *all* of the currently selected |
|
4170 # repository's origins and mirrors that use SSL schemes. |
|
4171 found_ssl = False |
|
4172 for uri in repo.origins: |
|
4173 if uri.scheme not in publisher.SSL_SCHEMES: |
|
4174 continue |
|
4175 found_ssl = True |
|
4176 if ssl_cert is not None: |
|
4177 uri.ssl_cert = ssl_cert |
|
4178 if ssl_key is not None: |
|
4179 uri.ssl_key = ssl_key |
|
4180 for uri in repo.mirrors: |
|
4181 if uri.scheme not in publisher.SSL_SCHEMES: |
|
4182 continue |
|
4183 found_ssl = True |
|
4184 if ssl_cert is not None: |
|
4185 uri.ssl_cert = ssl_cert |
|
4186 if ssl_key is not None: |
|
4187 uri.ssl_key = ssl_key |
|
4188 |
|
4189 if (ssl_cert or ssl_key) and not found_ssl: |
|
4190 # None of the origins or mirrors for the publisher |
|
4191 # use SSL schemes so the cert and key information |
|
4192 # won't be retained. |
|
4193 usage(_("Publisher '{0}' does not have any SSL-based " |
|
4194 "origins or mirrors.").format(prefix)) |
|
4195 |
|
4196 if set_props or add_prop_values or remove_prop_values or unset_props: |
|
4197 pub.update_props(set_props=set_props, |
|
4198 add_prop_values=add_prop_values, |
|
4199 remove_prop_values=remove_prop_values, |
|
4200 unset_props=unset_props) |
|
4201 |
|
4202 if new_pub: |
|
4203 api_inst.add_publisher(pub, |
|
4204 refresh_allowed=refresh_allowed, approved_cas=approved_cas, |
|
4205 revoked_cas=revoked_cas, unset_cas=unset_cas, |
|
4206 search_after=search_after, search_before=search_before, |
|
4207 search_first=search_first) |
|
4208 else: |
|
4209 for ca in approved_cas: |
|
4210 try: |
|
4211 ca = os.path.normpath( |
|
4212 os.path.join(orig_cwd, ca)) |
|
4213 with open(ca, "rb") as fh: |
|
4214 s = fh.read() |
|
4215 except EnvironmentError as e: |
|
4216 if e.errno == errno.ENOENT: |
|
4217 raise api_errors.MissingFileArgumentException( |
|
4218 ca) |
|
4219 elif e.errno == errno.EACCES: |
|
4220 raise api_errors.PermissionsException( |
|
4221 ca) |
|
4222 raise |
|
4223 pub.approve_ca_cert(s) |
|
4224 |
|
4225 for hsh in revoked_cas: |
|
4226 pub.revoke_ca_cert(hsh) |
|
4227 |
|
4228 for hsh in unset_cas: |
|
4229 pub.unset_ca_cert(hsh) |
|
4230 |
|
4231 api_inst.update_publisher(pub, |
|
4232 refresh_allowed=refresh_allowed, search_after=search_after, |
|
4233 search_before=search_before, search_first=search_first) |
|
4234 |
|
4235 return EXIT_OK, None |
|
4236 |
|
4237 def publisher_unset(api_inst, args): |
|
4238 """pkg unset-publisher publisher ...""" |
3527 """pkg unset-publisher publisher ...""" |
4239 |
3528 |
4240 opts, pargs = getopt.getopt(args, "") |
3529 opts, pargs = getopt.getopt(pargs, "") |
4241 if not pargs: |
3530 out_json = client_api._publisher_unset("unset-publisher", api_inst, |
4242 usage(_("at least one publisher must be specified"), |
3531 pargs) |
4243 cmd="unset-publisher") |
3532 |
4244 |
3533 if "errors" in out_json: |
4245 errors = [] |
3534 _generate_error_messages(out_json["status"], |
4246 goal = len(args) |
3535 out_json["errors"], cmd="unset-publisher") |
4247 progtrack = api_inst.progresstracker |
3536 |
4248 progtrack.job_start(progtrack.JOB_PKG_CACHE, goal=goal) |
3537 return out_json["status"] |
4249 for name in args: |
3538 |
4250 |
3539 def publisher_list(op, api_inst, pargs, omit_headers, preferred_only, |
4251 try: |
3540 inc_disabled, output_format): |
4252 api_inst.remove_publisher(prefix=name, alias=name) |
3541 """pkg publishers.""" |
4253 except api_errors.ImageFormatUpdateNeeded as e: |
3542 |
4254 format_update_error(e) |
3543 ret_json = client_api._publisher_list(op, api_inst, pargs, omit_headers, |
4255 return EXIT_OOPS |
3544 preferred_only, inc_disabled, output_format) |
4256 except (api_errors.PermissionsException, |
3545 retcode = ret_json["status"] |
4257 api_errors.PublisherError, |
3546 |
4258 api_errors.ModifyingSyspubException) as e: |
|
4259 errors.append((name, e)) |
|
4260 finally: |
|
4261 progtrack.job_add_progress(progtrack.JOB_PKG_CACHE) |
|
4262 |
|
4263 progtrack.job_done(progtrack.JOB_PKG_CACHE) |
|
4264 retcode = EXIT_OK |
|
4265 if errors: |
|
4266 if len(errors) == len(args): |
|
4267 # If the operation failed for every provided publisher |
|
4268 # prefix or alias, complete failure occurred. |
|
4269 retcode = EXIT_OOPS |
|
4270 else: |
|
4271 # If the operation failed for only some of the provided |
|
4272 # publisher prefixes or aliases, then partial failure |
|
4273 # occurred. |
|
4274 retcode = EXIT_PARTIAL |
|
4275 |
|
4276 txt = "" |
|
4277 for name, err in errors: |
|
4278 txt += "\n" |
|
4279 txt += _("Removal failed for '{pub}': {msg}").format( |
|
4280 pub=name, msg=err) |
|
4281 txt += "\n" |
|
4282 error(txt, cmd="unset-publisher") |
|
4283 |
|
4284 return retcode |
|
4285 |
|
4286 def publisher_list(api_inst, args): |
|
4287 """pkg publishers""" |
|
4288 omit_headers = False |
|
4289 preferred_only = False |
|
4290 inc_disabled = True |
|
4291 valid_formats = ( "tsv", ) |
|
4292 output_format = "default" |
|
4293 field_data = { |
|
4294 "publisher" : [("default", "tsv"), _("PUBLISHER"), ""], |
|
4295 "attrs" : [("default"), "", ""], |
|
4296 "type" : [("default", "tsv"), _("TYPE"), ""], |
|
4297 "status" : [("default", "tsv"), _("STATUS"), ""], |
|
4298 "repo_loc" : [("default"), _("LOCATION"), ""], |
|
4299 "uri": [("tsv"), _("URI"), ""], |
|
4300 "sticky" : [("tsv"), _("STICKY"), ""], |
|
4301 "enabled" : [("tsv"), _("ENABLED"), ""], |
|
4302 "syspub" : [("tsv"), _("SYSPUB"), ""], |
|
4303 "proxy" : [("tsv"), _("PROXY"), ""], |
|
4304 "proxied" : [("default"), _("P"), ""] |
|
4305 } |
|
4306 |
|
4307 desired_field_order = (_("PUBLISHER"), "", _("STICKY"), |
|
4308 _("SYSPUB"), _("ENABLED"), _("TYPE"), |
|
4309 _("STATUS"), _("P"), _("LOCATION")) |
|
4310 |
|
4311 # Custom sort function for preserving field ordering |
|
4312 def sort_fields(one, two): |
|
4313 return desired_field_order.index(get_header(one)) - \ |
|
4314 desired_field_order.index(get_header(two)) |
|
4315 |
|
4316 # Functions for manipulating field_data records |
|
4317 |
|
4318 def filter_default(record): |
|
4319 return "default" in record[0] |
|
4320 |
|
4321 def filter_tsv(record): |
|
4322 return "tsv" in record[0] |
|
4323 |
|
4324 def get_header(record): |
|
4325 return record[1] |
|
4326 |
|
4327 def get_value(record): |
|
4328 return record[2] |
|
4329 |
|
4330 def set_value(record, value): |
|
4331 record[2] = value |
|
4332 |
|
4333 # 'a' is left over |
|
4334 opts, pargs = getopt.getopt(args, "F:HPan") |
|
4335 for opt, arg in opts: |
|
4336 if opt == "-H": |
|
4337 omit_headers = True |
|
4338 if opt == "-P": |
|
4339 preferred_only = True |
|
4340 if opt == "-n": |
|
4341 inc_disabled = False |
|
4342 if opt == "-F": |
|
4343 output_format = arg |
|
4344 if output_format not in valid_formats: |
|
4345 usage(_("Unrecognized format {format}." |
|
4346 " Supported formats: {valid}").format( |
|
4347 format=output_format, |
|
4348 valid=valid_formats), cmd="publisher") |
|
4349 return EXIT_OOPS |
|
4350 |
|
4351 api_inst.progresstracker.set_purpose( |
|
4352 api_inst.progresstracker.PURPOSE_LISTING) |
|
4353 |
|
4354 cert_cache = {} |
|
4355 def get_cert_info(ssl_cert): |
|
4356 if not ssl_cert: |
|
4357 return None |
|
4358 if ssl_cert not in cert_cache: |
|
4359 c = cert_cache[ssl_cert] = {} |
|
4360 errors = c["errors"] = [] |
|
4361 times = c["info"] = { |
|
4362 "effective": "", |
|
4363 "expiration": "", |
|
4364 } |
|
4365 |
|
4366 try: |
|
4367 cert = misc.validate_ssl_cert(ssl_cert) |
|
4368 except (EnvironmentError, |
|
4369 api_errors.CertificateError, |
|
4370 api_errors.PermissionsException) as e: |
|
4371 # If the cert information can't be retrieved, |
|
4372 # add the errors to a list and continue on. |
|
4373 errors.append(e) |
|
4374 c["valid"] = False |
|
4375 else: |
|
4376 nb = cert.get_notBefore() |
|
4377 t = time.strptime(nb, "%Y%m%d%H%M%SZ") |
|
4378 nb = datetime.datetime.utcfromtimestamp( |
|
4379 calendar.timegm(t)) |
|
4380 times["effective"] = nb.strftime("%c") |
|
4381 |
|
4382 na = cert.get_notAfter() |
|
4383 t = time.strptime(na, "%Y%m%d%H%M%SZ") |
|
4384 na = datetime.datetime.utcfromtimestamp( |
|
4385 calendar.timegm(t)) |
|
4386 times["expiration"] = na.strftime("%c") |
|
4387 c["valid"] = True |
|
4388 |
|
4389 return cert_cache[ssl_cert] |
|
4390 |
|
4391 retcode = EXIT_OK |
|
4392 if len(pargs) == 0: |
3547 if len(pargs) == 0: |
4393 if preferred_only: |
|
4394 pref_pub = api_inst.get_highest_ranked_publisher() |
|
4395 if api_inst.has_publisher(pref_pub): |
|
4396 pubs = [pref_pub] |
|
4397 else: |
|
4398 # Only publisher known is from an installed |
|
4399 # package and is not configured in the image. |
|
4400 pubs = [] |
|
4401 else: |
|
4402 pubs = [ |
|
4403 p for p in api_inst.get_publishers() |
|
4404 if inc_disabled or not p.disabled |
|
4405 ] |
|
4406 # Create a formatting string for the default output |
3548 # Create a formatting string for the default output |
4407 # format |
3549 # format. |
4408 if output_format == "default": |
3550 if output_format == "default": |
4409 fmt = "{0:14} {1:12} {2:8} {3:2} {4} {5}" |
3551 fmt = "{0:14} {1:12} {2:8} {3:2} {4} {5}" |
4410 filter_func = filter_default |
|
4411 |
3552 |
4412 # Create a formatting string for the tsv output |
3553 # Create a formatting string for the tsv output |
4413 # format |
3554 # format. |
4414 if output_format == "tsv": |
3555 if output_format == "tsv": |
4415 fmt = "{0}\t{1}\t{2}\t{3}\t{4}\t{5}\t{6}\t{7}" |
3556 fmt = "{0}\t{1}\t{2}\t{3}\t{4}\t{5}\t{6}\t{7}" |
4416 filter_func = filter_tsv |
3557 |
4417 desired_field_order = (_("PUBLISHER"), "", _("STICKY"), |
3558 # Output an header if desired. |
4418 _("SYSPUB"), _("ENABLED"), _("TYPE"), |
|
4419 _("STATUS"), _("URI"), _("PROXY")) |
|
4420 |
|
4421 # Extract our list of headers from the field_data |
|
4422 # dictionary Make sure they are extracted in the |
|
4423 # desired order by using our custom sort function |
|
4424 hdrs = map(get_header, sorted(filter(filter_func, |
|
4425 field_data.values()), sort_fields)) |
|
4426 |
|
4427 # Output an header if desired |
|
4428 if not omit_headers: |
3559 if not omit_headers: |
4429 msg(fmt.format(*hdrs)) |
3560 msg(fmt.format(*ret_json["data"]["headers"])) |
4430 |
3561 |
4431 for p in pubs: |
3562 for p in ret_json["data"]["publishers"]: |
4432 # Store all our publisher related data in |
3563 msg(fmt.format(*p)) |
4433 # field_data ready for output |
|
4434 |
|
4435 set_value(field_data["publisher"], p.prefix) |
|
4436 # Setup the synthetic attrs field if the |
|
4437 # format is default. |
|
4438 if output_format == "default": |
|
4439 pstatus = "" |
|
4440 |
|
4441 if not p.sticky: |
|
4442 pstatus_list = [_("non-sticky")] |
|
4443 else: |
|
4444 pstatus_list = [] |
|
4445 |
|
4446 if p.disabled: |
|
4447 pstatus_list.append(_("disabled")) |
|
4448 if p.sys_pub: |
|
4449 pstatus_list.append(_("syspub")) |
|
4450 if pstatus_list: |
|
4451 pstatus = "({0})".format( |
|
4452 ", ".join(pstatus_list)) |
|
4453 set_value(field_data["attrs"], pstatus) |
|
4454 |
|
4455 if p.sticky: |
|
4456 set_value(field_data["sticky"], _("true")) |
|
4457 else: |
|
4458 set_value(field_data["sticky"], _("false")) |
|
4459 if not p.disabled: |
|
4460 set_value(field_data["enabled"], _("true")) |
|
4461 else: |
|
4462 set_value(field_data["enabled"], _("false")) |
|
4463 if p.sys_pub: |
|
4464 set_value(field_data["syspub"], _("true")) |
|
4465 else: |
|
4466 set_value(field_data["syspub"], _("false")) |
|
4467 |
|
4468 # Only show the selected repository's information in |
|
4469 # summary view. |
|
4470 if p.repository: |
|
4471 origins = p.repository.origins |
|
4472 mirrors = p.repository.mirrors |
|
4473 else: |
|
4474 origins = mirrors = [] |
|
4475 |
|
4476 set_value(field_data["repo_loc"], "") |
|
4477 set_value(field_data["proxied"], "") |
|
4478 # Update field_data for each origin and output |
|
4479 # a publisher record in our desired format. |
|
4480 for uri in sorted(origins): |
|
4481 # XXX get the real origin status |
|
4482 set_value(field_data["type"], _("origin")) |
|
4483 set_value(field_data["status"], _("online")) |
|
4484 set_value(field_data["proxy"], "-") |
|
4485 set_value(field_data["proxied"], "F") |
|
4486 |
|
4487 set_value(field_data["uri"], uri) |
|
4488 |
|
4489 if uri.proxies: |
|
4490 set_value(field_data["proxied"], _("T")) |
|
4491 set_value(field_data["proxy"], |
|
4492 ", ".join( |
|
4493 [proxy.uri |
|
4494 for proxy in uri.proxies])) |
|
4495 if uri.system: |
|
4496 set_value(field_data["repo_loc"], |
|
4497 SYSREPO_HIDDEN_URI) |
|
4498 else: |
|
4499 set_value(field_data["repo_loc"], uri) |
|
4500 |
|
4501 values = map(get_value, |
|
4502 sorted(filter(filter_func, |
|
4503 field_data.values()), sort_fields) |
|
4504 ) |
|
4505 msg(fmt.format(*values)) |
|
4506 # Update field_data for each mirror and output |
|
4507 # a publisher record in our desired format. |
|
4508 for uri in mirrors: |
|
4509 # XXX get the real mirror status |
|
4510 set_value(field_data["type"], _("mirror")) |
|
4511 set_value(field_data["status"], _("online")) |
|
4512 set_value(field_data["proxy"], "-") |
|
4513 set_value(field_data["proxied"], _("F")) |
|
4514 |
|
4515 set_value(field_data["uri"], uri) |
|
4516 |
|
4517 if uri.proxies: |
|
4518 set_value(field_data["proxied"], _("T")) |
|
4519 set_value(field_data["proxy"], |
|
4520 ", ".join( |
|
4521 [p.uri for p in uri.proxies])) |
|
4522 if uri.system: |
|
4523 set_value(field_data["repo_loc"], |
|
4524 SYSREPO_HIDDEN_URI) |
|
4525 else: |
|
4526 set_value(field_data["repo_loc"], uri) |
|
4527 |
|
4528 values = map(get_value, |
|
4529 sorted(filter(filter_func, |
|
4530 field_data.values()), sort_fields) |
|
4531 ) |
|
4532 msg(fmt.format(*values)) |
|
4533 |
|
4534 if not origins and not mirrors: |
|
4535 set_value(field_data["type"], "") |
|
4536 set_value(field_data["status"], "") |
|
4537 set_value(field_data["uri"], "") |
|
4538 set_value(field_data["proxy"], "") |
|
4539 values = map(get_value, |
|
4540 sorted(filter(filter_func, |
|
4541 field_data.values()), sort_fields) |
|
4542 ) |
|
4543 msg(fmt.format(*values)) |
|
4544 |
|
4545 else: |
3564 else: |
4546 def display_ssl_info(uri): |
3565 def display_signing_certs(p): |
4547 retcode = EXIT_OK |
3566 if "Approved CAs" in p: |
4548 c = get_cert_info(uri.ssl_cert) |
3567 msg(_(" Approved CAs:"), |
4549 msg(_(" SSL Key:"), uri.ssl_key) |
3568 p["Approved CAs"][0]) |
4550 msg(_(" SSL Cert:"), uri.ssl_cert) |
3569 for h in p["Approved CAs"][1:]: |
4551 |
3570 msg(_(" :"), h) |
4552 if not c: |
3571 if "Revoked CAs" in p: |
4553 return retcode |
3572 msg(_(" Revoked CAs:"), |
4554 |
3573 p["Revoked CAs"][0]) |
4555 if c["errors"]: |
3574 for h in p["Revoked CAs"][1:]: |
4556 retcode = EXIT_OOPS |
3575 msg(_(" :"), h) |
4557 |
3576 |
4558 for e in c["errors"]: |
3577 def display_ssl_info(uri_data): |
4559 logger.error("\n" + str(e) + "\n") |
3578 msg(_(" SSL Key:"), uri_data["SSL Key"]) |
4560 |
3579 msg(_(" SSL Cert:"), uri_data["SSL Cert"]) |
4561 if c["valid"]: |
3580 |
|
3581 if "errors" in ret_json: |
|
3582 for e in ret_json["errors"]: |
|
3583 if "errtype" in e and \ |
|
3584 e["errtype"] == "cert_info": |
|
3585 emsg(e["reason"]) |
|
3586 |
|
3587 if "Cert. Effective Date" in uri_data: |
4562 msg(_(" Cert. Effective Date:"), |
3588 msg(_(" Cert. Effective Date:"), |
4563 c["info"]["effective"]) |
3589 uri_data["Cert. Effective Date"]) |
4564 msg(_("Cert. Expiration Date:"), |
3590 msg(_("Cert. Expiration Date:"), |
4565 c["info"]["expiration"]) |
3591 uri_data["Cert. Expiration Date"]) |
|
3592 |
|
3593 if "data" not in ret_json or "publisher_details" not in \ |
|
3594 ret_json["data"]: |
4566 return retcode |
3595 return retcode |
4567 |
3596 |
4568 def display_repository(r): |
3597 for pub in ret_json["data"]["publisher_details"]: |
4569 retcode = 0 |
|
4570 for uri in r.origins: |
|
4571 msg(_(" Origin URI:"), uri) |
|
4572 if uri.proxies: |
|
4573 msg(_(" Proxy:"), |
|
4574 ", ".join( |
|
4575 [p.uri for p in uri.proxies])) |
|
4576 rval = display_ssl_info(uri) |
|
4577 if rval == 1: |
|
4578 retcode = EXIT_PARTIAL |
|
4579 |
|
4580 for uri in r.mirrors: |
|
4581 msg(_(" Mirror URI:"), uri) |
|
4582 if uri.proxies: |
|
4583 msg(_(" Proxy:"), |
|
4584 ", ".join( |
|
4585 [p.uri for p in uri.proxies])) |
|
4586 rval = display_ssl_info(uri) |
|
4587 if rval == 1: |
|
4588 retcode = EXIT_PARTIAL |
|
4589 return retcode |
|
4590 |
|
4591 def display_signing_certs(p): |
|
4592 if p.approved_ca_certs: |
|
4593 msg(_(" Approved CAs:"), |
|
4594 p.approved_ca_certs[0]) |
|
4595 for h in p.approved_ca_certs[1:]: |
|
4596 msg(_(" :"), h) |
|
4597 if p.revoked_ca_certs: |
|
4598 msg(_(" Revoked CAs:"), |
|
4599 p.revoked_ca_certs[0]) |
|
4600 for h in p.revoked_ca_certs[1:]: |
|
4601 msg(_(" :"), h) |
|
4602 |
|
4603 for name in pargs: |
|
4604 # detailed print |
|
4605 pub = api_inst.get_publisher(prefix=name, alias=name) |
|
4606 dt = api_inst.get_publisher_last_update_time(pub.prefix) |
|
4607 if dt: |
|
4608 dt = dt.strftime("%c") |
|
4609 |
|
4610 msg("") |
3598 msg("") |
4611 msg(_(" Publisher:"), pub.prefix) |
3599 msg(_(" Publisher:"), pub["Publisher"]) |
4612 msg(_(" Alias:"), pub.alias) |
3600 msg(_(" Alias:"), pub["Alias"]) |
4613 |
3601 |
4614 rval = display_repository(pub.repository) |
3602 if "origins" in pub: |
4615 if rval != 0: |
3603 for od in pub["origins"]: |
4616 # There was an error in displaying some |
3604 msg(_(" Origin URI:"), |
4617 # of the information about a repository. |
3605 od["Origin URI"]) |
4618 # However, continue on. |
3606 if "Proxy" in od: |
4619 retcode = rval |
3607 msg(_(" Proxy:"), |
4620 |
3608 ", ".join(od["Proxy"])) |
4621 msg(_(" Client UUID:"), pub.client_uuid) |
3609 display_ssl_info(od) |
4622 msg(_(" Catalog Updated:"), dt) |
3610 |
|
3611 if "mirrors" in pub: |
|
3612 for md in pub["mirrors"]: |
|
3613 msg(_(" Mirror URI:"), |
|
3614 md["Mirror URI"]) |
|
3615 if "Proxy" in md: |
|
3616 msg(_(" Proxy:"), |
|
3617 ", ".join(md["Proxy"])) |
|
3618 display_ssl_info(md) |
|
3619 |
|
3620 msg(_(" Client UUID:"), |
|
3621 pub["Client UUID"]) |
|
3622 msg(_(" Catalog Updated:"), |
|
3623 pub["Catalog Updated"]) |
4623 display_signing_certs(pub) |
3624 display_signing_certs(pub) |
4624 if pub.disabled: |
3625 msg(_(" Enabled:"), |
4625 msg(_(" Enabled:"), _("No")) |
3626 _(pub["Enabled"])) |
4626 else: |
3627 |
4627 msg(_(" Enabled:"), _("Yes")) |
3628 if "Properties" not in pub: |
4628 pub_items = sorted(pub.properties.iteritems()) |
3629 continue |
|
3630 pub_items = sorted( |
|
3631 pub["Properties"].iteritems()) |
4629 property_padding = " " |
3632 property_padding = " " |
4630 properties_displayed = False |
3633 properties_displayed = False |
4631 for k, v in pub_items: |
3634 for k, v in pub_items: |
4632 if not v: |
3635 if not v: |
4633 continue |
3636 continue |