54 from pkg import actions, elf |
54 from pkg import actions, elf |
55 from pkg.bundle.SolarisPackageDirBundle import SolarisPackageDirBundle |
55 from pkg.bundle.SolarisPackageDirBundle import SolarisPackageDirBundle |
56 from pkg.misc import emsg |
56 from pkg.misc import emsg |
57 from pkg.portable import PD_LOCAL_PATH, PD_PROTO_DIR, PD_PROTO_DIR_LIST |
57 from pkg.portable import PD_LOCAL_PATH, PD_PROTO_DIR, PD_PROTO_DIR_LIST |
58 |
58 |
59 CLIENT_API_VERSION = 48 |
59 CLIENT_API_VERSION = 49 |
60 PKG_CLIENT_NAME = "importer.py" |
60 PKG_CLIENT_NAME = "importer.py" |
61 pkg.client.global_settings.client_name = PKG_CLIENT_NAME |
61 pkg.client.global_settings.client_name = PKG_CLIENT_NAME |
62 |
62 |
63 from tempfile import mkstemp |
63 from tempfile import mkstemp |
64 |
64 |
69 # to global name table. Actions are annotated to include svr4 source |
69 # to global name table. Actions are annotated to include svr4 source |
70 # pkg & path |
70 # pkg & path |
71 |
71 |
72 |
72 |
73 basename_dict = {} # basenames to action lists |
73 basename_dict = {} # basenames to action lists |
74 branch_dict = {} # |
74 branch_dict = {} # |
75 cons_dict = {} # consolidation incorporation dictionaries |
75 cons_dict = {} # consolidation incorporation dictionaries |
76 file_repo = False # |
76 file_repo = False # |
77 curpkg = None # which IPS package we're currently importing |
77 curpkg = None # which IPS package we're currently importing |
78 def_branch = "" # default branch |
78 def_branch = "" # default branch |
79 def_pub = None |
79 def_pub = None |
200 action.attrs["path"] in excludes: |
200 action.attrs["path"] in excludes: |
201 if show_debug: |
201 if show_debug: |
202 print "excluding %s from %s" % \ |
202 print "excluding %s from %s" % \ |
203 (action.attrs["path"], imppkg_name) |
203 (action.attrs["path"], imppkg_name) |
204 continue |
204 continue |
205 |
205 |
206 if action.name == "unknown": |
206 if action.name == "unknown": |
207 continue |
207 continue |
208 |
208 |
209 action.attrs["importer.source"] = "svr4pkg" |
209 action.attrs["importer.source"] = "svr4pkg" |
210 action.attrs["importer.svr4pkg"] = imppkg_name |
210 action.attrs["importer.svr4pkg"] = imppkg_name |
212 |
212 |
213 if action.name == "license": |
213 if action.name == "license": |
214 # The "path" attribute is confusing and |
214 # The "path" attribute is confusing and |
215 # unnecessary for licenses. |
215 # unnecessary for licenses. |
216 del action.attrs["path"] |
216 del action.attrs["path"] |
217 |
217 |
218 if action.name == "file": |
218 if action.name == "file": |
219 # is this a file for which we need a timestamp? |
219 # is this a file for which we need a timestamp? |
220 basename = os.path.basename(action.attrs["path"]) |
220 basename = os.path.basename(action.attrs["path"]) |
221 for file_pattern in timestamp_files: |
221 for file_pattern in timestamp_files: |
222 if fnmatch.fnmatch(basename, file_pattern): |
222 if fnmatch.fnmatch(basename, file_pattern): |
226 |
226 |
227 # is this file likely to be an SMF manifest? If so, |
227 # is this file likely to be an SMF manifest? If so, |
228 # save a copy of the file to use for dependency analysis |
228 # save a copy of the file to use for dependency analysis |
229 if smf_manifest.has_smf_manifest_dir(action.attrs["path"]): |
229 if smf_manifest.has_smf_manifest_dir(action.attrs["path"]): |
230 fetch_file(action, local_smf_manifests) |
230 fetch_file(action, local_smf_manifests) |
231 |
231 |
232 if hollow: |
232 if hollow: |
233 action.attrs["variant.opensolaris.zone"] = "global" |
233 action.attrs["variant.opensolaris.zone"] = "global" |
234 |
234 |
235 self.check_perms(action) |
235 self.check_perms(action) |
236 self.actions.append(action) |
236 self.actions.append(action) |
329 new_attrs = actions.attrsfromstr(line.rstrip()) |
329 new_attrs = actions.attrsfromstr(line.rstrip()) |
330 |
330 |
331 o = [ |
331 o = [ |
332 f |
332 f |
333 for f in self.actions |
333 for f in self.actions |
334 if "path" in f.attrs and |
334 if "path" in f.attrs and |
335 fnmatch.fnmatchcase(f.attrs["path"], glob) and |
335 fnmatch.fnmatchcase(f.attrs["path"], glob) and |
336 (not type or type == f.name) |
336 (not type or type == f.name) |
337 ] |
337 ] |
338 |
338 |
339 for f in o: |
339 for f in o: |
421 targets = set((d.attrs["target"] for d in dups)) |
421 targets = set((d.attrs["target"] for d in dups)) |
422 if len(targets) > 1: |
422 if len(targets) > 1: |
423 errorlist.append("Multiple %s actions with same path and different targets:\n\t%s\n" % |
423 errorlist.append("Multiple %s actions with same path and different targets:\n\t%s\n" % |
424 (dups[0].name, "\n\t".join(str(d) for d in dups))) |
424 (dups[0].name, "\n\t".join(str(d) for d in dups))) |
425 continue |
425 continue |
426 |
426 |
427 elif dups[0].name != "dir": |
427 elif dups[0].name != "dir": |
428 errorlist.append("Multiple actions with the same path that aren't directories:\n\t%s\n" % |
428 errorlist.append("Multiple actions with the same path that aren't directories:\n\t%s\n" % |
429 ("\n\t".join(str(d) for d in dups))) |
429 ("\n\t".join(str(d) for d in dups))) |
430 continue |
430 continue |
431 |
431 |
444 (p, g, "\n\t".join(str(d) for d in dups)) |
444 (p, g, "\n\t".join(str(d) for d in dups)) |
445 if allow_dir_goofs: |
445 if allow_dir_goofs: |
446 print >> sys.stderr, "%s\n" % dir_error |
446 print >> sys.stderr, "%s\n" % dir_error |
447 else: |
447 else: |
448 errorlist.append(dir_error) |
448 errorlist.append(dir_error) |
449 |
449 |
450 elif remove_dups and g.startswith("variant.") and None in ga[g]: |
450 elif remove_dups and g.startswith("variant.") and None in ga[g]: |
451 # remove any dirs that are zone variants if same dir w/o variant exists |
451 # remove any dirs that are zone variants if same dir w/o variant exists |
452 for d in dups: |
452 for d in dups: |
453 if d.attrs.get(g) != None: |
453 if d.attrs.get(g) != None: |
454 d.attrs["importer.deleteme"] = "True" |
454 d.attrs["importer.deleteme"] = "True" |
485 |
485 |
486 # add dependency on consolidation incorporation if not obsolete or renamed |
486 # add dependency on consolidation incorporation if not obsolete or renamed |
487 if pkg.consolidation and not pkg.obsolete_branch and not pkg.rename_branch: |
487 if pkg.consolidation and not pkg.obsolete_branch and not pkg.rename_branch: |
488 action = actions.fromstr( |
488 action = actions.fromstr( |
489 "depend fmri=consolidation/%s/%s-incorporation " |
489 "depend fmri=consolidation/%s/%s-incorporation " |
490 "type=require importer.no-version=true" % |
490 "type=require importer.no-version=true" % |
491 (pkg.consolidation, pkg.consolidation)) |
491 (pkg.consolidation, pkg.consolidation)) |
492 pkg.actions.append(action) |
492 pkg.actions.append(action) |
493 |
493 |
494 # add legacy actions |
494 # add legacy actions |
495 if pkg.name != "SUNWipkg": |
495 if pkg.name != "SUNWipkg": |
545 try: |
545 try: |
546 t.add(a) |
546 t.add(a) |
547 except TypeError, e: |
547 except TypeError, e: |
548 print a.attrs |
548 print a.attrs |
549 print a.name |
549 print a.name |
550 |
550 |
551 raise |
551 raise |
552 |
552 |
553 def publish_pkg(pkg, proto_dir): |
553 def publish_pkg(pkg, proto_dir): |
554 """ send this package to the repo """ |
554 """ send this package to the repo """ |
555 |
555 |
556 smf_fmris = [] |
556 smf_fmris = [] |
557 |
557 |
558 svr4_pkg_list = sorted(list(set([ |
558 svr4_pkg_list = sorted(list(set([ |
559 a.attrs["importer.svr4pkg"] |
559 a.attrs["importer.svr4pkg"] |
560 for a in pkg.actions |
560 for a in pkg.actions |
561 if "importer.svr4pkg" in a.attrs and |
561 if "importer.svr4pkg" in a.attrs and |
562 a.name in ["license", "file"] |
562 a.name in ["license", "file"] |
737 np = link.attrs["path"] |
737 np = link.attrs["path"] |
738 nt = link.attrs["target"] |
738 nt = link.attrs["target"] |
739 newpath = os.path.normpath( |
739 newpath = os.path.normpath( |
740 os.path.join(os.path.split(np)[0], nt)) |
740 os.path.join(os.path.split(np)[0], nt)) |
741 assert path.startswith(np) |
741 assert path.startswith(np) |
742 ret = [pkgpath_dict[p][0]] |
742 ret = [pkgpath_dict[p][0]] |
743 next = search_dicts(path.replace(np, newpath)) |
743 next = search_dicts(path.replace(np, newpath)) |
744 if next: |
744 if next: |
745 ret += next |
745 ret += next |
746 return ret |
746 return ret |
747 else: |
747 else: |
748 print "unexpected action %s in path %s" % (path_dict[p][0], path) |
748 print "unexpected action %s in path %s" % (path_dict[p][0], path) |
749 return [] |
749 return [] |
750 |
750 |
751 def get_smf_fmris(file, action_path): |
751 def get_smf_fmris(file, action_path): |
752 """ pull the delivered SMF FMRIs from file, associated with action_path """ |
752 """ pull the delivered SMF FMRIs from file, associated with action_path """ |
753 |
753 |
754 if smf_manifest.has_smf_manifest_dir(action_path): |
754 if smf_manifest.has_smf_manifest_dir(action_path): |
755 instance_mf, instance_deps = smf_manifest.parse_smf_manifest(file) |
755 instance_mf, instance_deps = smf_manifest.parse_smf_manifest(file) |
756 if instance_mf: |
756 if instance_mf: |
757 return instance_mf.keys() |
757 return instance_mf.keys() |
758 |
758 |
832 elif "perl" in l or path.endswith(".pl"): |
832 elif "perl" in l or path.endswith(".pl"): |
833 pass # and here |
833 pass # and here |
834 |
834 |
835 # handle smf manifests |
835 # handle smf manifests |
836 if smf_manifest.has_smf_manifest_dir(path): |
836 if smf_manifest.has_smf_manifest_dir(path): |
837 |
837 |
838 # pkg.flavor.* used by pkgdepend wants PD_LOCAL_PATH, PD_PROTO_DIR |
838 # pkg.flavor.* used by pkgdepend wants PD_LOCAL_PATH, PD_PROTO_DIR |
839 # and PD_PROTO_DIR_LIST set |
839 # and PD_PROTO_DIR_LIST set |
840 action.attrs[PD_LOCAL_PATH] = fname |
840 action.attrs[PD_LOCAL_PATH] = fname |
841 action.attrs[PD_PROTO_DIR] = proto_dir |
841 action.attrs[PD_PROTO_DIR] = proto_dir |
842 action.attrs[PD_PROTO_DIR_LIST] = [proto_dir] |
842 action.attrs[PD_PROTO_DIR_LIST] = [proto_dir] |
977 |
977 |
978 def get_manifest(server_pub, fmri): |
978 def get_manifest(server_pub, fmri): |
979 if not fmri: # no matching fmri |
979 if not fmri: # no matching fmri |
980 return null_manifest |
980 return null_manifest |
981 |
981 |
982 return manifest_cache.setdefault((server_pub, fmri), |
982 return manifest_cache.setdefault((server_pub, fmri), |
983 fetch_manifest(server_pub, fmri)) |
983 fetch_manifest(server_pub, fmri)) |
984 |
984 |
985 def fetch_manifest(server_pub, fmri): |
985 def fetch_manifest(server_pub, fmri): |
986 """Fetch the manifest for package-fmri 'fmri' from the server |
986 """Fetch the manifest for package-fmri 'fmri' from the server |
987 in 'server_url'... return as Manifest object.... needs |
987 in 'server_url'... return as Manifest object.... needs |
1030 d.setdefault(f.pkg_name, []).append(f) |
1030 d.setdefault(f.pkg_name, []).append(f) |
1031 |
1031 |
1032 for k in d: |
1032 for k in d: |
1033 d[k].sort(reverse=True) |
1033 d[k].sort(reverse=True) |
1034 |
1034 |
1035 catalog_dict[server_pub] = d |
1035 catalog_dict[server_pub] = d |
1036 |
1036 |
1037 def expand_fmri(server_pub, fmri_string, constraint=version.CONSTRAINT_AUTO): |
1037 def expand_fmri(server_pub, fmri_string, constraint=version.CONSTRAINT_AUTO): |
1038 """ from specified server, find matching fmri using CONSTRAINT_AUTO |
1038 """ from specified server, find matching fmri using CONSTRAINT_AUTO |
1039 cache for performance. Returns None if no matching fmri is found """ |
1039 cache for performance. Returns None if no matching fmri is found """ |
1040 if server_pub not in catalog_dict: |
1040 if server_pub not in catalog_dict: |
1041 load_catalog(server_pub) |
1041 load_catalog(server_pub) |
1042 |
1042 |
1043 fmri = pkg.fmri.PkgFmri(fmri_string, "5.11") |
1043 fmri = pkg.fmri.PkgFmri(fmri_string, "5.11") |
1044 |
1044 |
1045 for f in catalog_dict[server_pub].get(fmri.pkg_name, []): |
1045 for f in catalog_dict[server_pub].get(fmri.pkg_name, []): |
1046 if not fmri.version or f.version.is_successor(fmri.version, constraint): |
1046 if not fmri.version or f.version.is_successor(fmri.version, constraint): |
1047 return f |
1047 return f |
1048 return None |
1048 return None |
1058 def _get_dependencies(s, server_pub, fmri): |
1058 def _get_dependencies(s, server_pub, fmri): |
1059 """ recursive incorp expansion""" |
1059 """ recursive incorp expansion""" |
1060 s.add(fmri) |
1060 s.add(fmri) |
1061 for a in get_manifest(server_pub, fmri).gen_actions_by_type("depend"): |
1061 for a in get_manifest(server_pub, fmri).gen_actions_by_type("depend"): |
1062 if a.attrs["type"] == "incorporate": |
1062 if a.attrs["type"] == "incorporate": |
1063 new_fmri = expand_fmri(server_pub, a.attrs["fmri"]) |
1063 new_fmri = expand_fmri(server_pub, a.attrs["fmri"]) |
1064 if new_fmri and new_fmri not in s: # ignore missing, already planned |
1064 if new_fmri and new_fmri not in s: # ignore missing, already planned |
1065 _get_dependencies(s, server_pub, new_fmri) |
1065 _get_dependencies(s, server_pub, new_fmri) |
1066 |
1066 |
1067 def get_smf_packages(server_url, manifest_locations, filter): |
1067 def get_smf_packages(server_url, manifest_locations, filter): |
1068 """ Performs a search against server_url looking for packages which contain |
1068 """ Performs a search against server_url looking for packages which contain |
1112 continue |
1112 continue |
1113 if filter in pfmri.get_fmri(): |
1113 if filter in pfmri.get_fmri(): |
1114 fmris.add(pfmri.get_fmri()) |
1114 fmris.add(pfmri.get_fmri()) |
1115 |
1115 |
1116 return [pkg.fmri.PkgFmri(pfmri) for pfmri in fmris] |
1116 return [pkg.fmri.PkgFmri(pfmri) for pfmri in fmris] |
1117 |
1117 |
1118 def zap_strings(instr, strings): |
1118 def zap_strings(instr, strings): |
1119 """takes an input string and a list of strings to be removed, ignoring |
1119 """takes an input string and a list of strings to be removed, ignoring |
1120 case""" |
1120 case""" |
1121 for s in strings: |
1121 for s in strings: |
1122 ls = s.lower() |
1122 ls = s.lower() |
1124 li = instr.lower() |
1124 li = instr.lower() |
1125 i = li.find(ls) |
1125 i = li.find(ls) |
1126 if i < 0: |
1126 if i < 0: |
1127 break |
1127 break |
1128 instr = instr[0:i] + instr[i + len(ls):] |
1128 instr = instr[0:i] + instr[i + len(ls):] |
1129 return instr |
1129 return instr |
1130 |
1130 |
1131 def get_branch(name): |
1131 def get_branch(name): |
1132 return branch_dict.get(name, def_branch) |
1132 return branch_dict.get(name, def_branch) |
1133 |
1133 |
1134 def set_macro(key, value): |
1134 def set_macro(key, value): |
1135 macro_definitions.update([("$(%s)" % key, value)]) |
1135 macro_definitions.update([("$(%s)" % key, value)]) |
1136 |
1136 |
1137 def clear_macro(key): |
1137 def clear_macro(key): |
1138 del macro_definitions["$(%s)" % key] |
1138 del macro_definitions["$(%s)" % key] |
1139 |
1139 |
1140 def get_arch(): # use value of arch macro or platform |
1140 def get_arch(): # use value of arch macro or platform |
1141 return macro_definitions.get("$ARCH", platform.processor()) |
1141 return macro_definitions.get("$ARCH", platform.processor()) |
1142 |
1142 |
1143 def read_full_line(lexer, continuation='\\'): |
1143 def read_full_line(lexer, continuation='\\'): |
1144 """Read a complete line, allowing for the possibility of it being |
1144 """Read a complete line, allowing for the possibility of it being |
1145 continued over multiple lines. Returns a single joined line, with |
1145 continued over multiple lines. Returns a single joined line, with |
1187 class tokenlexer(shlex.shlex): |
1187 class tokenlexer(shlex.shlex): |
1188 def read_token(self): |
1188 def read_token(self): |
1189 """ simple replacement of $(ARCH) with a non-special |
1189 """ simple replacement of $(ARCH) with a non-special |
1190 value defined on the command line is trivial. Since |
1190 value defined on the command line is trivial. Since |
1191 shlex's read_token routine also strips comments and |
1191 shlex's read_token routine also strips comments and |
1192 white space, this read_token cannot return either |
1192 white space, this read_token cannot return either |
1193 one so any macros that translate to either spaces or |
1193 one so any macros that translate to either spaces or |
1194 # (comment) need to be removed from the token stream.""" |
1194 # (comment) need to be removed from the token stream.""" |
1195 |
1195 |
1196 while True: |
1196 while True: |
1197 s = apply_macros(shlex.shlex.read_token(self)) |
1197 s = apply_macros(shlex.shlex.read_token(self)) |
1389 raise RuntimeError("Error: unknown token '%s' " |
1389 raise RuntimeError("Error: unknown token '%s' " |
1390 "(%s:%s)" % (token, lexer.infile, lexer.lineno)) |
1390 "(%s:%s)" % (token, lexer.infile, lexer.lineno)) |
1391 def repo_add_content(path_to_repo, path_to_proto): |
1391 def repo_add_content(path_to_repo, path_to_proto): |
1392 """Fire up depo to add content and rebuild search index""" |
1392 """Fire up depo to add content and rebuild search index""" |
1393 |
1393 |
1394 cmdname = os.path.join(path_to_proto, "usr/bin/pkgrepo") |
1394 cmdname = os.path.join(path_to_proto, "usr/bin/pkgrepo") |
1395 argstr = "%s -s %s refresh" % (cmdname, path_to_repo) |
1395 argstr = "%s -s %s refresh" % (cmdname, path_to_repo) |
1396 |
1396 |
1397 print "Adding content & rebuilding search indicies synchronously...." |
1397 print "Adding content & rebuilding search indicies synchronously...." |
1398 print "%s" % str(argstr) |
1398 print "%s" % str(argstr) |
1399 try: |
1399 try: |
1431 global not_these_consolidations |
1431 global not_these_consolidations |
1432 global curpkg |
1432 global curpkg |
1433 global xport |
1433 global xport |
1434 global xport_cfg |
1434 global xport_cfg |
1435 |
1435 |
1436 |
1436 |
1437 try: |
1437 try: |
1438 _opts, _args = getopt.getopt(sys.argv[1:], "AB:C:D:E:I:J:G:NR:T:b:dj:m:ns:v:w:p:") |
1438 _opts, _args = getopt.getopt(sys.argv[1:], "AB:C:D:E:I:J:G:NR:T:b:dj:m:ns:v:w:p:") |
1439 except getopt.GetoptError, _e: |
1439 except getopt.GetoptError, _e: |
1440 print "unknown option", _e.opt |
1440 print "unknown option", _e.opt |
1441 sys.exit(1) |
1441 sys.exit(1) |
1583 try: |
1583 try: |
1584 del pkgdict[pkg] |
1584 del pkgdict[pkg] |
1585 except KeyError: |
1585 except KeyError: |
1586 print "excluded package %s not in pkgdict" % pkg |
1586 print "excluded package %s not in pkgdict" % pkg |
1587 |
1587 |
1588 # Unless we are publishing all obsolete and renamed packages |
1588 # Unless we are publishing all obsolete and renamed packages |
1589 # (-A command line option), remove obsolete and renamed packages |
1589 # (-A command line option), remove obsolete and renamed packages |
1590 # that weren't obsoleted or renamed at this branch and create |
1590 # that weren't obsoleted or renamed at this branch and create |
1591 # a dictionary (called or_pkgs_per_con) of obsoleted and renamed |
1591 # a dictionary (called or_pkgs_per_con) of obsoleted and renamed |
1592 # packages per consolidation. The version portion of the fmri |
1592 # packages per consolidation. The version portion of the fmri |
1593 # will contain the branch that the package was obsoleted or renamed at. |
1593 # will contain the branch that the package was obsoleted or renamed at. |
1594 or_pkgs_per_con = {} |
1594 or_pkgs_per_con = {} |
1595 obs_or_renamed_pkgs = {} |
1595 obs_or_renamed_pkgs = {} |
1596 |
1596 |
1597 for pkg in pkgdict.keys(): |
1597 for pkg in pkgdict.keys(): |
1665 if action.name != "depend": |
1665 if action.name != "depend": |
1666 continue |
1666 continue |
1667 if action.attrs["type"] == "require" and "fmri" in action.attrs: |
1667 if action.attrs["type"] == "require" and "fmri" in action.attrs: |
1668 fmri = action.attrs["fmri"].split("@")[0] # remove version |
1668 fmri = action.attrs["fmri"].split("@")[0] # remove version |
1669 if fmri.startswith("pkg:/"): # remove pkg:/ if exists |
1669 if fmri.startswith("pkg:/"): # remove pkg:/ if exists |
1670 fmri = fmri[5:] |
1670 fmri = fmri[5:] |
1671 if fmri in obs_or_renamed_pkgs: |
1671 if fmri in obs_or_renamed_pkgs: |
1672 tup = obs_or_renamed_pkgs[fmri] |
1672 tup = obs_or_renamed_pkgs[fmri] |
1673 s = "Pkg %s has 'require' dependency on pkg %s, which is %s" % ( |
1673 s = "Pkg %s has 'require' dependency on pkg %s, which is %s" % ( |
1674 (pack.fmristr(),) + tup) |
1674 (pack.fmristr(),) + tup) |
1675 if tup[1] == "obsolete": |
1675 if tup[1] == "obsolete": |
1777 curpkg = None |
1777 curpkg = None |
1778 |
1778 |
1779 # Generate entire consolidation if we're generating any consolidation incorps |
1779 # Generate entire consolidation if we're generating any consolidation incorps |
1780 if consolidation_incorporations: |
1780 if consolidation_incorporations: |
1781 curpkg = start_package("entire") |
1781 curpkg = start_package("entire") |
1782 curpkg.summary = "incorporation to lock all system packages to same build" |
1782 curpkg.summary = "incorporation to lock all system packages to same build" |
1783 curpkg.desc = "This package constrains " \ |
1783 curpkg.desc = "This package constrains " \ |
1784 "system package versions to the same build. WARNING: Proper " \ |
1784 "system package versions to the same build. WARNING: Proper " \ |
1785 "system update and correct package selection depend on the " \ |
1785 "system update and correct package selection depend on the " \ |
1786 "presence of this incorporation. Removing this package will " \ |
1786 "presence of this incorporation. Removing this package will " \ |
1787 "result in an unsupported system." |
1787 "result in an unsupported system." |
1813 |
1813 |
1814 incorporated_pkgs = set([ |
1814 incorporated_pkgs = set([ |
1815 f |
1815 f |
1816 for l in cons_dict.values() |
1816 for l in cons_dict.values() |
1817 for f in l |
1817 for f in l |
1818 ]) |
1818 ]) |
1819 incorporated_pkgs |= set(consolidation_incorporations) |
1819 incorporated_pkgs |= set(consolidation_incorporations) |
1820 incorporated_pkgs |= set(["entire", "redistributable"]) |
1820 incorporated_pkgs |= set(["entire", "redistributable"]) |
1821 incorporated_pkgs |= set(obsoleted_renamed_pkgs) |
1821 incorporated_pkgs |= set(obsoleted_renamed_pkgs) |
1822 |
1822 |
1823 unincorps = set(pkgdict.keys()) - incorporated_pkgs |
1823 unincorps = set(pkgdict.keys()) - incorporated_pkgs |
1824 if unincorps: |
1824 if unincorps: |
1825 # look through these; if they have only set actions they're |
1825 # look through these; if they have only set actions they're |
1826 # ancient obsoleted pkgs - ignore them. |
1826 # ancient obsoleted pkgs - ignore them. |
1827 for f in unincorps.copy(): |
1827 for f in unincorps.copy(): |