diff --git a/buildqueue/worker.go b/buildqueue/worker.go index ffa3132..4f5773a 100644 --- a/buildqueue/worker.go +++ b/buildqueue/worker.go @@ -22,109 +22,124 @@ func StartPackageQueueWorker(ctx context.Context) { case <-ctx.Done(): return default: - err := packages.ProcessPackages() - if err != nil { - slog.Error("unable to process packages: " + err.Error()) - } - packs := packages.GetPackages() - packs.ForEach(func(k string, v domain.SourcePackage) bool { - needsBuild := v.Status == domain.Missing || v.Status == domain.Stale || v.Status == domain.Error - buildVersion := v.NewVersion - if buildVersion == "" { - buildVersion = v.Version - } - if needsBuild { - state, err := helpers.DBInst.GetBuildState(v.Name) - if err != nil { - state = domain.BuildState{ - BuildNumber: 0, - } - } - if state.BuildNumber > 1 { - return true - } - typ := domain.BuildTypeLTO - if state.BuildNumber == 1 { - typ = domain.BuildTypeNormal - } - buildItem := domain.BuildQueueItem{ - Source: v, - Status: domain.Queued, - Type: typ, - Patch: false, - Rebuild: false, - BuildNumber: state.BuildNumber, - BuildVersion: buildVersion, - } - err = Add(buildItem) - if err != nil { - slog.Info("unable to add package to queue: " + err.Error()) - } - } - return true - }) + processPackages() time.Sleep(1 * time.Hour) } } }() } +func StartQueueAndStatusWorker(ctx context.Context) { + go processQueueAndStatus(ctx) +} + +func processPackages() { + if err := packages.ProcessPackages(); err != nil { + slog.Error("unable to process packages", "error", err) + return + } + + packages.GetPackages().ForEach(func(k string, v domain.SourcePackage) bool { + if !shouldBuild(v) { + return true + } + + state, _ := helpers.DBInst.GetBuildState(v.Name) + if state.BuildNumber > 1 { + return true + } + + buildItem := createBuildItem(v, state) + if err := Add(buildItem); err != nil { + slog.Info("unable to add package to queue", "error", err) + } + return true + }) +} + +func shouldBuild(v domain.SourcePackage) bool { + return v.Status == domain.Missing || v.Status == domain.Stale || v.Status == domain.Error +} + +func createBuildItem(v domain.SourcePackage, state domain.BuildState) domain.BuildQueueItem { + buildVersion := v.NewVersion + if buildVersion == "" { + buildVersion = v.Version + } + + typ := domain.BuildTypeLTO + if state.BuildNumber == 1 { + typ = domain.BuildTypeNormal + } + + return domain.BuildQueueItem{ + Source: v, + Status: domain.Queued, + Type: typ, + Patch: false, + Rebuild: false, + BuildNumber: state.BuildNumber, + BuildVersion: buildVersion, + } +} + func processQueueAndStatus(ctx context.Context) { for { select { case <-ctx.Done(): return default: - q := GetQueue() - itemsToRemove := make([]string, 0) - buildingFound := false - - q.ForEach(func(k string, item domain.BuildQueueItem) bool { - if item.Status == domain.Building { - buildingFound = true - complete, err := CheckIfBuildComplete(ctx, item) - if err != nil && !complete { - slog.Error("unable to check if build is complete: " + err.Error()) - } - if complete { - - if err != nil { - updateBuildState(item, domain.Error) - item.Source.Status = domain.Error - item.Source.LastBuildStatus = domain.Error - item.Source.BuildAttempts++ - updatePackageStatus(&item, domain.Error) - } else { - updateBuildState(item, domain.Built) - item.Source.Status = domain.Built - item.Source.LastBuildStatus = domain.Built - item.Source.BuildAttempts = 0 - item.Source.Version = item.BuildVersion - updatePackageStatus(&item, domain.Current) - } - packages.UpdateSourcePackage(item.Source) - itemsToRemove = append(itemsToRemove, k) - } - } - return true - }) - - for _, item := range itemsToRemove { - Remove(item) - } - - if !buildingFound { - err := ProcessNext() - if err != nil { - slog.Error("unable to process queue: " + err.Error()) - } - } - + processQueue() time.Sleep(10 * time.Second) } } } +func processQueue() { + q := GetQueue() + itemsToRemove := []string{} + buildingFound := false + + q.ForEach(func(k string, item domain.BuildQueueItem) bool { + if item.Status == domain.Building { + buildingFound = true + complete, err := checkIfBuildComplete(item) + if complete { + handleCompletedBuild(item, err) + itemsToRemove = append(itemsToRemove, k) + } + } + return true + }) + + for _, item := range itemsToRemove { + Remove(item) + } + + if !buildingFound { + if err := ProcessNext(); err != nil { + slog.Error("unable to process queue", "error", err) + } + } +} + +func handleCompletedBuild(item domain.BuildQueueItem, err error) { + status := domain.Built + if err != nil { + status = domain.Error + item.Source.BuildAttempts++ + } else { + item.Source.BuildAttempts = 0 + item.Source.Version = item.BuildVersion + } + + item.Source.Status = status + item.Source.LastBuildStatus = status + updatePackageStatus(&item, status) + updateBuildState(item, status) + packages.UpdateSourcePackage(item.Source) +} + func updatePackageStatus(item *domain.BuildQueueItem, status domain.PackageStatus) { item.Source.Packages.ForEach(func(k string, v domain.PackageInfo) bool { v.Status = status @@ -153,89 +168,75 @@ func updateBuildState(item domain.BuildQueueItem, buildStatus domain.PackageStat helpers.DBInst.UpdateBuildState(state) } -func StartQueueAndStatusWorker(ctx context.Context) { - go processQueueAndStatus(ctx) -} - -func CheckIfBuildComplete(ctx context.Context, item domain.BuildQueueItem) (bool, error) { - resp, err := http.Get(config.Configs.ActionsUrl) +func checkIfBuildComplete(item domain.BuildQueueItem) (bool, error) { + builds, err := fetchBuilds(item.Source.Name + "=" + item.BuildVersion) if err != nil { return false, err } - defer resp.Body.Close() - - doc, err := html.Parse(resp.Body) - if err != nil { - return false, err - } - - buildName := item.Source.Name + "=" + item.BuildVersion - var builds []struct { - isMatch bool - status string - time time.Time - } - - var f func(*html.Node) error - f = func(n *html.Node) error { - if n.Type == html.ElementNode && n.Data == "div" { - for _, a := range n.Attr { - if a.Key == "class" && a.Val == "flex-item tw-items-center" { - isMatch, status, buildTime := checkBuildBlock(n, buildName) - if isMatch { - builds = append(builds, struct { - isMatch bool - status string - time time.Time - }{isMatch, status, buildTime}) - } - } - } - } - for c := n.FirstChild; c != nil; c = c.NextSibling { - if err := f(c); err != nil { - return err - } - } - return nil - } - - if err := f(doc); err != nil { - return false, err - } - - // Sort builds by time, most recent first - sort.Slice(builds, func(i, j int) bool { - return builds[i].time.After(builds[j].time) - }) if len(builds) == 0 { - slog.Info("No matching builds found", "buildName", buildName) + slog.Info("No matching builds found", "buildName", item.Source.Name+"="+item.BuildVersion) return false, nil } mostRecentBuild := builds[0] - switch mostRecentBuild.status { case "Success": return true, nil case "Failure": return true, fmt.Errorf("build failed") - case "Running": - return false, nil // Build is still in progress - case "Queued": - return false, nil // Build is still in progress - case "Waiting": - return false, nil // Build is waiting to start + case "Running", "Queued", "Waiting": + return false, nil default: slog.Warn("Unknown build status", "status", mostRecentBuild.status) return false, fmt.Errorf("unknown build status: %s", mostRecentBuild.status) } } -func checkBuildBlock(n *html.Node, buildName string) (bool, string, time.Time) { - var title string - var status string +func fetchBuilds(buildName string) ([]build, error) { + resp, err := http.Get(config.Configs.ActionsUrl) + if err != nil { + return nil, err + } + defer resp.Body.Close() + + doc, err := html.Parse(resp.Body) + if err != nil { + return nil, err + } + + var builds []build + var f func(*html.Node) + f = func(n *html.Node) { + if n.Type == html.ElementNode && n.Data == "div" { + for _, a := range n.Attr { + if a.Key == "class" && a.Val == "flex-item tw-items-center" { + if b := parseBuildBlock(n, buildName); b != nil { + builds = append(builds, *b) + } + } + } + } + for c := n.FirstChild; c != nil; c = c.NextSibling { + f(c) + } + } + f(doc) + + sort.Slice(builds, func(i, j int) bool { + return builds[i].time.After(builds[j].time) + }) + + return builds, nil +} + +type build struct { + status string + time time.Time +} + +func parseBuildBlock(n *html.Node, buildName string) *build { + var title, status string var buildTime time.Time var f func(*html.Node) @@ -278,8 +279,10 @@ func checkBuildBlock(n *html.Node, buildName string) (bool, string, time.Time) { f(c) } } - f(n) - return title == buildName, status, buildTime + if title == buildName { + return &build{status: status, time: buildTime} + } + return nil } diff --git a/go.mod b/go.mod index 0be702c..cbe4e45 100644 --- a/go.mod +++ b/go.mod @@ -14,6 +14,7 @@ require ( golang.org/x/crypto v0.21.0 golang.org/x/exp v0.0.0-20230522175609-2e198f4a06a1 golang.org/x/net v0.22.0 + golang.org/x/sync v0.3.0 gorm.io/driver/sqlite v1.5.6 gorm.io/gorm v1.25.11 pault.ag/go/debian v0.16.0 diff --git a/packages/packages.go b/packages/packages.go index 91daf7c..68e83af 100644 --- a/packages/packages.go +++ b/packages/packages.go @@ -12,8 +12,10 @@ import ( "net/http" "slices" "strings" + "sync" "time" + "golang.org/x/sync/errgroup" "pault.ag/go/debian/version" "github.com/alphadose/haxmap" @@ -21,20 +23,39 @@ import ( "github.com/ulikunitz/xz" ) +// Constants for frequently used strings +const ( + DMOSuffix = "-dmo" + DebianInstallerSection = "debian-installer" + PackageKey = "Package" + SourceKey = "Source" + VersionKey = "Version" + ArchitectureKey = "Architecture" + DescriptionKey = "Description" + SectionKey = "Section" +) + var LastUpdateTime time.Time var currentPackages = haxmap.New[string, domain.SourcePackage]() func ProcessPackages() error { - var internalPackages = haxmap.New[string, domain.SourcePackage]() - var externalPackages = haxmap.New[string, domain.SourcePackage]() - err := LoadInternalPackages(internalPackages) - if err != nil { - return err - } - err = LoadExternalPackages(externalPackages) - if err != nil { + start := time.Now() + internalPackages := haxmap.New[string, domain.SourcePackage]() + externalPackages := haxmap.New[string, domain.SourcePackage]() + + var eg errgroup.Group + eg.Go(func() error { + return LoadPackages(internalPackages, config.Configs.LocalPackageFiles, true) + }) + + eg.Go(func() error { + return LoadPackages(externalPackages, config.Configs.ExternalPackageFiles, false) + }) + + if err := eg.Wait(); err != nil { return err } + slog.Info("packages processed in " + time.Since(start).String()) combinePackages(internalPackages) combinePackages(externalPackages) @@ -68,7 +89,7 @@ func ProcessPackages() error { updatedPackages.ForEach(func(k string, v domain.SourcePackage) bool { for _, pkg := range config.Configs.I386List { - if v.Name == pkg || v.Name == pkg+"-dmo" { + if v.Name == pkg || v.Name == pkg+DMOSuffix { v.Has32bit = true updatedPackages.Set(k, v) return true @@ -82,13 +103,11 @@ func ProcessPackages() error { currentPackages = updatedPackages LastUpdateTime = time.Now() - err = helpers.DBInst.DropPackages() - if err != nil { - return err + if err := helpers.DBInst.DropPackages(); err != nil { + return fmt.Errorf("dropping packages: %w", err) } - err = SaveToDb() - if err != nil { - return err + if err := SaveToDb(); err != nil { + return fmt.Errorf("saving to database: %w", err) } return nil @@ -114,23 +133,20 @@ func UpdateSourcePackage(pkg domain.SourcePackage) error { } func saveSingleToDb(pkg domain.SourcePackage) error { - err := helpers.DBInst.UpdatePackage(pkg) - if err != nil { - return err + if err := helpers.DBInst.UpdatePackage(pkg); err != nil { + return fmt.Errorf("updating package: %w", err) } LastUpdateTime = time.Now() - err = helpers.DBInst.UpdateLastUpdateTime(LastUpdateTime) - if err != nil { - return err + if err := helpers.DBInst.UpdateLastUpdateTime(LastUpdateTime); err != nil { + return fmt.Errorf("updating last update time: %w", err) } return nil } func SaveToDb() error { - err := helpers.DBInst.SavePackages(currentPackages) - if err != nil { - slog.Error(err.Error()) - return err + if err := helpers.DBInst.SavePackages(currentPackages); err != nil { + slog.Error("Error saving packages to database", "error", err) + return fmt.Errorf("saving packages to database: %w", err) } LastUpdateTime = time.Now() return helpers.DBInst.UpdateLastUpdateTime(LastUpdateTime) @@ -139,7 +155,7 @@ func SaveToDb() error { func LoadFromDb() error { packages, err := helpers.DBInst.GetPackages() if err != nil { - slog.Error(err.Error()) + slog.Error("Error getting packages from database", "error", err) return nil } slices.SortStableFunc(packages, func(a, b domain.SourcePackage) int { @@ -158,110 +174,94 @@ func LoadFromDb() error { return nil } -func LoadInternalPackages(internalPackages *haxmap.Map[string, domain.SourcePackage]) error { - localPackageFile := config.Configs.LocalPackageFiles - slices.SortStableFunc(localPackageFile, func(a, b config.PackageFile) int { +func LoadPackages(packages *haxmap.Map[string, domain.SourcePackage], packageFiles []config.PackageFile, isInternal bool) error { + slices.SortStableFunc(packageFiles, func(a, b config.PackageFile) int { if a.Priority == b.Priority { return 0 } - if a.Priority < b.Priority { + if (isInternal && a.Priority < b.Priority) || (!isInternal && a.Priority > b.Priority) { return 1 } return -1 }) - for _, pkg := range config.Configs.LocalPackageFiles { - for _, repo := range pkg.Subrepos { - packages, err := fetchPackageFile(pkg, repo) + var eg errgroup.Group + var mu sync.Mutex + packageResults := make([][]fetchResult, len(packageFiles)) + + for i, pkg := range packageFiles { + eg.Go(func() error { + results, err := fetchPackagesForFile(pkg) if err != nil { return err } - packages.ForEach(func(newKey string, newPkg domain.PackageInfo) bool { - pk, ok := internalPackages.Get(newPkg.Source) - if !ok { - newMap := haxmap.New[string, domain.PackageInfo]() - newMap.Set(newKey, newPkg) - internalPackages.Set(newPkg.Source, domain.SourcePackage{ - Name: newPkg.Source, - Packages: newMap, - Status: domain.Current, - Version: newPkg.Version, - }) - return true - } - pkg, ok := pk.Packages.Get(newKey) - if !ok { - pk.Packages.Set(newKey, newPkg) - return true - } - mVer, _ := version.Parse(pkg.Version) - extVer, _ := version.Parse(strings.Split(newPkg.Version, "+b")[0]) - cmpVal := version.Compare(extVer, mVer) - if cmpVal >= 0 { - pk.Version = getHighestVer(pkg.Version, newPkg.Version) - pk.Packages.Set(newKey, newPkg) - internalPackages.Set(newPkg.Source, pk) - return true - } - return true - }) + mu.Lock() + packageResults[i] = results + mu.Unlock() + return nil + }) + } + + if err := eg.Wait(); err != nil { + return fmt.Errorf("fetching package files: %w", err) + } + + for _, results := range packageResults { + for _, result := range results { + processPackageResult(packages, result) } } return nil } -func LoadExternalPackages(externalPackages *haxmap.Map[string, domain.SourcePackage]) error { - externalPackageFile := config.Configs.ExternalPackageFiles - slices.SortStableFunc(externalPackageFile, func(a, b config.PackageFile) int { - if a.Priority == b.Priority { - return 0 - } - if a.Priority < b.Priority { - return -1 - } - return 1 - }) +type fetchResult struct { + repo string + packages *haxmap.Map[string, domain.PackageInfo] +} - for _, pkg := range config.Configs.ExternalPackageFiles { - for _, repo := range pkg.Subrepos { - packages, err := fetchPackageFile(pkg, repo) - if err != nil { - return err - } - packages.ForEach(func(newKey string, newPkg domain.PackageInfo) bool { - pk, ok := externalPackages.Get(newPkg.Source) - if !ok { - newMap := haxmap.New[string, domain.PackageInfo]() - newMap.Set(newKey, newPkg) - externalPackages.Set(newPkg.Source, domain.SourcePackage{ - Name: newPkg.Source, - Packages: newMap, - Status: domain.Current, - Version: newPkg.Version, - }) - return true - } - pkg, ok := pk.Packages.Get(newKey) - if !ok { - pk.Packages.Set(newKey, newPkg) - return true - } - mVer, _ := version.Parse(pkg.Version) - extVer, _ := version.Parse(strings.Split(newPkg.Version, "+b")[0]) - cmpVal := version.Compare(extVer, mVer) - if cmpVal >= 0 { - pk.Version = getHighestVer(pkg.Version, newPkg.Version) - pk.Packages.Set(newKey, newPkg) - externalPackages.Set(newPkg.Source, pk) - return true - } - return true - }) +func fetchPackagesForFile(pkg config.PackageFile) ([]fetchResult, error) { + var results []fetchResult + for _, repo := range pkg.Subrepos { + packages, err := fetchPackageFile(pkg, repo) + if err != nil { + return nil, fmt.Errorf("fetching package file for repo %s: %w", repo, err) } + results = append(results, fetchResult{repo: repo, packages: packages}) } + return results, nil +} - return nil +func processPackageResult(packages *haxmap.Map[string, domain.SourcePackage], result fetchResult) { + result.packages.ForEach(func(newKey string, newPkg domain.PackageInfo) bool { + pk, ok := packages.Get(newPkg.Source) + if !ok { + newMap := haxmap.New[string, domain.PackageInfo]() + newMap.Set(newKey, newPkg) + packages.Set(newPkg.Source, domain.SourcePackage{ + Name: newPkg.Source, + Packages: newMap, + Status: domain.Current, + Version: newPkg.Version, + }) + return true + } + pkg, ok := pk.Packages.Get(newKey) + if !ok { + pk.Packages.Set(newKey, newPkg) + return true + } + mVer, _ := version.Parse(pkg.Version) + extVer, _ := version.Parse(strings.Split(newPkg.Version, "+b")[0]) + cmpVal := version.Compare(extVer, mVer) + if cmpVal >= 0 { + pk.Version = getHighestVer(pkg.Version, newPkg.Version) + pk.Packages.Set(newKey, newPkg) + packages.Set(newPkg.Source, pk) + return true + } + return true + }) } func ProcessMissingPackages(internalPackages *haxmap.Map[string, domain.SourcePackage], externalPackages *haxmap.Map[string, domain.SourcePackage]) { @@ -281,8 +281,7 @@ func ProcessStalePackages(internalPackages *haxmap.Map[string, domain.SourcePack if !ok || matchedPackage.Packages.Len() == 0 { return true } - ver := newSource.Version - ver = getHighestVer(ver, matchedPackage.Version) + ver := getHighestVer(newSource.Version, matchedPackage.Version) if ver != matchedPackage.Version { matchedPackage.NewVersion = ver matchedPackage.Status = domain.Stale @@ -296,101 +295,105 @@ func ProcessStalePackages(internalPackages *haxmap.Map[string, domain.SourcePack func fetchPackageFile(pkg config.PackageFile, selectedRepo string) (*haxmap.Map[string, domain.PackageInfo], error) { resp, err := http.Get(pkg.Url + selectedRepo + "/" + pkg.Packagepath + "." + pkg.Compression) if err != nil { - return nil, err + return nil, fmt.Errorf("fetching package file: %w", err) } defer resp.Body.Close() - rdr := io.Reader(resp.Body) - if pkg.Compression == "bz2" { - r := bzip2.NewReader(resp.Body) - rdr = r - } - if pkg.Compression == "xz" { - r, err := xz.NewReader(resp.Body) - if err != nil { - return nil, err - } - rdr = r - } - if pkg.Compression == "gz" { - r, err := gzip.NewReader(resp.Body) - if err != nil { - return nil, err - } - rdr = r + + rdr, err := getCompressedReader(resp.Body, pkg.Compression) + if err != nil { + return nil, fmt.Errorf("creating compressed reader: %w", err) } packages := haxmap.New[string, domain.PackageInfo]() sreader := deb.NewControlFileReader(rdr, false, false) + for { stanza, err := sreader.ReadStanza() if err != nil || stanza == nil { break } - if stanza["Section"] == "debian-installer" { + if stanza[SectionKey] == DebianInstallerSection { continue } - name := stanza["Package"] + name := stanza[PackageKey] - useWhitelist := pkg.UseWhitelist && len(pkg.Whitelist) > 0 - if useWhitelist { - contained := nameContains(name, pkg.Whitelist) - if !contained { - continue - } - } - - broken := nameContains(name, pkg.Blacklist) - if broken { + if shouldSkipPackage(name, pkg) { continue } - sourceSplit := strings.Split(stanza["Source"], " ") - source := sourceSplit[0] + source, sourceVersion := parseSource(stanza[SourceKey]) if source == "" { source = name } - // Extract version from Source if available - sourceVersion := "" - if len(sourceSplit) > 1 { - sourceVersion = strings.Trim(sourceSplit[1], "()") - } - - // Use sourceVersion if available, otherwise use stanza["Version"] - versionStr := stanza["Version"] - if sourceVersion != "" { - versionStr = sourceVersion - } + versionStr := chooseVersion(sourceVersion, stanza[VersionKey]) ver, err := version.Parse(versionStr) if err != nil { - return nil, err + return nil, fmt.Errorf("parsing version %s: %w", versionStr, err) } - pk, ok := packages.Get(name) - if ok { - matchedVer, _ := version.Parse(pk.Version) - cmpVal := version.Compare(ver, matchedVer) - if cmpVal < 0 { - continue - } + if shouldUpdatePackage(packages, name, ver) { + packages.Set(name, domain.PackageInfo{ + PackageName: name, + Version: ver.String(), + Source: source, + Architecture: stanza[ArchitectureKey], + Description: stanza[DescriptionKey], + Status: domain.Current, + }) } - - packages.Set(name, domain.PackageInfo{ - PackageName: name, - Version: ver.String(), - Source: source, - Architecture: stanza["Architecture"], - Description: stanza["Description"], - Status: domain.Current, - }) } return packages, nil } +func getCompressedReader(body io.Reader, compression string) (io.Reader, error) { + switch compression { + case "bz2": + return bzip2.NewReader(body), nil + case "xz": + return xz.NewReader(body) + case "gz": + return gzip.NewReader(body) + default: + return body, nil + } +} + +func shouldSkipPackage(name string, pkg config.PackageFile) bool { + if pkg.UseWhitelist && len(pkg.Whitelist) > 0 && !nameContains(name, pkg.Whitelist) { + return true + } + return nameContains(name, pkg.Blacklist) +} + +func parseSource(source string) (string, string) { + sourceSplit := strings.Split(source, " ") + if len(sourceSplit) > 1 { + return sourceSplit[0], strings.Trim(sourceSplit[1], "()") + } + return source, "" +} + +func chooseVersion(sourceVersion, stanzaVersion string) string { + if sourceVersion != "" { + return sourceVersion + } + return stanzaVersion +} + +func shouldUpdatePackage(packages *haxmap.Map[string, domain.PackageInfo], name string, ver version.Version) bool { + existingPkg, exists := packages.Get(name) + if !exists { + return true + } + existingVer, _ := version.Parse(existingPkg.Version) + return version.Compare(ver, existingVer) >= 0 +} + func nameContains(name string, match []string) bool { for _, m := range match { if strings.Contains(name, m) { @@ -432,39 +435,40 @@ func GetPackagesCount() domain.PackagesCount { func combinePackages(packages *haxmap.Map[string, domain.SourcePackage]) { dmoPackages := haxmap.New[string, domain.SourcePackage]() - // First pass: identify and collect -dmo packages + // Identify and collect -dmo packages packages.ForEach(func(k string, v domain.SourcePackage) bool { - if strings.HasSuffix(k, "-dmo") { + if strings.HasSuffix(k, DMOSuffix) { dmoPackages.Set(k, v) packages.Del(k) } return true }) - // Second pass: combine -dmo packages with base packages or add them back + // Combine -dmo packages with base packages or add them back dmoPackages.ForEach(func(dmoName string, dmoPkg domain.SourcePackage) bool { - baseName := strings.TrimSuffix(dmoName, "-dmo") - basePkg, hasBase := packages.Get(baseName) - - if hasBase { - // Combine packages, prioritizing -dmo - combinedPkg := dmoPkg // Start with the -dmo package - basePkg.Packages.ForEach(func(pkgName string, pkgInfo domain.PackageInfo) bool { - if _, exists := combinedPkg.Packages.Get(pkgName); !exists { - combinedPkg.Packages.Set(pkgName, pkgInfo) - } - return true - }) - packages.Set(dmoName, combinedPkg) // Store under the -dmo name - packages.Del(baseName) // Remove the base package + baseName := strings.TrimSuffix(dmoName, DMOSuffix) + if basePkg, hasBase := packages.Get(baseName); hasBase { + combinedPkg := combineDMOPackages(dmoPkg, basePkg) + packages.Set(dmoName, combinedPkg) + packages.Del(baseName) } else { - // If there's no base package, just add the dmo package back packages.Set(dmoName, dmoPkg) } return true }) } +func combineDMOPackages(dmoPkg, basePkg domain.SourcePackage) domain.SourcePackage { + combinedPkg := dmoPkg + basePkg.Packages.ForEach(func(pkgName string, pkgInfo domain.PackageInfo) bool { + if _, exists := combinedPkg.Packages.Get(pkgName); !exists { + combinedPkg.Packages.Set(pkgName, pkgInfo) + } + return true + }) + return combinedPkg +} + func getHighestVer(ver string, newVer string) string { mVer, _ := version.Parse(ver) extVer, _ := version.Parse(newVer)