brunel/handlers/packages/freezes.go

148 lines
3.2 KiB
Go

package handlers_packages
import (
"brunel/packages"
"log/slog"
"sort"
"strings"
"sync"
"time"
"github.com/gofiber/fiber/v2"
)
type FreezeItem struct {
Name string `json:"name"`
Ver string `json:"version"`
Arch string `json:"architecture"`
}
type FreezesResponse struct {
Total int `json:"total"`
Freezes []FreezeItem `json:"freezes"`
}
var (
cachedFreezes []FreezeItem
lastCacheTime time.Time
cacheDuration = 1 * time.Hour
freezeCacheMutex sync.RWMutex
)
func UpdateFreezeCache() error {
freeze64, freeze32, err := packages.GetFreezeItems()
if err != nil {
return err
}
combinedFreezes := make([]FreezeItem, 0, len(freeze64)+len(freeze32))
for _, item := range freeze64 {
combinedFreezes = append(combinedFreezes, FreezeItem{
Name: item.Name,
Ver: item.Ver,
Arch: "amd64",
})
}
for _, item := range freeze32 {
combinedFreezes = append(combinedFreezes, FreezeItem{
Name: item.Name,
Ver: item.Ver,
Arch: "i386",
})
}
sort.Slice(combinedFreezes, func(i, j int) bool {
return combinedFreezes[i].Name < combinedFreezes[j].Name
})
freezeCacheMutex.Lock()
defer freezeCacheMutex.Unlock()
cachedFreezes = combinedFreezes
lastCacheTime = time.Now()
return nil
}
func getFreezes() ([]FreezeItem, error) {
freezeCacheMutex.RLock()
if time.Since(lastCacheTime) < cacheDuration && len(cachedFreezes) > 0 {
defer freezeCacheMutex.RUnlock()
return cachedFreezes, nil
}
freezeCacheMutex.RUnlock()
if err := UpdateFreezeCache(); err != nil {
return nil, err
}
freezeCacheMutex.RLock()
defer freezeCacheMutex.RUnlock()
return cachedFreezes, nil
}
func RemoveFromFreezeCache(name, version string) {
freezeCacheMutex.Lock()
defer freezeCacheMutex.Unlock()
for i, item := range cachedFreezes {
if item.Name == name && item.Ver == version {
cachedFreezes = append(cachedFreezes[:i], cachedFreezes[i+1:]...)
break
}
}
}
func Freezes(c *fiber.Ctx) error {
pageNum := c.QueryInt("page", 1)
pageSize := c.QueryInt("pageSize", 250)
search := strings.ToLower(c.Query("search"))
filter := c.Query("filter")
// Adjust pageNum to be 0-based for slice indexing
adjustedPageNum := pageNum - 1
if adjustedPageNum < 0 {
adjustedPageNum = 0
}
freezes, err := getFreezes()
if err != nil {
slog.Error("Failed to retrieve freeze items", "error", err)
return c.Status(fiber.StatusInternalServerError).JSON(fiber.Map{
"error": "Failed to retrieve freeze items",
})
}
filteredFreezes := make([]FreezeItem, 0)
for _, freeze := range freezes {
matchesFilter := filter == "" || filter == "All" || strings.EqualFold(freeze.Arch, filter)
matchesSearch := search == "" || strings.Contains(strings.ToLower(freeze.Name), search)
if matchesFilter && matchesSearch {
filteredFreezes = append(filteredFreezes, freeze)
}
}
total := len(filteredFreezes)
startIndex := adjustedPageNum * pageSize
endIndex := (adjustedPageNum + 1) * pageSize
if startIndex >= total {
filteredFreezes = []FreezeItem{}
} else {
if endIndex > total {
endIndex = total
}
filteredFreezes = filteredFreezes[startIndex:endIndex]
}
response := FreezesResponse{
Total: total,
Freezes: filteredFreezes,
}
return c.Status(fiber.StatusOK).JSON(response)
}