diff --git a/.gitignore b/.gitignore index 159583b..3885e54 100644 --- a/.gitignore +++ b/.gitignore @@ -22,4 +22,4 @@ go.work go.work.sum # Custom -/data +/data* diff --git a/benchmark_data.go b/benchmark_data.go new file mode 100644 index 0000000..b3e7bc7 --- /dev/null +++ b/benchmark_data.go @@ -0,0 +1,559 @@ +package flightlesssomething + +import ( + "archive/zip" + "bufio" + "bytes" + "encoding/csv" + "encoding/gob" + "errors" + "fmt" + "math" + "math/big" + "mime/multipart" + "os" + "path/filepath" + "strconv" + "strings" + + "github.com/dustin/go-humanize" + "github.com/klauspost/compress/zstd" +) + +type BenchmarkData struct { + Label string + + // Specs + SpecOS string + SpecGPU string + SpecCPU string + SpecRAM string + SpecLinuxKernel string + SpecLinuxScheduler string + + // Data + DataFPS []float64 + DataFrameTime []float64 + DataCPULoad []float64 + DataGPULoad []float64 + DataCPUTemp []float64 + DataGPUTemp []float64 + DataGPUCoreClock []float64 + DataGPUMemClock []float64 + DataGPUVRAMUsed []float64 + DataGPUPower []float64 + DataRAMUsed []float64 + DataSwapUsed []float64 +} + +// readBenchmarkFiles reads the uploaded benchmark files and returns a slice of BenchmarkData. +func readBenchmarkFiles(files []*multipart.FileHeader) ([]*BenchmarkData, error) { + benchmarkDatas := make([]*BenchmarkData, 0) + + for _, fileHeader := range files { + file, err := fileHeader.Open() + if err != nil { + return nil, err + } + + defer file.Close() + scanner := bufio.NewScanner(file) + + // FirstLine identifies file format + if !scanner.Scan() { + return nil, errors.New("failed to read file (err 1)") + } + firstLine := scanner.Text() + + var benchmarkData *BenchmarkData + var suffix string + switch { + case firstLine == "os,cpu,gpu,ram,kernel,driver,cpuscheduler": // MangoHud + benchmarkData, err = readMangoHudFile(scanner) + suffix = ".csv" + case strings.Contains(firstLine, ", Hardware monitoring log v"): // Afterburner + benchmarkData, err = readAfterburnerFile(scanner) + suffix = ".hml" + default: + return nil, errors.New("unsupported file format") + } + + if err != nil { + return nil, err + } + benchmarkData.Label = strings.TrimSuffix(fileHeader.Filename, suffix) + benchmarkDatas = append(benchmarkDatas, benchmarkData) + } + + return benchmarkDatas, nil +} + +func readMangoHudFile(scanner *bufio.Scanner) (*BenchmarkData, error) { + benchmarkData := &BenchmarkData{} + + // Second line should contain values + if !scanner.Scan() { + return nil, errors.New("failed to read file (err mh1)") + } + record := strings.Split(scanner.Text(), ",") + + for i, v := range record { + switch i { + case 0: + benchmarkData.SpecOS = truncateString(strings.TrimSpace(v)) + case 1: + benchmarkData.SpecCPU = truncateString(strings.TrimSpace(v)) + case 2: + benchmarkData.SpecGPU = truncateString(strings.TrimSpace(v)) + case 3: + kilobytes := new(big.Int) + _, ok := kilobytes.SetString(strings.TrimSpace(v), 10) + if ok { + // Contains number that represents kilobytes + bytes := new(big.Int).Mul(kilobytes, big.NewInt(1024)) + benchmarkData.SpecRAM = humanize.Bytes(bytes.Uint64()) + } else { + // Contains humanized (or invalid) value, so no conversion needed + benchmarkData.SpecRAM = truncateString(strings.TrimSpace(v)) + } + case 4: + benchmarkData.SpecLinuxKernel = truncateString(strings.TrimSpace(v)) + case 6: + benchmarkData.SpecLinuxScheduler = truncateString(strings.TrimSpace(v)) + } + } + + // 3rd line contain headers for benchmark data + if !scanner.Scan() { + return nil, errors.New("failed to read file (err mh2)") + } + record = strings.Split(strings.TrimRight(scanner.Text(), ","), ",") + if len(record) == 0 { + return nil, errors.New("failed to read file (err mh3)") + } + + benchmarkData.DataFPS = make([]float64, 0) + benchmarkData.DataFrameTime = make([]float64, 0) + benchmarkData.DataCPULoad = make([]float64, 0) + benchmarkData.DataGPULoad = make([]float64, 0) + benchmarkData.DataCPUTemp = make([]float64, 0) + benchmarkData.DataGPUTemp = make([]float64, 0) + benchmarkData.DataGPUCoreClock = make([]float64, 0) + benchmarkData.DataGPUMemClock = make([]float64, 0) + benchmarkData.DataGPUVRAMUsed = make([]float64, 0) + benchmarkData.DataGPUPower = make([]float64, 0) + benchmarkData.DataRAMUsed = make([]float64, 0) + benchmarkData.DataSwapUsed = make([]float64, 0) + + var counter uint + for scanner.Scan() { + record = strings.Split(scanner.Text(), ",") + if len(record) < 12 { // Ignore last 2 columns as they are not needed + return nil, errors.New("failed to read file (err mh4)") + } + + val, err := strconv.ParseFloat(record[0], 64) + if err != nil { + return nil, fmt.Errorf("failed to parse FPS value '%s': %v", record[0], err) + } + benchmarkData.DataFPS = append(benchmarkData.DataFPS, val) + + val, err = strconv.ParseFloat(record[1], 64) + if err != nil { + return nil, fmt.Errorf("failed to parse frametime value '%s': %v", record[1], err) + } + benchmarkData.DataFrameTime = append(benchmarkData.DataFrameTime, val) + + val, err = strconv.ParseFloat(record[2], 64) + if err != nil { + return nil, fmt.Errorf("failed to parse CPU load value '%s': %v", record[2], err) + } + benchmarkData.DataCPULoad = append(benchmarkData.DataCPULoad, val) + + val, err = strconv.ParseFloat(record[3], 64) + if err != nil { + return nil, fmt.Errorf("failed to parse GPU load value '%s': %v", record[3], err) + } + benchmarkData.DataGPULoad = append(benchmarkData.DataGPULoad, val) + + val, err = strconv.ParseFloat(record[4], 64) + if err != nil { + return nil, fmt.Errorf("failed to parse CPU temp value '%s': %v", record[4], err) + } + benchmarkData.DataCPUTemp = append(benchmarkData.DataCPUTemp, val) + + val, err = strconv.ParseFloat(record[5], 64) + if err != nil { + return nil, fmt.Errorf("failed to parse GPU temp value '%s': %v", record[5], err) + } + benchmarkData.DataGPUTemp = append(benchmarkData.DataGPUTemp, val) + + val, err = strconv.ParseFloat(record[6], 64) + if err != nil { + return nil, fmt.Errorf("failed to parse GPU core clock value '%s': %v", record[6], err) + } + benchmarkData.DataGPUCoreClock = append(benchmarkData.DataGPUCoreClock, val) + + val, err = strconv.ParseFloat(record[7], 64) + if err != nil { + return nil, fmt.Errorf("failed to parse GPU mem clock value '%s': %v", record[7], err) + } + benchmarkData.DataGPUMemClock = append(benchmarkData.DataGPUMemClock, val) + + val, err = strconv.ParseFloat(record[8], 64) + if err != nil { + return nil, fmt.Errorf("failed to parse GPU VRAM used value '%s': %v", record[8], err) + } + benchmarkData.DataGPUVRAMUsed = append(benchmarkData.DataGPUVRAMUsed, val) + + val, err = strconv.ParseFloat(record[9], 64) + if err != nil { + return nil, fmt.Errorf("failed to parse GPU power value '%s': %v", record[9], err) + } + benchmarkData.DataGPUPower = append(benchmarkData.DataGPUPower, val) + + val, err = strconv.ParseFloat(record[10], 64) + if err != nil { + return nil, fmt.Errorf("failed to parse RAM used value '%s': %v", record[10], err) + } + benchmarkData.DataRAMUsed = append(benchmarkData.DataRAMUsed, val) + + val, err = strconv.ParseFloat(record[11], 64) + if err != nil { + return nil, fmt.Errorf("failed to parse SWAP used value '%s': %v", record[11], err) + } + benchmarkData.DataSwapUsed = append(benchmarkData.DataSwapUsed, val) + + counter++ + if counter == 100000 { + return nil, errors.New("file cannot have more than 100000 data lines") + } + } + + if err := scanner.Err(); err != nil { + return nil, err + } + + if len(benchmarkData.DataFPS) == 0 && + len(benchmarkData.DataFrameTime) == 0 && + len(benchmarkData.DataCPULoad) == 0 && + len(benchmarkData.DataGPULoad) == 0 && + len(benchmarkData.DataCPUTemp) == 0 && + len(benchmarkData.DataGPUTemp) == 0 && + len(benchmarkData.DataGPUCoreClock) == 0 && + len(benchmarkData.DataGPUMemClock) == 0 && + len(benchmarkData.DataGPUVRAMUsed) == 0 && + len(benchmarkData.DataGPUPower) == 0 && + len(benchmarkData.DataRAMUsed) == 0 && + len(benchmarkData.DataSwapUsed) == 0 { + return nil, errors.New("empty file") + } + + return benchmarkData, nil +} + +func readAfterburnerFile(scanner *bufio.Scanner) (*BenchmarkData, error) { + benchmarkData := &BenchmarkData{} + + // Second line should contain CPU model + if !scanner.Scan() { + return nil, errors.New("failed to read file (err ab1)") + } + record := strings.Split(scanner.Text(), ",") + if len(record) < 3 { + return nil, errors.New("failed to read file (err ab2)") + } + benchmarkData.SpecOS = "Windows" // Hardcode + benchmarkData.SpecGPU = truncateString(strings.TrimSpace(record[2])) + + // 3rd line contain headers for benchmark data. We need to pay attention to their order + if !scanner.Scan() { + return nil, errors.New("failed to read file (err ab3)") + } + record = strings.Split(strings.TrimRight(scanner.Text(), ","), ",") + if len(record) <= 2 { // If no data (only counter and timestamp) + return nil, errors.New("failed to read file (err ab4)") + } + + headerMap := make(map[string]int) + for i := 2; i < len(record); i++ { + headerMap[strings.TrimSpace(record[i])] = i + } + + // Skip len(headerMap) amount of lines as this is not needed + for i := 0; i < len(headerMap); i++ { + if !scanner.Scan() { + return nil, errors.New("failed to read file (err ab5)") + } + } + + // Initiate data slices + benchmarkData.DataFPS = make([]float64, 0) + benchmarkData.DataFrameTime = make([]float64, 0) + benchmarkData.DataCPULoad = make([]float64, 0) + benchmarkData.DataGPULoad = make([]float64, 0) + benchmarkData.DataCPUTemp = make([]float64, 0) + benchmarkData.DataGPUTemp = make([]float64, 0) + benchmarkData.DataGPUCoreClock = make([]float64, 0) + benchmarkData.DataGPUMemClock = make([]float64, 0) + benchmarkData.DataGPUVRAMUsed = make([]float64, 0) + benchmarkData.DataGPUPower = make([]float64, 0) + benchmarkData.DataRAMUsed = make([]float64, 0) + + var counter uint + for scanner.Scan() { + record = strings.Split(scanner.Text(), ",") + if len(record) <= 2 { // If no data (only counter and timestamp) + return nil, errors.New("failed to read file (err ab5)") + } + + // Trim all values (ignore first 2 columns - not used) + for i := 2; i < len(record); i++ { + record[i] = strings.TrimSpace(record[i]) + } + + if index, ok := headerMap["Framerate"]; ok { + val, err := strconv.ParseFloat(record[index], 64) + if err != nil { + return nil, fmt.Errorf("failed to parse Framerate value '%s': %v", record[0], err) + } + benchmarkData.DataFPS = append(benchmarkData.DataFPS, val) + } + + if index, ok := headerMap["Frametime"]; ok { + val, err := strconv.ParseFloat(record[index], 64) + if err != nil { + return nil, fmt.Errorf("failed to parse Frametime value '%s': %v", record[1], err) + } + benchmarkData.DataFrameTime = append(benchmarkData.DataFrameTime, val) + } + + if index, ok := headerMap["CPU usage"]; ok { + val, err := strconv.ParseFloat(record[index], 64) + if err != nil { + return nil, fmt.Errorf("failed to parse CPU usage value '%s': %v", record[2], err) + } + benchmarkData.DataCPULoad = append(benchmarkData.DataCPULoad, val) + } + + if index, ok := headerMap["GPU usage"]; ok { + val, err := strconv.ParseFloat(record[index], 64) + if err != nil { + return nil, fmt.Errorf("failed to parse GPU usage value '%s': %v", record[3], err) + } + benchmarkData.DataGPULoad = append(benchmarkData.DataGPULoad, val) + } + + if index, ok := headerMap["CPU temperature"]; ok { + val, err := strconv.ParseFloat(record[index], 64) + if err != nil { + return nil, fmt.Errorf("failed to parse CPU temperature value '%s': %v", record[4], err) + } + benchmarkData.DataCPUTemp = append(benchmarkData.DataCPUTemp, val) + } + + if index, ok := headerMap["GPU temperature"]; ok { + val, err := strconv.ParseFloat(record[index], 64) + if err != nil { + return nil, fmt.Errorf("failed to parse GPU temperature value '%s': %v", record[5], err) + } + benchmarkData.DataGPUTemp = append(benchmarkData.DataGPUTemp, val) + } + + if index, ok := headerMap["Core clock"]; ok { + val, err := strconv.ParseFloat(record[index], 64) + if err != nil { + return nil, fmt.Errorf("failed to parse Core clock value '%s': %v", record[6], err) + } + benchmarkData.DataGPUCoreClock = append(benchmarkData.DataGPUCoreClock, val) + } + + if index, ok := headerMap["Memory clock"]; ok { + val, err := strconv.ParseFloat(record[index], 64) + if err != nil { + return nil, fmt.Errorf("failed to parse Memory clock value '%s': %v", record[7], err) + } + val = math.Round(val/2*100000) / 100000 // divide by 2 (so it represents core clocks in a same manner as Linux) and round to 5 decimal places + benchmarkData.DataGPUMemClock = append(benchmarkData.DataGPUMemClock, val) + } + + if index, ok := headerMap["Memory usage"]; ok { + val, err := strconv.ParseFloat(record[index], 64) + if err != nil { + return nil, fmt.Errorf("failed to parse Memory usage value '%s': %v", record[8], err) + } + val = math.Round(val/1024*100000) / 100000 // divide by 1024 and round to 5 decimal places + benchmarkData.DataGPUVRAMUsed = append(benchmarkData.DataGPUVRAMUsed, val) + } + + if index, ok := headerMap["Power"]; ok { + val, err := strconv.ParseFloat(record[index], 64) + if err != nil { + return nil, fmt.Errorf("failed to parse Power value '%s': %v", record[9], err) + } + benchmarkData.DataGPUPower = append(benchmarkData.DataGPUPower, val) + } + + if index, ok := headerMap["RAM usage"]; ok { + val, err := strconv.ParseFloat(record[index], 64) + if err != nil { + return nil, fmt.Errorf("failed to parse RAM usage value '%s': %v", record[10], err) + } + val = math.Round(val/1024*100000) / 100000 // divide by 1024 and round to 5 decimal places + benchmarkData.DataRAMUsed = append(benchmarkData.DataRAMUsed, val) + } + + counter++ + if counter == 100000 { + return nil, errors.New("file cannot have more than 100000 data lines") + } + } + + if err := scanner.Err(); err != nil { + return nil, err + } + + if len(benchmarkData.DataFPS) == 0 && + len(benchmarkData.DataFrameTime) == 0 && + len(benchmarkData.DataCPULoad) == 0 && + len(benchmarkData.DataGPULoad) == 0 && + len(benchmarkData.DataCPUTemp) == 0 && + len(benchmarkData.DataGPUTemp) == 0 && + len(benchmarkData.DataGPUCoreClock) == 0 && + len(benchmarkData.DataGPUMemClock) == 0 && + len(benchmarkData.DataGPUVRAMUsed) == 0 && + len(benchmarkData.DataGPUPower) == 0 && + len(benchmarkData.DataRAMUsed) == 0 { + return nil, errors.New("empty file") + } + + return benchmarkData, nil +} + +// truncateString truncates the input string to a maximum of 100 characters and appends "..." if it exceeds that length. +func truncateString(s string) string { + const maxLength = 100 + if len(s) > maxLength { + return s[:maxLength] + "..." + } + return s +} + +func storeBenchmarkData(benchmarkData []*BenchmarkData, benchmarkID uint) error { + // Store to disk + filePath := filepath.Join(benchmarksDir, fmt.Sprintf("%d.bin", benchmarkID)) + file, err := os.Create(filePath) + if err != nil { + return err + } + defer file.Close() + + // Convert to []byte + var buffer bytes.Buffer + gobEncoder := gob.NewEncoder(&buffer) + err = gobEncoder.Encode(benchmarkData) + if err != nil { + return err + } + + // Compress and write to file + zstdEncoder, err := zstd.NewWriter(file, zstd.WithEncoderLevel(zstd.SpeedFastest)) + if err != nil { + return err + } + defer zstdEncoder.Close() + _, err = zstdEncoder.Write(buffer.Bytes()) + return err +} + +func retrieveBenchmarkData(benchmarkID uint) (benchmarkData []*BenchmarkData, err error) { + filePath := filepath.Join(benchmarksDir, fmt.Sprintf("%d.bin", benchmarkID)) + file, err := os.Open(filePath) + if err != nil { + return nil, err + } + defer file.Close() + + // Decompress and read from file + zstdDecoder, err := zstd.NewReader(file) + if err != nil { + return nil, err + } + defer zstdDecoder.Close() + + var buffer bytes.Buffer + _, err = buffer.ReadFrom(zstdDecoder) + if err != nil { + return nil, err + } + + // Decode + gobDecoder := gob.NewDecoder(&buffer) + err = gobDecoder.Decode(&benchmarkData) + return benchmarkData, err +} + +func deleteBenchmarkData(benchmarkID uint) error { + filePath := filepath.Join(benchmarksDir, fmt.Sprintf("%d.bin", benchmarkID)) + return os.Remove(filePath) +} + +func createZipFromBenchmarkData(benchmarkData []*BenchmarkData) (*bytes.Buffer, error) { + // Create a buffer to write our archive to. + buf := new(bytes.Buffer) + zipWriter := zip.NewWriter(buf) + + for _, data := range benchmarkData { + // Create a new CSV file in the zip archive. + fileName := fmt.Sprintf("%s.csv", data.Label) + fileWriter, err := zipWriter.Create(fileName) + if err != nil { + return nil, fmt.Errorf("could not create file in zip: %v", err) + } + + // Create a CSV writer. + csvWriter := csv.NewWriter(fileWriter) + + // Write the header. + header := []string{"os", "cpu", "gpu", "ram", "kernel", "driver", "cpuscheduler"} + csvWriter.Write(header) + specs := []string{data.SpecOS, data.SpecCPU, data.SpecGPU, data.SpecRAM, data.SpecLinuxKernel, "", data.SpecLinuxScheduler} + csvWriter.Write(specs) + + // Write the data header. + dataHeader := []string{"fps", "frametime", "cpu_load", "gpu_load", "cpu_temp", "gpu_temp", "gpu_core_clock", "gpu_mem_clock", "gpu_vram_used", "gpu_power", "ram_used", "swap_used"} + csvWriter.Write(dataHeader) + + // Write the data rows. + for i := range data.DataFPS { + row := []string{ + strconv.FormatFloat(data.DataFPS[i], 'f', 4, 64), + strconv.FormatFloat(data.DataFrameTime[i], 'f', 4, 64), + strconv.FormatFloat(data.DataCPULoad[i], 'f', 4, 64), + strconv.FormatFloat(data.DataGPULoad[i], 'f', 4, 64), + strconv.FormatFloat(data.DataCPUTemp[i], 'f', 4, 64), + strconv.FormatFloat(data.DataGPUTemp[i], 'f', 4, 64), + strconv.FormatFloat(data.DataGPUCoreClock[i], 'f', 4, 64), + strconv.FormatFloat(data.DataGPUMemClock[i], 'f', 4, 64), + strconv.FormatFloat(data.DataGPUVRAMUsed[i], 'f', 4, 64), + strconv.FormatFloat(data.DataGPUPower[i], 'f', 4, 64), + strconv.FormatFloat(data.DataRAMUsed[i], 'f', 4, 64), + strconv.FormatFloat(data.DataSwapUsed[i], 'f', 4, 64), + } + csvWriter.Write(row) + } + + // Make sure to flush the writer. + csvWriter.Flush() + if err := csvWriter.Error(); err != nil { + return nil, fmt.Errorf("could not write CSV: %v", err) + } + } + + // Close the zip writer to flush the buffer. + if err := zipWriter.Close(); err != nil { + return nil, fmt.Errorf("could not close zip writer: %v", err) + } + + return buf, nil +} diff --git a/benchmarks.go b/benchmarks.go index e371329..e080037 100644 --- a/benchmarks.go +++ b/benchmarks.go @@ -181,7 +181,7 @@ func postBenchmarkCreate(c *gin.Context) { // Read CSV files // Store to disk only when DB record is created successfully - csvFiles, csvSpecs, err := readCSVFiles(files) + csvFiles, err := readBenchmarkFiles(files) if err != nil { c.HTML(http.StatusUnauthorized, "error.tmpl", gin.H{ "activePage": "error", @@ -197,13 +197,6 @@ func postBenchmarkCreate(c *gin.Context) { UserID: session.Get("ID").(uint), Title: title, Description: description, - - SpecDistro: csvSpecs.Distro, - SpecCPU: csvSpecs.CPU, - SpecGPU: csvSpecs.GPU, - SpecRAM: csvSpecs.RAM, - SpecKernel: csvSpecs.Kernel, - SpecScheduler: csvSpecs.Scheduler, } result := db.Create(&benchmark) @@ -331,7 +324,7 @@ func getBenchmark(c *gin.Context) { var benchmark Benchmark benchmark.ID = uint(intID) - var csvFiles []*CSVFile + var benchmarkDatas []*BenchmarkData var errCSV, errDB error errHTTPStatus := http.StatusInternalServerError @@ -340,7 +333,7 @@ func getBenchmark(c *gin.Context) { go func() { defer wg.Done() - csvFiles, errCSV = retrieveBenchmarkData(benchmark.ID) + benchmarkDatas, errCSV = retrieveBenchmarkData(benchmark.ID) }() go func() { @@ -379,6 +372,56 @@ func getBenchmark(c *gin.Context) { "userID": session.Get("ID"), "benchmark": benchmark, - "benchmarkData": csvFiles, + "benchmarkData": benchmarkDatas, }) } + +func getBenchmarkDownload(c *gin.Context) { + session := sessions.Default(c) + + // Get benchmark ID from the path + id := c.Param("id") + + // Get benchmark details + intID, err := strconv.Atoi(id) + if err != nil { + c.HTML(http.StatusInternalServerError, "error.tmpl", gin.H{ + "activePage": "error", + "username": session.Get("Username"), + "userID": session.Get("ID"), + + "errorMessage": "Internal server error occurred: " + err.Error(), + }) + return + } + + var benchmark Benchmark + benchmark.ID = uint(intID) + + benchmarkDatas, err := retrieveBenchmarkData(benchmark.ID) + if err != nil { + c.HTML(http.StatusInternalServerError, "error.tmpl", gin.H{ + "activePage": "error", + "username": session.Get("Username"), + "userID": session.Get("ID"), + "errorMessage": "Error occurred: " + err.Error(), + }) + return + } + + content, err := createZipFromBenchmarkData(benchmarkDatas) + if err != nil { + c.HTML(http.StatusInternalServerError, "error.tmpl", gin.H{ + "activePage": "error", + "username": session.Get("Username"), + "userID": session.Get("ID"), + "errorMessage": "Error occurred: " + err.Error(), + }) + return + } + + fileName := "benchmark_" + id + ".zip" + c.Header("Content-Type", "application/zip") + c.Header("Content-Disposition", "attachment; filename="+fileName) + c.Data(http.StatusOK, "application/zip", content.Bytes()) +} diff --git a/config.go b/config.go index 30252fc..6e673cf 100644 --- a/config.go +++ b/config.go @@ -14,6 +14,7 @@ type Config struct { DiscordClientID string DiscordClientSecret string DiscordRedirectURL string + SessionSecret string Version bool } @@ -25,6 +26,7 @@ func NewConfig() (*Config, error) { discordClientID := flag.String("discord-client-id", "", "Discord OAuth2 client ID (see https://discord.com/developers/applications)") discordClientSecret := flag.String("discord-client-secret", "", "Discord OAuth2 client secret (see https://discord.com/developers/applications)") discordRedirectURL := flag.String("discord-redirect-url", "", "Discord OAuth2 redirect URL (:///login/callback)") + sessionSecret := flag.String("session-secret", "", "Session secret") flagVersion := flag.Bool("version", false, "prints version of the application") envflag.Parse(envflag.WithPrefix("FS_")) @@ -36,6 +38,7 @@ func NewConfig() (*Config, error) { DiscordClientID: *discordClientID, DiscordClientSecret: *discordClientSecret, DiscordRedirectURL: *discordRedirectURL, + SessionSecret: *sessionSecret, Version: *flagVersion, } @@ -56,6 +59,9 @@ func NewConfig() (*Config, error) { if config.DiscordRedirectURL == "" { return nil, errors.New("missing discord-redirect-url argument") } + if config.SessionSecret == "" { + return nil, errors.New("missing session-secret argument") + } return config, nil } diff --git a/csv.go b/csv.go deleted file mode 100644 index 33ee177..0000000 --- a/csv.go +++ /dev/null @@ -1,256 +0,0 @@ -package flightlesssomething - -import ( - "bufio" - "bytes" - "encoding/gob" - "errors" - "fmt" - "log" - "math/big" - "mime/multipart" - "os" - "path/filepath" - "strings" - - "github.com/dustin/go-humanize" - "github.com/klauspost/compress/zstd" -) - -type CSVFile struct { - Filename string - - FPSPointsArray string - FrameTimeArray string - CPULoadArray string - GPULoadArray string - CPUTempArray string - GPUTempArray string - GPUCoreClockArray string - GPUMemClockArray string - GPUVRAMUsedArray string - GPUPowerArray string - RAMUsedArray string - SwapUsedArray string -} - -type CSVSpecs struct { - MaxPoints int - - Distro string - Kernel string - GPU string - CPU string - RAM string - Scheduler string -} - -// readCSVFiles reads multiple CSV files and returns a slice of CSVFile pointers and the maximum number of FPS records found in any file -func readCSVFiles(files []*multipart.FileHeader) ([]*CSVFile, *CSVSpecs, error) { - csvFiles := make([]*CSVFile, 0) - csvSpecs := &CSVSpecs{} - - var linesCount int - - for _, fileHeader := range files { - csvFile := CSVFile{} - - file, err := fileHeader.Open() - if err != nil { - return nil, nil, err - } - defer file.Close() - - scanner := bufio.NewScanner(file) - - // Set file name (without extension) - csvFile.Filename = strings.TrimSuffix(fileHeader.Filename, ".csv") - - // First line should contain this: os,cpu,gpu,ram,kernel,driver,cpuscheduler - if !scanner.Scan() { - return nil, nil, errors.New("invalid CSV file (err 1)") - } - record := strings.Split(strings.TrimRight(scanner.Text(), ","), ",") - if len(record) != 7 { - return nil, nil, errors.New("invalid CSV file (err 2)") - } - - // Second line should contain values - if !scanner.Scan() { - return nil, nil, errors.New("invalid CSV file (err 3)") - } - record = strings.Split(scanner.Text(), ",") - - for i, v := range record { - switch i { - case 0: - csvSpecs.Distro = truncateString(strings.TrimSpace(v)) - case 1: - csvSpecs.CPU = truncateString(strings.TrimSpace(v)) - case 2: - csvSpecs.GPU = truncateString(strings.TrimSpace(v)) - case 3: - kilobytes := new(big.Int) - _, ok := kilobytes.SetString(strings.TrimSpace(v), 10) - if !ok { - return nil, nil, errors.New("failed to convert RAM to big.Int") - } - bytes := new(big.Int).Mul(kilobytes, big.NewInt(1024)) - csvSpecs.RAM = humanize.Bytes(bytes.Uint64()) - case 4: - csvSpecs.Kernel = truncateString(strings.TrimSpace(v)) - case 6: - csvSpecs.Scheduler = truncateString(strings.TrimSpace(v)) - } - } - - // 3rd line contain headers for benchmark data: fps,frametime,cpu_load,gpu_load,cpu_temp,gpu_temp,gpu_core_clock,gpu_mem_clock,gpu_vram_used,gpu_power,ram_used,swap_used,process_rss,elapsed - if !scanner.Scan() { - return nil, nil, errors.New("invalid CSV file (err 5)") - } - record = strings.Split(strings.TrimRight(scanner.Text(), ","), ",") - if len(record) != 14 { - return nil, nil, errors.New("invalid CSV file (err 6)") - } - - fpsPoints := make([]string, 0, linesCount) - frametimePoints := make([]string, 0, linesCount) - cpuLoadPoints := make([]string, 0, linesCount) - gpuLoadPoints := make([]string, 0, linesCount) - cpuTempPoints := make([]string, 0, linesCount) - gpuTempPoints := make([]string, 0, linesCount) - gpuCoreClockPoints := make([]string, 0, linesCount) - gpuMemClockPoints := make([]string, 0, linesCount) - gpuVRAMUsedPoints := make([]string, 0, linesCount) - gpuPowerPoints := make([]string, 0, linesCount) - RAMUsedPoints := make([]string, 0, linesCount) - SWAPUsedPoints := make([]string, 0, linesCount) - - var counter uint - - for scanner.Scan() { - record = strings.Split(scanner.Text(), ",") - if len(record) != 14 { - return nil, nil, errors.New("invalid CSV file (err 7)") - } - fpsPoints = append(fpsPoints, record[0]) - frametimePoints = append(frametimePoints, record[1]) - cpuLoadPoints = append(cpuLoadPoints, record[2]) - gpuLoadPoints = append(gpuLoadPoints, record[3]) - cpuTempPoints = append(cpuTempPoints, record[4]) - gpuTempPoints = append(gpuTempPoints, record[5]) - gpuCoreClockPoints = append(gpuCoreClockPoints, record[6]) - gpuMemClockPoints = append(gpuMemClockPoints, record[7]) - gpuVRAMUsedPoints = append(gpuVRAMUsedPoints, record[8]) - gpuPowerPoints = append(gpuPowerPoints, record[9]) - RAMUsedPoints = append(RAMUsedPoints, record[10]) - SWAPUsedPoints = append(SWAPUsedPoints, record[11]) - - counter++ - if counter == 100000 { - return nil, nil, errors.New("too large CSV file") - } - } - - // More efficient buffer allocation - linesCount = len(fpsPoints) - - if err := scanner.Err(); err != nil { - log.Println("error (4) parsing CSV:", err) - return nil, nil, err - } - - if len(fpsPoints) == 0 { - return nil, nil, errors.New("invalid CSV file (err 8)") - } - - if len(fpsPoints) > csvSpecs.MaxPoints { - csvSpecs.MaxPoints = len(fpsPoints) - } - - csvFile.FPSPointsArray = strings.Join(fpsPoints, ",") - csvFile.FrameTimeArray = strings.Join(frametimePoints, ",") - csvFile.CPULoadArray = strings.Join(cpuLoadPoints, ",") - csvFile.GPULoadArray = strings.Join(gpuLoadPoints, ",") - csvFile.CPUTempArray = strings.Join(cpuTempPoints, ",") - csvFile.GPUTempArray = strings.Join(gpuTempPoints, ",") - csvFile.GPUCoreClockArray = strings.Join(gpuCoreClockPoints, ",") - csvFile.GPUMemClockArray = strings.Join(gpuMemClockPoints, ",") - csvFile.GPUVRAMUsedArray = strings.Join(gpuVRAMUsedPoints, ",") - csvFile.GPUPowerArray = strings.Join(gpuPowerPoints, ",") - csvFile.RAMUsedArray = strings.Join(RAMUsedPoints, ",") - csvFile.SwapUsedArray = strings.Join(SWAPUsedPoints, ",") - - csvFiles = append(csvFiles, &csvFile) - } - - return csvFiles, csvSpecs, nil -} - -// truncateString truncates the input string to a maximum of 100 characters and appends "..." if it exceeds that length. -func truncateString(s string) string { - const maxLength = 100 - if len(s) > maxLength { - return s[:maxLength] + "..." - } - return s -} - -func storeBenchmarkData(csvFiles []*CSVFile, benchmarkID uint) error { - // Store to disk - filePath := filepath.Join(benchmarksDir, fmt.Sprintf("%d.bin", benchmarkID)) - file, err := os.Create(filePath) - if err != nil { - return err - } - defer file.Close() - - // Convert to []byte - var buffer bytes.Buffer - gobEncoder := gob.NewEncoder(&buffer) - err = gobEncoder.Encode(csvFiles) - if err != nil { - return err - } - - // Compress and write to file - zstdEncoder, err := zstd.NewWriter(file, zstd.WithEncoderLevel(zstd.SpeedFastest)) - if err != nil { - return err - } - defer zstdEncoder.Close() - _, err = zstdEncoder.Write(buffer.Bytes()) - return err -} - -func retrieveBenchmarkData(benchmarkID uint) (csvFiles []*CSVFile, err error) { - filePath := filepath.Join(benchmarksDir, fmt.Sprintf("%d.bin", benchmarkID)) - file, err := os.Open(filePath) - if err != nil { - return nil, err - } - defer file.Close() - - // Decompress and read from file - zstdDecoder, err := zstd.NewReader(file) - if err != nil { - return nil, err - } - defer zstdDecoder.Close() - - var buffer bytes.Buffer - _, err = buffer.ReadFrom(zstdDecoder) - if err != nil { - return nil, err - } - - // Decode - gobDecoder := gob.NewDecoder(&buffer) - err = gobDecoder.Decode(&csvFiles) - return csvFiles, err -} - -func deleteBenchmarkData(benchmarkID uint) error { - filePath := filepath.Join(benchmarksDir, fmt.Sprintf("%d.bin", benchmarkID)) - return os.Remove(filePath) -} diff --git a/docker-compose.yaml b/docker-compose.yaml index 62fdc46..bc30cc4 100644 --- a/docker-compose.yaml +++ b/docker-compose.yaml @@ -13,4 +13,5 @@ services: - FS_DISCORD_CLIENT_ID=xxxxxxxxxxxxxxxxxx - FS_DISCORD_CLIENT_SECRET=xxxxxxxxxxxxxxxxxx - FS_DISCORD_REDIRECT_URL=:///login/callback + - FS_SESSION_SECRET=xxxxxxxxxxxxxxxxxx restart: unless-stopped diff --git a/embed.go b/embed.go index ff25851..6b2743d 100644 --- a/embed.go +++ b/embed.go @@ -4,3 +4,6 @@ import "embed" //go:embed templates/* var templatesFS embed.FS + +//go:embed static/* +var staticFS embed.FS diff --git a/models.go b/models.go index d40f579..d80454f 100644 --- a/models.go +++ b/models.go @@ -15,15 +15,9 @@ type User struct { type Benchmark struct { gorm.Model - UserID uint - Title string - Description string - SpecDistro string - SpecCPU string - SpecGPU string - SpecRAM string - SpecKernel string - SpecScheduler string + UserID uint + Title string + Description string CreatedAtHumanized string `gorm:"-"` // Human readable "X h/m/s ago" version of CreatedAt (filled automatically) diff --git a/server.go b/server.go index c9b0b5c..1afa10b 100644 --- a/server.go +++ b/server.go @@ -66,7 +66,7 @@ func Start(c *Config) { if err != nil { panic(err) } - store := gormsessions.NewStore(db, true, []byte("secret")) + store := gormsessions.NewStore(db, true, []byte(c.SessionSecret)) db.AutoMigrate(&Benchmark{}) // Setup gin // @@ -79,6 +79,13 @@ func Start(c *Config) { tmpl := template.Must(template.ParseFS(templatesFS, "templates/*.tmpl")) r.SetHTMLTemplate(tmpl) + // Serve static files + fileServer := http.FileServer(http.FS(staticFS)) + r.GET("/static/*filepath", func(c *gin.Context) { + c.Header("Cache-Control", "public, max-age=3600") + fileServer.ServeHTTP(c.Writer, c.Request) + }) + r.GET("/", func(c *gin.Context) { c.Redirect(http.StatusTemporaryRedirect, "/benchmarks") }) r.GET("/benchmarks", getBenchmarks) @@ -87,6 +94,7 @@ func Start(c *Config) { r.POST("/benchmark", postBenchmarkCreate) r.GET("/benchmark/:id", getBenchmark) r.DELETE("/benchmark/:id", deleteBenchmark) + r.GET("/benchmark/:id/download", getBenchmarkDownload) r.GET("/user/:id", getUser) diff --git a/static/css/benchmarks.css b/static/css/benchmarks.css new file mode 100644 index 0000000..6c242a2 --- /dev/null +++ b/static/css/benchmarks.css @@ -0,0 +1,15 @@ +.list-group-item { + position: relative; +} +.stretched-link { + position: absolute; + top: 0; + left: 0; + right: 0; + bottom: 0; + z-index: 1; +} +.username-link { + position: relative; + z-index: 2; +} diff --git a/static/js/benchmark.js b/static/js/benchmark.js new file mode 100644 index 0000000..c9a004c --- /dev/null +++ b/static/js/benchmark.js @@ -0,0 +1,594 @@ +Highcharts.setOptions({ + chart: { + animation: false + }, + plotOptions: { + series: { + animation: false + } + } +}); + + +var colors = Highcharts.getOptions().colors; + +function getLineChartOptions(title, description, unit, maxY = null) { + return { + chart: { + type: 'line', + backgroundColor: null, // Set background to transparent + style: { + color: '#FFFFFF' + }, + zooming: { + type: 'x' + } + }, + title: { + text: title, + style: { + color: '#FFFFFF', + fontSize: '16px' + } + }, + subtitle: { + text: description, + style: { + color: '#FFFFFF', + fontSize: '12px' + } + }, + xAxis: { + lineColor: '#FFFFFF', + tickColor: '#FFFFFF', + labels: { + enabled: false + } + }, + yAxis: { + title: { + text: null + }, + labels: { + formatter: function() { + return this.value.toFixed(2) + ' ' + unit; + }, + style: { + color: '#FFFFFF' + } + }, + gridLineColor: 'rgba(255, 255, 255, 0.1)', + max: maxY + }, + legend: { + align: 'center', + verticalAlign: 'bottom', + itemStyle: { + color: '#FFFFFF' + } + }, + tooltip: { + shared: false, + pointFormat: '{series.name}: {point.y:.2f} ' + unit + '
', // Include unit in tooltip + backgroundColor: '#1E1E1E', + borderColor: '#FFFFFF', + style: { + color: '#FFFFFF' + } + }, + plotOptions: { + line: { + marker: { + enabled: false, + symbol: 'circle', + lineColor: null, + radius: 1.5, + states: { + hover: { + enabled: true, + } + } + }, + lineWidth: 1, + animation: false + } + }, + credits: { + enabled: false + }, + series: [], + exporting: { + buttons: { + contextButton: { + menuItems: [ + 'viewFullscreen', + 'printChart', + 'separator', + 'downloadPNG', + 'downloadJPEG', + 'downloadPDF', + 'downloadSVG', + 'separator', + 'downloadCSV', + 'downloadXLS' + ] + } + } + } + }; +} + +function getBarChartOptions(title, unit, maxY = null) { + return { + chart: { + type: 'bar', + backgroundColor: null, // Set background to transparent + style: { + color: '#FFFFFF' + } + }, + title: { + text: title, + style: { + color: '#FFFFFF', + fontSize: '16px' + } + }, + xAxis: { + categories: [], + title: { + text: null + }, + labels: { + style: { + color: '#FFFFFF' + } + } + }, + yAxis: { + min: 0, + max: maxY, + title: { + text: unit, + align: 'high', + style: { + color: '#FFFFFF' + } + }, + labels: { + overflow: 'justify', + style: { + color: '#FFFFFF' + }, + formatter: function() { + return this.value.toFixed(2) + ' ' + unit; + } + }, + gridLineColor: 'rgba(255, 255, 255, 0.1)' + }, + tooltip: { + valueSuffix: ' ' + unit, + backgroundColor: '#1E1E1E', + borderColor: '#FFFFFF', + style: { + color: '#FFFFFF' + }, + formatter: function() { + return '' + this.point.category + ': ' + this.y.toFixed(2) + ' ' + unit; + } + }, + plotOptions: { + bar: { + dataLabels: { + enabled: true, + style: { + color: '#FFFFFF' + }, + formatter: function() { + return this.y.toFixed(2) + ' ' + unit; + } + } + } + }, + legend: { + enabled: false, // Disable legend + }, + credits: { + enabled: false + }, + series: [] + }; +} + +function createChart(chartId, title, description, unit, dataArrays, maxY = null) { + var options = getLineChartOptions(title, description, unit, maxY); + options.series = dataArrays.map(function(dataArray, index) { + return {name: dataArray.label, data: dataArray.data, color: colors[index % colors.length]}; + }); + + Highcharts.chart(chartId, options); +} + +function createBarChart(chartId, title, unit, categories, data, colors, maxY = null) { + var options = getBarChartOptions(title, unit, maxY); + options.xAxis.categories = categories; + options.series = [{ + name: title, + data: data, + colorByPoint: true, + colors: colors + }]; + + Highcharts.chart(chartId, options); +} + +function calculateAverage(data) { + const sum = data.reduce((acc, value) => acc + value, 0); + return sum / data.length; +} + +function calculatePercentile(data, percentile) { + data.sort((a, b) => a - b); + const index = Math.ceil(percentile / 100 * data.length) - 1; + return data[index]; +} + +// Create line charts +createChart('fpsChart', 'FPS', 'More is better', 'fps', fpsDataArrays); +createChart('frameTimeChart', 'Frametime', 'Less is better', 'ms', frameTimeDataArrays); +createChart('cpuLoadChart', 'CPU Load', '', '%', cpuLoadDataArrays, 100); +createChart('gpuLoadChart', 'GPU Load', '', '%', gpuLoadDataArrays, 100); +createChart('cpuTempChart', 'CPU Temperature', '', '°C', cpuTempDataArrays); +createChart('gpuTempChart', 'GPU Temperature', '', '°C', gpuTempDataArrays); +createChart('gpuCoreClockChart', 'GPU Core Clock', '', 'MHz', gpuCoreClockDataArrays); +createChart('gpuMemClockChart', 'GPU Memory Clock', '', 'MHz', gpuMemClockDataArrays); +createChart('gpuVRAMUsedChart', 'GPU VRAM Usage', '', 'GB', gpuVRAMUsedDataArrays); +createChart('gpuPowerChart', 'GPU Power', '', 'W', gpuPowerDataArrays); +createChart('ramUsedChart', 'RAM Usage', '', 'GB', ramUsedDataArrays); +createChart('swapUsedChart', 'SWAP Usage', '', 'GB', swapUsedDataArrays); + +// Calculate average CPU and GPU load +var cpuLoadAverages = cpuLoadDataArrays.map(function(dataArray) { + return calculateAverage(dataArray.data); +}); + +var gpuLoadAverages = gpuLoadDataArrays.map(function(dataArray) { + return calculateAverage(dataArray.data); +}); + +// Create bar charts for average CPU and GPU load +createBarChart('cpuLoadSummaryChart', 'Average CPU Load', '%', cpuLoadDataArrays.map(function(dataArray) { return dataArray.label; }), cpuLoadAverages, colors, 100); +createBarChart('gpuLoadSummaryChart', 'Average GPU Load', '%', gpuLoadDataArrays.map(function(dataArray) { return dataArray.label; }), gpuLoadAverages, colors, 100); + +// Calculate and render min, max, and average FPS +var categories = []; +var minFPSData = []; +var avgFPSData = []; +var maxFPSData = []; + +fpsDataArrays.forEach(function(dataArray) { + var minFPS = calculatePercentile(dataArray.data, 1); + var avgFPS = calculateAverage(dataArray.data); + var maxFPS = calculatePercentile(dataArray.data, 97); + + categories.push(dataArray.label); + minFPSData.push(minFPS); + avgFPSData.push(avgFPS); + maxFPSData.push(maxFPS); +}); + +Highcharts.chart('minMaxAvgChart', { + chart: { + type: 'bar', + backgroundColor: null + }, + title: { + text: 'Min/Avg/Max FPS', + style: { + color: '#FFFFFF', + fontSize: '16px' + } + }, + subtitle: { + text: 'More is better', + style: { + color: '#FFFFFF' + } + }, + xAxis: { + categories: categories, + title: { + text: null + }, + labels: { + style: { + color: '#FFFFFF' + } + } + }, + yAxis: { + min: 0, + title: { + text: 'FPS', + align: 'high', + style: { + color: '#FFFFFF' + } + }, + labels: { + overflow: 'justify', + style: { + color: '#FFFFFF' + } + }, + gridLineColor: 'rgba(255, 255, 255, 0.1)' + }, + tooltip: { + valueSuffix: ' FPS', + backgroundColor: '#1E1E1E', + borderColor: '#FFFFFF', + style: { + color: '#FFFFFF' + }, + formatter: function() { + return '' + this.series.name + ': ' + this.y.toFixed(2) + ' FPS'; + } + }, + plotOptions: { + bar: { + dataLabels: { + enabled: true, + style: { + color: '#FFFFFF' + }, + formatter: function() { + return this.y.toFixed(2) + ' fps'; + } + } + } + }, + legend: { + reversed: true, + itemStyle: { + color: '#FFFFFF' + } + }, + credits: { + enabled: false + }, + series: [{ + name: '97th', + data: maxFPSData, + color: '#00FF00' + }, { + name: 'AVG', + data: avgFPSData, + color: '#0000FF' + }, { + name: '1%', + data: minFPSData, + color: '#FF0000' + }] +}); + +// Calculate average FPS for each filename +var avgFPSData = fpsDataArrays.map(function(dataArray) { + return calculateAverage(dataArray.data); +}); + +// Calculate FPS as a percentage of the first element +var firstFPS = avgFPSData[0]; +var percentageFPSData = avgFPSData.map(function(fps) { + return (fps / firstFPS) * 100; +}); + +// Create bar chart for FPS percentage +Highcharts.chart('avgChart', { + chart: { + type: 'bar', + backgroundColor: null + }, + title: { + text: 'Average FPS in %', + style: { + color: '#FFFFFF', + fontSize: '16px' + } + }, + xAxis: { + categories: fpsDataArrays.map(function(dataArray) { return dataArray.label; }), + title: { + text: null + }, + labels: { + style: { + color: '#FFFFFF' + } + } + }, + yAxis: { + min: 0, + title: { + text: 'Percentage (%)', + align: 'high', + style: { + color: '#FFFFFF' + } + }, + labels: { + overflow: 'justify', + style: { + color: '#FFFFFF' + } + }, + gridLineColor: 'rgba(255, 255, 255, 0.1)' + }, + tooltip: { + valueSuffix: ' %', + backgroundColor: '#1E1E1E', + borderColor: '#FFFFFF', + style: { + color: '#FFFFFF' + }, + formatter: function() { + return '' + this.point.category + ': ' + this.y.toFixed(2) + ' %'; + } + }, + plotOptions: { + bar: { + dataLabels: { + enabled: true, + style: { + color: '#FFFFFF' + }, + formatter: function() { + return this.y.toFixed(2) + ' %'; + } + } + } + }, + legend: { + enabled: false + }, + credits: { + enabled: false + }, + series: [{ + name: 'FPS Percentage', + data: percentageFPSData, + colorByPoint: true, + colors: colors + }] +}); + +function calculateSpikes(data, threshold) { + if (data.length < 6) { + throw new Error("Data length must be greater than or equal to 6."); + } + + let spikeCount = 0; + + // Helper function to calculate the moving average with a minimum of 6 points + function movingAverage(arr, index) { + const windowSize = Math.max(6, Math.ceil(arr.length * 0.05)); // 5 % of the data + const halfWindowSize = Math.floor(windowSize / 2); + const start = Math.max(0, index - halfWindowSize); + const end = Math.min(arr.length - 1, index + halfWindowSize); + const actualWindowSize = end - start + 1; + + let sum = 0; + for (let i = start; i <= end; i++) { + sum += arr[i]; + } + return sum / actualWindowSize; + } + + for (let i = 0; i < data.length; i++) { + const currentPoint = data[i]; + const movingAvg = movingAverage(data, i); + + const change = Math.abs(currentPoint - movingAvg) / movingAvg * 100; + + if (change > threshold) { + spikeCount++; + } + } + + return (spikeCount / data.length) * 100; +} + +function updateSpikesChart(threshold) { + document.getElementById('spikeThresholdValue').innerText = threshold + '%'; + + var spikePercentages = fpsDataArrays.map(function(dataArray) { + return calculateSpikes(dataArray.data, threshold); + }); + + Highcharts.chart('spikesChart', { + chart: { + type: 'bar', + backgroundColor: null + }, + title: { + text: 'FPS Spikes', + style: { + color: '#FFFFFF', + fontSize: '16px' + } + }, + subtitle: { + text: 'Less is better', + style: { + color: '#FFFFFF', + fontSize: '12px' + } + }, + xAxis: { + categories: categories, + title: { + text: null + }, + labels: { + style: { + color: '#FFFFFF' + } + } + }, + yAxis: { + min: 0, + title: { + text: 'Percentage (%)', + align: 'high', + style: { + color: '#FFFFFF' + } + }, + labels: { + overflow: 'justify', + style: { + color: '#FFFFFF' + } + }, + gridLineColor: 'rgba(255, 255, 255, 0.1)' + }, + tooltip: { + valueSuffix: ' %', + backgroundColor: '#1E1E1E', + borderColor: '#FFFFFF', + style: { + color: '#FFFFFF' + }, + formatter: function() { + return '' + this.point.category + ': ' + this.y.toFixed(2) + ' %'; + } + }, + plotOptions: { + bar: { + dataLabels: { + enabled: true, + style: { + color: '#FFFFFF' + }, + formatter: function() { + return this.y.toFixed(2) + ' %'; + } + } + } + }, + legend: { + enabled: false + }, + credits: { + enabled: false + }, + series: [{ + name: 'Spike Percentage', + data: spikePercentages, + colorByPoint: true, + colors: colors + }] + }); +} + +// Initial render of spikes chart +updateSpikesChart(document.getElementById('spikeThreshold').value); diff --git a/templates/benchmark.tmpl b/templates/benchmark.tmpl index d4a00bd..5e2801f 100644 --- a/templates/benchmark.tmpl +++ b/templates/benchmark.tmpl @@ -1,51 +1,51 @@ {{template "header.tmpl" .}} +
+

Benchmark #{{ .benchmark.ID }}

+
+ {{if eq .benchmark.UserID .userID }} + Delete + {{end}} + Download +
+
+ +
+
{{ .benchmark.Title }}
+

{{ .benchmark.Description }}

+

Submitted {{ .benchmark.CreatedAtHumanized }} by {{ .benchmark.User.Username }}

+
+
-
-
-
-
{{ .benchmark.Title }}
-

{{ .benchmark.Description }}

-

Submitted {{ .benchmark.CreatedAtHumanized }} by {{ .benchmark.User.Username }}.

-
-
-
-
-
    -
  • Distro: {{ .benchmark.SpecDistro }}
  • -
  • Kernel: {{ .benchmark.SpecKernel }}
  • -
  • GPU: {{ .benchmark.SpecGPU }}
  • -
  • CPU: {{ .benchmark.SpecCPU }}
  • -
  • RAM: {{ .benchmark.SpecRAM }}
  • -
  • Scheduler: {{ .benchmark.SpecScheduler }}
  • -
+
+
Specifications
+ + + + + + + + + + + + + {{- range .benchmarkData }} + + + + + + + + + {{- end }} + +
LabelOSGPUCPURAMOS specific
{{ .Label }}{{ .SpecOS }}{{ .SpecGPU }}{{ .SpecCPU }}{{ .SpecRAM }}{{ .SpecLinuxKernel }} {{ .SpecLinuxScheduler }}
-{{if eq .benchmark.UserID .userID }} -Delete benchmark - - -{{end}} -
@@ -100,674 +100,102 @@ + + +{{if eq .benchmark.UserID .userID }} + +{{end}} + {{template "footer.tmpl" .}} diff --git a/templates/benchmark_create.tmpl b/templates/benchmark_create.tmpl index e596be7..0e08bb4 100644 --- a/templates/benchmark_create.tmpl +++ b/templates/benchmark_create.tmpl @@ -22,23 +22,140 @@
-

Notes:

+

Instructions:

    -
  • CSV file name (without .csv) is used as a name in charts. +
  • Filename (without the extension) is the name of the label.
    • You cannot rename once benchmark is submitted.
  • You can upload at max 50 CSV files for a single benchmark.
  • -
  • Only MangoHUD log format is supported. -
      -
    • If you want to upload RivaTuner log format - modify/convert it to MangoHud format by hand.
    • -
    • RivaTuner log format (.htm) is not supported yet.
    • -
    +
  • Only MangoHUD (*.csv) and Afterburner (*.hml) formats are supported.
+ +
+ + +
+
    +
  1. Install MangoHud overlay package to your Linux distribution appropriately. More information can be found here.
  2. +
  3. (Optional) install Goverlay application which allows you to configure MangoHud overlay with graphical UI.
  4. +
  5. Edit ~/.config/MangoHud/MangoHud.conf with the following contents (read the in-code comments and update accordingly): +
    
    +legacy_layout=false
    +
    +background_alpha=0.6
    +round_corners=0
    +background_alpha=0.6
    +background_color=000000
    +
    +font_size=24
    +text_color=FFFFFF
    +position=top-left
    +toggle_hud=Shift_R+F12
    +pci_dev=0:0b:00.0
    +table_columns=3
    +gpu_text=GPU
    +gpu_stats
    +gpu_temp
    +cpu_text=CPU
    +cpu_stats
    +core_load
    +core_bars
    +cpu_temp
    +io_stats
    +io_read
    +io_write
    +vram
    +vram_color=AD64C1
    +ram
    +ram_color=C26693
    +fps
    +gpu_name
    +frame_timing
    +frametime_color=00FF00
    +fps_limit_method=late
    +toggle_fps_limit=Shift_L+F1
    +fps_limit=0
    +
    +# Update to your preferred logs location here:
    +output_folder=/home/user/mangohud_logs
    +
    +# Set this to maximum log duration (in seconds). It will autostop after this duration, which is useful if
    +# you know the duration of your benchmark, otherwise set to something large, like 9999...
    +log_duration=90
    +
    +# If your application starts right into the benchmark - setting this to e.g. '10' gives game 10s to load. If you
    +# don't want it to autostart logging the data - leave this set to '0'.
    +autostart_log=0
    +
    +# Set this to interval of how frequently logs are collected (in milliseconds). '100' is what I use, 50 provides
    +# more data and is suitable for short benchmarks, while 200-500 is suitable for (very) long benchmarks.
    +# 
    +# NOTE - If you are comparing Linux and Windows, then make sure this value is identical in both!
    +# 
    +log_interval=100
    +
    +toggle_logging=Shift_L+F2
    +                
    +
  6. +
+

When you start the game, overlay should be visible. Pressing SHIFT+F2 starts the logging and either it ends due to log_duration value, or can be manually stopped by pressing SHIFT+F2 again. Note that there is an indication in overlay, where it shows big red dot, indicating that recording in progress.

+

After recording is done, you might end up with (or without) *-summary.csv file. This file can be deleted. Then there is <game>-<timestamp>.csv file - rename it to a label that you want to see in the website. Something like Linux or something else (with or without .csv extension) will work.

+
+ + +
+
    +
  1. Install Afterburner. It will also install RivaTuner statistics server.
  2. +
  3. Open Afterburner, go to Settings, then "Monitoring" tab.
  4. +
  5. Change "Hardware polling period (in milliseconds)" to "100" (ensure ALL your benchmarks have the same interval value, even on Linux!)
  6. +
  7. Modify the graphs: +
      +
    1. Disable everything
    2. +
    3. Enable the following: +
        +
      • GPU temperature
      • +
      • GPU usage
      • +
      • Memory usage
      • +
      • Core clock
      • +
      • Memory clock
      • +
      • Power
      • +
      • CPU temperature
      • +
      • CPU usage
      • +
      • RAM usage
      • +
      • Framerate
      • +
      • Frametime
      • +
      +
    4. +
    5. (optional) Click on each, then check "Show in On-Screen Display" for each, so you can see them in Overlay
    6. +
    +
  8. +
  9. Check "Log history to file"
  10. +
  11. Select location for such file (e.g. Desktop or Downloads works great).
  12. +
  13. Check "Recreate existing log files"
  14. +
  15. Uncheck "Log history to file" (yes, check to configure and then uncheck to disable auto recording when game starts)
  16. +
  17. Set "Begin logging" and "End logging" shortcuts. Suggestion is SHIFT+F2 and SHIFT+F3 appropriately.
  18. +
  19. Close Afterburner settings.
  20. +
  21. Ensure that AfterBurner and RivaTuner is running (opened or in system tray).
  22. +
  23. Start the game, overlay will show up in 5-30 seconds (keep clicking a mouse when the game is loading)
  24. +
  25. When starting benchmark, press shortcut to record, then press shortcut to stop recording. Note that there is no indication that game is being recorded or not.
  26. +
+

You will end up with a file, named *.hml. Rename it to a label that you want to see in the website. Something like Windows or something else (with or without .hml extension) will work.

+
+ +
+ {{template "footer.tmpl" .}} diff --git a/templates/benchmarks.tmpl b/templates/benchmarks.tmpl index 8c28aeb..d13a4db 100644 --- a/templates/benchmarks.tmpl +++ b/templates/benchmarks.tmpl @@ -1,9 +1,11 @@ {{template "header.tmpl" .}} + +

Benchmarks

{{if .username}} - New benchmark + New benchmark {{end}}
@@ -34,24 +36,6 @@ {{- end -}}
- -
  • diff --git a/templates/footer.tmpl b/templates/footer.tmpl index 6291e2d..acbf45c 100644 --- a/templates/footer.tmpl +++ b/templates/footer.tmpl @@ -1,5 +1,4 @@
- diff --git a/templates/header.tmpl b/templates/header.tmpl index 886e724..a802c5f 100644 --- a/templates/header.tmpl +++ b/templates/header.tmpl @@ -1,6 +1,7 @@ + diff --git a/templates/user.tmpl b/templates/user.tmpl index 5c665f5..4ffdf0a 100644 --- a/templates/user.tmpl +++ b/templates/user.tmpl @@ -19,17 +19,16 @@ {{- end -}} -