This commit is contained in:
Erikas 2024-07-11 19:33:15 +03:00
parent e7a34c92df
commit 4c1aa120aa
9 changed files with 398 additions and 330 deletions

2
.gitignore vendored
View File

@ -22,4 +22,4 @@ go.work
go.work.sum
# Custom
/data
/data*

312
benchmark_data.go Normal file
View File

@ -0,0 +1,312 @@
package flightlesssomething
import (
"bufio"
"bytes"
"encoding/gob"
"errors"
"fmt"
"log"
"math/big"
"mime/multipart"
"os"
"path/filepath"
"strconv"
"strings"
"github.com/dustin/go-humanize"
"github.com/klauspost/compress/zstd"
)
type BenchmarkData struct {
Label string
// Specs
SpecOS string
SpecGPU string
SpecCPU string
SpecRAM string
SpecLinuxKernel string
SpecLinuxScheduler string
// Data
DataFPS []float64
DataFrameTime []float64
DataCPULoad []float64
DataGPULoad []float64
DataCPUTemp []float64
DataGPUTemp []float64
DataGPUCoreClock []float64
DataGPUMemClock []float64
DataGPUVRAMUsed []float64
DataGPUPower []float64
DataRAMUsed []float64
DataSwapUsed []float64
}
// readBenchmarkFiles reads the uploaded benchmark files and returns a slice of BenchmarkData.
func readBenchmarkFiles(files []*multipart.FileHeader) ([]*BenchmarkData, error) {
csvFiles := make([]*BenchmarkData, 0)
linesCount := 0
for _, fileHeader := range files {
csvFile := BenchmarkData{}
file, err := fileHeader.Open()
if err != nil {
return nil, err
}
defer file.Close()
scanner := bufio.NewScanner(file)
// Label is filename without extension
csvFile.Label = strings.TrimSuffix(fileHeader.Filename, ".csv")
csvFile.Label = strings.TrimSuffix(csvFile.Label, ".htm")
// First line should contain this: os,cpu,gpu,ram,kernel,driver,cpuscheduler
if !scanner.Scan() {
return nil, errors.New("invalid CSV file (err 1)")
}
record := strings.Split(strings.TrimRight(scanner.Text(), ","), ",")
if len(record) != 7 {
return nil, errors.New("invalid CSV file (err 2)")
}
// Second line should contain values
if !scanner.Scan() {
return nil, errors.New("invalid CSV file (err 3)")
}
record = strings.Split(scanner.Text(), ",")
for i, v := range record {
switch i {
case 0:
csvFile.SpecOS = truncateString(strings.TrimSpace(v))
case 1:
csvFile.SpecCPU = truncateString(strings.TrimSpace(v))
case 2:
csvFile.SpecGPU = truncateString(strings.TrimSpace(v))
case 3:
kilobytes := new(big.Int)
_, ok := kilobytes.SetString(strings.TrimSpace(v), 10)
if !ok {
return nil, errors.New("failed to convert RAM to big.Int")
}
bytes := new(big.Int).Mul(kilobytes, big.NewInt(1024))
csvFile.SpecRAM = humanize.Bytes(bytes.Uint64())
case 4:
csvFile.SpecLinuxKernel = truncateString(strings.TrimSpace(v))
case 6:
csvFile.SpecLinuxScheduler = truncateString(strings.TrimSpace(v))
}
}
// 3rd line contain headers for benchmark data: fps,frametime,cpu_load,gpu_load,cpu_temp,gpu_temp,gpu_core_clock,gpu_mem_clock,gpu_vram_used,gpu_power,ram_used,swap_used,process_rss,elapsed
if !scanner.Scan() {
return nil, errors.New("invalid CSV file (err 5)")
}
record = strings.Split(strings.TrimRight(scanner.Text(), ","), ",")
if len(record) != 14 {
return nil, errors.New("invalid CSV file (err 6)")
}
// Preallocate slices. First file will be inefficient, but later files will contain
// value of linesCount that would help to optimize preallocation.
csvFile.DataFPS = make([]float64, 0, linesCount)
csvFile.DataFrameTime = make([]float64, 0, linesCount)
csvFile.DataCPULoad = make([]float64, 0, linesCount)
csvFile.DataGPULoad = make([]float64, 0, linesCount)
csvFile.DataCPUTemp = make([]float64, 0, linesCount)
csvFile.DataGPUTemp = make([]float64, 0, linesCount)
csvFile.DataGPUCoreClock = make([]float64, 0, linesCount)
csvFile.DataGPUMemClock = make([]float64, 0, linesCount)
csvFile.DataGPUVRAMUsed = make([]float64, 0, linesCount)
csvFile.DataGPUPower = make([]float64, 0, linesCount)
csvFile.DataRAMUsed = make([]float64, 0, linesCount)
csvFile.DataSwapUsed = make([]float64, 0, linesCount)
var counter uint
for scanner.Scan() {
record = strings.Split(scanner.Text(), ",")
if len(record) != 14 {
return nil, errors.New("invalid CSV file (err 7)")
}
val, err := strconv.ParseFloat(record[0], 64)
if err != nil {
return nil, fmt.Errorf("failed to parse FPS value '%s': %v", record[0], err)
}
csvFile.DataFPS = append(csvFile.DataFPS, val)
val, err = strconv.ParseFloat(record[1], 64)
if err != nil {
return nil, fmt.Errorf("failed to parse frametime value '%s': %v", record[1], err)
}
csvFile.DataFrameTime = append(csvFile.DataFrameTime, val)
val, err = strconv.ParseFloat(record[2], 64)
if err != nil {
return nil, fmt.Errorf("failed to parse CPU load value '%s': %v", record[2], err)
}
csvFile.DataCPULoad = append(csvFile.DataCPULoad, val)
val, err = strconv.ParseFloat(record[3], 64)
if err != nil {
return nil, fmt.Errorf("failed to parse GPU load value '%s': %v", record[3], err)
}
csvFile.DataGPULoad = append(csvFile.DataGPULoad, val)
val, err = strconv.ParseFloat(record[4], 64)
if err != nil {
return nil, fmt.Errorf("failed to parse CPU temp value '%s': %v", record[4], err)
}
csvFile.DataCPUTemp = append(csvFile.DataCPUTemp, val)
val, err = strconv.ParseFloat(record[5], 64)
if err != nil {
return nil, fmt.Errorf("failed to parse GPU temp value '%s': %v", record[5], err)
}
csvFile.DataGPUTemp = append(csvFile.DataGPUTemp, val)
val, err = strconv.ParseFloat(record[6], 64)
if err != nil {
return nil, fmt.Errorf("failed to parse GPU core clock value '%s': %v", record[6], err)
}
csvFile.DataGPUCoreClock = append(csvFile.DataGPUCoreClock, val)
val, err = strconv.ParseFloat(record[7], 64)
if err != nil {
return nil, fmt.Errorf("failed to parse GPU mem clock value '%s': %v", record[7], err)
}
csvFile.DataGPUMemClock = append(csvFile.DataGPUMemClock, val)
val, err = strconv.ParseFloat(record[8], 64)
if err != nil {
return nil, fmt.Errorf("failed to parse GPU VRAM used value '%s': %v", record[8], err)
}
csvFile.DataGPUVRAMUsed = append(csvFile.DataGPUVRAMUsed, val)
val, err = strconv.ParseFloat(record[9], 64)
if err != nil {
return nil, fmt.Errorf("failed to parse GPU power value '%s': %v", record[9], err)
}
csvFile.DataGPUPower = append(csvFile.DataGPUPower, val)
val, err = strconv.ParseFloat(record[10], 64)
if err != nil {
return nil, fmt.Errorf("failed to parse RAM used value '%s': %v", record[10], err)
}
csvFile.DataRAMUsed = append(csvFile.DataRAMUsed, val)
val, err = strconv.ParseFloat(record[11], 64)
if err != nil {
return nil, fmt.Errorf("failed to parse SWAP used value '%s': %v", record[11], err)
}
csvFile.DataSwapUsed = append(csvFile.DataSwapUsed, val)
counter++
if counter == 100000 {
return nil, errors.New("CSV file cannot have more than 100000 data lines")
}
}
// Next file would be more efficient to preallocate slices
if linesCount < len(csvFile.DataFPS) {
linesCount = len(csvFile.DataFPS)
}
if err := scanner.Err(); err != nil {
log.Println("error (4) parsing CSV:", err)
return nil, err
}
if len(csvFile.DataFPS) == 0 &&
len(csvFile.DataFrameTime) == 0 &&
len(csvFile.DataCPULoad) == 0 &&
len(csvFile.DataGPULoad) == 0 &&
len(csvFile.DataCPUTemp) == 0 &&
len(csvFile.DataGPUTemp) == 0 &&
len(csvFile.DataGPUCoreClock) == 0 &&
len(csvFile.DataGPUMemClock) == 0 &&
len(csvFile.DataGPUVRAMUsed) == 0 &&
len(csvFile.DataGPUPower) == 0 &&
len(csvFile.DataRAMUsed) == 0 &&
len(csvFile.DataSwapUsed) == 0 {
return nil, errors.New("empty CSV file (err 8)")
}
csvFiles = append(csvFiles, &csvFile)
}
return csvFiles, nil
}
// truncateString truncates the input string to a maximum of 100 characters and appends "..." if it exceeds that length.
func truncateString(s string) string {
const maxLength = 100
if len(s) > maxLength {
return s[:maxLength] + "..."
}
return s
}
func storeBenchmarkData(csvFiles []*BenchmarkData, benchmarkID uint) error {
// Store to disk
filePath := filepath.Join(benchmarksDir, fmt.Sprintf("%d.bin", benchmarkID))
file, err := os.Create(filePath)
if err != nil {
return err
}
defer file.Close()
// Convert to []byte
var buffer bytes.Buffer
gobEncoder := gob.NewEncoder(&buffer)
err = gobEncoder.Encode(csvFiles)
if err != nil {
return err
}
// Compress and write to file
zstdEncoder, err := zstd.NewWriter(file, zstd.WithEncoderLevel(zstd.SpeedFastest))
if err != nil {
return err
}
defer zstdEncoder.Close()
_, err = zstdEncoder.Write(buffer.Bytes())
return err
}
func retrieveBenchmarkData(benchmarkID uint) (csvFiles []*BenchmarkData, err error) {
filePath := filepath.Join(benchmarksDir, fmt.Sprintf("%d.bin", benchmarkID))
file, err := os.Open(filePath)
if err != nil {
return nil, err
}
defer file.Close()
// Decompress and read from file
zstdDecoder, err := zstd.NewReader(file)
if err != nil {
return nil, err
}
defer zstdDecoder.Close()
var buffer bytes.Buffer
_, err = buffer.ReadFrom(zstdDecoder)
if err != nil {
return nil, err
}
// Decode
gobDecoder := gob.NewDecoder(&buffer)
err = gobDecoder.Decode(&csvFiles)
return csvFiles, err
}
func deleteBenchmarkData(benchmarkID uint) error {
filePath := filepath.Join(benchmarksDir, fmt.Sprintf("%d.bin", benchmarkID))
return os.Remove(filePath)
}

View File

@ -181,7 +181,7 @@ func postBenchmarkCreate(c *gin.Context) {
// Read CSV files
// Store to disk only when DB record is created successfully
csvFiles, csvSpecs, err := readCSVFiles(files)
csvFiles, err := readBenchmarkFiles(files)
if err != nil {
c.HTML(http.StatusUnauthorized, "error.tmpl", gin.H{
"activePage": "error",
@ -197,13 +197,6 @@ func postBenchmarkCreate(c *gin.Context) {
UserID: session.Get("ID").(uint),
Title: title,
Description: description,
SpecDistro: csvSpecs.Distro,
SpecCPU: csvSpecs.CPU,
SpecGPU: csvSpecs.GPU,
SpecRAM: csvSpecs.RAM,
SpecKernel: csvSpecs.Kernel,
SpecScheduler: csvSpecs.Scheduler,
}
result := db.Create(&benchmark)
@ -331,7 +324,7 @@ func getBenchmark(c *gin.Context) {
var benchmark Benchmark
benchmark.ID = uint(intID)
var csvFiles []*CSVFile
var benchmarkDatas []*BenchmarkData
var errCSV, errDB error
errHTTPStatus := http.StatusInternalServerError
@ -340,7 +333,7 @@ func getBenchmark(c *gin.Context) {
go func() {
defer wg.Done()
csvFiles, errCSV = retrieveBenchmarkData(benchmark.ID)
benchmarkDatas, errCSV = retrieveBenchmarkData(benchmark.ID)
}()
go func() {
@ -379,6 +372,6 @@ func getBenchmark(c *gin.Context) {
"userID": session.Get("ID"),
"benchmark": benchmark,
"benchmarkData": csvFiles,
"benchmarkData": benchmarkDatas,
})
}

11
cmd/fsmig1/main.go Normal file
View File

@ -0,0 +1,11 @@
package main
import "fmt"
/*
This app is a migration tool from v0.0.7 to v0.0.8
*/
func main() {
fmt.Println("Hello, World!")
}

256
csv.go
View File

@ -1,256 +0,0 @@
package flightlesssomething
import (
"bufio"
"bytes"
"encoding/gob"
"errors"
"fmt"
"log"
"math/big"
"mime/multipart"
"os"
"path/filepath"
"strings"
"github.com/dustin/go-humanize"
"github.com/klauspost/compress/zstd"
)
type CSVFile struct {
Filename string
FPSPointsArray string
FrameTimeArray string
CPULoadArray string
GPULoadArray string
CPUTempArray string
GPUTempArray string
GPUCoreClockArray string
GPUMemClockArray string
GPUVRAMUsedArray string
GPUPowerArray string
RAMUsedArray string
SwapUsedArray string
}
type CSVSpecs struct {
MaxPoints int
Distro string
Kernel string
GPU string
CPU string
RAM string
Scheduler string
}
// readCSVFiles reads multiple CSV files and returns a slice of CSVFile pointers and the maximum number of FPS records found in any file
func readCSVFiles(files []*multipart.FileHeader) ([]*CSVFile, *CSVSpecs, error) {
csvFiles := make([]*CSVFile, 0)
csvSpecs := &CSVSpecs{}
var linesCount int
for _, fileHeader := range files {
csvFile := CSVFile{}
file, err := fileHeader.Open()
if err != nil {
return nil, nil, err
}
defer file.Close()
scanner := bufio.NewScanner(file)
// Set file name (without extension)
csvFile.Filename = strings.TrimSuffix(fileHeader.Filename, ".csv")
// First line should contain this: os,cpu,gpu,ram,kernel,driver,cpuscheduler
if !scanner.Scan() {
return nil, nil, errors.New("invalid CSV file (err 1)")
}
record := strings.Split(strings.TrimRight(scanner.Text(), ","), ",")
if len(record) != 7 {
return nil, nil, errors.New("invalid CSV file (err 2)")
}
// Second line should contain values
if !scanner.Scan() {
return nil, nil, errors.New("invalid CSV file (err 3)")
}
record = strings.Split(scanner.Text(), ",")
for i, v := range record {
switch i {
case 0:
csvSpecs.Distro = truncateString(strings.TrimSpace(v))
case 1:
csvSpecs.CPU = truncateString(strings.TrimSpace(v))
case 2:
csvSpecs.GPU = truncateString(strings.TrimSpace(v))
case 3:
kilobytes := new(big.Int)
_, ok := kilobytes.SetString(strings.TrimSpace(v), 10)
if !ok {
return nil, nil, errors.New("failed to convert RAM to big.Int")
}
bytes := new(big.Int).Mul(kilobytes, big.NewInt(1024))
csvSpecs.RAM = humanize.Bytes(bytes.Uint64())
case 4:
csvSpecs.Kernel = truncateString(strings.TrimSpace(v))
case 6:
csvSpecs.Scheduler = truncateString(strings.TrimSpace(v))
}
}
// 3rd line contain headers for benchmark data: fps,frametime,cpu_load,gpu_load,cpu_temp,gpu_temp,gpu_core_clock,gpu_mem_clock,gpu_vram_used,gpu_power,ram_used,swap_used,process_rss,elapsed
if !scanner.Scan() {
return nil, nil, errors.New("invalid CSV file (err 5)")
}
record = strings.Split(strings.TrimRight(scanner.Text(), ","), ",")
if len(record) != 14 {
return nil, nil, errors.New("invalid CSV file (err 6)")
}
fpsPoints := make([]string, 0, linesCount)
frametimePoints := make([]string, 0, linesCount)
cpuLoadPoints := make([]string, 0, linesCount)
gpuLoadPoints := make([]string, 0, linesCount)
cpuTempPoints := make([]string, 0, linesCount)
gpuTempPoints := make([]string, 0, linesCount)
gpuCoreClockPoints := make([]string, 0, linesCount)
gpuMemClockPoints := make([]string, 0, linesCount)
gpuVRAMUsedPoints := make([]string, 0, linesCount)
gpuPowerPoints := make([]string, 0, linesCount)
RAMUsedPoints := make([]string, 0, linesCount)
SWAPUsedPoints := make([]string, 0, linesCount)
var counter uint
for scanner.Scan() {
record = strings.Split(scanner.Text(), ",")
if len(record) != 14 {
return nil, nil, errors.New("invalid CSV file (err 7)")
}
fpsPoints = append(fpsPoints, record[0])
frametimePoints = append(frametimePoints, record[1])
cpuLoadPoints = append(cpuLoadPoints, record[2])
gpuLoadPoints = append(gpuLoadPoints, record[3])
cpuTempPoints = append(cpuTempPoints, record[4])
gpuTempPoints = append(gpuTempPoints, record[5])
gpuCoreClockPoints = append(gpuCoreClockPoints, record[6])
gpuMemClockPoints = append(gpuMemClockPoints, record[7])
gpuVRAMUsedPoints = append(gpuVRAMUsedPoints, record[8])
gpuPowerPoints = append(gpuPowerPoints, record[9])
RAMUsedPoints = append(RAMUsedPoints, record[10])
SWAPUsedPoints = append(SWAPUsedPoints, record[11])
counter++
if counter == 100000 {
return nil, nil, errors.New("too large CSV file")
}
}
// More efficient buffer allocation
linesCount = len(fpsPoints)
if err := scanner.Err(); err != nil {
log.Println("error (4) parsing CSV:", err)
return nil, nil, err
}
if len(fpsPoints) == 0 {
return nil, nil, errors.New("invalid CSV file (err 8)")
}
if len(fpsPoints) > csvSpecs.MaxPoints {
csvSpecs.MaxPoints = len(fpsPoints)
}
csvFile.FPSPointsArray = strings.Join(fpsPoints, ",")
csvFile.FrameTimeArray = strings.Join(frametimePoints, ",")
csvFile.CPULoadArray = strings.Join(cpuLoadPoints, ",")
csvFile.GPULoadArray = strings.Join(gpuLoadPoints, ",")
csvFile.CPUTempArray = strings.Join(cpuTempPoints, ",")
csvFile.GPUTempArray = strings.Join(gpuTempPoints, ",")
csvFile.GPUCoreClockArray = strings.Join(gpuCoreClockPoints, ",")
csvFile.GPUMemClockArray = strings.Join(gpuMemClockPoints, ",")
csvFile.GPUVRAMUsedArray = strings.Join(gpuVRAMUsedPoints, ",")
csvFile.GPUPowerArray = strings.Join(gpuPowerPoints, ",")
csvFile.RAMUsedArray = strings.Join(RAMUsedPoints, ",")
csvFile.SwapUsedArray = strings.Join(SWAPUsedPoints, ",")
csvFiles = append(csvFiles, &csvFile)
}
return csvFiles, csvSpecs, nil
}
// truncateString truncates the input string to a maximum of 100 characters and appends "..." if it exceeds that length.
func truncateString(s string) string {
const maxLength = 100
if len(s) > maxLength {
return s[:maxLength] + "..."
}
return s
}
func storeBenchmarkData(csvFiles []*CSVFile, benchmarkID uint) error {
// Store to disk
filePath := filepath.Join(benchmarksDir, fmt.Sprintf("%d.bin", benchmarkID))
file, err := os.Create(filePath)
if err != nil {
return err
}
defer file.Close()
// Convert to []byte
var buffer bytes.Buffer
gobEncoder := gob.NewEncoder(&buffer)
err = gobEncoder.Encode(csvFiles)
if err != nil {
return err
}
// Compress and write to file
zstdEncoder, err := zstd.NewWriter(file, zstd.WithEncoderLevel(zstd.SpeedFastest))
if err != nil {
return err
}
defer zstdEncoder.Close()
_, err = zstdEncoder.Write(buffer.Bytes())
return err
}
func retrieveBenchmarkData(benchmarkID uint) (csvFiles []*CSVFile, err error) {
filePath := filepath.Join(benchmarksDir, fmt.Sprintf("%d.bin", benchmarkID))
file, err := os.Open(filePath)
if err != nil {
return nil, err
}
defer file.Close()
// Decompress and read from file
zstdDecoder, err := zstd.NewReader(file)
if err != nil {
return nil, err
}
defer zstdDecoder.Close()
var buffer bytes.Buffer
_, err = buffer.ReadFrom(zstdDecoder)
if err != nil {
return nil, err
}
// Decode
gobDecoder := gob.NewDecoder(&buffer)
err = gobDecoder.Decode(&csvFiles)
return csvFiles, err
}
func deleteBenchmarkData(benchmarkID uint) error {
filePath := filepath.Join(benchmarksDir, fmt.Sprintf("%d.bin", benchmarkID))
return os.Remove(filePath)
}

View File

@ -18,12 +18,6 @@ type Benchmark struct {
UserID uint
Title string
Description string
SpecDistro string
SpecCPU string
SpecGPU string
SpecRAM string
SpecKernel string
SpecScheduler string
CreatedAtHumanized string `gorm:"-"` // Human readable "X h/m/s ago" version of CreatedAt (filled automatically)

0
static/js/benchmark.js Normal file
View File

View File

@ -1,29 +1,21 @@
{{template "header.tmpl" .}}
<div class="row">
<div class="col-md-8">
<div class="p-3">
<div class="d-flex justify-content-between align-items-center">
<h2>Benchmark #{{ .benchmark.ID }}</h2>
</div>
<div class="text-center">
<h5><b>{{ .benchmark.Title }}</b></h5>
<p>{{ .benchmark.Description }}</p>
<p><small>Submitted <b>{{ .benchmark.CreatedAtHumanized }}</b> by <b>{{ .benchmark.User.Username }}.</b></small></p>
</div>
</div>
</div>
<div class="col-md-4">
<ul>
<li>Distro: <code>{{ .benchmark.SpecDistro }}</code></li>
<li>Kernel: <code>{{ .benchmark.SpecKernel }}</code></li>
<li>GPU: <code>{{ .benchmark.SpecGPU }}</code></li>
<li>CPU: <code>{{ .benchmark.SpecCPU }}</code></li>
<li>RAM: <code>{{ .benchmark.SpecRAM }}</code></li>
<li>Scheduler: <code>{{ .benchmark.SpecScheduler }}</code></li>
</ul>
</div>
</div>
{{if eq .benchmark.UserID .userID }}
<div class="row">
<div class="col-12">
<a class="btn btn-warning" data-bs-toggle="modal" data-bs-target="#exampleModal">Delete benchmark</a>
</div>
</div>
<div class="modal" id="exampleModal" tabindex="-1">
<div class="modal-dialog">
@ -46,6 +38,35 @@
</div>
{{end}}
<div class="row">
<div class="col-12">
<table class="table table-sm table-bordered">
<thead>
<tr>
<th scope="col">Label</th>
<th scope="col">OS</th>
<th scope="col">CPU</th>
<th scope="col">GPU</th>
<th scope="col">RAM</th>
<th scope="col">OS specific</th>
</tr>
</thead>
<tbody>
{{- range .benchmarkData }}
<tr>
<th scope="row">{{ .Label }}</th>
<td>{{ .SpecOS }}</td>
<td>{{ .SpecGPU }}</td>
<td>{{ .SpecCPU }}</td>
<td>{{ .SpecRAM }}</td>
<td>{{ .SpecLinuxKernel }} {{ .SpecLinuxScheduler }}</td>
</tr>
{{- end }}
</tbody>
</table>
</div>
</div>
<div id="fpsChart" style="height:250pt;"></div>
<div id="frameTimeChart" style="height:250pt;"></div>
@ -100,79 +121,82 @@
<script src="https://code.highcharts.com/modules/boost.js"></script>
<script>
// Render data here
var fpsDataArrays = [
{{- range .benchmarkData }}
{ label: '{{ .Filename }}', data: '{{ .FPSPointsArray }}' },
{ label: '{{ .Label }}', data: {{ .DataFPS }} },
{{- end }}
];
var frameTimeDataArrays = [
{{- range .benchmarkData }}
{ label: '{{ .Filename }}', data: '{{ .FrameTimeArray }}' },
{ label: '{{ .Label }}', data: {{ .DataFrameTime }} },
{{- end }}
];
var cpuLoadDataArrays = [
{{- range .benchmarkData }}
{ label: '{{ .Filename }}', data: '{{ .CPULoadArray }}' },
{ label: '{{ .Label }}', data: {{ .DataCPULoad }} },
{{- end }}
];
var gpuLoadDataArrays = [
{{- range .benchmarkData }}
{ label: '{{ .Filename }}', data: '{{ .GPULoadArray }}' },
{ label: '{{ .Label }}', data: {{ .DataGPULoad }} },
{{- end }}
];
var cpuTempDataArrays = [
{{- range .benchmarkData }}
{ label: '{{ .Filename }}', data: '{{ .CPUTempArray }}' },
{ label: '{{ .Label }}', data: {{ .DataCPUTemp }} },
{{- end }}
];
var gpuTempDataArrays = [
{{- range .benchmarkData }}
{ label: '{{ .Filename }}', data: '{{ .GPUTempArray }}' },
{ label: '{{ .Label }}', data: {{ .DataGPUTemp }} },
{{- end }}
];
var gpuCoreClockDataArrays = [
{{- range .benchmarkData }}
{ label: '{{ .Filename }}', data: '{{ .GPUCoreClockArray }}' },
{ label: '{{ .Label }}', data: {{ .DataGPUCoreClock }} },
{{- end }}
];
var gpuMemClockDataArrays = [
{{- range .benchmarkData }}
{ label: '{{ .Filename }}', data: '{{ .GPUMemClockArray }}' },
{ label: '{{ .Label }}', data: {{ .DataGPUMemClock }} },
{{- end }}
];
var gpuVRAMUsedDataArrays = [
{{- range .benchmarkData }}
{ label: '{{ .Filename }}', data: '{{ .GPUVRAMUsedArray }}' },
{ label: '{{ .Label }}', data: {{ .DataGPUVRAMUsed }} },
{{- end }}
];
var gpuPowerDataArrays = [
{{- range .benchmarkData }}
{ label: '{{ .Filename }}', data: '{{ .GPUPowerArray }}' },
{ label: '{{ .Label }}', data: {{ .DataGPUPower }} },
{{- end }}
];
var ramUsedDataArrays = [
{{- range .benchmarkData }}
{ label: '{{ .Filename }}', data: '{{ .RAMUsedArray }}' },
{ label: '{{ .Label }}', data: {{ .DataRAMUsed }} },
{{- end }}
];
var swapUsedDataArrays = [
{{- range .benchmarkData }}
{ label: '{{ .Filename }}', data: '{{ .SwapUsedArray }}' },
{ label: '{{ .Label }}', data: {{ .DataSwapUsed }} },
{{- end }}
];
</script>
<script>
// Define a set of colors to be used for the charts
var colors = Highcharts.getOptions().colors;
@ -364,18 +388,10 @@
};
}
function createDataset(label, data, color) {
return {
name: label,
data: data.split(',').map(Number),
color: color
};
}
function createChart(chartId, title, description, unit, dataArrays, maxY = null) {
var options = getLineChartOptions(title, description, unit, maxY);
options.series = dataArrays.map(function(dataArray, index) {
return createDataset(dataArray.label, dataArray.data, colors[index % colors.length]);
return {name: dataArray.label, data: dataArray.data, color: colors[index % colors.length]};
});
Highcharts.chart(chartId, options);
@ -421,11 +437,11 @@
// Calculate average CPU and GPU load
var cpuLoadAverages = cpuLoadDataArrays.map(function(dataArray) {
return calculateAverage(dataArray.data.split(',').map(Number));
return calculateAverage(dataArray.data);
});
var gpuLoadAverages = gpuLoadDataArrays.map(function(dataArray) {
return calculateAverage(dataArray.data.split(',').map(Number));
return calculateAverage(dataArray.data);
});
// Create bar charts for average CPU and GPU load
@ -439,10 +455,9 @@
var maxFPSData = [];
fpsDataArrays.forEach(function(dataArray) {
var data = dataArray.data.split(',').map(Number);
var minFPS = calculatePercentile(data, 1);
var avgFPS = calculateAverage(data);
var maxFPS = calculatePercentile(data, 97);
var minFPS = calculatePercentile(dataArray.data, 1);
var avgFPS = calculateAverage(dataArray.data);
var maxFPS = calculatePercentile(dataArray.data, 97);
categories.push(dataArray.label);
minFPSData.push(minFPS);
@ -546,8 +561,7 @@
// Calculate average FPS for each filename
var avgFPSData = fpsDataArrays.map(function(dataArray) {
var data = dataArray.data.split(',').map(Number);
return calculateAverage(data);
return calculateAverage(dataArray.data);
});
// Calculate FPS as a percentage of the first element
@ -675,8 +689,7 @@
document.getElementById('spikeThresholdValue').innerText = threshold + '%';
var spikePercentages = fpsDataArrays.map(function(dataArray) {
var data = dataArray.data.split(',').map(Number);
return calculateSpikes(data, threshold);
return calculateSpikes(dataArray.data, threshold);
});
Highcharts.chart('spikesChart', {

View File

@ -1,6 +1,7 @@
<!doctype html>
<html data-bs-theme="dark">
<head>
<link rel="icon" href="data:,">
<meta charset="utf-8">
<meta name="viewport" content="width=device-width, initial-scale=1, shrink-to-fit=no">
<link href="https://cdn.jsdelivr.net/npm/bootstrap@5.3.3/dist/css/bootstrap.min.css" rel="stylesheet" integrity="sha384-QWTKZyjpPEjISv5WaRU9OFeRpok6YctnYmDr5pNlyT2bRjXh0JMhjY6hW+ALEwIH" crossorigin="anonymous">