refactor(main): delegate setup to config, store, and web packages
Replace in-file bootstrap logic with package-level constructors in `run`: - use `config.Load()` instead of local env parsing/AppConfig helpers - use `store.Open()` and `web.New()` for persistence and app wiring - rename local store variable to `benchmarkStore` for clarity This centralizes startup concerns in dedicated modules, reducing `main.go` boilerplate and improving maintainability.refactor(main): delegate setup to config, store, and web packages Replace in-file bootstrap logic with package-level constructors in `run`: - use `config.Load()` instead of local env parsing/AppConfig helpers - use `store.Open()` and `web.New()` for persistence and app wiring - rename local store variable to `benchmarkStore` for clarity This centralizes startup concerns in dedicated modules, reducing `main.go` boilerplate and improving maintainability.
This commit is contained in:
284
lib/store/store.go
Normal file
284
lib/store/store.go
Normal file
@@ -0,0 +1,284 @@
|
||||
package store
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"math"
|
||||
"strconv"
|
||||
"strings"
|
||||
"sync"
|
||||
"time"
|
||||
|
||||
"cpu-benchmark-server/lib/model"
|
||||
|
||||
"github.com/dgraph-io/badger/v4"
|
||||
"github.com/google/uuid"
|
||||
)
|
||||
|
||||
const submissionPrefix = "submission:"
|
||||
|
||||
type indexedSubmission struct {
|
||||
submission *model.Submission
|
||||
searchText string
|
||||
cpuText string
|
||||
}
|
||||
|
||||
type Store struct {
|
||||
db *badger.DB
|
||||
mu sync.RWMutex
|
||||
orderedIDs []string
|
||||
records map[string]*indexedSubmission
|
||||
}
|
||||
|
||||
func Open(path string) (*Store, error) {
|
||||
opts := badger.DefaultOptions(path).WithLogger(nil)
|
||||
db, err := badger.Open(opts)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
store := &Store{
|
||||
db: db,
|
||||
records: make(map[string]*indexedSubmission),
|
||||
}
|
||||
|
||||
if err := store.loadIndex(); err != nil {
|
||||
_ = db.Close()
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return store, nil
|
||||
}
|
||||
|
||||
func (s *Store) Close() error {
|
||||
return s.db.Close()
|
||||
}
|
||||
|
||||
func (s *Store) Count() int {
|
||||
s.mu.RLock()
|
||||
defer s.mu.RUnlock()
|
||||
|
||||
return len(s.orderedIDs)
|
||||
}
|
||||
|
||||
func (s *Store) SaveSubmission(result model.BenchmarkResult, submitter string) (*model.Submission, error) {
|
||||
submission := &model.Submission{
|
||||
SubmissionID: uuid.NewString(),
|
||||
Submitter: model.NormalizeSubmitter(submitter),
|
||||
SubmittedAt: time.Now().UTC(),
|
||||
BenchmarkResult: result,
|
||||
}
|
||||
|
||||
key := submissionKey(submission.SubmittedAt, submission.SubmissionID)
|
||||
payload, err := json.Marshal(submission)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
if err := s.db.Update(func(txn *badger.Txn) error {
|
||||
return txn.Set([]byte(key), payload)
|
||||
}); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
indexed := newIndexedSubmission(submission)
|
||||
|
||||
s.mu.Lock()
|
||||
s.records[submission.SubmissionID] = indexed
|
||||
s.orderedIDs = append([]string{submission.SubmissionID}, s.orderedIDs...)
|
||||
s.mu.Unlock()
|
||||
|
||||
return model.CloneSubmission(submission), nil
|
||||
}
|
||||
|
||||
func (s *Store) ListSubmissions(page, pageSize int) ([]model.Submission, int) {
|
||||
s.mu.RLock()
|
||||
defer s.mu.RUnlock()
|
||||
|
||||
total := len(s.orderedIDs)
|
||||
start, end, _ := pageBounds(page, pageSize, total)
|
||||
results := make([]model.Submission, 0, max(0, end-start))
|
||||
|
||||
for _, id := range s.orderedIDs[start:end] {
|
||||
record := s.records[id]
|
||||
if record == nil {
|
||||
continue
|
||||
}
|
||||
|
||||
results = append(results, *model.CloneSubmission(record.submission))
|
||||
}
|
||||
|
||||
return results, total
|
||||
}
|
||||
|
||||
func (s *Store) SearchSubmissions(text, cpu string) []model.Submission {
|
||||
queryText := normalizeSearchText(text)
|
||||
cpuText := normalizeSearchText(cpu)
|
||||
|
||||
s.mu.RLock()
|
||||
defer s.mu.RUnlock()
|
||||
|
||||
results := make([]model.Submission, 0)
|
||||
for _, id := range s.orderedIDs {
|
||||
record := s.records[id]
|
||||
if record == nil {
|
||||
continue
|
||||
}
|
||||
|
||||
if !matchesSearch(record.searchText, queryText) {
|
||||
continue
|
||||
}
|
||||
|
||||
if !matchesSearch(record.cpuText, cpuText) {
|
||||
continue
|
||||
}
|
||||
|
||||
results = append(results, *model.CloneSubmission(record.submission))
|
||||
}
|
||||
|
||||
return results
|
||||
}
|
||||
|
||||
func (s *Store) loadIndex() error {
|
||||
return s.db.View(func(txn *badger.Txn) error {
|
||||
opts := badger.DefaultIteratorOptions
|
||||
opts.PrefetchValues = true
|
||||
opts.Prefix = []byte(submissionPrefix)
|
||||
|
||||
it := txn.NewIterator(opts)
|
||||
defer it.Close()
|
||||
|
||||
for it.Rewind(); it.Valid(); it.Next() {
|
||||
item := it.Item()
|
||||
payload, err := item.ValueCopy(nil)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
var submission model.Submission
|
||||
if err := json.Unmarshal(payload, &submission); err != nil {
|
||||
return fmt.Errorf("decode %q: %w", item.Key(), err)
|
||||
}
|
||||
|
||||
s.records[submission.SubmissionID] = newIndexedSubmission(&submission)
|
||||
s.orderedIDs = append(s.orderedIDs, submission.SubmissionID)
|
||||
}
|
||||
|
||||
return nil
|
||||
})
|
||||
}
|
||||
|
||||
func newIndexedSubmission(submission *model.Submission) *indexedSubmission {
|
||||
return &indexedSubmission{
|
||||
submission: model.CloneSubmission(submission),
|
||||
searchText: buildSearchText(submission),
|
||||
cpuText: normalizeSearchText(submission.CPUInfo.BrandString),
|
||||
}
|
||||
}
|
||||
|
||||
func buildSearchText(submission *model.Submission) string {
|
||||
parts := []string{
|
||||
submission.SubmissionID,
|
||||
submission.Submitter,
|
||||
submission.CPUInfo.BrandString,
|
||||
submission.CPUInfo.VendorID,
|
||||
model.ThreadModeLabel(submission.Config.MultiCore),
|
||||
strconv.Itoa(submission.Config.DurationSecs),
|
||||
strconv.Itoa(submission.CPUInfo.PhysicalCores),
|
||||
strconv.Itoa(submission.CPUInfo.LogicalCores),
|
||||
strconv.FormatInt(submission.Duration, 10),
|
||||
strconv.FormatInt(submission.TotalOps, 10),
|
||||
strconv.FormatInt(submission.Score, 10),
|
||||
fmt.Sprintf("%.4f", submission.MOpsPerSec),
|
||||
}
|
||||
|
||||
for _, feature := range submission.CPUInfo.SupportedFeatures {
|
||||
parts = append(parts, feature)
|
||||
}
|
||||
|
||||
for _, result := range submission.CoreResults {
|
||||
parts = append(parts,
|
||||
strconv.Itoa(result.LogicalID),
|
||||
result.CoreType,
|
||||
strconv.FormatInt(result.TotalOps, 10),
|
||||
fmt.Sprintf("%.4f", result.MOpsPerSec),
|
||||
)
|
||||
}
|
||||
|
||||
return normalizeSearchText(strings.Join(parts, " "))
|
||||
}
|
||||
|
||||
func submissionKey(timestamp time.Time, submissionID string) string {
|
||||
reversed := math.MaxInt64 - timestamp.UTC().UnixNano()
|
||||
return fmt.Sprintf("%s%019d:%s", submissionPrefix, reversed, submissionID)
|
||||
}
|
||||
|
||||
func normalizeSearchText(value string) string {
|
||||
return strings.Join(strings.Fields(strings.ToLower(value)), " ")
|
||||
}
|
||||
|
||||
func matchesSearch(target, query string) bool {
|
||||
if query == "" {
|
||||
return true
|
||||
}
|
||||
|
||||
for _, token := range strings.Fields(query) {
|
||||
if !strings.Contains(target, token) {
|
||||
return false
|
||||
}
|
||||
}
|
||||
|
||||
return true
|
||||
}
|
||||
|
||||
func pageBounds(page, pageSize, total int) (int, int, int) {
|
||||
if pageSize <= 0 {
|
||||
pageSize = 50
|
||||
}
|
||||
|
||||
totalPages := totalPages(total, pageSize)
|
||||
if totalPages == 0 {
|
||||
return 0, 0, 1
|
||||
}
|
||||
|
||||
if page < 1 {
|
||||
page = 1
|
||||
}
|
||||
|
||||
if page > totalPages {
|
||||
page = totalPages
|
||||
}
|
||||
|
||||
start := (page - 1) * pageSize
|
||||
end := min(total, start+pageSize)
|
||||
return start, end, page
|
||||
}
|
||||
|
||||
func totalPages(total, pageSize int) int {
|
||||
if total == 0 || pageSize <= 0 {
|
||||
return 0
|
||||
}
|
||||
|
||||
pages := total / pageSize
|
||||
if total%pageSize != 0 {
|
||||
pages++
|
||||
}
|
||||
|
||||
return pages
|
||||
}
|
||||
|
||||
func min(a, b int) int {
|
||||
if a < b {
|
||||
return a
|
||||
}
|
||||
|
||||
return b
|
||||
}
|
||||
|
||||
func max(a, b int) int {
|
||||
if a > b {
|
||||
return a
|
||||
}
|
||||
|
||||
return b
|
||||
}
|
||||
Reference in New Issue
Block a user