package main import ( "encoding/json" "fmt" "math" "strconv" "strings" "sync" "time" "github.com/dgraph-io/badger/v4" "github.com/google/uuid" ) const submissionPrefix = "submission:" type indexedSubmission struct { submission *Submission searchText string cpuText string submittedAt time.Time } type Store struct { db *badger.DB mu sync.RWMutex orderedIDs []string records map[string]*indexedSubmission } func OpenStore(path string) (*Store, error) { opts := badger.DefaultOptions(path).WithLogger(nil) db, err := badger.Open(opts) if err != nil { return nil, err } store := &Store{ db: db, records: make(map[string]*indexedSubmission), } if err := store.loadIndex(); err != nil { _ = db.Close() return nil, err } return store, nil } func (s *Store) Close() error { return s.db.Close() } func (s *Store) Count() int { s.mu.RLock() defer s.mu.RUnlock() return len(s.orderedIDs) } func (s *Store) SaveSubmission(result BenchmarkResult, submitter string) (*Submission, error) { submission := &Submission{ SubmissionID: uuid.NewString(), Submitter: normalizeSubmitter(submitter), SubmittedAt: time.Now().UTC(), BenchmarkResult: result, } key := submissionKey(submission.SubmittedAt, submission.SubmissionID) payload, err := json.Marshal(submission) if err != nil { return nil, err } if err := s.db.Update(func(txn *badger.Txn) error { return txn.Set([]byte(key), payload) }); err != nil { return nil, err } indexed := newIndexedSubmission(submission) s.mu.Lock() s.records[submission.SubmissionID] = indexed s.orderedIDs = append([]string{submission.SubmissionID}, s.orderedIDs...) s.mu.Unlock() return cloneSubmission(submission), nil } func (s *Store) ListSubmissions(page, pageSize int) ([]Submission, int) { s.mu.RLock() defer s.mu.RUnlock() total := len(s.orderedIDs) start, end, _ := pageBounds(page, pageSize, total) results := make([]Submission, 0, max(0, end-start)) for _, id := range s.orderedIDs[start:end] { record := s.records[id] if record == nil { continue } results = append(results, *cloneSubmission(record.submission)) } return results, total } func (s *Store) SearchSubmissions(text, cpu string) []Submission { queryText := normalizeSearchText(text) cpuText := normalizeSearchText(cpu) s.mu.RLock() defer s.mu.RUnlock() results := make([]Submission, 0) for _, id := range s.orderedIDs { record := s.records[id] if record == nil { continue } if !matchesSearch(record.searchText, queryText) { continue } if !matchesSearch(record.cpuText, cpuText) { continue } results = append(results, *cloneSubmission(record.submission)) } return results } func (s *Store) loadIndex() error { return s.db.View(func(txn *badger.Txn) error { opts := badger.DefaultIteratorOptions opts.PrefetchValues = true opts.Prefix = []byte(submissionPrefix) it := txn.NewIterator(opts) defer it.Close() for it.Rewind(); it.Valid(); it.Next() { item := it.Item() payload, err := item.ValueCopy(nil) if err != nil { return err } var submission Submission if err := json.Unmarshal(payload, &submission); err != nil { return fmt.Errorf("decode %q: %w", item.Key(), err) } indexed := newIndexedSubmission(&submission) s.records[submission.SubmissionID] = indexed s.orderedIDs = append(s.orderedIDs, submission.SubmissionID) } return nil }) } func newIndexedSubmission(submission *Submission) *indexedSubmission { return &indexedSubmission{ submission: cloneSubmission(submission), searchText: buildSearchText(submission), cpuText: normalizeSearchText(submission.CPUInfo.BrandString), submittedAt: submission.SubmittedAt, } } func buildSearchText(submission *Submission) string { parts := []string{ submission.SubmissionID, submission.Submitter, submission.CPUInfo.BrandString, submission.CPUInfo.VendorID, threadModeLabel(submission.Config.MultiCore), strconv.Itoa(submission.Config.DurationSecs), strconv.Itoa(submission.CPUInfo.PhysicalCores), strconv.Itoa(submission.CPUInfo.LogicalCores), strconv.FormatInt(submission.Duration, 10), strconv.FormatInt(submission.TotalOps, 10), strconv.FormatInt(submission.Score, 10), fmt.Sprintf("%.4f", submission.MOpsPerSec), } for _, feature := range submission.CPUInfo.SupportedFeatures { parts = append(parts, feature) } for _, result := range submission.CoreResults { parts = append(parts, strconv.Itoa(result.LogicalID), result.CoreType, strconv.FormatInt(result.TotalOps, 10), fmt.Sprintf("%.4f", result.MOpsPerSec), ) } return normalizeSearchText(strings.Join(parts, " ")) } func submissionKey(timestamp time.Time, submissionID string) string { reversed := math.MaxInt64 - timestamp.UTC().UnixNano() return fmt.Sprintf("%s%019d:%s", submissionPrefix, reversed, submissionID) } func normalizeSearchText(value string) string { return strings.Join(strings.Fields(strings.ToLower(value)), " ") } func matchesSearch(target, query string) bool { if query == "" { return true } for _, token := range strings.Fields(query) { if !strings.Contains(target, token) { return false } } return true } func cloneSubmission(submission *Submission) *Submission { if submission == nil { return nil } copySubmission := *submission if len(submission.CoreResults) > 0 { copySubmission.CoreResults = append([]CoreResult(nil), submission.CoreResults...) } if len(submission.CPUInfo.Cores) > 0 { copySubmission.CPUInfo.Cores = append([]CPUCoreDescriptor(nil), submission.CPUInfo.Cores...) } if len(submission.CPUInfo.SupportedFeatures) > 0 { copySubmission.CPUInfo.SupportedFeatures = append([]string(nil), submission.CPUInfo.SupportedFeatures...) } return ©Submission }