Skip to content

Commit

Permalink
chore: Remove store log writers
Browse files Browse the repository at this point in the history
We no longer need the log writers for visibility with the database
implementation in place.
  • Loading branch information
bgins committed Dec 10, 2024
1 parent 68567de commit 52fb242
Show file tree
Hide file tree
Showing 5 changed files with 3 additions and 129 deletions.
49 changes: 0 additions & 49 deletions pkg/jsonl/reader.go

This file was deleted.

48 changes: 0 additions & 48 deletions pkg/jsonl/writer.go

This file was deleted.

2 changes: 0 additions & 2 deletions pkg/solver/store/db/db.go
Original file line number Diff line number Diff line change
Expand Up @@ -14,8 +14,6 @@ import (

type SolverStoreDatabase struct {
db *gorm.DB
// TODO Log writers?
// logWriters map[string]jsonl.Writer
}

func NewSolverStoreDatabase(connStr string, silenceLogs bool) (*SolverStoreDatabase, error) {
Expand Down
17 changes: 3 additions & 14 deletions pkg/solver/store/memory/store.go
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,6 @@ import (
"sync"

"github.com/lilypad-tech/lilypad/pkg/data"
"github.com/lilypad-tech/lilypad/pkg/jsonl"
"github.com/lilypad-tech/lilypad/pkg/solver/store"
)

Expand All @@ -17,23 +16,15 @@ type SolverStoreMemory struct {
resultMap map[string]*data.Result
matchDecisionMap map[string]*data.MatchDecision
mutex sync.RWMutex
logWriters map[string]jsonl.Writer
}

func NewSolverStoreMemory() (*SolverStoreMemory, error) {
kinds := []string{"job_offers", "resource_offers", "deals", "decisions", "results"}
logWriters, err := store.GetLogWriters(kinds)
if err != nil {
return nil, err
}

return &SolverStoreMemory{
jobOfferMap: map[string]*data.JobOfferContainer{},
resourceOfferMap: map[string]*data.ResourceOfferContainer{},
dealMap: map[string]*data.DealContainer{},
resultMap: map[string]*data.Result{},
matchDecisionMap: map[string]*data.MatchDecision{},
logWriters: logWriters,
}, nil
}

Expand All @@ -42,7 +33,6 @@ func (s *SolverStoreMemory) AddJobOffer(jobOffer data.JobOfferContainer) (*data.
defer s.mutex.Unlock()
s.jobOfferMap[jobOffer.ID] = &jobOffer

s.logWriters["job_offers"].Write(jobOffer)
return &jobOffer, nil
}

Expand All @@ -51,23 +41,22 @@ func (s *SolverStoreMemory) AddResourceOffer(resourceOffer data.ResourceOfferCon
defer s.mutex.Unlock()
s.resourceOfferMap[resourceOffer.ID] = &resourceOffer

s.logWriters["resource_offers"].Write(resourceOffer)
return &resourceOffer, nil
}

func (s *SolverStoreMemory) AddDeal(deal data.DealContainer) (*data.DealContainer, error) {
s.mutex.Lock()
defer s.mutex.Unlock()
s.dealMap[deal.ID] = &deal
s.logWriters["deals"].Write(deal)

return &deal, nil
}

func (s *SolverStoreMemory) AddResult(result data.Result) (*data.Result, error) {
s.mutex.Lock()
defer s.mutex.Unlock()
s.resultMap[result.DealID] = &result
s.logWriters["results"].Write(result)

return &result, nil
}

Expand All @@ -86,7 +75,7 @@ func (s *SolverStoreMemory) AddMatchDecision(resourceOffer string, jobOffer stri
Result: result,
}
s.matchDecisionMap[id] = decision
s.logWriters["decisions"].Write(decision)

return decision, nil
}

Expand Down
16 changes: 0 additions & 16 deletions pkg/solver/store/store.go
Original file line number Diff line number Diff line change
Expand Up @@ -2,10 +2,8 @@ package store

import (
"fmt"
"os"

"github.com/lilypad-tech/lilypad/pkg/data"
"github.com/lilypad-tech/lilypad/pkg/jsonl"
)

type GetJobOffersQuery struct {
Expand Down Expand Up @@ -86,17 +84,3 @@ type SolverStore interface {
func GetMatchID(resourceOffer string, jobOffer string) string {
return fmt.Sprintf("%s-%s", resourceOffer, jobOffer)
}

func GetLogWriters(kinds []string) (map[string]jsonl.Writer, error) {
logWriters := make(map[string]jsonl.Writer)

for k := range kinds {
logfile, err := os.OpenFile(fmt.Sprintf("/var/tmp/lilypad_%s.jsonl", kinds[k]), os.O_APPEND|os.O_WRONLY|os.O_CREATE, 0644)
if err != nil {
return nil, err
}
logWriters[kinds[k]] = jsonl.NewWriter(logfile)
}

return logWriters, nil
}

0 comments on commit 52fb242

Please sign in to comment.