koji: Reduce excessive logging by retryablehttp
Use LeveledLogger Fixes COMPOSER-1394
This commit is contained in:
parent
4c5de045ff
commit
3ab2725042
2 changed files with 35 additions and 2 deletions
|
|
@ -330,14 +330,13 @@ func (k *Koji) uploadChunk(chunk []byte, filepath, filename string, offset uint6
|
|||
|
||||
return shouldRetry, retErr
|
||||
}
|
||||
logger := rh.Logger(logrus.StandardLogger())
|
||||
|
||||
client := rh.Client{
|
||||
HTTPClient: &http.Client{
|
||||
Transport: k.transport,
|
||||
},
|
||||
CheckRetry: countingCheckRetry,
|
||||
Logger: logger,
|
||||
Logger: rh.LeveledLogger(&LeveledLogrus{logrus.StandardLogger()}),
|
||||
}
|
||||
|
||||
respData, err := client.Post(u.String(), "application/octet-stream", bytes.NewBuffer(chunk))
|
||||
|
|
|
|||
34
internal/upload/koji/rh-logrus-adapter.go
Normal file
34
internal/upload/koji/rh-logrus-adapter.go
Normal file
|
|
@ -0,0 +1,34 @@
|
|||
package koji
|
||||
|
||||
import (
|
||||
"github.com/sirupsen/logrus"
|
||||
)
|
||||
|
||||
type LeveledLogrus struct {
|
||||
*logrus.Logger
|
||||
}
|
||||
|
||||
func (l *LeveledLogrus) fields(keysAndValues ...interface{}) map[string]interface{} {
|
||||
fields := make(map[string]interface{})
|
||||
|
||||
for i := 0; i < len(keysAndValues)-1; i += 2 {
|
||||
fields[keysAndValues[i].(string)] = keysAndValues[i+1]
|
||||
}
|
||||
|
||||
return fields
|
||||
}
|
||||
|
||||
func (l *LeveledLogrus) Error(msg string, keysAndValues ...interface{}) {
|
||||
l.WithFields(l.fields(keysAndValues...)).Error(msg)
|
||||
}
|
||||
|
||||
func (l *LeveledLogrus) Info(msg string, keysAndValues ...interface{}) {
|
||||
l.WithFields(l.fields(keysAndValues...)).Info(msg)
|
||||
}
|
||||
func (l *LeveledLogrus) Debug(msg string, keysAndValues ...interface{}) {
|
||||
l.WithFields(l.fields(keysAndValues...)).Debug(msg)
|
||||
}
|
||||
|
||||
func (l *LeveledLogrus) Warn(msg string, keysAndValues ...interface{}) {
|
||||
l.WithFields(l.fields(keysAndValues...)).Warn(msg)
|
||||
}
|
||||
Loading…
Add table
Add a link
Reference in a new issue