Merge pull request #14466 from influxdata/revert_revert
fix(task): push task backend logging messages back to debugpull/14475/head
commit
a96194797d
|
@ -142,11 +142,11 @@ func (p *syncRunPromise) finish(res *runResult, err error) {
|
|||
close(p.ready)
|
||||
|
||||
if err != nil {
|
||||
p.logger.Info("Execution failed to get result", zap.Error(err))
|
||||
p.logger.Debug("Execution failed to get result", zap.Error(err))
|
||||
} else if res.err != nil {
|
||||
p.logger.Info("Got result with error", zap.Error(res.err))
|
||||
p.logger.Debug("Got result with error", zap.Error(res.err))
|
||||
} else {
|
||||
p.logger.Info("Completed successfully")
|
||||
p.logger.Debug("Completed successfully")
|
||||
}
|
||||
})
|
||||
}
|
||||
|
@ -378,7 +378,7 @@ func (p *asyncRunPromise) finish(res *runResult, err error) {
|
|||
} else if res.err != nil {
|
||||
p.logger.Info("Got result with error", zap.Error(res.err))
|
||||
} else {
|
||||
p.logger.Info("Completed successfully")
|
||||
p.logger.Debug("Completed successfully")
|
||||
}
|
||||
})
|
||||
}
|
||||
|
|
|
@ -679,7 +679,7 @@ func (r *runner) startFromWorking(now int64) {
|
|||
// and we'll quickly end up with many run_ids associated with the log.
|
||||
runLogger := r.logger.With(logger.TraceID(ctx), zap.String("run_id", qr.RunID.String()), zap.Int64("now", qr.Now))
|
||||
|
||||
runLogger.Info("Created run; beginning execution")
|
||||
runLogger.Debug("Created run; beginning execution")
|
||||
r.wg.Add(1)
|
||||
go r.executeAndWait(ctx, qr, runLogger)
|
||||
|
||||
|
@ -784,7 +784,7 @@ func (r *runner) executeAndWait(ctx context.Context, qr QueuedRun, runLogger *za
|
|||
r.taskControlService.AddRunLog(authCtx, r.task.ID, qr.RunID, time.Now(), string(b))
|
||||
}
|
||||
r.updateRunState(qr, RunSuccess, runLogger)
|
||||
runLogger.Info("Execution succeeded")
|
||||
runLogger.Debug("Execution succeeded")
|
||||
|
||||
// Check again if there is a new run available, without returning to idle state.
|
||||
r.startFromWorking(atomic.LoadInt64(r.ts.now))
|
||||
|
|
Loading…
Reference in New Issue