From 1c5fa40a7892efa2876f8741446e9afeb92bb705 Mon Sep 17 00:00:00 2001 From: Martin Holst Swende Date: Tue, 14 Feb 2023 03:08:06 -0500 Subject: [PATCH] cmd/devp2p: reduce output of node crawler (#26674) Our discovery crawler spits out a huge amount of logs, most of which is pretty non-interesting. This change moves the very verbose output to Debug, and adds a 8-second status log message giving the general idea about what's going on. --- cmd/devp2p/crawl.go | 53 ++++++++++++++++++++++++++++++++++++++------- 1 file changed, 45 insertions(+), 8 deletions(-) diff --git a/cmd/devp2p/crawl.go b/cmd/devp2p/crawl.go index 9259b4894c..0d8127684e 100644 --- a/cmd/devp2p/crawl.go +++ b/cmd/devp2p/crawl.go @@ -36,6 +36,14 @@ type crawler struct { revalidateInterval time.Duration } +const ( + nodeRemoved = iota + nodeSkipRecent + nodeSkipIncompat + nodeAdded + nodeUpdated +) + type resolver interface { RequestENR(*enode.Node) (*enode.Node, error) } @@ -63,19 +71,39 @@ func (c *crawler) run(timeout time.Duration) nodeSet { var ( timeoutTimer = time.NewTimer(timeout) timeoutCh <-chan time.Time + statusTicker = time.NewTicker(time.Second * 8) doneCh = make(chan enode.Iterator, len(c.iters)) liveIters = len(c.iters) ) defer timeoutTimer.Stop() + defer statusTicker.Stop() for _, it := range c.iters { go c.runIterator(doneCh, it) } + var ( + added int + updated int + skipped int + recent int + removed int + ) loop: for { select { case n := <-c.ch: - c.updateNode(n) + switch c.updateNode(n) { + case nodeSkipIncompat: + skipped++ + case nodeSkipRecent: + recent++ + case nodeRemoved: + removed++ + case nodeAdded: + added++ + default: + updated++ + } case it := <-doneCh: if it == c.inputIter { // Enable timeout when we're done revalidating the input nodes. @@ -89,6 +117,10 @@ loop: } case <-timeoutCh: break loop + case <-statusTicker.C: + log.Info("Crawling in progress", + "added", added, "updated", updated, "removed", removed, + "ignored(recent)", recent, "ignored(incompatible)", skipped) } } @@ -113,22 +145,25 @@ func (c *crawler) runIterator(done chan<- enode.Iterator, it enode.Iterator) { } } -func (c *crawler) updateNode(n *enode.Node) { +// updateNode updates the info about the given node, and returns a status +// about what changed +func (c *crawler) updateNode(n *enode.Node) int { node, ok := c.output[n.ID()] // Skip validation of recently-seen nodes. if ok && time.Since(node.LastCheck) < c.revalidateInterval { - return + return nodeSkipRecent } // Request the node record. nn, err := c.disc.RequestENR(n) node.LastCheck = truncNow() + status := nodeUpdated if err != nil { if node.Score == 0 { // Node doesn't implement EIP-868. log.Debug("Skipping node", "id", n.ID()) - return + return nodeSkipIncompat } node.Score /= 2 } else { @@ -137,18 +172,20 @@ func (c *crawler) updateNode(n *enode.Node) { node.Score++ if node.FirstResponse.IsZero() { node.FirstResponse = node.LastCheck + status = nodeAdded } node.LastResponse = node.LastCheck } // Store/update node in output set. if node.Score <= 0 { - log.Info("Removing node", "id", n.ID()) + log.Debug("Removing node", "id", n.ID()) delete(c.output, n.ID()) - } else { - log.Info("Updating node", "id", n.ID(), "seq", n.Seq(), "score", node.Score) - c.output[n.ID()] = node + return nodeRemoved } + log.Debug("Updating node", "id", n.ID(), "seq", n.Seq(), "score", node.Score) + c.output[n.ID()] = node + return status } func truncNow() time.Time {