func reconcile
has a cyclomatic complexity of 28 with "very-high" risk159 return nil, nil
160}
161
162func (e *etcdSnapshotHandler) reconcile() error {163 // Get a list of all etcd nodes currently in the cluster.
164 // We will use this list to prune local entries for any node that does not exist.
165 nodes := e.etcd.config.Runtime.Core.Core().V1().Node()
func sync
has a cyclomatic complexity of 16 with "high" risk 57 go wait.JitterUntil(func() { snapshots.Enqueue(reconcileKey) }, reconcileInterval, 0.04, false, ctx.Done())
58}
59
60func (e *etcdSnapshotHandler) sync(key string, esf *apisv1.ETCDSnapshotFile) (*apisv1.ETCDSnapshotFile, error) { 61 if key == reconcileKey {
62 err := e.reconcile()
63 if err == errNotReconciled {
func manageLearners
has a cyclomatic complexity of 18 with "high" risk1022// manageLearners monitors the etcd cluster to ensure that learners are making progress towards
1023// being promoted to full voting member. The checks only run on the cluster member that is
1024// the etcd leader.
1025func (e *ETCD) manageLearners(ctx context.Context) {1026 <-e.config.Runtime.AgentReady
1027 t := time.NewTicker(manageTickerTime)
1028 defer t.Stop()
func join
has a cyclomatic complexity of 17 with "high" risk 494}
495
496// join attempts to add a member to an existing cluster
497func (e *ETCD) join(ctx context.Context, clientAccessInfo *clientaccess.Info) error { 498 clientCtx, cancel := context.WithTimeout(ctx, 20*time.Second)
499 defer cancel()
500
func Start
has a cyclomatic complexity of 18 with "high" risk 394}
395
396// Start starts the datastore
397func (e *ETCD) Start(ctx context.Context, clientAccessInfo *clientaccess.Info) error { 398 isInitialized, err := e.IsInitialized()
399 if err != nil {
400 return errors.Wrapf(err, "failed to check for initialized etcd datastore")
A function with high cyclomatic complexity can be hard to understand and maintain. Cyclomatic complexity is a software metric that measures the number of independent paths through a function. A higher cyclomatic complexity indicates that the function has more decision points and is more complex.
Functions with high cyclomatic complexity are more likely to have bugs and be harder to test. They may lead to reduced code maintainability and increased development time.
To reduce the cyclomatic complexity of a function, you can:
package main
import "log"
func fizzbuzzfuzz(x int) { // cc = 1
if x == 0 || x < 0 { // cc = 3 (if, ||)
return
}
for i := 1; i <= x; i++ { // cc = 4 (for)
switch i % 15 * 2 {
case 0: // cc = 5 (case)
countDiv3 += 1
countDiv5 += 1
log.Println("fizzbuzz")
break
case 3:
case 6:
case 9:
case 12: // cc = 9 (case)
countDiv3 += 1
log.Println("fizz")
break
case 5:
case 10: // cc = 11 (case)
countDiv5 += 1
log.Println("buzz")
break
default:
log.Printf("%d\n", x)
}
}
} // CC == 11; raises issues
package main
import "log"
func fizzbuzz(x int) { // cc = 1
for i := 1; i <= x; i++ { // cc = 2 (for)
y := i%3 == 0
z := i%5 == 0
if y == z { // 3
if y == false { // 4
log.Printf("%d\n", i)
} else {
log.Println("fizzbuzz")
}
} else {
if y { // 5
log.Println("fizz")
} else {
log.Println("buzz")
}
}
}
} // CC == 5
Cyclomatic complexity threshold can be configured using the
cyclomatic_complexity_threshold
(docs) in the
.deepsource.toml
config file.
Configuring this is optional. If you don't provide a value, the Analyzer will
raise issues for functions with complexity higher than the default threshold,
which is medium
(only raise issues for >15) for the Go Analyzer.
Here's the mapping of the risk category to the cyclomatic complexity score to help you configure this better:
Risk category | Cyclomatic complexity range | Recommended action |
---|---|---|
low | 1-5 | No action needed. |
medium | 6-15 | Review and monitor. |
high | 16-25 | Review and refactor. Recommended to add comments if the function is absolutely needed to be kept as it is. |
very-high. | 26-50 | Refactor to reduce the complexity. |
critical | >50 | Must refactor this. This can make the code untestable and very difficult to understand. |