Skip to content

Commit

Permalink
W 17464266.code coverage (#1019)
Browse files Browse the repository at this point in the history
* W-17464266: Enable jacoco code coverage and bump up coverage above 80%
  • Loading branch information
peterzxu-crm authored Jan 9, 2025
1 parent 2c00a5e commit 0df8df8
Show file tree
Hide file tree
Showing 95 changed files with 3,581 additions and 140 deletions.
2 changes: 1 addition & 1 deletion .github/workflows/gradle.yml
Original file line number Diff line number Diff line change
Expand Up @@ -20,7 +20,7 @@ jobs:
if: ${{ !contains(github.ref, 'dependabot') }}
run: echo "${{ secrets.SALESFORCE_DOCKER_HUB_SECRET }}" | docker login --username sholavanalli508 --password-stdin
- name: Build with Gradle
run: ./gradlew build --info --stacktrace
run: ./gradlew build printCoverageReport --info --stacktrace
- name: Publish Failure Test Report
if: ${{ failure() }} # doesn't run unless it specifically grabs the failure condition of the previous step..
uses: scacap/action-surefire-report@v1
Expand Down
4 changes: 2 additions & 2 deletions carbonj.service/Dockerfile
Original file line number Diff line number Diff line change
Expand Up @@ -18,8 +18,8 @@ RUN yum update -y && \
yum install -y gcc-c++ gcc make libtool automake autoconf make python3-devel && \
rpm --import http://repos.azulsystems.com/RPM-GPG-KEY-azulsystems && \
yum install -y https://cdn.azul.com/zulu/bin/zulu-repo-1.0.0-1.noarch.rpm && \
yum install -y https://mirror.stream.centos.org/9-stream/AppStream/$(uname -m)/os/Packages/pcp-conf-6.2.1-1.el9.$(uname -m).rpm && \
yum install -y https://mirror.stream.centos.org/9-stream/AppStream/$(uname -m)/os/Packages/pcp-libs-6.2.1-1.el9.$(uname -m).rpm && \
yum install -y https://mirror.stream.centos.org/9-stream/AppStream/$(uname -m)/os/Packages/pcp-conf-6.2.2-6.el9.$(uname -m).rpm && \
yum install -y https://mirror.stream.centos.org/9-stream/AppStream/$(uname -m)/os/Packages/pcp-libs-6.2.2-6.el9.$(uname -m).rpm && \
#
# If sysstat version is updated, confirm iolog.sh execution and update associated version check in entrypoint.sh
#
Expand Down
26 changes: 25 additions & 1 deletion carbonj.service/build.gradle
Original file line number Diff line number Diff line change
Expand Up @@ -40,6 +40,7 @@ task licenseCheckDockerFiles(type: com.hierynomus.gradle.license.tasks.LicenseFo
licenseMain.dependsOn licenseCheckDockerFiles

apply plugin: 'java'
apply plugin: 'jacoco'
apply plugin: 'eclipse'
apply plugin: 'idea'
apply plugin: 'io.spring.dependency-management'
Expand Down Expand Up @@ -225,17 +226,40 @@ dependencies {
}
implementation group:"org.springframework.boot", name:"spring-boot-starter-actuator", version: "${springbootVersion}"
testImplementation group:"org.springframework.boot", name:"spring-boot-starter-test", version: "${springbootVersion}"
testImplementation "org.testcontainers:junit-jupiter:${testContainers}"
testImplementation "org.testcontainers:localstack:${testContainers}"
testImplementation "software.amazon.kinesis:amazon-kinesis-client:${kinesisClient}"
}

test {
useJUnitPlatform()
maxHeapSize = "2g"
maxHeapSize = "3g"
jvmArgs = [
'--add-opens', 'java.base/java.util=ALL-UNNAMED',
]
environment "AWS_REGION", "us-east-1"
}

jacocoTestReport {
reports {
html.required = true
xml.required = true
csv.required = true
}
}

task printCoverageReport {
dependsOn jacocoTestReport
doLast {
def reportFile = file("${projectDir}/build/reports/jacoco/test/jacocoTestReport.csv")
if (reportFile.exists()) {
println reportFile.text
} else {
println "Coverage report not found!"
}
}
}

tasks.withType(Test) {
testLogging {
// set options for log level LIFECYCLE
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -6,24 +6,17 @@
*/
package com.demandware.carbonj.service.db.model;

import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
import java.util.Optional;

import com.demandware.carbonj.service.accumulator.MetricAggregationPolicy;
import com.google.common.base.Preconditions;

import com.demandware.carbonj.service.strings.StringsCache;

public class MsgPackMetric
{
final public String path;
final public boolean isLeaf;
public String path;
public boolean isLeaf;

public MsgPackMetric() {
}

public MsgPackMetric( Metric metric )
{
this.path = metric.name;
this.isLeaf = metric.isLeaf();
}
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,6 @@
package com.demandware.carbonj.service.db.model;

import java.util.List;
import com.fasterxml.jackson.annotation.JsonFormat;
import com.fasterxml.jackson.annotation.JsonProperty;
import com.fasterxml.jackson.annotation.JsonCreator;

Expand All @@ -32,12 +31,7 @@ public class MsgPackSeries

public MsgPackSeries( Series series)
{
this.start = series.start;
this.end = series.end;
this.step = series.step;
this.name = series.name;
this.pathExpression = series.name;
this.values = series.values;
this(series.start, series.end, series.step, series.name, series.name, series.values);
}

@JsonCreator
Expand Down Expand Up @@ -66,4 +60,4 @@ public String toString()
", values=" + values +
'}';
}
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,6 @@

import java.util.List;
import java.util.concurrent.CopyOnWriteArrayList;
import java.util.stream.Collectors;

import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
Expand All @@ -26,9 +25,9 @@ public class StorageAggregationPolicySource
private static final Logger log = LoggerFactory.getLogger( StorageAggregationPolicySource.class );

// reuse same instance across multiple metrics.
private CopyOnWriteArrayList<AggregationPolicy> policies = new CopyOnWriteArrayList<>( );
private final CopyOnWriteArrayList<AggregationPolicy> policies = new CopyOnWriteArrayList<>( );

private StorageAggregationRulesLoader rulesLoader;
private final StorageAggregationRulesLoader rulesLoader;

public StorageAggregationPolicySource( StorageAggregationRulesLoader rulesLoader)
{
Expand Down Expand Up @@ -75,15 +74,11 @@ public synchronized void cleanup()
{
log.info("checking for obsolete aggregation policies to remove from cache");
List<AggregationPolicy> obsolete = policies.stream()
.filter( p -> p.configChanged() )
.collect( Collectors.toList());
.filter(AggregationPolicy::configChanged)
.toList();
// no need to keep policies that represent obsolete config.
policies.removeAll( obsolete );
log.info("purged obsolete aggregation policies from cache. Number of obsolete policies found: "
+ obsolete.size() + ", total number of policies after purge: " + policies.size());
}

public StorageAggregationRulesLoader getRulesLoader() {
return rulesLoader;
}
}

This file was deleted.

Original file line number Diff line number Diff line change
Expand Up @@ -132,7 +132,7 @@ private void reconfigureConsumers(Set<String> newRules, Set<String> currentRules
kinesisApplicationName = kinesisApplicationNamePropValue;
}
} catch (FileNotFoundException e) {
log.warn(" config/" + consumerCfgFile + "not found in the classpath ");
log.warn(consumerCfgFile + " not found in the classpath ");
log.info(" Falling back to default values ");
} catch (Throwable e) {
log.error(e.getMessage(), e);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -142,6 +142,10 @@ String getDest()
return dest;
}

LineProtocolDestination[] getDestinations() {
return destinations;
}

void close()
{
close( Arrays.asList(destinations) );
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -15,12 +15,14 @@
import com.amazonaws.services.kinesis.clientlibrary.lib.worker.InitialPositionInStream;
import com.amazonaws.services.kinesis.clientlibrary.lib.worker.KinesisClientLibConfiguration;
import com.amazonaws.services.kinesis.clientlibrary.lib.worker.Worker;
import com.amazonaws.services.kinesis.metrics.interfaces.MetricsLevel;
import com.codahale.metrics.Counter;
import com.codahale.metrics.MetricRegistry;
import com.demandware.carbonj.service.engine.kinesis.GzipDataPointCodec;
import com.demandware.carbonj.service.engine.kinesis.kcl.MemLeaseManager;
import com.demandware.carbonj.service.engine.recovery.*;
import com.google.common.base.Preconditions;
import org.apache.commons.lang3.StringUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

Expand All @@ -45,16 +47,27 @@ public class KinesisConsumer extends Thread {

private Worker worker;

private PointProcessor recoveryPointProcessor;
private final PointProcessor recoveryPointProcessor;

private volatile boolean closed;

private String kinesisConsumerRegion;
private final String kinesisConsumerRegion;

private final String overrideKinesisEndpoint;

public KinesisConsumer(MetricRegistry metricRegistry, PointProcessor pointProcessor, PointProcessor recoveryPointProcessor,
String kinesisStreamName, String kinesisApplicationName,
KinesisConfig kinesisConfig, CheckPointMgr<Date> checkPointMgr,
Counter noOfRestarts, String kinesisConsumerRegion) {
this(metricRegistry, pointProcessor, recoveryPointProcessor, kinesisStreamName, kinesisApplicationName, kinesisConfig,
checkPointMgr, noOfRestarts, kinesisConsumerRegion, null);
}

public KinesisConsumer(MetricRegistry metricRegistry, PointProcessor pointProcessor, PointProcessor recoveryPointProcessor,
String kinesisStreamName, String kinesisApplicationName,
KinesisConfig kinesisConfig, CheckPointMgr<Date> checkPointMgr,
Counter noOfRestarts, String kinesisConsumerRegion,
String overrideKinesisEndpoint) {
this.metricRegistry = metricRegistry;
this.pointProcessor = Preconditions.checkNotNull(pointProcessor);
this.recoveryPointProcessor = recoveryPointProcessor;
Expand All @@ -64,6 +77,7 @@ public KinesisConsumer(MetricRegistry metricRegistry, PointProcessor pointProces
this.checkPointMgr = checkPointMgr;
this.noOfRestarts = noOfRestarts;
this.kinesisConsumerRegion = kinesisConsumerRegion;
this.overrideKinesisEndpoint = overrideKinesisEndpoint;
log.info("Kinesis consumer started");
this.start();
}
Expand Down Expand Up @@ -91,6 +105,12 @@ public void run () {
kinesisClientLibConfiguration.withMaxRecords(maxRecords);
}

// For testing only
if (!StringUtils.isEmpty(overrideKinesisEndpoint)) {
kinesisClientLibConfiguration.withKinesisEndpoint(overrideKinesisEndpoint);
kinesisClientLibConfiguration.withMetricsLevel(MetricsLevel.NONE);
}

log.info(" Kinesis Client Library started with application name " + kinesisApplicationName + " with stream "
+ kinesisStreamName + " and worker id is " + workerId);

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,6 @@
import com.codahale.metrics.MetricRegistry;
import com.demandware.carbonj.service.engine.DataPoint;
import com.google.common.base.Preconditions;
import com.google.common.base.Throwables;
import com.google.common.io.Closeables;
import org.apache.commons.lang3.StringUtils;
import org.slf4j.Logger;
Expand All @@ -28,7 +27,7 @@ public class LineProtocolDestinationSocket
extends Destination
implements LineProtocolDestination
{
private static Logger log = LoggerFactory.getLogger( LineProtocolDestinationSocket.class );
private static final Logger log = LoggerFactory.getLogger( LineProtocolDestinationSocket.class );

private final MetricRegistry metricRegistry;
final String ip;
Expand Down Expand Up @@ -126,7 +125,7 @@ void close()
}
catch ( InterruptedException e )
{
throw Throwables.propagate( e );
throw new RuntimeException(e);
}
}

Expand Down Expand Up @@ -199,25 +198,24 @@ public Number getValue()
{
Closeables.close( pw, true );
}
catch ( IOException e2 )
{
catch ( IOException ignored) {
}
pw = null;
try
{
Closeables.close( sock, true );
}
catch ( IOException e2 )
{
catch ( IOException ignored) {
}
sock = null;
try
{
//noinspection BusyWait
Thread.sleep( 1000 ); // try reconnect in a sec
}
catch ( InterruptedException e1 )
{
throw Throwables.propagate( e );
throw new RuntimeException(e1);
}
}
finally
Expand All @@ -236,8 +234,7 @@ public Number getValue()
{
Closeables.close( sock, true );
}
catch ( IOException e )
{
catch ( IOException ignored) {
}
metricRegistry.remove(name);
log.info("Exited " + name);
Expand Down
Loading

0 comments on commit 0df8df8

Please sign in to comment.