Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Introduce instance start date in api and billing #3204

Merged
merged 6 commits into from
Apr 18, 2023
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -33,6 +33,7 @@
import com.epam.pipeline.entity.pipeline.run.parameter.RunSid;
import com.epam.pipeline.entity.region.CloudProvider;
import com.epam.pipeline.entity.user.PipelineUser;
import com.epam.pipeline.entity.utils.DateUtils;
import com.fasterxml.jackson.core.type.TypeReference;
import org.apache.commons.collections.CollectionUtils;
import org.apache.commons.collections4.ListUtils;
Expand Down Expand Up @@ -109,6 +110,7 @@ public class PipelineRunDao extends NamedParameterJdbcDaoSupport {
private String countFilteredPipelineRunsBaseQuery;
private String loadPipelineRunsWithPipelineByIdsQuery;
private String updateRunInstanceQuery;
private String updateRunInstanceStartDateQuery;
private String updatePodIPQuery;
private String loadRunsGroupingQuery;
private String loadRunsCountGroupingQuery;
Expand Down Expand Up @@ -267,6 +269,14 @@ public void updateRunInstance(PipelineRun run) {
.getParameters(run, getConnection()));
}

@Transactional(propagation = Propagation.MANDATORY)
public void updateRunInstanceStartDate(final Long id, final LocalDateTime date) {
MapSqlParameterSource params = new MapSqlParameterSource();
params.addValue(PipelineRunParameters.RUN_ID.name(), id);
params.addValue(PipelineRunParameters.NODE_START_DATE.name(), DateUtils.convertLocalDateTimeToDate(date));
getNamedParameterJdbcTemplate().update(updateRunInstanceStartDateQuery, params);
}

@Transactional(propagation = Propagation.REQUIRED)
public void updatePodIP(PipelineRun run) {
getNamedParameterJdbcTemplate().update(updatePodIPQuery, PipelineRunParameters
Expand Down Expand Up @@ -835,7 +845,8 @@ public enum PipelineRunParameters {
SENSITIVE,
KUBE_SERVICE_ENABLED,
CLUSTER_PRICE,
NODE_POOL_ID;
NODE_POOL_ID,
NODE_START_DATE;

public static final RunAccessType DEFAULT_ACCESS_TYPE = RunAccessType.ENDPOINT;

Expand All @@ -846,6 +857,7 @@ static MapSqlParameterSource getParameters(PipelineRun run, Connection connectio
params.addValue(PIPELINE_ID.name(), run.getPipelineId());
params.addValue(VERSION.name(), run.getVersion());
params.addValue(START_DATE.name(), run.getStartDate());
params.addValue(NODE_START_DATE.name(), run.getInstanceStartDate());
params.addValue(END_DATE.name(), run.getEndDate());
params.addValue(PARAMETERS.name(), run.getParams());
params.addValue(STATUS.name(), run.getStatus().getId());
Expand Down Expand Up @@ -948,6 +960,10 @@ public static PipelineRun parsePipelineRun(ResultSet rs) throws SQLException {
run.setPipelineName(rs.getString(PIPELINE_NAME.name()));
run.setVersion(rs.getString(VERSION.name()));
run.setStartDate(new Date(rs.getTimestamp(START_DATE.name()).getTime()));
Timestamp instanceStartDate = rs.getTimestamp(NODE_START_DATE.name());
if (!rs.wasNull()) {
run.setInstanceStartDate(new Date(instanceStartDate.getTime()));
}
run.setParams(rs.getString(PARAMETERS.name()));
run.setStatus(TaskStatus.getById(rs.getLong(STATUS.name())));
run.setCommitStatus(CommitStatus.getById(rs.getLong(COMMIT_STATUS.name())));
Expand Down Expand Up @@ -1242,6 +1258,11 @@ public void setUpdateRunInstanceQuery(String updateRunInstanceQuery) {
this.updateRunInstanceQuery = updateRunInstanceQuery;
}

@Required
public void setUpdateRunInstanceStartDateQuery(String updateRunInstanceStartDateQuery) {
this.updateRunInstanceStartDateQuery = updateRunInstanceStartDateQuery;
}

@Required
public void setUpdatePodIPQuery(String updatePodIPQuery) {
this.updatePodIPQuery = updatePodIPQuery;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -39,6 +39,7 @@
import com.epam.pipeline.manager.preference.PreferenceManager;
import com.epam.pipeline.manager.preference.SystemPreferences;
import com.epam.pipeline.manager.region.CloudRegionManager;
import com.epam.pipeline.utils.RunDurationUtils;
import lombok.AllArgsConstructor;
import lombok.NoArgsConstructor;
import org.apache.commons.collections4.ListUtils;
Expand Down Expand Up @@ -143,7 +144,7 @@ public InstancePrice getInstanceEstimatedPrice(Long id, String version, String c
long maximumDuration = -1;
long totalDurations = 0;
for (PipelineRun run : runs) {
long duration = run.getEndDate().getTime() - run.getStartDate().getTime();
long duration = RunDurationUtils.getBillableDuration(run).toMillis();
if (minimumDuration == -1 || minimumDuration > duration) {
minimumDuration = duration;
}
Expand Down Expand Up @@ -191,8 +192,7 @@ public PipelineRunPrice getPipelineRunEstimatedPrice(Long runId, Long regionId)
price.setPricePerHour(pricePerHour);

if (pipelineRun.getStatus().isFinal()) {
long duration = pipelineRun.getEndDate().getTime() - pipelineRun.getStartDate().getTime();
price.setTotalPrice(duration / ONE_HOUR * pricePerHour);
price.setTotalPrice(RunDurationUtils.getBillableDuration(pipelineRun).toMillis() / ONE_HOUR * pricePerHour);
} else {
price.setTotalPrice(0);
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -41,6 +41,7 @@
import com.epam.pipeline.manager.preference.PreferenceManager;
import com.epam.pipeline.manager.preference.SystemPreferences;
import com.epam.pipeline.manager.scheduling.AbstractSchedulingManager;
import com.epam.pipeline.utils.RunDurationUtils;
import io.fabric8.kubernetes.api.model.ContainerStatus;
import io.fabric8.kubernetes.api.model.Node;
import io.fabric8.kubernetes.api.model.Pod;
Expand Down Expand Up @@ -292,7 +293,7 @@ private long runningDurationOf(final PipelineRun run) {
}

private long overallDurationOf(final PipelineRun run) {
return Duration.between(run.getStartDate().toInstant(), DateUtils.now().toInstant()).abs().getSeconds();
return RunDurationUtils.getOverallDuration(run).getSeconds();
}

private List<RunStatus> toSortedStatuses(final List<RunStatus> statuses) {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -25,6 +25,7 @@
import com.epam.pipeline.entity.pipeline.run.parameter.PipelineRunParameter;
import com.epam.pipeline.entity.pipeline.run.parameter.RuntimeParameter;
import com.epam.pipeline.entity.pipeline.run.parameter.RuntimeParameterType;
import com.epam.pipeline.entity.utils.DateUtils;
import com.epam.pipeline.exception.CmdExecutionException;
import com.epam.pipeline.exception.git.GitClientException;
import com.epam.pipeline.manager.cloud.CloudFacade;
Expand Down Expand Up @@ -492,6 +493,7 @@ private void createNodeForRun(List<CompletableFuture<Void>> tasks, String runId,
.scaleUpNode(longId, requiredInstance.getInstance(), requiredInstance.getRuntimeParameters());
//save instance ID and IP
pipelineRunManager.updateRunInstance(longId, instance);
pipelineRunManager.updateRunInstanceStartDate(longId, DateUtils.nowUTC());
autoscalerService.registerDisks(longId, instance);
Instant end = Instant.now();
removeNodeUpTask(longId);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -23,7 +23,6 @@
import com.epam.pipeline.entity.configuration.PipelineConfiguration;
import com.epam.pipeline.entity.pipeline.PipelineRun;
import com.epam.pipeline.entity.pipeline.RunInstance;
import com.epam.pipeline.entity.utils.DateUtils;
import com.epam.pipeline.manager.cloud.CloudFacade;
import com.epam.pipeline.manager.cluster.KubernetesConstants;
import com.epam.pipeline.manager.cluster.NodeDiskManager;
Expand Down Expand Up @@ -164,7 +163,7 @@ public void registerDisks(final Long runId, final RunInstance instance) {
private void registerNodeDisks(long runId, List<InstanceDisk> disks) {
PipelineRun run = runCRUDService.loadRunById(runId);
String nodeId = run.getInstance().getNodeId();
LocalDateTime creationDate = DateUtils.convertDateToLocalDateTime(run.getStartDate());
LocalDateTime creationDate = run.getInstanceStartDateTime();
List<DiskRegistrationRequest> requests = DiskRegistrationRequest.from(disks);
nodeDiskManager.register(nodeId, creationDate, requests);
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -24,6 +24,7 @@
import com.epam.pipeline.entity.cluster.pool.filter.instancefilter.PoolInstanceFilterType;
import com.epam.pipeline.entity.pipeline.PipelineRun;
import com.epam.pipeline.entity.pipeline.RunInstance;
import com.epam.pipeline.entity.utils.DateUtils;
import com.epam.pipeline.manager.cloud.CloudFacade;
import com.epam.pipeline.manager.cluster.KubernetesConstants;
import com.epam.pipeline.manager.cluster.autoscale.filter.PoolFilterHandler;
Expand Down Expand Up @@ -184,6 +185,7 @@ private boolean reassignInstance(final String newNodeId,
cloudFacade.describeInstance(runId, instance) : instance;
reassignedInstance.setPoolId(instance.getPoolId());
pipelineRunManager.updateRunInstance(runId, reassignedInstance);
pipelineRunManager.updateRunInstanceStartDate(runId, DateUtils.nowUTC());
final List<InstanceDisk> disks = cloudFacade.loadDisks(reassignedInstance.getCloudRegionId(),
runId);
autoscalerService.adjustRunPrices(runId, disks);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -17,9 +17,9 @@
package com.epam.pipeline.manager.cluster.costs;

import com.epam.pipeline.entity.pipeline.PipelineRun;
import com.epam.pipeline.entity.utils.DateUtils;
import com.epam.pipeline.manager.pipeline.PipelineRunCRUDService;
import com.epam.pipeline.manager.pipeline.PipelineRunManager;
import com.epam.pipeline.utils.RunDurationUtils;
import lombok.RequiredArgsConstructor;
import lombok.extern.slf4j.Slf4j;
import net.javacrumbs.shedlock.core.SchedulerLock;
Expand All @@ -28,11 +28,9 @@

import java.math.BigDecimal;
import java.math.RoundingMode;
import java.util.Date;
import java.util.List;
import java.util.Map;
import java.util.Objects;
import java.util.concurrent.TimeUnit;
import java.util.function.Function;
import java.util.stream.Collectors;

Expand Down Expand Up @@ -79,17 +77,7 @@ private BigDecimal estimatePriceForRun(final PipelineRun run) {
return BigDecimal.ZERO;
}
return run.getPricePerHour()
.multiply(durationInMinutes(run))
.multiply(BigDecimal.valueOf(RunDurationUtils.getBillableDuration(run).toMinutes()))
.divide(BigDecimal.valueOf(MINUTES_IN_HOUR), DIVIDE_SCALE, RoundingMode.HALF_UP);
}

private BigDecimal durationInMinutes(final PipelineRun run) {
if (Objects.isNull(run.getStartDate())) {
return BigDecimal.ZERO;
}

final Date pipelineEnd = Objects.isNull(run.getEndDate()) ? DateUtils.now() : run.getEndDate();
final long runDurationMs = pipelineEnd.getTime() - run.getStartDate().getTime();
return new BigDecimal(TimeUnit.MINUTES.convert(runDurationMs, TimeUnit.MILLISECONDS));
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -627,6 +627,11 @@ public PipelineRun updateRunInstance(Long id, RunInstance instance) {
return pipelineRun;
}

@Transactional(propagation = Propagation.REQUIRED)
public void updateRunInstanceStartDate(Long id, LocalDateTime date) {
pipelineRunDao.updateRunInstanceStartDate(id, date);
}

@Transactional(propagation = Propagation.REQUIRED)
public PipelineRun updatePipelineStatusIfNotFinalExternal(Long runId, TaskStatus status) {
return updatePipelineStatusIfNotFinal(runId, status);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -16,12 +16,11 @@

package com.epam.pipeline.mapper;

import java.time.Duration;
import java.util.Map;

import com.epam.pipeline.config.JsonMapper;
import com.epam.pipeline.entity.pipeline.PipelineRun;
import com.epam.pipeline.entity.utils.DateUtils;
import com.epam.pipeline.utils.RunDurationUtils;
import org.apache.commons.lang3.StringUtils;

public final class PipelineRunMapper {
Expand Down Expand Up @@ -64,6 +63,6 @@ public static Map<String, Object> map(PipelineRun run) {
}

private static long overallDurationOf(PipelineRun run) {
return Duration.between(run.getStartDate().toInstant(), DateUtils.now().toInstant()).abs().getSeconds();
return RunDurationUtils.getOverallDuration(run).getSeconds();
}
}
28 changes: 28 additions & 0 deletions api/src/main/java/com/epam/pipeline/utils/RunDurationUtils.java
Original file line number Diff line number Diff line change
@@ -0,0 +1,28 @@
package com.epam.pipeline.utils;

import com.epam.pipeline.entity.pipeline.PipelineRun;
import com.epam.pipeline.entity.utils.DateUtils;

import java.time.Duration;
import java.util.Date;
import java.util.Optional;

public final class RunDurationUtils {

private RunDurationUtils() {
}

public static Duration getOverallDuration(final PipelineRun run) {
return durationBetween(run.getStartDate(), run.getEndDate());
}

public static Duration getBillableDuration(final PipelineRun run) {
return durationBetween(run.getInstanceStartDate(), run.getEndDate());
}

private static Duration durationBetween(final Date from, final Date to) {
final Date end = Optional.ofNullable(to).orElseGet(DateUtils::now);
final Date start = Optional.ofNullable(from).orElse(end);
return Duration.ofMillis(end.getTime() - start.getTime()).abs();
}
}
3 changes: 2 additions & 1 deletion api/src/main/resources/dao/filter-dao.xml
Original file line number Diff line number Diff line change
Expand Up @@ -74,7 +74,8 @@
r.kube_service_enabled,
r.pipeline_name,
r.cluster_price,
r.node_pool_id
r.node_pool_id,
r.node_start_date
FROM
pipeline.pipeline_run r
WHERE @WHERE@
Expand Down
Loading