Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -31,7 +31,6 @@
import org.apache.cloudstack.context.CallContext;

import com.cloud.event.Event;
import com.cloud.exception.InvalidParameterValueException;
import com.cloud.user.Account;

@APICommand(name = "archiveEvents", description = "Archive one or more events.", responseObject = SuccessResponse.class, entityType = {Event.class},
Expand Down Expand Up @@ -82,6 +81,22 @@ public String getType() {
return type;
}

public void setIds(List<Long> ids) {
this.ids = ids;
}

public void setEndDate(Date endDate) {
this.endDate = endDate;
}

public void setStartDate(Date startDate) {
this.startDate = startDate;
}

public void setType(String type) {
this.type = type;
}

/////////////////////////////////////////////////////
/////////////// API Implementation///////////////////
/////////////////////////////////////////////////////
Expand All @@ -97,17 +112,12 @@ public long getEntityOwnerId() {

@Override
public void execute() {
if (ids == null && type == null && endDate == null) {
throw new InvalidParameterValueException("either ids, type or enddate must be specified");
} else if (startDate != null && endDate == null) {
throw new InvalidParameterValueException("enddate must be specified with startdate parameter");
}
boolean result = _mgr.archiveEvents(this);
if (result) {
SuccessResponse response = new SuccessResponse(getCommandName());
setResponseObject(response);
} else {
throw new ServerApiException(ApiErrorCode.INTERNAL_ERROR, "Unable to archive Events, one or more parameters has invalid values");
return;
}
throw new ServerApiException(ApiErrorCode.INTERNAL_ERROR, "Unable to archive Events. One or more parameters have invalid values.");
}
Comment thread
winterhazel marked this conversation as resolved.
}
Original file line number Diff line number Diff line change
Expand Up @@ -31,7 +31,6 @@
import org.apache.cloudstack.context.CallContext;

import com.cloud.event.Event;
import com.cloud.exception.InvalidParameterValueException;
import com.cloud.user.Account;

@APICommand(name = "deleteEvents", description = "Delete one or more events.", responseObject = SuccessResponse.class, entityType = {Event.class},
Expand Down Expand Up @@ -82,6 +81,22 @@ public String getType() {
return type;
}

public void setIds(List<Long> ids) {
this.ids = ids;
}

public void setEndDate(Date endDate) {
this.endDate = endDate;
}

public void setStartDate(Date startDate) {
this.startDate = startDate;
}

public void setType(String type) {
this.type = type;
}

// ///////////////////////////////////////////////////
// ///////////// API Implementation///////////////////
// ///////////////////////////////////////////////////
Expand All @@ -97,17 +112,12 @@ public long getEntityOwnerId() {

@Override
public void execute() {
if (ids == null && type == null && endDate == null) {
throw new InvalidParameterValueException("either ids, type or enddate must be specified");
} else if (startDate != null && endDate == null) {
throw new InvalidParameterValueException("enddate must be specified with startdate parameter");
}
boolean result = _mgr.deleteEvents(this);
if (result) {
SuccessResponse response = new SuccessResponse(getCommandName());
setResponseObject(response);
} else {
throw new ServerApiException(ApiErrorCode.INTERNAL_ERROR, "Unable to delete Events, one or more parameters has invalid values");
throw new ServerApiException(ApiErrorCode.INTERNAL_ERROR, "Unable to delete any events. One or more parameters have invalid values.");
}
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -134,7 +134,6 @@ public String getState() {

@Override
public void execute() {

ListResponse<EventResponse> response = _queryService.searchForEvents(this);
response.setResponseName(getCommandName());
setResponseObject(response);
Expand Down
15 changes: 4 additions & 11 deletions engine/schema/src/main/java/com/cloud/event/dao/EventDao.java
Original file line number Diff line number Diff line change
Expand Up @@ -20,19 +20,12 @@
import java.util.List;

import com.cloud.event.EventVO;
import com.cloud.utils.db.Filter;
import com.cloud.utils.db.GenericDao;
import com.cloud.utils.db.SearchCriteria;

public interface EventDao extends GenericDao<EventVO, Long> {
public List<EventVO> searchAllEvents(SearchCriteria<EventVO> sc, Filter filter);

public List<EventVO> listOlderEvents(Date oldTime);

EventVO findCompletedEvent(long startId);

public List<EventVO> listToArchiveOrDeleteEvents(List<Long> ids, String type, Date startDate, Date endDate, List<Long> accountIds);

public void archiveEvents(List<EventVO> events);
long archiveEvents(List<Long> ids, String type, Date startDate, Date endDate, Long accountId, List<Long> domainIds,
long limitPerQuery);

long purgeAll(List<Long> ids, Date startDate, Date endDate, Date limitDate, String type, Long accountId, List<Long> domainIds,
long limitPerQuery);
}
120 changes: 66 additions & 54 deletions engine/schema/src/main/java/com/cloud/event/dao/EventDaoImpl.java
Original file line number Diff line number Diff line change
Expand Up @@ -16,15 +16,20 @@
// under the License.
package com.cloud.event.dao;

import java.sql.PreparedStatement;
import java.sql.SQLException;
import java.util.Date;
import java.util.List;
import java.util.stream.Collectors;


import com.cloud.utils.db.DB;
import com.cloud.utils.db.Filter;
import com.cloud.utils.exception.CloudRuntimeException;
import org.apache.commons.collections4.CollectionUtils;
import org.springframework.stereotype.Component;

import com.cloud.event.Event.State;
import com.cloud.event.EventVO;
import com.cloud.utils.db.Filter;
import com.cloud.utils.db.GenericDaoBase;
import com.cloud.utils.db.SearchBuilder;
import com.cloud.utils.db.SearchCriteria;
Expand All @@ -33,83 +38,90 @@

@Component
public class EventDaoImpl extends GenericDaoBase<EventVO, Long> implements EventDao {
protected final SearchBuilder<EventVO> CompletedEventSearch;

protected final SearchBuilder<EventVO> ToArchiveOrDeleteEventSearch;

public EventDaoImpl() {
CompletedEventSearch = createSearchBuilder();
CompletedEventSearch.and("state", CompletedEventSearch.entity().getState(), SearchCriteria.Op.EQ);
CompletedEventSearch.and("startId", CompletedEventSearch.entity().getStartId(), SearchCriteria.Op.EQ);
CompletedEventSearch.and("archived", CompletedEventSearch.entity().getArchived(), Op.EQ);
CompletedEventSearch.done();

ToArchiveOrDeleteEventSearch = createSearchBuilder();
ToArchiveOrDeleteEventSearch.select("id", SearchCriteria.Func.NATIVE, ToArchiveOrDeleteEventSearch.entity().getId());
ToArchiveOrDeleteEventSearch.and("id", ToArchiveOrDeleteEventSearch.entity().getId(), Op.IN);
ToArchiveOrDeleteEventSearch.and("type", ToArchiveOrDeleteEventSearch.entity().getType(), Op.EQ);
ToArchiveOrDeleteEventSearch.and("accountIds", ToArchiveOrDeleteEventSearch.entity().getAccountId(), Op.IN);
ToArchiveOrDeleteEventSearch.and("accountId", ToArchiveOrDeleteEventSearch.entity().getAccountId(), Op.EQ);
ToArchiveOrDeleteEventSearch.and("domainIds", ToArchiveOrDeleteEventSearch.entity().getDomainId(), Op.IN);
ToArchiveOrDeleteEventSearch.and("createdDateB", ToArchiveOrDeleteEventSearch.entity().getCreateDate(), Op.BETWEEN);
ToArchiveOrDeleteEventSearch.and("createdDateL", ToArchiveOrDeleteEventSearch.entity().getCreateDate(), Op.LTEQ);
ToArchiveOrDeleteEventSearch.and("createdDateLT", ToArchiveOrDeleteEventSearch.entity().getCreateDate(), Op.LT);
ToArchiveOrDeleteEventSearch.and("archived", ToArchiveOrDeleteEventSearch.entity().getArchived(), Op.EQ);
ToArchiveOrDeleteEventSearch.done();
}

@Override
public List<EventVO> searchAllEvents(SearchCriteria<EventVO> sc, Filter filter) {
return listIncludingRemovedBy(sc, filter);
}

@Override
public List<EventVO> listOlderEvents(Date oldTime) {
if (oldTime == null)
return null;
SearchCriteria<EventVO> sc = createSearchCriteria();
sc.addAnd("createDate", SearchCriteria.Op.LT, oldTime);
sc.addAnd("archived", SearchCriteria.Op.EQ, false);
return listIncludingRemovedBy(sc, null);
}

@Override
public EventVO findCompletedEvent(long startId) {
SearchCriteria<EventVO> sc = CompletedEventSearch.create();
sc.setParameters("state", State.Completed);
sc.setParameters("startId", startId);
sc.setParameters("archived", false);
return findOneIncludingRemovedBy(sc);
}

@Override
public List<EventVO> listToArchiveOrDeleteEvents(List<Long> ids, String type, Date startDate, Date endDate, List<Long> accountIds) {
private SearchCriteria<EventVO> createEventSearchCriteria(List<Long> ids, String type, Date startDate, Date endDate,
Date limitDate, Long accountId, List<Long> domainIds) {
SearchCriteria<EventVO> sc = ToArchiveOrDeleteEventSearch.create();
if (ids != null) {
sc.setParameters("id", ids.toArray(new Object[ids.size()]));

if (CollectionUtils.isNotEmpty(ids)) {
sc.setParameters("id", ids.toArray(new Object[0]));
}
if (type != null) {
sc.setParameters("type", type);
if (CollectionUtils.isNotEmpty(domainIds)) {
sc.setParameters("domainIds", domainIds.toArray(new Object[0]));
}
if (startDate != null && endDate != null) {
sc.setParameters("createdDateB", startDate, endDate);
} else if (endDate != null) {
sc.setParameters("createdDateL", endDate);
}
if (accountIds != null && !accountIds.isEmpty()) {
sc.setParameters("accountIds", accountIds.toArray(new Object[accountIds.size()]));
}
sc.setParametersIfNotNull("accountId", accountId);
sc.setParametersIfNotNull("createdDateLT", limitDate);
sc.setParametersIfNotNull("type", type);
sc.setParameters("archived", false);
return search(sc, null);

return sc;
}

@Override
public void archiveEvents(List<EventVO> events) {
if (events != null && !events.isEmpty()) {
TransactionLegacy txn = TransactionLegacy.currentTxn();
txn.start();
for (EventVO event : events) {
event = lockRow(event.getId(), true);
event.setArchived(true);
update(event.getId(), event);
txn.commit();
public long archiveEvents(List<Long> ids, String type, Date startDate, Date endDate, Long accountId, List<Long> domainIds,
long limitPerQuery) {
SearchCriteria<EventVO> sc = createEventSearchCriteria(ids, type, startDate, endDate, null, accountId, domainIds);
Filter filter = null;
if (limitPerQuery > 0) {
filter = new Filter(limitPerQuery);
}

long archived;
long totalArchived = 0L;

do {
List<EventVO> events = search(sc, filter);
if (events.isEmpty()) {
break;
}
txn.close();

archived = archiveEventsInternal(events);
totalArchived += archived;
} while (limitPerQuery > 0 && archived >= limitPerQuery);
Comment on lines 81 to +101
Copy link

Copilot AI Apr 28, 2026

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

archiveEvents can load all matching events into memory when limitPerQuery <= 0 (the default for delete.query.batch.size is 0), then builds a single UPDATE ... WHERE id IN (...) statement containing every ID. On large event tables this risks OOMs, oversized SQL statements, and very long-running queries. Consider always archiving in batches (even when the config is 0), or implement a batched UPDATE ... LIMIT <batch> loop similar to batchExpunge so the operation remains bounded.

Copilot uses AI. Check for mistakes.
Copy link
Copy Markdown
Member Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Ignoring the configuration and doing it in batches anyways will make having a configuration weird. I think that we can just enable batch operations by default instead.

Comment thread
winterhazel marked this conversation as resolved.

return totalArchived;
}

@DB
private long archiveEventsInternal(List<EventVO> events) {
final String idsAsString = events.stream()
.map(e -> Long.toString(e.getId()))
.collect(Collectors.joining(","));
final String query = String.format("UPDATE event SET archived=true WHERE id IN (%s)", idsAsString);

try (TransactionLegacy txn = TransactionLegacy.currentTxn();
PreparedStatement pstmt = txn.prepareStatement(query)) {
Comment on lines +108 to +114
Copy link

Copilot AI Apr 28, 2026

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

archiveEventsInternal builds SQL by string-concatenating the IDs into an IN (...) clause. Besides the query-size risk, it bypasses parameter binding and can stress SQL parsing/plan caching. Consider using a parameterized statement (placeholders) or a different batching strategy (e.g., update by criteria with LIMIT) to keep statements small and reusable.

Suggested change
final String idsAsString = events.stream()
.map(e -> Long.toString(e.getId()))
.collect(Collectors.joining(","));
final String query = String.format("UPDATE event SET archived=true WHERE id IN (%s)", idsAsString);
try (TransactionLegacy txn = TransactionLegacy.currentTxn();
PreparedStatement pstmt = txn.prepareStatement(query)) {
if (CollectionUtils.isEmpty(events)) {
return 0L;
}
final String placeholders = events.stream()
.map(event -> "?")
.collect(Collectors.joining(","));
final String query = String.format("UPDATE event SET archived=true WHERE id IN (%s)", placeholders);
try (TransactionLegacy txn = TransactionLegacy.currentTxn();
PreparedStatement pstmt = txn.prepareStatement(query)) {
for (int i = 0; i < events.size(); i++) {
pstmt.setLong(i + 1, events.get(i).getId());
}

Copilot uses AI. Check for mistakes.
Copy link
Copy Markdown
Member Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

This suggestion does not make sense to me

return pstmt.executeUpdate();
} catch (SQLException e) {
throw new CloudRuntimeException(e);
}
}

@Override
public long purgeAll(List<Long> ids, Date startDate, Date endDate, Date limitDate, String type, Long accountId,
List<Long> domainIds, long limitPerQuery) {
SearchCriteria<EventVO> sc = createEventSearchCriteria(ids, type, startDate, endDate, limitDate, accountId, domainIds);
return batchExpunge(sc, limitPerQuery);
}
Comment on lines 81 to +126
Copy link

Copilot AI Apr 28, 2026

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

New batching semantics in archiveEvents(...)/purgeAll(...) are non-trivial (limit handling, iteration/termination, criteria composition) but there are currently no unit tests covering these paths (unlike other DAO batch-expunge usages in engine/schema/src/test). Consider adding a focused DAO unit test validating that multiple batches are processed correctly and that limitPerQuery <= 0 behaves as intended.

Copilot uses AI. Check for mistakes.
}
Original file line number Diff line number Diff line change
Expand Up @@ -25,7 +25,10 @@ public class DbUpgradeUtils {

public static void addIndexIfNeeded(Connection conn, String tableName, String... columnNames) {
String indexName = dao.generateIndexName(tableName, columnNames);
addIndexWithNameIfNeeded(conn, tableName, indexName, columnNames);
}

public static void addIndexWithNameIfNeeded(Connection conn, String tableName, String indexName, String... columnNames) {
if (!dao.indexExists(conn, tableName, indexName)) {
dao.createIndex(conn, tableName, indexName, columnNames);
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -51,6 +51,7 @@ public InputStream[] getPrepareScripts() {
@Override
public void performDataMigration(Connection conn) {
unhideJsInterpretationEnabled(conn);
addIndexes(conn);
}

protected void unhideJsInterpretationEnabled(Connection conn) {
Expand Down Expand Up @@ -89,4 +90,10 @@ protected void updateJsInterpretationEnabledFields(Connection conn, String encry
logger.warn("Error while decrypting configuration 'js.interpretation.enabled'. The configuration may already be decrypted.");
}
}

private void addIndexes(Connection conn) {
DbUpgradeUtils.addIndexWithNameIfNeeded(conn, "event", "i_event__multiple_columns_for_generic_search",
"account_id", "domain_id", "archived", "display", "resource_type", "resource_id", "start_id", "type", "level",
"created", "id");
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -950,7 +950,7 @@ private Pair<List<Long>, Integer> searchForEventIdsAndCount(ListEventsCmd cmd) {
searchFilter.addOrderBy(EventVO.class, "id", false);

SearchBuilder<EventVO> eventSearchBuilder = eventDao.createSearchBuilder();
eventSearchBuilder.select(null, Func.DISTINCT, eventSearchBuilder.entity().getId());
eventSearchBuilder.select(null, Func.NATIVE, eventSearchBuilder.entity().getId());
accountMgr.buildACLSearchBuilder(eventSearchBuilder, domainId, isRecursive, permittedAccounts, listProjectResourcesCriteria);

eventSearchBuilder.and("id", eventSearchBuilder.entity().getId(), SearchCriteria.Op.EQ);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -553,7 +553,7 @@ public class ConfigurationManagerImpl extends ManagerBase implements Configurati
public static final ConfigKey<Long> DELETE_QUERY_BATCH_SIZE = new ConfigKey<>("Advanced", Long.class, "delete.query.batch.size", "0",
"Indicates the limit applied while deleting entries in bulk. With this, the delete query will apply the limit as many times as necessary," +
" to delete all the entries. This is advised when retaining several days of records, which can lead to slowness. <= 0 means that no limit will " +
"be applied. Default value is 0. For now, this is used for deletion of VM stats, volume stats, and usage records.", true);
"be applied. Default value is 0. For now, this is used for deletion of VM stats, volume stats, events, and usage records.", true);

private static final String IOPS_READ_RATE = "IOPS Read";
private static final String IOPS_WRITE_RATE = "IOPS Write";
Expand Down
Loading
Loading