Skip to content

Commit

Permalink
Removed the few autocommit-related changes previously borrowed from 1…
Browse files Browse the repository at this point in the history
…0547, to keep things separate and clear, for testing etc. #10554
  • Loading branch information
landreev committed May 24, 2024
1 parent 1a097cc commit 1f66420
Show file tree
Hide file tree
Showing 2 changed files with 20 additions and 23 deletions.
29 changes: 13 additions & 16 deletions src/main/java/edu/harvard/iq/dataverse/search/IndexServiceBean.java
Original file line number Diff line number Diff line change
Expand Up @@ -312,7 +312,7 @@ public Future<String> indexDataverse(Dataverse dataverse, boolean processPaths)
String status;
try {
if (dataverse.getId() != null) {
solrClientService.getSolrClient().add(docs, COMMIT_WITHIN);
solrClientService.getSolrClient().add(docs);
} else {
logger.info("WARNING: indexing of a dataverse with no id attempted");
}
Expand All @@ -321,13 +321,13 @@ public Future<String> indexDataverse(Dataverse dataverse, boolean processPaths)
logger.info(status);
return new AsyncResult<>(status);
}
/*try {
try {
solrClientService.getSolrClient().commit();
} catch (SolrServerException | IOException ex) {
status = ex.toString();
logger.info(status);
return new AsyncResult<>(status);
}*/
}

dvObjectService.updateContentIndexTime(dataverse);
IndexResponse indexResponse = solrIndexService.indexPermissionsForOneDvObject(dataverse);
Expand All @@ -353,7 +353,6 @@ public void indexDatasetInNewTransaction(Long datasetId) { //Dataset dataset) {
private static final Map<Long, Boolean> INDEXING_NOW = new ConcurrentHashMap<>();
// semaphore for async indexing
private static final Semaphore ASYNC_INDEX_SEMAPHORE = new Semaphore(JvmSettings.MAX_ASYNC_INDEXES.lookupOptional(Integer.class).orElse(4), true);
static final int COMMIT_WITHIN = 5000;

@Inject
@Metric(name = "index_permit_wait_time", absolute = true, unit = MetricUnits.NANOSECONDS,
Expand Down Expand Up @@ -1536,8 +1535,8 @@ private String addOrUpdateDataset(IndexableDataset indexableDataset, Set<Long> d
final SolrInputDocuments docs = toSolrDocs(indexableDataset, datafilesInDraftVersion);

try {
solrClientService.getSolrClient().add(docs.getDocuments(), COMMIT_WITHIN);
//solrClientService.getSolrClient().commit();
solrClientService.getSolrClient().add(docs.getDocuments());
solrClientService.getSolrClient().commit();
} catch (SolrServerException | IOException ex) {
if (ex.getCause() instanceof SolrServerException) {
throw new SolrServerException(ex);
Expand Down Expand Up @@ -1789,8 +1788,8 @@ private void updatePathForExistingSolrDocs(DvObject object) throws SolrServerExc

sid.removeField(SearchFields.SUBTREE);
sid.addField(SearchFields.SUBTREE, paths);
UpdateResponse addResponse = solrClientService.getSolrClient().add(sid, COMMIT_WITHIN);
//UpdateResponse commitResponse = solrClientService.getSolrClient().commit();
UpdateResponse addResponse = solrClientService.getSolrClient().add(sid);
UpdateResponse commitResponse = solrClientService.getSolrClient().commit();
if (object.isInstanceofDataset()) {
for (DataFile df : dataset.getFiles()) {
solrQuery.setQuery(SearchUtil.constructQuery(SearchFields.ENTITY_ID, df.getId().toString()));
Expand All @@ -1803,8 +1802,8 @@ private void updatePathForExistingSolrDocs(DvObject object) throws SolrServerExc
}
sid.removeField(SearchFields.SUBTREE);
sid.addField(SearchFields.SUBTREE, paths);
addResponse = solrClientService.getSolrClient().add(sid, COMMIT_WITHIN);
//commitResponse = solrClientService.getSolrClient().commit();
addResponse = solrClientService.getSolrClient().add(sid);
commitResponse = solrClientService.getSolrClient().commit();
}
}
}
Expand Down Expand Up @@ -1846,16 +1845,15 @@ public String delete(Dataverse doomed) {
logger.fine("deleting Solr document for dataverse " + doomed.getId());
UpdateResponse updateResponse;
try {
updateResponse = solrClientService.getSolrClient().deleteById(solrDocIdentifierDataverse + doomed.getId(), COMMIT_WITHIN);
updateResponse = solrClientService.getSolrClient().deleteById(solrDocIdentifierDataverse + doomed.getId());
} catch (SolrServerException | IOException ex) {
return ex.toString();
}
/*try {
try {
solrClientService.getSolrClient().commit();
} catch (SolrServerException | IOException ex) {
return ex.toString();
}
*/
String response = "Successfully deleted dataverse " + doomed.getId() + " from Solr index. updateReponse was: " + updateResponse.toString();
logger.fine(response);
return response;
Expand All @@ -1872,16 +1870,15 @@ public String removeSolrDocFromIndex(String doomed) {
logger.fine("deleting Solr document: " + doomed);
UpdateResponse updateResponse;
try {
updateResponse = solrClientService.getSolrClient().deleteById(doomed, COMMIT_WITHIN);
updateResponse = solrClientService.getSolrClient().deleteById(doomed);
} catch (SolrServerException | IOException ex) {
return ex.toString();
}
/*try {
try {
solrClientService.getSolrClient().commit();
} catch (SolrServerException | IOException ex) {
return ex.toString();
}
*/
String response = "Attempted to delete " + doomed + " from Solr index. updateReponse was: " + updateResponse.toString();
logger.fine(response);
return response;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -356,8 +356,8 @@ private void persistToSolr(Collection<SolrInputDocument> docs) throws SolrServer
/**
* @todo Do something with these responses from Solr.
*/
UpdateResponse addResponse = solrClientService.getSolrClient().add(docs, IndexServiceBean.COMMIT_WITHIN);
//UpdateResponse commitResponse = solrClientService.getSolrClient().commit();
UpdateResponse addResponse = solrClientService.getSolrClient().add(docs);
UpdateResponse commitResponse = solrClientService.getSolrClient().commit();
}

public IndexResponse indexPermissionsOnSelfAndChildren(long definitionPointId) {
Expand Down Expand Up @@ -497,26 +497,26 @@ public IndexResponse deleteMultipleSolrIds(List<String> solrIdsToDelete) {
return new IndexResponse("nothing to delete");
}
try {
solrClientService.getSolrClient().deleteById(solrIdsToDelete, IndexServiceBean.COMMIT_WITHIN);
solrClientService.getSolrClient().deleteById(solrIdsToDelete);
} catch (SolrServerException | IOException ex) {
/**
* @todo mark these for re-deletion
*/
return new IndexResponse("problem deleting the following documents from Solr: " + solrIdsToDelete);
}
/*try {
try {
solrClientService.getSolrClient().commit();
} catch (SolrServerException | IOException ex) {
return new IndexResponse("problem committing deletion of the following documents from Solr: " + solrIdsToDelete);
}*/
}
return new IndexResponse("no known problem deleting the following documents from Solr:" + solrIdsToDelete);
}

public JsonObjectBuilder deleteAllFromSolrAndResetIndexTimes() throws SolrServerException, IOException {
JsonObjectBuilder response = Json.createObjectBuilder();
logger.info("attempting to delete all Solr documents before a complete re-index");
solrClientService.getSolrClient().deleteByQuery("*:*", IndexServiceBean.COMMIT_WITHIN);
//solrClientService.getSolrClient().commit();
solrClientService.getSolrClient().deleteByQuery("*:*");
solrClientService.getSolrClient().commit();
int numRowsAffected = dvObjectService.clearAllIndexTimes();
response.add(numRowsClearedByClearAllIndexTimes, numRowsAffected);
response.add(messageString, "Solr index and database index timestamps cleared.");
Expand Down

0 comments on commit 1f66420

Please sign in to comment.