Skip to content

Commit

Permalink
Merge pull request #171 from Olog/CSSTUDIO-1818
Browse files Browse the repository at this point in the history
Csstudio 1818
  • Loading branch information
georgweiss authored Sep 4, 2023
2 parents 7877616 + 29d38b2 commit 12cb46c
Show file tree
Hide file tree
Showing 7 changed files with 337 additions and 79 deletions.
16 changes: 15 additions & 1 deletion src/main/java/org/phoebus/olog/ElasticConfig.java
Original file line number Diff line number Diff line change
Expand Up @@ -57,6 +57,9 @@ public class ElasticConfig {
private String ES_LOG_INDEX;
@Value("${elasticsearch.sequence.index:olog_sequence}")
private String ES_SEQ_INDEX;
@Value("${elasticsearch.log.archive.index:olog_archived_logs}")
private String ES_LOG_ARCHIVE_INDEX;


@Value("${elasticsearch.cluster.name:elasticsearch}")
private String clusterName;
Expand Down Expand Up @@ -175,7 +178,18 @@ void elasticIndexValidation(ElasticsearchClient client) {
} catch (IOException e) {
logger.log(Level.WARNING, "Failed to create index " + ES_LOG_INDEX, e);
}

// Olog Archived Log Template
try (InputStream is = ElasticConfig.class.getResourceAsStream("/log_entry_mapping.json")) {
BooleanResponse exits = client.indices().exists(ExistsRequest.of(e -> e.index(ES_LOG_ARCHIVE_INDEX)));
if (!exits.value()) {
CreateIndexResponse result = client.indices().create(
CreateIndexRequest.of(
c -> c.index(ES_LOG_ARCHIVE_INDEX).withJson(is)));
logger.info("Created index: " + "archived_" + ES_LOG_ARCHIVE_INDEX + " : acknowledged " + result.acknowledged());
}
} catch (IOException e) {
logger.log(Level.WARNING, "Failed to create index " + ES_LOG_ARCHIVE_INDEX, e);
}
}

private static final ObjectMapper mapper = new ObjectMapper();
Expand Down
78 changes: 68 additions & 10 deletions src/main/java/org/phoebus/olog/LogRepository.java
Original file line number Diff line number Diff line change
Expand Up @@ -6,8 +6,13 @@
package org.phoebus.olog;

import co.elastic.clients.elasticsearch.ElasticsearchClient;
import co.elastic.clients.elasticsearch._types.FieldSort;
import co.elastic.clients.elasticsearch._types.Refresh;
import co.elastic.clients.elasticsearch._types.Result;
import co.elastic.clients.elasticsearch._types.SortOptions;
import co.elastic.clients.elasticsearch._types.SortOrder;
import co.elastic.clients.elasticsearch._types.query_dsl.WildcardQuery;
import co.elastic.clients.elasticsearch.core.ExistsRequest;
import co.elastic.clients.elasticsearch.core.GetRequest;
import co.elastic.clients.elasticsearch.core.GetResponse;
import co.elastic.clients.elasticsearch.core.IndexRequest;
Expand All @@ -18,6 +23,7 @@
import co.elastic.clients.elasticsearch.core.SearchResponse;
import co.elastic.clients.elasticsearch.core.mget.MultiGetResponseItem;
import co.elastic.clients.elasticsearch.core.search.Hit;
import org.apache.logging.log4j.util.Strings;
import org.phoebus.olog.entity.Attachment;
import org.phoebus.olog.entity.Log;
import org.phoebus.olog.entity.Log.LogBuilder;
Expand Down Expand Up @@ -51,6 +57,9 @@ public class LogRepository implements CrudRepository<Log, String> {
@Value("${elasticsearch.log.index:olog_logs}")
private String ES_LOG_INDEX;

@Value("${elasticsearch.log.archive.index:olog_archived_logs}")
private String ES_LOG_ARCHIVE_INDEX;

@SuppressWarnings("unused")
@Autowired
@Qualifier("client")
Expand Down Expand Up @@ -112,10 +121,9 @@ public <S extends Log> Iterable<S> saveAll(Iterable<S> logs) {
public Log update(Log log) {
try {
Log document = LogBuilder.createLog(log).build();

IndexRequest<Log> indexRequest =
IndexRequest.of(i ->
i.index(ES_LOG_INDEX)
i.index(ES_LOG_INDEX)
.id(String.valueOf(document.getId()))
.document(document));

Expand All @@ -131,16 +139,70 @@ public Log update(Log log) {
}
} catch (Exception e) {
logger.log(Level.SEVERE, "Failed to save log entry: " + log, e);
throw new ResponseStatusException(HttpStatus.INTERNAL_SERVER_ERROR, "Failed to save log entry: " + log);
throw new ResponseStatusException(HttpStatus.INTERNAL_SERVER_ERROR, "Failed to update log entry: " + log);
}
return null;
}

public Log archive(Log log) {
try {
// retrieve the log version from elastic
GetResponse<Log> resp = client.get(GetRequest.of(g ->
g.index(ES_LOG_INDEX).id(String.valueOf(log.getId()))), Log.class);
if(!resp.found()) {
logger.log(Level.SEVERE, "Failed to archive log with id: " + log.getId());
} else {
Log originalDocument = resp.source();
String updatedVersion = originalDocument.getId() + "_v" + resp.version();
IndexRequest<Log> indexRequest =
IndexRequest.of(i ->
i.index(ES_LOG_ARCHIVE_INDEX)
.id(updatedVersion)
.document(originalDocument)
.refresh(Refresh.True));
IndexResponse response = client.index(indexRequest);
if (response.result().equals(Result.Created)) {
GetRequest getRequest =
GetRequest.of(g ->
g.index(ES_LOG_ARCHIVE_INDEX).id(response.id()));
return client.get(getRequest, Log.class).source();
} else {
logger.log(Level.SEVERE, "Failed to archiver log with id: " + updatedVersion);
}
}
} catch (IOException e) {
logger.log(Level.SEVERE, "Failed to archiver log with id: " + log.getId(), e);
}
return null;
}

public SearchResult findArchivedById(String id) {
FieldSort.Builder fb = new FieldSort.Builder();
fb.field("modifyDate");
fb.order(SortOrder.Desc);

SearchRequest searchRequest = SearchRequest.of(s -> s.index(ES_LOG_ARCHIVE_INDEX)
.query(WildcardQuery.of(q -> q.field("id").caseInsensitive(true).value(id+"*"))._toQuery())
.timeout("60s")
.sort(SortOptions.of(so -> so.field(fb.build()))));
try {
final SearchResponse<Log> searchResponse = client.search(searchRequest, Log.class);
List<Log> result = searchResponse.hits().hits().stream().map(Hit::source).collect(Collectors.toList());
SearchResult searchResult = new SearchResult();
searchResult.setHitCount(searchResponse.hits().total().value());
searchResult.setLogs(result);
return searchResult;
} catch (IOException | IllegalArgumentException e) {
logger.log(Level.SEVERE, "Failed to complete search for archived logs", e);
throw new ResponseStatusException(HttpStatus.INTERNAL_SERVER_ERROR, "Failed to complete search archived logs");
}
}

@Override
public Optional<Log> findById(String id) {
try {
GetRequest getRequest =
co.elastic.clients.elasticsearch.core.GetRequest.of(g ->
GetRequest.of(g ->
g.index(ES_LOG_INDEX).id(id));
GetResponse<Log> resp =
client.get(getRequest, Log.class);
Expand All @@ -159,12 +221,8 @@ public Optional<Log> findById(String id) {
@Override
public boolean existsById(String logId) {
try {
GetRequest getRequest =
GetRequest.of(g ->
g.index(ES_LOG_INDEX).id(logId));
GetResponse<Log> resp =
client.get(getRequest, Log.class);
return resp.found();
ExistsRequest existsRequest = ExistsRequest.of(e -> e.index(ES_LOG_INDEX).id(logId));
return client.exists(existsRequest).value();
} catch (IOException e) {
logger.log(Level.SEVERE, "Failed to check existence of log with id: " + logId, e);
throw new ResponseStatusException(HttpStatus.NOT_FOUND, "Failed to check existence of log with id: " + logId);
Expand Down
34 changes: 28 additions & 6 deletions src/main/java/org/phoebus/olog/LogResource.java
Original file line number Diff line number Diff line change
Expand Up @@ -12,6 +12,7 @@
import org.phoebus.olog.notification.LogEntryNotifier;
import org.phoebus.util.time.TimeParser;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.core.io.InputStreamResource;
import org.springframework.core.io.Resource;
import org.springframework.core.task.TaskExecutor;
Expand Down Expand Up @@ -100,6 +101,13 @@ public Log getLog(@PathVariable String logId) {
}
}

@GetMapping("archived/{logId}")
@SuppressWarnings("unused")
public SearchResult getArchivedLog(@PathVariable String logId) {
SearchResult searchResult = logRepository.findArchivedById(logId);
return searchResult;
}

@GetMapping("/attachments/{logId}/{attachmentName}")
public ResponseEntity<Resource> findResources(@PathVariable String logId, @PathVariable String attachmentName) {
Optional<Log> log = logRepository.findById(logId);
Expand Down Expand Up @@ -301,6 +309,15 @@ public Log createLog(@RequestHeader(value = OLOG_CLIENT_INFO_HEADER, required =
}


/**
* Add an attachment to log entry identified by logId
* @param logId log entry ID
* @param file the file to be attached
* @param filename name of file
* @param id UUID for file in mongo
* @param fileMetadataDescription file metadata
* @return
*/
@PostMapping("/attachments/{logId}")
public Log uploadAttachment(@PathVariable String logId,
@RequestPart("file") MultipartFile file,
Expand Down Expand Up @@ -332,7 +349,6 @@ public Log uploadAttachment(@PathVariable String logId,
* of logbooks or tags, the updated log record will reflect that. However, the following data is NOT updated:
* <ul>
* <li>Attachments</li>
* <li>Owner (author)</li>
* <li>Created date</li>
* <li>Events</li>
* </ul>
Expand All @@ -341,22 +357,28 @@ public Log uploadAttachment(@PathVariable String logId,
* @param logId The log id of the entry subject to update. It must exist, i.e. it is not created of not found.
* @param markup Markup strategy, if any.
* @param log The log record data as sent by client.
* @param principal The authenticated {@link Principal} of the request.
* @return The updated log record, or HTTP status 404 if the log record does not exist. If the path
* variable does not match the id in the log record, HTTP status 400 (bad request) is returned.
*/
@SuppressWarnings("unused")
@PostMapping("/{logId}")
public Log updateLog(@PathVariable String logId,
@RequestParam(value = "markup", required = false) String markup,
@RequestBody Log log) {
@RequestBody Log log,
@AuthenticationPrincipal Principal principal) {

// In case a client sends a log record where the id does not match the path variable, return HTTP 400 (bad request)
if (!logId.equals(Long.toString(log.getId()))) {
throw new ResponseStatusException(HttpStatus.BAD_REQUEST, "Log entry id does not match path variable");
}

Optional<Log> foundLog = logRepository.findById(logId);
if (foundLog.isPresent()) {
Log persistedLog = foundLog.get();
// In case a client sends a log record where the id does not match the path variable, return HTTP 400 (bad request)
if (!logId.equals(Long.toString(log.getId()))) {
throw new ResponseStatusException(HttpStatus.BAD_REQUEST, "Log entry id does not match path variable");
}
logRepository.archive(persistedLog);

persistedLog.setOwner(principal.getName());
persistedLog.setLevel(log.getLevel());
persistedLog.setProperties(log.getProperties());
persistedLog.setModifyDate(Instant.now());
Expand Down
3 changes: 3 additions & 0 deletions src/main/resources/application.properties
Original file line number Diff line number Diff line change
Expand Up @@ -113,6 +113,9 @@ elasticsearch.log.index: olog_logs

elasticsearch.sequence.index: olog_sequence

# Archive modified log entries
elasticsearch.log.archive.index: olog_archived_logs

############################## Mongo gridfs client ###############################

mongo.database:ologAttachments
Expand Down
23 changes: 23 additions & 0 deletions src/site/sphinx/index.rst
Original file line number Diff line number Diff line change
Expand Up @@ -304,6 +304,29 @@ Find entries with at least one attachment of type 'image'

**GET** https://localhost:8181/Olog/logs/search?attachments=image

Updating a Log Entry
********************

**POST** https://localhost:8181/Olog/logs/{logId}

Update a log entry, the orginal log entry is archived in a seperate elastic index before any of the changes are applied.

Note: the create date, attachments, and events cannot be modified.

.. code-block:: json
{
"owner":"log",
"description":"Beam Dump due to Major power dip Current Alarms Booster transmitter switched back to lower state.
New important info appended",
"level":"Info",
"title":"A new title",
"logbooks":[
{
"name":"Operations"
}
]
}
Managing Logbooks & Tags
************************
Expand Down
Loading

0 comments on commit 12cb46c

Please sign in to comment.