Skip to content

Commit

Permalink
Merge branch 'integration' into task/replace-nonpublic-iterationinter…
Browse files Browse the repository at this point in the history
…ruptedexception
  • Loading branch information
SethSmucker authored Dec 12, 2024
2 parents 7fc7d10 + 8fb39f3 commit ad6680c
Show file tree
Hide file tree
Showing 247 changed files with 12,665 additions and 2,007 deletions.
7 changes: 4 additions & 3 deletions .github/workflows/tests.yml
Original file line number Diff line number Diff line change
Expand Up @@ -46,11 +46,12 @@ jobs:
- name: Commit Changes
run: |
if [ "$diffs_found" = true ]; then
git checkout -b ${{ steps.extract_branch.outputs.branch }}
git config --global user.name "GitHub Actions"
git config --global user.email "datawave@github.com"
git commit -am "Formatting job fix"
git push
git pull origin ${{ steps.extract_branch.outputs.branch }} --rebase --autostash
git checkout -b ${{ steps.extract_branch.outputs.branch }}
git commit -am "GitHub Actions: Fix Formatting"
git push origin ${{ steps.extract_branch.outputs.branch }}
else
echo "Nothing to do"
fi
Expand Down
4 changes: 2 additions & 2 deletions BUILDME.md
Original file line number Diff line number Diff line change
Expand Up @@ -64,7 +64,7 @@ mvn -Pdev,assemble,rpm -Ddeploy -Dtar -Ddist -DskipTests clean install

Datawave web services utilize several microservices at runtime (currently authorization and auditing, although that
list will expand soon). Datawave depends on api modules for some of these services, and the dependencies are set in
the parent pom (see `version.microservice.*` properties) to released versions. If you wish to build the microservices
the parent pom (see `version.datawave.*` properties) to released versions. If you wish to build the microservices
for some reason, you can simply add `-Dservices` to your maven build command.

### Releasing Microservices
Expand Down Expand Up @@ -95,7 +95,7 @@ the authorization service API version 1.0 is tagged with `svc_authorization-api_

Note that simply building a new API or service release won't ensure that it is used anywhere. You will need to update
build properties in either the datawave parent pom or within other service poms (for cross-service dependencies) to
ensure that the new version is used. Look for properties starting with `version.microservice.` to see what to update.
ensure that the new version is used. Look for properties starting with `version.datawave.` to see what to update.
If you are updating an API module, you should be careful. In general, the associated service will need to be updated as
well to support the API changes. The service should _add_ a new version of the API and continue to support the old
version until it can be ensured that there are no more consumers of the old API.
Expand Down
2 changes: 1 addition & 1 deletion common-test/pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@
<parent>
<groupId>gov.nsa.datawave</groupId>
<artifactId>datawave-parent</artifactId>
<version>7.10.0-SNAPSHOT</version>
<version>7.12.0-SNAPSHOT</version>
</parent>
<artifactId>datawave-common-test</artifactId>
<name>${project.artifactId}</name>
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -184,7 +184,7 @@ function datawaveIngestWikipedia() {
[ ! -f "${wikipediaRawFile}" ] && error "File not found: ${wikipediaRawFile}" && return 1

local wikipediaHdfsFile="${DW_DATAWAVE_INGEST_HDFS_BASEDIR}/$( basename ${wikipediaRawFile} )"
local putFileCommand="hdfs dfs -copyFromLocal ${wikipediaRawFile} ${wikipediaHdfsFile}"
local putFileCommand="hdfs dfs -copyFromLocal -f ${wikipediaRawFile} ${wikipediaHdfsFile}"

local inputFormat="datawave.ingest.wikipedia.WikipediaEventInputFormat"
local jobCommand="${DW_DATAWAVE_INGEST_HOME}/bin/ingest/live-ingest.sh ${wikipediaHdfsFile} ${DW_DATAWAVE_INGEST_NUM_SHARDS} -inputFormat ${inputFormat} -data.name.override=wikipedia ${extraOpts}"
Expand All @@ -211,7 +211,7 @@ function datawaveIngestCsv() {
[ ! -f "${csvRawFile}" ] && error "File not found: ${csvRawFile}" && return 1

local csvHdfsFile="${DW_DATAWAVE_INGEST_HDFS_BASEDIR}/$( basename ${csvRawFile} )"
local putFileCommand="hdfs dfs -copyFromLocal ${csvRawFile} ${csvHdfsFile}"
local putFileCommand="hdfs dfs -copyFromLocal -f ${csvRawFile} ${csvHdfsFile}"

local inputFormat="datawave.ingest.csv.mr.input.CSVFileInputFormat"
local jobCommand="${DW_DATAWAVE_INGEST_HOME}/bin/ingest/live-ingest.sh ${csvHdfsFile} ${DW_DATAWAVE_INGEST_NUM_SHARDS} -inputFormat ${inputFormat} -data.name.override=mycsv ${extraOpts}"
Expand All @@ -232,7 +232,7 @@ function datawaveIngestJson() {
[ ! -f "${jsonRawFile}" ] && error "File not found: ${jsonRawFile}" && return 1

local jsonHdfsFile="${DW_DATAWAVE_INGEST_HDFS_BASEDIR}/$( basename ${jsonRawFile} )"
local putFileCommand="hdfs dfs -copyFromLocal ${jsonRawFile} ${jsonHdfsFile}"
local putFileCommand="hdfs dfs -copyFromLocal -f ${jsonRawFile} ${jsonHdfsFile}"

local inputFormat="datawave.ingest.json.mr.input.JsonInputFormat"
local jobCommand="${DW_DATAWAVE_INGEST_HOME}/bin/ingest/live-ingest.sh ${jsonHdfsFile} ${DW_DATAWAVE_INGEST_NUM_SHARDS} -inputFormat ${inputFormat} -data.name.override=myjson ${extraOpts}"
Expand Down Expand Up @@ -347,3 +347,10 @@ function datawaveIngestTarballName() {
local dwVersion="$(getDataWaveVersion)"
echo "$( basename "${DW_DATAWAVE_INGEST_TARBALL/-\*-/-$dwVersion-}" )"
}

function datawaveIngestExamples() {
datawaveIngestWikipedia ${DW_DATAWAVE_INGEST_TEST_FILE_WIKI}
datawaveIngestJson ${DW_DATAWAVE_INGEST_TEST_FILE_JSON}
datawaveIngestCsv ${DW_DATAWAVE_INGEST_TEST_FILE_CSV}
}

Original file line number Diff line number Diff line change
Expand Up @@ -159,13 +159,6 @@ function initializeDatawaveTables() {
fi
}

function ingestExampleData() {
# Ingest some canned, example data files
datawaveIngestWikipedia "${DW_DATAWAVE_INGEST_TEST_FILE_WIKI}"
datawaveIngestJson "${DW_DATAWAVE_INGEST_TEST_FILE_JSON}"
datawaveIngestCsv "${DW_DATAWAVE_INGEST_TEST_FILE_CSV}"
}


initializeDatawaveTables

Expand All @@ -186,4 +179,4 @@ info "See \$DW_CLOUD_HOME/bin/services/datawave/bootstrap-ingest.sh to view/edit

# Ingest raw data examples, if appropriate...

[ "${DW_REDEPLOY_IN_PROGRESS}" != true ] && [ "${DW_DATAWAVE_INGEST_TEST_SKIP}" == false ] && ingestExampleData
[ "${DW_REDEPLOY_IN_PROGRESS}" != true ] && [ "${DW_DATAWAVE_INGEST_TEST_SKIP}" == false ] && datawaveIngestExamples
6 changes: 3 additions & 3 deletions contrib/datawave-quickstart/bin/services/hadoop/bootstrap.sh
Original file line number Diff line number Diff line change
Expand Up @@ -45,8 +45,8 @@ dfs.replication 1"

DW_HADOOP_MR_HEAPDUMP_DIR="${DW_CLOUD_DATA}/heapdumps"
# mapred-site.xml (Format: <property-name><space><property-value>{<newline>})
DW_HADOOP_MAPRED_SITE_CONF="mapreduce.jobhistory.address http://${DW_BIND_HOST}:8020
mapreduce.jobhistory.webapp.address http://${DW_BIND_HOST}:8021
DW_HADOOP_MAPRED_SITE_CONF="mapreduce.jobhistory.address ${DW_BIND_HOST}:8020
mapreduce.jobhistory.webapp.address ${DW_BIND_HOST}:8021
mapreduce.jobhistory.intermediate-done-dir ${DW_HADOOP_MR_INTER_DIR}
mapreduce.jobhistory.done-dir ${DW_HADOOP_MR_DONE_DIR}
mapreduce.map.memory.mb 2048
Expand All @@ -72,7 +72,7 @@ yarn.nodemanager.pmem-check-enabled false
yarn.nodemanager.vmem-check-enabled false
yarn.nodemanager.resource.memory-mb 6144
yarn.app.mapreduce.am.resource.mb 1024
yarn.log.server.url http://localhost:8070/jobhistory/logs"
yarn.log.server.url http://localhost:8021/jobhistory/logs"

# capacity-scheduler.xml (Format: <property-name><space><property-value>{<newline>})
DW_HADOOP_CAPACITY_SCHEDULER_CONF="yarn.scheduler.capacity.maximum-applications 10000
Expand Down
4 changes: 2 additions & 2 deletions contrib/datawave-quickstart/docker/pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@
<parent>
<groupId>gov.nsa.datawave</groupId>
<artifactId>datawave-parent</artifactId>
<version>7.10.0-SNAPSHOT</version>
<version>7.12.0-SNAPSHOT</version>
<relativePath>../../../pom.xml</relativePath>
</parent>
<artifactId>quickstart</artifactId>
Expand Down Expand Up @@ -500,4 +500,4 @@
</build>
</profile>
</profiles>
</project>
</project>
2 changes: 1 addition & 1 deletion core/base-rest-responses
Submodule base-rest-responses updated 1 files
+4 −4 pom.xml
2 changes: 1 addition & 1 deletion core/cached-results/pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@
<parent>
<groupId>gov.nsa.datawave.core</groupId>
<artifactId>datawave-core-parent</artifactId>
<version>7.10.0-SNAPSHOT</version>
<version>7.12.0-SNAPSHOT</version>
</parent>
<artifactId>datawave-core-cached-results</artifactId>
<name>${project.artifactId}</name>
Expand Down
2 changes: 1 addition & 1 deletion core/common-util/pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@
<parent>
<groupId>gov.nsa.datawave.core</groupId>
<artifactId>datawave-core-parent</artifactId>
<version>7.10.0-SNAPSHOT</version>
<version>7.12.0-SNAPSHOT</version>
</parent>
<artifactId>datawave-core-common-util</artifactId>
<name>${project.artifactId}</name>
Expand Down
2 changes: 1 addition & 1 deletion core/common/pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@
<parent>
<groupId>gov.nsa.datawave.core</groupId>
<artifactId>datawave-core-parent</artifactId>
<version>7.10.0-SNAPSHOT</version>
<version>7.12.0-SNAPSHOT</version>
</parent>
<artifactId>datawave-core-common</artifactId>
<name>${project.artifactId}</name>
Expand Down
2 changes: 1 addition & 1 deletion core/connection-pool/pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@
<parent>
<groupId>gov.nsa.datawave.core</groupId>
<artifactId>datawave-core-parent</artifactId>
<version>7.10.0-SNAPSHOT</version>
<version>7.12.0-SNAPSHOT</version>
</parent>
<artifactId>datawave-core-connection-pool</artifactId>
<name>${project.artifactId}</name>
Expand Down
2 changes: 1 addition & 1 deletion core/map-reduce/pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@
<parent>
<groupId>gov.nsa.datawave.core</groupId>
<artifactId>datawave-core-parent</artifactId>
<version>7.10.0-SNAPSHOT</version>
<version>7.12.0-SNAPSHOT</version>
</parent>
<artifactId>datawave-core-map-reduce</artifactId>
<name>${project.artifactId}</name>
Expand Down
2 changes: 1 addition & 1 deletion core/modification/pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@
<parent>
<groupId>gov.nsa.datawave.core</groupId>
<artifactId>datawave-core-parent</artifactId>
<version>7.10.0-SNAPSHOT</version>
<version>7.12.0-SNAPSHOT</version>
</parent>
<artifactId>datawave-core-modification</artifactId>
<name>${project.artifactId}</name>
Expand Down
2 changes: 1 addition & 1 deletion core/pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@
<parent>
<groupId>gov.nsa.datawave</groupId>
<artifactId>datawave-parent</artifactId>
<version>7.10.0-SNAPSHOT</version>
<version>7.12.0-SNAPSHOT</version>
</parent>
<groupId>gov.nsa.datawave.core</groupId>
<artifactId>datawave-core-parent</artifactId>
Expand Down
2 changes: 1 addition & 1 deletion core/query/pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@
<parent>
<groupId>gov.nsa.datawave.core</groupId>
<artifactId>datawave-core-parent</artifactId>
<version>7.10.0-SNAPSHOT</version>
<version>7.12.0-SNAPSHOT</version>
</parent>
<artifactId>datawave-core-query</artifactId>
<name>${project.artifactId}</name>
Expand Down
2 changes: 1 addition & 1 deletion core/utils/common-utils
Submodule common-utils updated 1 files
+2 −2 pom.xml
2 changes: 1 addition & 1 deletion core/utils/pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@
<parent>
<groupId>gov.nsa.datawave.core</groupId>
<artifactId>datawave-core-parent</artifactId>
<version>7.10.0-SNAPSHOT</version>
<version>7.12.0-SNAPSHOT</version>
</parent>
<groupId>gov.nsa.datawave.core</groupId>
<artifactId>datawave-utils-parent</artifactId>
Expand Down
4 changes: 2 additions & 2 deletions docker/docker-compose.yml
Original file line number Diff line number Diff line change
Expand Up @@ -424,8 +424,8 @@ services:
- --spring.cloud.consul.host=consul
- --spring.cloud.consul.discovery.instance-id=$${spring.application.name}:$${random.value}
ports:
- "8280:8080"
- "8643:8443"
- "8580:8080"
- "8943:8443"
volumes:
- ${PKI_DIR:-./pki}:/etc/pki:ro
- ./logs:/logs
Expand Down
32 changes: 6 additions & 26 deletions docker/scripts/cleanup.sh
Original file line number Diff line number Diff line change
@@ -1,26 +1,6 @@
#!/bin/sh
rm -r -f query_*
rm -r -f errorQuery_*
rm -r -f edge_*
rm -r -f plan_*
rm -r -f prediction_*
rm -r -f lookup_*
rm -r -f batchLookup_*
rm -r -f lookupContent_*
rm -r -f batchLookupContent_*
rm -r -f streamingQuery_*
rm -r -f discovery_*
rm -r -f errorDiscovery_*
rm -r -f count_*
rm -r -f errorCount_*
rm -r -f fieldIndexCount_*
rm -r -f errorFieldIndexCount_*
rm -r -f poundit_*
rm -r -f executor_*
rm -r -f termFrequency_*
rm -r -f edgeEvent_*
rm -r -f hitHighlights_*
rm -r -f metrics_*
rm -r -f modification_*
rm -r -f mapReduceQuery_*
rm -r -f oozieQuery_*
#!/bin/bash

PASSED_TESTS=(${1})
for p in "${PASSED_TESTS[@]}" ; do
rm -rf "${p%.sh}"_*
done
31 changes: 31 additions & 0 deletions docker/scripts/common/common.sh
Original file line number Diff line number Diff line change
Expand Up @@ -85,4 +85,35 @@ logMetrics () {
echo "$(date): Metrics available at: ${METRICS_ENDPOINT}/id/${QUERY_ID}"
echo "$(date): Metrics available at: ${METRICS_ENDPOINT}/id/${QUERY_ID}" >> ${QUERY_TYPE}_${QUERY_ID}/querySummary.txt
fi
}

printLine() {
echo "$( printGreen "********************************************************************************************************" )"
}

printRed() {
echo -ne "${DW_COLOR_RED}${1}${DW_COLOR_RESET}"
}

printGreen() {
echo -ne "${DW_COLOR_GREEN}${1}${DW_COLOR_RESET}"
}

setPrintColors() {
DW_COLOR_RED="\033[31m"
DW_COLOR_GREEN="\033[32m"
DW_COLOR_RESET="\033[m"
}

setTestLabels() {
LABEL_PASS="$( printGreen PASSED )"
LABEL_FAIL="$( printRed FAILED )"
}

printTestStatus() {
elapsed_time=$(echo "scale=3; ($2 - $1) / 1000000000" | bc)
echo
echo "Test Total Time: $elapsed_time seconds"
echo "Test Status: $3"
echo
}
Loading

0 comments on commit ad6680c

Please sign in to comment.