Skip to content

Commit

Permalink
Merge branch 'integration' into task/federatedQueryPlanner
Browse files Browse the repository at this point in the history
  • Loading branch information
lbschanno committed Aug 22, 2024
2 parents 9aca68a + ed6a3da commit 08ac120
Show file tree
Hide file tree
Showing 295 changed files with 198,615 additions and 5,632 deletions.
9 changes: 5 additions & 4 deletions .github/CODEOWNERS
Validating CODEOWNERS rules …
Original file line number Diff line number Diff line change
@@ -1,7 +1,8 @@

# In order to ensure the query microservices are consistent with the wildfly webservice, we need
# to ensure that changes made to QueryExecutorBean match QueryManagementService, and changes made
# to LookupUUIDUtil match LookupService in the Query Service.
QueryExecutorBean.java @jwomeara
LookupUUIDUtil.java @jwomeara
RunningQuery.java @jwomeara
QueryExecutorBean.java @jwomeara @ivakegg
LookupUUIDUtil.java @jwomeara @ivakegg
RunningQuery.java @jwomeara @ivakegg
/core/ @jwomeara @ivakegg
/warehouse/query-core/ @jwomeara @ivakegg
138 changes: 138 additions & 0 deletions .github/workflows/tests.yml
Original file line number Diff line number Diff line change
Expand Up @@ -31,6 +31,10 @@ jobs:
java-version: ${{env.JAVA_VERSION}}
maven-version: 3.9.5
cache: 'maven'
- name: Extract branch name
shell: bash
run: echo "branch=${GITHUB_HEAD_REF:-${GITHUB_REF#refs/heads/}}" >> $GITHUB_OUTPUT
id: extract_branch
- name: Format code
env:
USER_NAME: ${{ secrets.USER_NAME }}
Expand All @@ -42,6 +46,7 @@ jobs:
- name: Commit Changes
run: |
if [ "$diffs_found" = true ]; then
git checkout -b ${{ steps.extract_branch.outputs.branch }}
git config --global user.name "GitHub Actions"
git config --global user.email "[email protected]"
git commit -am "Formatting job fix"
Expand Down Expand Up @@ -150,6 +155,139 @@ jobs:
TAG=$(mvn -s $GITHUB_WORKSPACE/.github/workflows/settings.xml -q -N -Dmaven.build.cache.enabled=false -Dexec.executable='echo' -Dexec.args='${project.version}' exec:exec)
contrib/datawave-quickstart/docker/docker-build.sh ${TAG} --docker-opts "${DOCKER_BUILD_OPTS}"
compose-build-and-test-latest-snapshots:
runs-on: ubuntu-latest
steps:
- name: Free up some space
run: |
sudo rm -rf /usr/share/dotnet
sudo rm -rf /opt/ghc
sudo rm -rf /usr/local/share/boost
sudo rm -rf $AGENT_TOOLSDIRECTORY
- name: Checkout Code
uses: actions/checkout@v3
with:
submodules: 'recursive'
- name: Set up JDK ${{env.JAVA_VERSION}}
uses: actions/setup-java@v3
with:
distribution: ${{env.JAVA_DISTRIBUTION}}
java-version: ${{env.JAVA_VERSION}}
maven-version: 3.9.5
cache: 'maven'
# Builds the quickstart and microservice docker images and runs a query test
- name: Docker Compose Query Tests
env:
USER_NAME: ${{ secrets.USER_NAME }}
ACCESS_TOKEN: ${{ secrets.ACCESS_TOKEN }}
run: |
# update datawave dependencies to use the latest snapshots
mvn -s $GITHUB_WORKSPACE/.github/workflows/settings.xml -B -V -e versions:update-properties versions:update-parent -DallowSnapshots=true -Dincludes=gov.nsa.*
mvn -s $GITHUB_WORKSPACE/.github/workflows/settings.xml -B -V -e -Pcompose -Dmicroservice-docker -Dquickstart-docker -Ddeploy -Dtar -DskipTests clean install
# free up some space so that we don't run out
docker system prune -f
mvn -s $GITHUB_WORKSPACE/.github/workflows/settings.xml -B -V -e -Pcompose -Dmicroservice-docker -Dquickstart-docker -Ddeploy -Dtar -DskipTests clean
cd docker
./bootstrap.sh
attempt=0
max_attempts=20
while [ $attempt -lt $max_attempts ]; do
attempt=$((attempt+1))
echo "Starting docker compose (Attempt ${attempt}/${max_attempts})"
nohup docker compose up -d --no-recreate < /dev/null > compose.out 2>&1 &
sleep 60s
cat compose.out
# check to see if the query service is running
QUERY="$(docker compose ps --status running --services | grep query || true)"
if [ "$QUERY" == "query" ] ; then
echo "Docker compose started successfully"
break
elif [ $attempt -eq $max_attempts ] ; then
echo "Failed to start docker compose"
exit 1
fi
done
cd scripts
./testAll.sh
- name: Dump Logs
if: failure()
run: |
cd docker
docker compose logs
compose-build-and-test:
runs-on: ubuntu-latest
steps:
- name: Free up some space
run: |
sudo rm -rf /usr/share/dotnet
sudo rm -rf /opt/ghc
sudo rm -rf /usr/local/share/boost
sudo rm -rf $AGENT_TOOLSDIRECTORY
- name: Checkout Code
uses: actions/checkout@v3
with:
submodules: 'recursive'
- name: Set up JDK ${{env.JAVA_VERSION}}
uses: actions/setup-java@v3
with:
distribution: ${{env.JAVA_DISTRIBUTION}}
java-version: ${{env.JAVA_VERSION}}
- uses: actions/cache@v3
with:
path: ~/.m2/repository
key: ${{ runner.os }}-maven-build-${{ hashFiles('**/pom.xml') }}
restore-keys: |
${{ runner.os }}-maven-build-
${{ runner.os }}-maven-format-
${{ runner.os }}-maven-
# Builds the quickstart and microservice docker images and runs a query test
- name: Docker Compose Query Tests
env:
USER_NAME: ${{ secrets.USER_NAME }}
ACCESS_TOKEN: ${{ secrets.ACCESS_TOKEN }}
run: |
mvn -s $GITHUB_WORKSPACE/.github/workflows/settings.xml -B -V -e -Pcompose -Dmicroservice-docker -Dquickstart-docker -Ddeploy -Dtar -DskipTests clean install
# free up some space so that we don't run out
docker system prune -f
mvn -s $GITHUB_WORKSPACE/.github/workflows/settings.xml -B -V -e -Pcompose -Dmicroservice-docker -Dquickstart-docker -Ddeploy -Dtar -DskipTests clean
cd docker
./bootstrap.sh
attempt=0
max_attempts=20
while [ $attempt -lt $max_attempts ]; do
attempt=$((attempt+1))
echo "Starting docker compose (Attempt ${attempt}/${max_attempts})"
nohup docker compose up -d --no-recreate < /dev/null > compose.out 2>&1 &
sleep 60s
cat compose.out
# check to see if the query service is running
QUERY="$(docker compose ps --status running --services | grep query || true)"
if [ "$QUERY" == "query" ] ; then
echo "Docker compose started successfully"
break
elif [ $attempt -eq $max_attempts ] ; then
echo "Failed to start docker compose"
exit 1
fi
done
cd scripts
./testAll.sh
- name: Dump Logs
if: failure()
run: |
cd docker
docker compose logs
# Here's an example of how you'd deploy the image to the github package registry.
# We don't want to do this by default since packages on github cannot be deleted
# or overwritten. So this could only be done for tags, however it seems the quickstart
Expand Down
2 changes: 1 addition & 1 deletion common-test/pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@
<parent>
<groupId>gov.nsa.datawave</groupId>
<artifactId>datawave-parent</artifactId>
<version>7.2.0-SNAPSHOT</version>
<version>7.4.0-SNAPSHOT</version>
</parent>
<artifactId>datawave-common-test</artifactId>
<name>${project.artifactId}</name>
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -50,9 +50,9 @@ admin.enableServer=false"

# You may override DW_ACCUMULO_DIST_URI in your env ahead of time, and set as file:///path/to/file.tar.gz for local tarball, if needed
# DW_ACCUMULO_DIST_URI should, if possible, be using https. There are potential security risks by using http.
DW_ACCUMULO_DIST_URI="${DW_ACCUMULO_DIST_URI:-https://dlcdn.apache.org/accumulo/2.1.2/accumulo-2.1.2-bin.tar.gz}"
DW_ACCUMULO_DIST_URI="${DW_ACCUMULO_DIST_URI:-https://dlcdn.apache.org/accumulo/2.1.3/accumulo-2.1.3-bin.tar.gz}"
# The sha512 checksum for the tarball. Value should be the hash value only and does not include the file name. Cannot be left blank.
DW_ACCUMULO_DIST_SHA512_CHECKSUM="${DW_ACCUMULO_DIST_SHA512_CHECKSUM:-27778c1c3f1d88ab128649fd0671d3be97ba052216ab43f1169395960e8c7d16375a51f940c2262437b836ea31f83f73f08f7a3d8cadda443e5e8bb31d9b23c5}"
DW_ACCUMULO_DIST_SHA512_CHECKSUM="${DW_ACCUMULO_DIST_SHA512_CHECKSUM:-1a27a144dc31f55ccc8e081b6c1bc6cc0362a8391838c53c166cb45291ff8f35867fd8e4729aa7b2c540f8b721f8c6953281bf589fc7fe320e4dc4d20b87abc4}"
# shellcheck disable=SC2034
DW_ACCUMULO_DIST="$( downloadTarball "${DW_ACCUMULO_DIST_URI}" "${DW_ACCUMULO_SERVICE_DIR}" && echo "${tarball}" )"
DW_ACCUMULO_BASEDIR="accumulo-install"
Expand Down
6 changes: 3 additions & 3 deletions contrib/datawave-quickstart/docker/pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -4,21 +4,21 @@
<parent>
<groupId>gov.nsa.datawave</groupId>
<artifactId>datawave-parent</artifactId>
<version>7.2.0-SNAPSHOT</version>
<version>7.4.0-SNAPSHOT</version>
<relativePath>../../../pom.xml</relativePath>
</parent>
<artifactId>quickstart</artifactId>
<packaging>pom</packaging>
<name>${project.artifactId}</name>
<properties>
<!-- These distributions and URLs should match what is listed in each service's bootstrap.sh script -->
<dist.accumulo>accumulo-2.1.2-bin.tar.gz</dist.accumulo>
<dist.accumulo>accumulo-2.1.3-bin.tar.gz</dist.accumulo>
<dist.hadoop>hadoop-3.3.6.tar.gz</dist.hadoop>
<dist.maven>apache-maven-3.8.8-bin.tar.gz</dist.maven>
<dist.wildfly>wildfly-17.0.1.Final.tar.gz</dist.wildfly>
<dist.zookeeper>apache-zookeeper-3.7.2-bin.tar.gz</dist.zookeeper>
<skipIngest>false</skipIngest>
<url.accumulo>https://dlcdn.apache.org/accumulo/2.1.2/${dist.accumulo}</url.accumulo>
<url.accumulo>https://dlcdn.apache.org/accumulo/2.1.3/${dist.accumulo}</url.accumulo>
<url.hadoop>https://dlcdn.apache.org/hadoop/common/hadoop-3.3.6/${dist.hadoop}</url.hadoop>
<url.maven>https://dlcdn.apache.org/maven/maven-3/3.8.8/binaries/${dist.maven}</url.maven>
<url.wildfly>https://download.jboss.org/wildfly/17.0.1.Final/${dist.wildfly}</url.wildfly>
Expand Down
2 changes: 1 addition & 1 deletion core/cached-results/pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@
<parent>
<groupId>gov.nsa.datawave.core</groupId>
<artifactId>datawave-core-parent</artifactId>
<version>7.2.0-SNAPSHOT</version>
<version>7.4.0-SNAPSHOT</version>
</parent>
<artifactId>datawave-core-cached-results</artifactId>
<name>${project.artifactId}</name>
Expand Down
2 changes: 1 addition & 1 deletion core/common-util/pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@
<parent>
<groupId>gov.nsa.datawave.core</groupId>
<artifactId>datawave-core-parent</artifactId>
<version>7.2.0-SNAPSHOT</version>
<version>7.4.0-SNAPSHOT</version>
</parent>
<artifactId>datawave-core-common-util</artifactId>
<name>${project.artifactId}</name>
Expand Down
2 changes: 1 addition & 1 deletion core/common/pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@
<parent>
<groupId>gov.nsa.datawave.core</groupId>
<artifactId>datawave-core-parent</artifactId>
<version>7.2.0-SNAPSHOT</version>
<version>7.4.0-SNAPSHOT</version>
</parent>
<artifactId>datawave-core-common</artifactId>
<name>${project.artifactId}</name>
Expand Down
2 changes: 1 addition & 1 deletion core/connection-pool/pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@
<parent>
<groupId>gov.nsa.datawave.core</groupId>
<artifactId>datawave-core-parent</artifactId>
<version>7.2.0-SNAPSHOT</version>
<version>7.4.0-SNAPSHOT</version>
</parent>
<artifactId>datawave-core-connection-pool</artifactId>
<name>${project.artifactId}</name>
Expand Down
2 changes: 1 addition & 1 deletion core/map-reduce/pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@
<parent>
<groupId>gov.nsa.datawave.core</groupId>
<artifactId>datawave-core-parent</artifactId>
<version>7.2.0-SNAPSHOT</version>
<version>7.4.0-SNAPSHOT</version>
</parent>
<artifactId>datawave-core-map-reduce</artifactId>
<name>${project.artifactId}</name>
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -82,7 +82,6 @@ protected void setup(org.apache.hadoop.mapreduce.Mapper<Key,Value,Key,Value>.Con

QueryLogic<?> logic = (QueryLogic<?>) super.applicationContext.getBean(logicName);
t = logic.getEnrichedTransformer(query);

Assert.notNull(logic.getMarkingFunctions());
Assert.notNull(logic.getResponseObjectFactory());
this.format = SerializationFormat.valueOf(context.getConfiguration().get(RESULT_SERIALIZATION_FORMAT));
Expand Down
2 changes: 1 addition & 1 deletion core/modification/pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@
<parent>
<groupId>gov.nsa.datawave.core</groupId>
<artifactId>datawave-core-parent</artifactId>
<version>7.2.0-SNAPSHOT</version>
<version>7.4.0-SNAPSHOT</version>
</parent>
<artifactId>datawave-core-modification</artifactId>
<name>${project.artifactId}</name>
Expand Down
2 changes: 1 addition & 1 deletion core/pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@
<parent>
<groupId>gov.nsa.datawave</groupId>
<artifactId>datawave-parent</artifactId>
<version>7.2.0-SNAPSHOT</version>
<version>7.4.0-SNAPSHOT</version>
</parent>
<groupId>gov.nsa.datawave.core</groupId>
<artifactId>datawave-core-parent</artifactId>
Expand Down
2 changes: 1 addition & 1 deletion core/query/pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@
<parent>
<groupId>gov.nsa.datawave.core</groupId>
<artifactId>datawave-core-parent</artifactId>
<version>7.2.0-SNAPSHOT</version>
<version>7.4.0-SNAPSHOT</version>
</parent>
<artifactId>datawave-core-query</artifactId>
<name>${project.artifactId}</name>
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -70,6 +70,8 @@ public class GenericQueryConfiguration implements Serializable {
// use a value like 'env:PASS' to pull from the environment
private String accumuloPassword = "";

private String connPoolName;

// Whether or not this query emits every result or performs some kind of result reduction
protected boolean reduceResults = false;

Expand Down Expand Up @@ -111,6 +113,7 @@ public void copyFrom(GenericQueryConfiguration other) {
this.setBaseIteratorPriority(other.getBaseIteratorPriority());
this.setBypassAccumulo(other.getBypassAccumulo());
this.setAccumuloPassword(other.getAccumuloPassword());
this.setConnPoolName(other.getConnPoolName());
this.setAuthorizations(other.getAuthorizations());
this.setBeginDate(other.getBeginDate());
this.setClient(other.getClient());
Expand Down Expand Up @@ -289,6 +292,14 @@ public void setAccumuloPassword(String password) {
this.accumuloPassword = EnvProvider.resolve(password);
}

public String getConnPoolName() {
return connPoolName;
}

public void setConnPoolName(String connPoolName) {
this.connPoolName = connPoolName;
}

public Map<String,ScannerBase.ConsistencyLevel> getTableConsistencyLevels() {
return tableConsistencyLevels;
}
Expand Down Expand Up @@ -347,12 +358,13 @@ && getBypassAccumulo() == that.getBypassAccumulo() && Objects.equals(getAuthoriz
&& Objects.equals(getBeginDate(), that.getBeginDate()) && Objects.equals(getEndDate(), that.getEndDate())
&& Objects.equals(getMaxWork(), that.getMaxWork()) && Objects.equals(getTableName(), that.getTableName())
&& Objects.equals(getQueries(), that.getQueries()) && Objects.equals(getAccumuloPassword(), that.getAccumuloPassword())
&& Objects.equals(isReduceResults(), that.isReduceResults());
&& Objects.equals(getConnPoolName(), that.getConnPoolName()) && Objects.equals(isReduceResults(), that.isReduceResults());
}

@Override
public int hashCode() {
return Objects.hash(isCheckpointable(), getAuthorizations(), getQuery(), getQueryString(), getBeginDate(), getEndDate(), getMaxWork(),
getBaseIteratorPriority(), getTableName(), getQueries(), getBypassAccumulo(), getAccumuloPassword(), isReduceResults());
getBaseIteratorPriority(), getTableName(), getQueries(), getBypassAccumulo(), getConnPoolName(), getAccumuloPassword(),
isReduceResults());
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -41,7 +41,6 @@ public abstract class BaseQueryLogic<T> implements QueryLogic<T> {
private int maxPageSize = 0;
private long pageByteTrigger = 0;
private boolean collectQueryMetrics = true;
private String _connPoolName;
private Set<String> authorizedDNs;

protected ProxiedUserDetails currentUser;
Expand Down Expand Up @@ -79,7 +78,6 @@ public BaseQueryLogic(BaseQueryLogic<T> other) {
setPageByteTrigger(other.getPageByteTrigger());
setCollectQueryMetrics(other.getCollectQueryMetrics());
this.authorizedDNs = other.authorizedDNs;
setConnPoolName(other.getConnPoolName());
setRequiredRoles(other.getRequiredRoles());
setSelectorExtractor(other.getSelectorExtractor());
setCurrentUser(other.getCurrentUser());
Expand Down Expand Up @@ -330,13 +328,13 @@ public void setCollectQueryMetrics(boolean collectQueryMetrics) {
/** {@inheritDoc} */
@Override
public String getConnPoolName() {
return _connPoolName;
return getConfig().getConnPoolName();
}

/** {@inheritDoc} */
@Override
public void setConnPoolName(final String connPoolName) {
_connPoolName = connPoolName;
getConfig().setConnPoolName(connPoolName);
}

/** {@inheritDoc} */
Expand Down
2 changes: 1 addition & 1 deletion core/utils/pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@
<parent>
<groupId>gov.nsa.datawave.core</groupId>
<artifactId>datawave-core-parent</artifactId>
<version>7.2.0-SNAPSHOT</version>
<version>7.4.0-SNAPSHOT</version>
</parent>
<groupId>gov.nsa.datawave.core</groupId>
<artifactId>datawave-utils-parent</artifactId>
Expand Down
Empty file modified docker/config/application-federation.yml
100644 → 100755
Empty file.
Loading

0 comments on commit 08ac120

Please sign in to comment.