-
Notifications
You must be signed in to change notification settings - Fork 27
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
Setup Initial Jenkins Git Structure (#744)
This change introduces a shared library structure for our Jenkins pipelines. As can be seen, this allows creating a pipeline template that our actual pipeline files can utilize and configure as needed. Along with this, this change includes some common functions that will be needed for testing and have been added to the console library, with a future change expected to move our python testing files to be able to use this console library as well. --------- Signed-off-by: Tanner Lewis <[email protected]>
- Loading branch information
Showing
9 changed files
with
405 additions
and
11 deletions.
There are no files selected for viewing
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
49 changes: 47 additions & 2 deletions
49
...Solution/src/main/docker/migrationConsole/lib/console_link/console_link/logic/clusters.py
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -1,8 +1,53 @@ | ||
from console_link.models.cluster import Cluster | ||
from console_link.models.cluster import Cluster, HttpMethod | ||
from dataclasses import dataclass | ||
import logging | ||
|
||
logger = logging.getLogger(__name__) | ||
|
||
|
||
@dataclass | ||
class ConnectionResult: | ||
connection_message: str | ||
connection_established: bool | ||
cluster_version: str | ||
|
||
|
||
def cat_indices(cluster: Cluster, as_json=False): | ||
as_json_suffix = "?format=json" if as_json else "" | ||
as_json_suffix = "?format=json" if as_json else "?v" | ||
cat_indices_path = f"/_cat/indices{as_json_suffix}" | ||
r = cluster.call_api(cat_indices_path) | ||
return r.json() if as_json else r.content | ||
|
||
|
||
def connection_check(cluster: Cluster) -> ConnectionResult: | ||
cluster_details_path = "/" | ||
caught_exception = None | ||
r = None | ||
try: | ||
r = cluster.call_api(cluster_details_path, timeout=3) | ||
except Exception as e: | ||
caught_exception = e | ||
logging.debug(f"Unable to access cluster: {cluster} with exception: {e}") | ||
if caught_exception is None: | ||
response_json = r.json() | ||
return ConnectionResult(connection_message="Successfully connected!", | ||
connection_established=True, | ||
cluster_version=response_json['version']['number']) | ||
else: | ||
return ConnectionResult(connection_message=f"Unable to connect to cluster with error: {caught_exception}", | ||
connection_established=False, | ||
cluster_version=None) | ||
|
||
|
||
def run_test_benchmarks(cluster: Cluster): | ||
cluster.execute_benchmark_workload(workload="geonames") | ||
cluster.execute_benchmark_workload(workload="http_logs") | ||
cluster.execute_benchmark_workload(workload="nested") | ||
cluster.execute_benchmark_workload(workload="nyc_taxis") | ||
|
||
|
||
# As a default we exclude system indices and searchguard indices | ||
def clear_indices(cluster: Cluster): | ||
clear_indices_path = "/*,-.*,-searchguard*,-sg7*" | ||
r = cluster.call_api(clear_indices_path, method=HttpMethod.DELETE) | ||
return r.content |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
70 changes: 70 additions & 0 deletions
70
jenkins/migrationIntegPipelines/ec2SourceE2EPipeline.groovy
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,70 @@ | ||
// Note: | ||
// 1. We are using an existing common VPC that we provide through a 'vpcId' parameter on the pipeline for now until we move | ||
// to a proper Jenkins accounts and can create a setup without public subnets as well as request an extension to allow more than 5 VPCs per region | ||
// 2. There is a still a manual step needed on the EC2 source load balancer to replace its security group rule which allows all traffic (0.0.0.0/0) to | ||
// allow traffic for the relevant service security group. This needs a better story around accepting user security groups in our Migration CDK. | ||
|
||
def sourceContextId = 'source-single-node-ec2' | ||
def migrationContextId = 'migration-default' | ||
def gitUrl = 'https://github.com/opensearch-project/opensearch-migrations.git' | ||
def gitBranch = 'main' | ||
def stageId = 'aws-integ' | ||
def source_cdk_context = """ | ||
{ | ||
"source-single-node-ec2": { | ||
"suffix": "ec2-source-<STAGE>", | ||
"networkStackSuffix": "ec2-source-<STAGE>", | ||
"vpcId": "$vpcId", | ||
"distVersion": "7.10.2", | ||
"cidr": "12.0.0.0/16", | ||
"distributionUrl": "https://artifacts.elastic.co/downloads/elasticsearch/elasticsearch-oss-7.10.2-linux-x86_64.tar.gz", | ||
"captureProxyEnabled": true, | ||
"securityDisabled": true, | ||
"minDistribution": false, | ||
"cpuArch": "x64", | ||
"isInternal": true, | ||
"singleNodeCluster": true, | ||
"networkAvailabilityZones": 2, | ||
"dataNodeCount": 1, | ||
"managerNodeCount": 0, | ||
"serverAccessType": "ipv4", | ||
"restrictServerAccessTo": "0.0.0.0/0" | ||
} | ||
} | ||
""" | ||
def migration_cdk_context = """ | ||
{ | ||
"migration-default": { | ||
"stage": "<STAGE>", | ||
"vpcId": "$vpcId", | ||
"engineVersion": "OS_2.11", | ||
"domainName": "os-cluster-<STAGE>", | ||
"dataNodeCount": 2, | ||
"openAccessPolicyEnabled": true, | ||
"domainRemovalPolicy": "DESTROY", | ||
"artifactBucketRemovalPolicy": "DESTROY", | ||
"trafficReplayerExtraArgs": "--speedup-factor 10.0", | ||
"fetchMigrationEnabled": true, | ||
"reindexFromSnapshotServiceEnabled": true, | ||
"sourceClusterEndpoint": "<SOURCE_CLUSTER_ENDPOINT>", | ||
"dpPipelineTemplatePath": "../../../test/dp_pipeline_aws_integ.yaml", | ||
"migrationConsoleEnableOSI": true, | ||
"migrationAPIEnabled": true | ||
} | ||
} | ||
""" | ||
|
||
@Library("migrations-shared-lib@main")_ | ||
|
||
defaultIntegPipeline( | ||
sourceContext: source_cdk_context, | ||
migrationContext: migration_cdk_context, | ||
sourceContextId: sourceContextId, | ||
migrationContextId: migrationContextId, | ||
gitUrl: gitUrl, | ||
gitBranch: gitBranch, | ||
stageId: stageId | ||
//deployStep: { | ||
// echo 'Custom Test Step' | ||
//} | ||
) |
66 changes: 66 additions & 0 deletions
66
jenkins/migrationIntegPipelines/rfsBackfillE2EPipeline.groovy
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,66 @@ | ||
// Note: | ||
// 1. We are using an existing common VPC that we provide through a 'vpcId' parameter on the pipeline for now until we move | ||
// to a proper Jenkins accounts and can create a setup without public subnets as well as request an extension to allow more than 5 VPCs per region | ||
// 2. There is a still a manual step needed on the EC2 source load balancer to replace its security group rule which allows all traffic (0.0.0.0/0) to | ||
// allow traffic for the relevant service security group. This needs a better story around accepting user security groups in our Migration CDK. | ||
|
||
def sourceContextId = 'source-single-node-ec2' | ||
def migrationContextId = 'migration-rfs' | ||
def gitUrl = 'https://github.com/opensearch-project/opensearch-migrations.git' | ||
def gitBranch = 'main' | ||
def stageId = 'rfs-integ' | ||
def source_cdk_context = """ | ||
{ | ||
"source-single-node-ec2": { | ||
"suffix": "ec2-source-<STAGE>", | ||
"networkStackSuffix": "ec2-source-<STAGE>", | ||
"vpcId": "$vpcId", | ||
"distVersion": "7.10.2", | ||
"distributionUrl": "https://artifacts.elastic.co/downloads/elasticsearch/elasticsearch-oss-7.10.2-linux-x86_64.tar.gz", | ||
"captureProxyEnabled": false, | ||
"securityDisabled": true, | ||
"minDistribution": false, | ||
"cpuArch": "x64", | ||
"isInternal": true, | ||
"singleNodeCluster": true, | ||
"networkAvailabilityZones": 2, | ||
"dataNodeCount": 1, | ||
"managerNodeCount": 0, | ||
"serverAccessType": "ipv4", | ||
"restrictServerAccessTo": "0.0.0.0/0" | ||
} | ||
} | ||
""" | ||
def migration_cdk_context = """ | ||
{ | ||
"migration-rfs": { | ||
"stage": "<STAGE>", | ||
"vpcId": "$vpcId", | ||
"engineVersion": "OS_2.11", | ||
"domainName": "os-cluster-<STAGE>", | ||
"dataNodeCount": 2, | ||
"openAccessPolicyEnabled": true, | ||
"domainRemovalPolicy": "DESTROY", | ||
"artifactBucketRemovalPolicy": "DESTROY", | ||
"kafkaBrokerServiceEnabled": true, | ||
"trafficReplayerServiceEnabled": false, | ||
"reindexFromSnapshotServiceEnabled": true, | ||
"sourceClusterEndpoint": "<SOURCE_CLUSTER_ENDPOINT>" | ||
} | ||
} | ||
""" | ||
|
||
@Library("migrations-shared-lib@main")_ | ||
|
||
defaultIntegPipeline( | ||
sourceContext: source_cdk_context, | ||
migrationContext: migration_cdk_context, | ||
sourceContextId: sourceContextId, | ||
migrationContextId: migrationContextId, | ||
gitUrl: gitUrl, | ||
gitBranch: gitBranch, | ||
stageId: stageId, | ||
finishStep: { | ||
echo 'Skipping step for RFS' | ||
} | ||
) |
Oops, something went wrong.