Merge branch 'main' into 20230331_add_kerberos_support

This commit is contained in:
Brian Gann
2023-04-03 21:46:01 -04:00
committed by GitHub
165 changed files with 4739 additions and 2921 deletions

View File

@@ -65,10 +65,19 @@ load("scripts/drone/vault.star", "from_secret", "prerelease_bucket")
ver_mode = "release"
release_trigger = {
"event": {"exclude": ["promote"]},
"ref": [
"refs/tags/v*",
],
"event": {
"exclude": [
"promote",
],
},
"ref": {
"include": [
"refs/tags/v*",
],
"exclude": [
"refs/tags/*-cloud*",
],
},
}
def store_npm_packages_step():
@@ -196,6 +205,12 @@ def oss_pipelines(ver_mode = ver_mode, trigger = release_trigger):
mysql_integration_tests_step(),
]
# We don't need to run integration tests at release time since they have
# been run multiple times before:
if ver_mode in ("release"):
integration_test_steps = []
volumes = []
windows_pipeline = pipeline(
name = "{}-oss-windows".format(ver_mode),
edition = "oss",
@@ -203,11 +218,9 @@ def oss_pipelines(ver_mode = ver_mode, trigger = release_trigger):
steps = get_windows_steps(edition = "oss", ver_mode = ver_mode),
platform = "windows",
depends_on = [
# 'oss-build-e2e-publish-{}'.format(ver_mode),
"{}-oss-build-e2e-publish".format(ver_mode),
"{}-oss-test-frontend".format(ver_mode),
"{}-oss-test-backend".format(ver_mode),
"{}-oss-integration-tests".format(ver_mode),
],
environment = environment,
)
@@ -224,7 +237,10 @@ def oss_pipelines(ver_mode = ver_mode, trigger = release_trigger):
),
test_frontend(trigger, ver_mode),
test_backend(trigger, ver_mode),
pipeline(
]
if ver_mode not in ("release"):
pipelines.append(pipeline(
name = "{}-oss-integration-tests".format(ver_mode),
edition = "oss",
trigger = trigger,
@@ -239,9 +255,9 @@ def oss_pipelines(ver_mode = ver_mode, trigger = release_trigger):
integration_test_steps,
environment = environment,
volumes = volumes,
),
windows_pipeline,
]
))
pipelines.append(windows_pipeline)
return pipelines
@@ -338,8 +354,16 @@ def enterprise_pipelines(ver_mode = ver_mode, trigger = release_trigger):
integration_test_steps = [
postgres_integration_tests_step(),
mysql_integration_tests_step(),
redis_integration_tests_step(),
memcached_integration_tests_step(),
]
# We don't need to run integration tests at release time since they have
# been run multiple times before:
if ver_mode in ("release"):
integration_test_steps = []
volumes = []
windows_pipeline = pipeline(
name = "{}-enterprise-windows".format(ver_mode),
edition = "enterprise",
@@ -347,11 +371,9 @@ def enterprise_pipelines(ver_mode = ver_mode, trigger = release_trigger):
steps = get_windows_steps(edition = "enterprise", ver_mode = ver_mode),
platform = "windows",
depends_on = [
# 'enterprise-build-e2e-publish-{}'.format(ver_mode),
"{}-enterprise-build-e2e-publish".format(ver_mode),
"{}-enterprise-test-frontend".format(ver_mode),
"{}-enterprise-test-backend".format(ver_mode),
"{}-enterprise-integration-tests".format(ver_mode),
],
environment = environment,
)
@@ -368,7 +390,10 @@ def enterprise_pipelines(ver_mode = ver_mode, trigger = release_trigger):
),
test_frontend_enterprise(trigger, ver_mode, source = source),
test_backend_enterprise(trigger, ver_mode, source = source),
pipeline(
]
if ver_mode not in ("release"):
pipelines.append(pipeline(
name = "{}-enterprise-integration-tests".format(ver_mode),
edition = "enterprise",
trigger = trigger,
@@ -391,16 +416,12 @@ def enterprise_pipelines(ver_mode = ver_mode, trigger = release_trigger):
[
wire_install_step(),
] +
integration_test_steps +
[
redis_integration_tests_step(),
memcached_integration_tests_step(),
],
integration_test_steps,
environment = environment,
volumes = volumes,
),
windows_pipeline,
]
))
pipelines.append(windows_pipeline)
return pipelines
@@ -655,3 +676,77 @@ def artifacts_page_pipeline():
environment = {"EDITION": "enterprise"},
),
]
def integration_test_pipelines():
"""
Trigger integration tests on release builds
These pipelines should be triggered when we have a release that does a lot of
cherry-picking and we still want to have all the integration tests run on that
particular build.
Returns:
List of Drone pipelines (one for enterprise and one for oss integration tests)
"""
trigger = {
"event": ["promote"],
"target": "integration-tests",
}
pipelines = []
volumes = integration_test_services_volumes()
oss_integration_test_steps = [
postgres_integration_tests_step(),
mysql_integration_tests_step(),
]
enterprise_integration_test_steps = oss_integration_test_steps + [
redis_integration_tests_step(),
memcached_integration_tests_step(),
]
source = "${DRONE_TAG}"
pipelines.append(pipeline(
name = "integration-tests-oss",
edition = "oss",
trigger = trigger,
services = integration_test_services(edition = "oss"),
steps = [
download_grabpl_step(),
identify_runner_step(),
verify_gen_cue_step(),
verify_gen_jsonnet_step(),
wire_install_step(),
] +
oss_integration_test_steps,
environment = {"EDITION": "oss"},
volumes = volumes,
))
pipelines.append(pipeline(
name = "integration-tests-enterprise",
edition = "enterprise",
trigger = trigger,
services = integration_test_services(edition = "enterprise"),
steps = [
download_grabpl_step(),
identify_runner_step(),
clone_enterprise_step(source = source),
init_enterprise_step(ver_mode),
] +
with_deps(
[
verify_gen_cue_step(),
verify_gen_jsonnet_step(),
],
[
"init-enterprise",
],
) +
[
wire_install_step(),
] +
enterprise_integration_test_steps,
environment = {"EDITION": "enterprise"},
volumes = volumes,
))
return pipelines

View File

@@ -78,10 +78,10 @@ def test_frontend_enterprise(trigger, ver_mode, source, edition = "enterprise"):
environment = {"EDITION": edition}
steps = (
[
download_grabpl_step(),
clone_enterprise_step(source),
init_enterprise_step(ver_mode),
identify_runner_step(),
download_grabpl_step(),
] +
with_deps([yarn_install_step()], ["init-enterprise"]) +
[

View File

@@ -171,6 +171,7 @@ def init_enterprise_step(ver_mode):
"image": build_image,
"depends_on": [
"clone-enterprise",
"grabpl",
],
"environment": environment,
"commands": [