Skip to content
代码片段 群组 项目

比较版本

更改显示为版本正在合并到目标版本。了解更多关于比较版本的信息。

来源

选择目标项目
No results found

目标

选择目标项目
No results found
显示更改
源代码提交(5843)
显示
1539 个添加61 个删除
......@@ -21,8 +21,32 @@ notifications:
pullrequests: jira@kafka.apache.org
jira_options: link label
# This list allows you to trigger builds on pull requests. It can have a maximum of 10 people.
# https://cwiki.apache.org/confluence/pages/viewpage.action?spaceKey=INFRA&title=Git+-+.asf.yaml+features#Git.asf.yamlfeatures-JenkinsPRwhitelisting
jenkins:
github_whitelist:
- ConcurrencyPractitioner
- ableegoldman
- cadonna
- FrankYang0529
- kamalcph
- apoorvmittal10
- lianetm
- brandboat
- kirktrue
- nizhikov
- OmniaGM
- dongnuo123
- frankvicky
# This list allows you to triage pull requests. It can have a maximum of 10 people.
# https://cwiki.apache.org/confluence/pages/viewpage.action?spaceKey=INFRA&title=Git+-+.asf.yaml+features#Git.asf.yamlfeatures-AssigningexternalcollaboratorswiththetriageroleonGitHub
github:
collaborators:
- FrankYang0529
- kamalcph
- apoorvmittal10
- lianetm
- brandboat
- kirktrue
- nizhikov
- OmniaGM
- dongnuo123
- frankvicky
\ No newline at end of file
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
#
---
name: "Update Commit Status Check"
description: "Update the status of a commit check using the GH CLI"
inputs:
gh-token:
description: "The GitHub token for use with the CLI"
required: true
commit_sha:
description: "The SHA of the commit we are updating"
required: true
url:
description: "The URL of the status check"
required: false
default: ""
description:
description: "The text to display next to the check"
default: ""
required: false
context:
description: "The name of the status check"
required: true
state:
description: "The state of the check. Can be one of: error, failure, pending, success"
required: true
runs:
using: "composite"
steps:
- name: Update Check
shell: bash
env:
GH_TOKEN: ${{ inputs.gh-token }}
run: |
gh api --method POST -H "Accept: application/vnd.github+json" -H "X-GitHub-Api-Version: 2022-11-28" \
/repos/apache/kafka/statuses/${{ inputs.commit_sha }} \
-f "state=${{ inputs.state }}" -f "target_url=${{ inputs.url }}" \
-f "description=${{ inputs.description }}" \
-f "context=${{ inputs.context }}"
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
#
---
name: "Gradle Setup"
description: "Setup Java and Gradle"
inputs:
java-version:
description: "Java version to use"
default: "17"
gradle-cache-read-only:
description: "Should the Gradle cache be read-only?"
default: "true"
gradle-cache-write-only:
description: "Should the Gradle cache be write-only?"
default: "false"
develocity-access-key:
description: "Optional access key for uploading build scans to Develocity"
default: ""
runs:
using: "composite"
steps:
- name: Setup Java
uses: actions/setup-java@v4
with:
distribution: temurin
java-version: ${{ inputs.java-version }}
- name: Setup Gradle
uses: gradle/actions/setup-gradle@af1da67850ed9a4cedd57bfd976089dd991e2582 # v4.0.0
env:
GRADLE_BUILD_ACTION_CACHE_DEBUG_ENABLED: true
with:
gradle-version: wrapper
develocity-access-key: ${{ inputs.develocity-access-key }}
develocity-token-expiry: 4
cache-read-only: ${{ inputs.gradle-cache-read-only }}
cache-write-only: ${{ inputs.gradle-cache-write-only }}
# Cache downloaded JDKs in addition to the default directories.
gradle-home-cache-includes: |
caches
notifications
jdks
cache-cleanup: on-success
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from glob import glob
import logging
import os
import os.path
import sys
from typing import Tuple, Optional
import xml.etree.ElementTree
logger = logging.getLogger()
logger.setLevel(logging.DEBUG)
handler = logging.StreamHandler(sys.stderr)
handler.setLevel(logging.DEBUG)
logger.addHandler(handler)
def get_env(key: str) -> str:
value = os.getenv(key)
logger.debug(f"Read env {key}: {value}")
return value
def parse_report(workspace_path, fp) -> Tuple[int, int]:
stack = []
errors = []
file_count = 0
error_count = 0
for (event, elem) in xml.etree.ElementTree.iterparse(fp, events=["start", "end"]):
if event == "start":
stack.append(elem)
if elem.tag == "file":
file_count += 1
errors.clear()
if elem.tag == "error":
logger.debug(f"Found checkstyle error: {elem.attrib}")
errors.append(elem)
error_count += 1
elif event == "end":
if elem.tag == "file" and len(errors) > 0:
filename = elem.get("name")
rel_path = os.path.relpath(filename, workspace_path)
logger.debug(f"Outputting errors for file: {elem.attrib}")
for error in errors:
line = error.get("line")
col = error.get("column")
severity = error.get("severity")
message = error.get('message')
title = f"Checkstyle {severity}"
print(f"::notice file={rel_path},line={line},col={col},title={title}::{message}")
stack.pop()
else:
logger.error(f"Unhandled xml event {event}: {elem}")
return file_count, error_count
if __name__ == "__main__":
"""
Parse checkstyle XML reports and generate GitHub annotations.
See: https://docs.github.com/en/actions/writing-workflows/choosing-what-your-workflow-does/workflow-commands-for-github-actions#setting-a-notice-message
"""
if not os.getenv("GITHUB_WORKSPACE"):
print("This script is intended to by run by GitHub Actions.")
exit(1)
reports = glob(pathname="**/checkstyle/*.xml", recursive=True)
logger.debug(f"Found {len(reports)} checkstyle reports")
total_file_count = 0
total_error_count = 0
workspace_path = get_env("GITHUB_WORKSPACE") # e.g., /home/runner/work/apache/kafka
for report in reports:
with open(report, "r") as fp:
logger.debug(f"Parsing report file: {report}")
file_count, error_count = parse_report(workspace_path, fp)
if error_count == 1:
logger.debug(f"Checked {file_count} files from {report} and found 1 error")
else:
logger.debug(f"Checked {file_count} files from {report} and found {error_count} errors")
total_file_count += file_count
total_error_count += error_count
exit(0)
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import argparse
import dataclasses
from functools import partial
from glob import glob
import logging
import os
import os.path
import sys
from typing import Tuple, Optional, List, Iterable
import xml.etree.ElementTree
import html
logger = logging.getLogger()
logger.setLevel(logging.DEBUG)
handler = logging.StreamHandler(sys.stderr)
handler.setLevel(logging.DEBUG)
logger.addHandler(handler)
PASSED = "PASSED ✅"
FAILED = "FAILED ❌"
FLAKY = "FLAKY ⚠️ "
SKIPPED = "SKIPPED 🙈"
def get_env(key: str, fn = str) -> Optional:
value = os.getenv(key)
if value is None:
logger.debug(f"Could not find env {key}")
return None
else:
logger.debug(f"Read env {key}: {value}")
return fn(value)
@dataclasses.dataclass
class TestCase:
test_name: str
class_name: str
time: float
failure_message: Optional[str]
failure_class: Optional[str]
failure_stack_trace: Optional[str]
def key(self) -> Tuple[str, str]:
return self.class_name, self.test_name
def __repr__(self):
return f"{self.class_name} {self.test_name}"
@dataclasses.dataclass
class TestSuite:
name: str
path: str
tests: int
skipped: int
failures: int
errors: int
time: float
failed_tests: List[TestCase]
skipped_tests: List[TestCase]
passed_tests: List[TestCase]
def parse_report(workspace_path, report_path, fp) -> Iterable[TestSuite]:
cur_suite: Optional[TestSuite] = None
partial_test_case = None
test_case_failed = False
for (event, elem) in xml.etree.ElementTree.iterparse(fp, events=["start", "end"]):
if event == "start":
if elem.tag == "testsuite":
name = elem.get("name")
tests = int(elem.get("tests", 0))
skipped = int(elem.get("skipped", 0))
failures = int(elem.get("failures", 0))
errors = int(elem.get("errors", 0))
suite_time = float(elem.get("time", 0.0))
cur_suite = TestSuite(name, report_path, tests, skipped, failures, errors, suite_time, [], [], [])
elif elem.tag == "testcase":
test_name = elem.get("name")
class_name = elem.get("classname")
test_time = float(elem.get("time", 0.0))
partial_test_case = partial(TestCase, test_name, class_name, test_time)
test_case_failed = False
elif elem.tag == "failure":
failure_message = elem.get("message")
if failure_message:
failure_message = html.escape(failure_message)
failure_message = failure_message.replace('\n', '<br>').replace('\r', '<br>')
failure_class = elem.get("type")
failure_stack_trace = elem.text
failure = partial_test_case(failure_message, failure_class, failure_stack_trace)
cur_suite.failed_tests.append(failure)
test_case_failed = True
elif elem.tag == "skipped":
skipped = partial_test_case(None, None, None)
cur_suite.skipped_tests.append(skipped)
else:
pass
elif event == "end":
if elem.tag == "testcase":
if not test_case_failed:
passed = partial_test_case(None, None, None)
cur_suite.passed_tests.append(passed)
partial_test_case = None
elif elem.tag == "testsuite":
yield cur_suite
cur_suite = None
else:
logger.error(f"Unhandled xml event {event}: {elem}")
def pretty_time_duration(seconds: float) -> str:
time_min, time_sec = divmod(int(seconds), 60)
time_hour, time_min = divmod(time_min, 60)
time_fmt = ""
if time_hour > 0:
time_fmt += f"{time_hour}h"
if time_min > 0:
time_fmt += f"{time_min}m"
time_fmt += f"{time_sec}s"
return time_fmt
if __name__ == "__main__":
"""
Parse JUnit XML reports and generate GitHub job summary in Markdown format.
A Markdown summary of the test results is written to stdout. This should be redirected to $GITHUB_STEP_SUMMARY
within the action. Additional debug logs are written to stderr.
Exits with status code 0 if no tests failed, 1 otherwise.
"""
parser = argparse.ArgumentParser(description="Parse JUnit XML results.")
parser.add_argument("--path",
required=False,
default="build/junit-xml/**/*.xml",
help="Path to XML files. Glob patterns are supported.")
if not os.getenv("GITHUB_WORKSPACE"):
print("This script is intended to by run by GitHub Actions.")
exit(1)
args = parser.parse_args()
reports = glob(pathname=args.path, recursive=True)
logger.debug(f"Found {len(reports)} JUnit results")
workspace_path = get_env("GITHUB_WORKSPACE") # e.g., /home/runner/work/apache/kafka
total_file_count = 0
total_run = 0 # All test runs according to <testsuite tests="N"/>
total_skipped = 0 # All skipped tests according to <testsuite skipped="N"/>
total_errors = 0 # All test errors according to <testsuite errors="N"/>
total_time = 0 # All test time according to <testsuite time="N"/>
total_failures = 0 # All unique test names that only failed. Re-run tests not counted
total_flaky = 0 # All unique test names that failed and succeeded
total_success = 0 # All unique test names that only succeeded. Re-runs not counted
total_tests = 0 # All unique test names that were run. Re-runs not counted
failed_table = []
flaky_table = []
skipped_table = []
for report in reports:
with open(report, "r") as fp:
logger.debug(f"Parsing {report}")
for suite in parse_report(workspace_path, report, fp):
total_skipped += suite.skipped
total_errors += suite.errors
total_time += suite.time
total_run += suite.tests
# Due to how the Develocity Test Retry plugin interacts with our generated ClusterTests, we can see
# tests pass and then fail in the same run. Because of this, we need to capture all passed and all
# failed for each suite. Then we can find flakes by taking the intersection of those two.
all_suite_passed = {test.key() for test in suite.passed_tests}
all_suite_failed = {test.key(): test for test in suite.failed_tests}
flaky = all_suite_passed & all_suite_failed.keys()
all_tests = all_suite_passed | all_suite_failed.keys()
total_tests += len(all_tests)
total_flaky += len(flaky)
total_failures += len(all_suite_failed) - len(flaky)
total_success += len(all_suite_passed) - len(flaky)
# Display failures first. Iterate across the unique failed tests to avoid duplicates in table.
for test_failure in all_suite_failed.values():
if test_failure.key() in flaky:
continue
logger.debug(f"Found test failure: {test_failure}")
simple_class_name = test_failure.class_name.split(".")[-1]
failed_table.append((simple_class_name, test_failure.test_name, test_failure.failure_message, f"{test_failure.time:0.2f}s"))
for test_failure in all_suite_failed.values():
if test_failure.key() not in flaky:
continue
logger.debug(f"Found flaky test: {test_failure}")
simple_class_name = test_failure.class_name.split(".")[-1]
flaky_table.append((simple_class_name, test_failure.test_name, test_failure.failure_message, f"{test_failure.time:0.2f}s"))
for skipped_test in suite.skipped_tests:
simple_class_name = skipped_test.class_name.split(".")[-1]
logger.debug(f"Found skipped test: {skipped_test}")
skipped_table.append((simple_class_name, skipped_test.test_name))
duration = pretty_time_duration(total_time)
logger.info(f"Finished processing {len(reports)} reports")
# Print summary
report_url = get_env("REPORT_URL")
report_md = f"Download [HTML report]({report_url})."
summary = (f"{total_run} tests cases run in {duration}. "
f"{total_success} {PASSED}, {total_failures} {FAILED}, "
f"{total_flaky} {FLAKY}, {total_skipped} {SKIPPED}, and {total_errors} errors.")
print("## Test Summary\n")
print(f"{summary} {report_md}\n")
if len(failed_table) > 0:
logger.info(f"Found {len(failed_table)} test failures:")
print("### Failed Tests\n")
print(f"| Module | Test | Message | Time |")
print(f"| ------ | ---- | ------- | ---- |")
for row in failed_table:
logger.info(f"{FAILED} {row[0]} > {row[1]}")
row_joined = " | ".join(row)
print(f"| {row_joined} |")
print("\n")
if len(flaky_table) > 0:
logger.info(f"Found {len(flaky_table)} flaky test failures:")
print("### Flaky Tests\n")
print(f"| Module | Test | Message | Time |")
print(f"| ------ | ---- | ------- | ---- |")
for row in flaky_table:
logger.info(f"{FLAKY} {row[0]} > {row[1]}")
row_joined = " | ".join(row)
print(f"| {row_joined} |")
print("\n")
if len(skipped_table) > 0:
print("<details>")
print(f"<summary>{len(skipped_table)} Skipped Tests</summary>\n")
print(f"| Module | Test |")
print(f"| ------ | ---- |")
for row in skipped_table:
row_joined = " | ".join(row)
print(f"| {row_joined} |")
print("\n</details>")
# Print special message if there was a timeout
exit_code = get_env("GRADLE_EXIT_CODE", int)
if exit_code == 124:
logger.debug(f"Gradle command timed out. These are partial results!")
logger.debug(summary)
logger.debug("Failing this step because the tests timed out.")
exit(1)
elif exit_code in (0, 1):
logger.debug(summary)
if total_failures > 0:
logger.debug(f"Failing this step due to {total_failures} test failures")
exit(1)
elif total_errors > 0:
logger.debug(f"Failing this step due to {total_errors} test errors")
exit(1)
else:
exit(0)
else:
logger.debug(f"Gradle had unexpected exit code {exit_code}. Failing this step")
exit(1)
# GitHub Actions
## Overview
The entry point for our build is the "CI" workflow which is defined in ci.yml.
This is used for both PR and trunk builds. The jobs and steps of the workflow
are defined in build.yml.
## Opting-in to GitHub Actions
To opt-in to the new GitHub actions workflows, simply name your branch with a
prefix of "gh-". For example, `gh-KAFKA-17433-deflake`
## Disabling Email Notifications
By default, GitHub sends an email for each failed action run. To change this,
visit https://github.com/settings/notifications and find System -> Actions.
Here you can change your notification preferences.
## Publishing Build Scans
> This only works for committers (who have ASF accounts on ge.apache.org).
There are two ways committers can have build scans published. The simplest
way is to push their branches to apache/kafka. This will allow GitHub Actions to
have access to the repository secret needed to publish the scan.
Alternatively, committers create pull requests against their own forks and
configure their own access key as a repository secret.
Log in to https://ge.apache.org/, click on My Settings and then Access Keys
Generate an Access Key and give it a name like "github-actions". Copy the key
down somewhere safe.
On your fork of apache/kafka, navigate to Settings -> Security -> Secrets and
Variables -> Actions. In the Secrets tab, click Create a New Repository Secret.
The name of the secret should be `GE_ACCESS_TOKEN` and the value should
be `ge.apache.org=abc123` where "abc123" is substituted for the Access Key you
previously generated.
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
name: Check and Test
on:
workflow_call:
inputs:
gradle-cache-read-only:
description: "Should the Gradle cache be read-only?"
default: "true"
type: string
gradle-cache-write-only:
description: "Should the Gradle cache be write-only?"
default: "false"
type: string
is-public-fork:
description: "Is this CI run from a public fork?"
default: "true"
type: string
jobs:
validate:
runs-on: ubuntu-latest
strategy:
fail-fast: false
matrix:
java: [ 21, 17, 11, 8 ]
name: Compile and Check Java ${{ matrix.java }}
steps:
- name: Env
run: printenv
env:
GITHUB_CONTEXT: ${{ toJson(github) }}
- name: Checkout code
uses: actions/checkout@v4
with:
persist-credentials: false
- uses: actions/setup-python@v5
with:
python-version: '3.12'
- name: Setup Gradle
uses: ./.github/actions/setup-gradle
with:
java-version: ${{ matrix.java }}
gradle-cache-read-only: ${{ inputs.gradle-cache-read-only }}
gradle-cache-write-only: ${{ inputs.gradle-cache-write-only }}
develocity-access-key: ${{ secrets.GE_ACCESS_TOKEN }}
- name: Compile and validate
# Gradle flags
# --build-cache: Let Gradle restore the build cache
# --info: For now, we'll generate lots of logs while setting up the GH Actions
# --scan: Publish the build scan. This will only work on PRs from apache/kafka and trunk
# --no-scan: For public fork PRs, we won't attempt to publish the scan
run: |
./gradlew --build-cache --info \
${{ inputs.is-public-fork == 'true' && '--no-scan' || '--scan' }} \
check -x test
- name: Archive check reports
if: always()
uses: actions/upload-artifact@v4
with:
name: check-reports-${{ matrix.java }}
path: |
**/build/**/*.html
if-no-files-found: ignore
- name: Annotate checkstyle errors
# Avoid duplicate annotations, only run on java 21
if: ${{ failure() && matrix.java == '21' }}
run: python .github/scripts/checkstyle.py
env:
GITHUB_WORKSPACE: ${{ github.workspace }}
test:
needs: validate
runs-on: ubuntu-latest
strategy:
fail-fast: false
matrix:
java: [ 17, 11 ] # If we change these, make sure to adjust ci-complete.yml
name: JUnit tests Java ${{ matrix.java }}
steps:
- name: Checkout code
uses: actions/checkout@v4
with:
persist-credentials: false
- name: Setup Gradle
uses: ./.github/actions/setup-gradle
with:
java-version: ${{ matrix.java }}
gradle-cache-read-only: ${{ inputs.gradle-cache-read-only }}
gradle-cache-write-only: ${{ inputs.gradle-cache-write-only }}
develocity-access-key: ${{ secrets.GE_ACCESS_TOKEN }}
- name: Test
# Gradle flags
# --build-cache: Let Gradle restore the build cache
# --scan: Attempt to publish build scans in PRs. This will only work on PRs from apache/kafka, not public forks.
# --continue: Keep running even if a test fails
# -PcommitId Prevent the Git SHA being written into the jar files (which breaks caching)
id: junit-test
run: |
set +e
timeout 180m ./gradlew --build-cache --continue \
${{ inputs.is-public-fork == 'true' && '--no-scan' || '--scan' }} \
-PtestLoggingEvents=started,passed,skipped,failed \
-PmaxParallelForks=2 \
-PmaxTestRetries=1 -PmaxTestRetryFailures=10 \
-PcommitId=xxxxxxxxxxxxxxxx \
test
exitcode="$?"
echo "exitcode=$exitcode" >> $GITHUB_OUTPUT
- name: Archive JUnit reports
uses: actions/upload-artifact@v4
id: junit-upload-artifact
with:
name: junit-reports-${{ matrix.java }}
path: |
**/build/reports/tests/test/*
if-no-files-found: ignore
- name: Parse JUnit tests
run: python .github/scripts/junit.py >> $GITHUB_STEP_SUMMARY
env:
GITHUB_WORKSPACE: ${{ github.workspace }}
REPORT_URL: ${{ steps.junit-upload-artifact.outputs.artifact-url }}
GRADLE_EXIT_CODE: ${{ steps.junit-test.outputs.exitcode }}
- name: Archive Build Scan
if: always()
uses: actions/upload-artifact@v4
with:
name: build-scan-test-${{ matrix.java }}
path: ~/.gradle/build-scan-data
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
name: CI Complete
on:
workflow_run:
workflows: [CI]
types:
- completed
run-name: Build Scans for ${{ github.event.workflow_run.display_title}}
# This workflow runs after the completion of the CI workflow triggered on a "pull_request" event.
# The "pull_request" event type is run in an unprivileged context without access to the repository
# secrets. This means that PRs from public forks cannot publish Gradle Build Scans or modify the
# PR contents.
#
# This "workflow_run" triggered workflow is run in a privileged context and so does have access to
# the repository secrets. Here we can download the build scan files produced by a PR and publish
# them to ge.apache.org.
#
# If we need to do things like comment on, label, or otherwise modify PRs from public forks. This
# workflow is the place to do it. PR number is ${{ github.event.workflow_run.pull_requests[0].number }}
jobs:
upload-build-scan:
# Skip this workflow if CI was run for anything other than "pull_request" (like "push").
# Skip this workflow if the PR was from apache/kafka. Those will have already published the build scan in CI.
# Skip this workflow if the run was skipped or cancelled
if: |
github.event.workflow_run.event == 'pull_request' &&
github.event.workflow_run.head_repository.full_name != 'apache/kafka' &&
(github.event.workflow_run.conclusion == 'success' || github.event.workflow_run.conclusion == 'failure')
runs-on: ubuntu-latest
strategy:
fail-fast: false
matrix:
java: [ 17, 11 ]
steps:
- name: Env
run: printenv
env:
GITHUB_CONTEXT: ${{ toJson(github) }}
- name: Checkout code
uses: actions/checkout@v4
with:
persist-credentials:
false
- name: Setup Gradle
uses: ./.github/actions/setup-gradle
with:
java-version: ${{ matrix.java }}
develocity-access-key: ${{ secrets.GE_ACCESS_TOKEN }}
- name: Download build scan archive
id: download-build-scan
uses: actions/download-artifact@v4
continue-on-error: true
with:
github-token: ${{ github.token }}
run-id: ${{ github.event.workflow_run.id }}
name: build-scan-test-${{ matrix.java }}
path: ~/.gradle/build-scan-data # This is where Gradle buffers unpublished build scan data when --no-scan is given
- name: Handle missing scan
if: ${{ steps.download-build-scan.outcome == 'failure' }}
uses: ./.github/actions/gh-api-update-status
with:
gh-token: ${{ secrets.GITHUB_TOKEN }}
commit_sha: ${{ github.event.workflow_run.head_sha }}
url: '${{ github.event.repository.html_url }}/actions/runs/${{ github.run_id }}'
description: 'Could not find build scan'
context: 'Gradle Build Scan / Java ${{ matrix.java }}'
state: 'error'
- name: Publish Scan
id: publish-build-scan
continue-on-error: true
if: ${{ steps.download-build-scan.outcome == 'success' }}
run: |
./gradlew --info buildScanPublishPrevious > gradle.out
SCAN_URL=$(grep '^https://.*$' gradle.out)
cat gradle.out
echo "Published build scan to $SCAN_URL" >> $GITHUB_STEP_SUMMARY
echo "build-scan-url=$SCAN_URL" >> $GITHUB_OUTPUT
- name: Handle failed publish
if: ${{ steps.publish-build-scan.outcome == 'failure' }}
uses: ./.github/actions/gh-api-update-status
with:
gh-token: ${{ secrets.GITHUB_TOKEN }}
commit_sha: ${{ github.event.workflow_run.head_sha }}
url: '${{ github.event.repository.html_url }}/actions/runs/${{ github.run_id }}'
description: 'The build scan failed to be published'
context: 'Gradle Build Scan / Java ${{ matrix.java }}'
state: 'error'
- name: Update Status Check
if: ${{ steps.publish-build-scan.outcome == 'success' }}
uses: ./.github/actions/gh-api-update-status
with:
gh-token: ${{ secrets.GITHUB_TOKEN }}
commit_sha: ${{ github.event.workflow_run.head_sha }}
url: ${{ steps.publish-build-scan.outputs.build-scan-url }}
description: 'The build scan was successfully published'
context: 'Gradle Build Scan / Java ${{ matrix.java }}'
state: 'success'
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
name: CI
on:
push:
branches:
- 'trunk'
pull_request:
types: [ opened, synchronize, reopened ]
branches:
- 'trunk'
concurrency:
group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.ref }}
cancel-in-progress: ${{ github.ref != 'refs/heads/trunk' }}
jobs:
build:
uses: ./.github/workflows/build.yml
with:
gradle-cache-read-only: ${{ github.ref != 'refs/heads/trunk' }}
gradle-cache-write-only: ${{ github.ref == 'refs/heads/trunk' }}
is-public-fork: ${{ github.event.pull_request.head.repo.fork == 'true' }}
secrets:
inherit
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
name: 'Deflake a test'
on:
workflow_dispatch:
inputs:
test-module:
description: 'Gradle sub-module which contains the test being de-flaked. Should be like :core'
required: true
type: string
test-pattern:
description: 'Test class to de-flake (must be a ClusterTest). Should be like *SomeTest*'
required: true
type: string
test-repeat:
description: 'Number of times to invoke the test'
required: true
type: number
default: 1
java-version:
description: 'Java version to use.'
required: true
type: string
default: '17'
jobs:
deflake:
runs-on: ubuntu-latest
name: Deflake JUnit tests
steps:
- name: Checkout code
uses: actions/checkout@v4
with:
persist-credentials: false
- name: Setup Gradle
uses: ./.github/actions/setup-gradle
with:
java-version: ${{ inputs.java-version }}
gradle-cache-read-only: true
develocity-access-key: ${{ secrets.GE_ACCESS_TOKEN }}
- name: Test
timeout-minutes: 60
id: junit-test
run: |
set +e
./gradlew --info --build-cache --scan --continue \
-PtestLoggingEvents=started,passed,skipped,failed \
-PignoreFailures=true -PmaxParallelForks=2 \
-Pkafka.cluster.test.repeat=${{ inputs.test-repeat }} \
${{ inputs.test-module }}:test --tests ${{ inputs.test-pattern }}
exitcode="$?"
echo "exitcode=$exitcode" >> $GITHUB_OUTPUT
- name: Archive JUnit reports
uses: actions/upload-artifact@v4
id: junit-upload-artifact
with:
name: junit-reports-${{ inputs.java-version }}
path: |
**/build/reports/tests/test/*
if-no-files-found: ignore
- name: Parse JUnit tests
run: python .github/scripts/junit.py >> $GITHUB_STEP_SUMMARY
env:
GITHUB_WORKSPACE: ${{ github.workspace }}
REPORT_URL: ${{ steps.junit-upload-artifact.outputs.artifact-url }}
GRADLE_EXIT_CODE: ${{ steps.junit-test.outputs.exitcode }}
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
name: Docker Build Test
on:
workflow_dispatch:
inputs:
image_type:
type: choice
description: Docker image type to build and test
options:
- "jvm"
- "native"
kafka_url:
description: Kafka url to be used to build the docker image
required: true
jobs:
build:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
- name: Set up Python 3.10
uses: actions/setup-python@v5
with:
python-version: "3.10"
- name: Install dependencies
run: |
python -m pip install --upgrade pip
pip install -r docker/requirements.txt
- name: Build image and run tests
working-directory: ./docker
run: |
python docker_build_test.py kafka/test -tag=test -type=${{ github.event.inputs.image_type }} -u=${{ github.event.inputs.kafka_url }}
- name: Run CVE scan
uses: aquasecurity/trivy-action@6e7b7d1fd3e4fef0c5fa8cce1229c54b2c9bd0d8 # v0.24.0
with:
image-ref: 'kafka/test:test'
format: 'table'
severity: 'CRITICAL,HIGH'
output: scan_report_${{ github.event.inputs.image_type }}.txt
exit-code: '1'
- name: Upload test report
if: always()
uses: actions/upload-artifact@v4
with:
name: report_${{ github.event.inputs.image_type }}.html
path: docker/test/report_${{ github.event.inputs.image_type }}.html
- name: Upload CVE scan report
if: always()
uses: actions/upload-artifact@v4
with:
name: scan_report_${{ github.event.inputs.image_type }}.txt
path: scan_report_${{ github.event.inputs.image_type }}.txt
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
name: Docker Official Image Build Test
on:
workflow_dispatch:
inputs:
image_type:
type: choice
description: Docker image type to build and test
options:
- "jvm"
kafka_version:
description: Kafka version for the docker official image. This should be >=3.7.0
required: true
jobs:
build:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
- name: Set up Python 3.10
uses: actions/setup-python@v5
with:
python-version: "3.10"
- name: Install dependencies
run: |
python -m pip install --upgrade pip
pip install -r docker/requirements.txt
- name: Build image and run tests
working-directory: ./docker
run: |
python docker_official_image_build_test.py kafka/test -tag=test -type=${{ github.event.inputs.image_type }} -v=${{ github.event.inputs.kafka_version }}
- name: Run CVE scan
uses: aquasecurity/trivy-action@6e7b7d1fd3e4fef0c5fa8cce1229c54b2c9bd0d8 # v0.24.0
with:
image-ref: 'kafka/test:test'
format: 'table'
severity: 'CRITICAL,HIGH'
output: scan_report_${{ github.event.inputs.image_type }}.txt
exit-code: '1'
- name: Upload test report
if: always()
uses: actions/upload-artifact@v4
with:
name: report_${{ github.event.inputs.image_type }}.html
path: docker/test/report_${{ github.event.inputs.image_type }}.html
- name: Upload CVE scan report
if: always()
uses: actions/upload-artifact@v4
with:
name: scan_report_${{ github.event.inputs.image_type }}.txt
path: scan_report_${{ github.event.inputs.image_type }}.txt
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
name: Promote Release Candidate Docker Image
on:
workflow_dispatch:
inputs:
rc_docker_image:
description: RC docker image that needs to be promoted (Example:- apache/kafka:3.8.0-rc0 (OR) apache/kafka-native:3.8.0-rc0)
required: true
promoted_docker_image:
description: Docker image name of the promoted image (Example:- apache/kafka:3.8.0 (OR) apache/kafka-native:3.8.0)
required: true
jobs:
promote:
if: github.repository == 'apache/kafka'
runs-on: ubuntu-latest
steps:
- name: Set up QEMU
uses: docker/setup-qemu-action@49b3bc8e6bdd4a60e6116a5414239cba5943d3cf # v3.2.0
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@988b5a0280414f521da01fcc63a27aeeb4b104db # v3.6.1
- name: Login to Docker Hub
uses: docker/login-action@9780b0c442fbb1117ed29e0efdff1e18412f7567 # v3.3.0
with:
username: ${{ secrets.DOCKERHUB_USER }}
password: ${{ secrets.DOCKERHUB_TOKEN }}
- name: Copy RC Image to promoted image
run: |
docker buildx imagetools create --tag ${{ github.event.inputs.promoted_docker_image }} ${{ github.event.inputs.rc_docker_image }}
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
name: Build and Push Release Candidate Docker Image
on:
workflow_dispatch:
inputs:
image_type:
type: choice
description: Docker image type to be built and pushed
options:
- "jvm"
- "native"
rc_docker_image:
description: RC docker image that needs to be built and pushed to Dockerhub (Example:- apache/kafka:3.8.0-rc0 (OR) apache/kafka-native:3.8.0-rc0)
required: true
kafka_url:
description: Kafka url to be used to build the docker image
required: true
jobs:
release:
if: github.repository == 'apache/kafka'
runs-on: ubuntu-latest
steps:
- name: Checkout
uses: actions/checkout@v4
- name: Set up Python 3.10
uses: actions/setup-python@v5
with:
python-version: "3.10"
- name: Install dependencies
run: |
python -m pip install --upgrade pip
pip install -r docker/requirements.txt
- name: Set up QEMU
uses: docker/setup-qemu-action@49b3bc8e6bdd4a60e6116a5414239cba5943d3cf # v3.2.0
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@988b5a0280414f521da01fcc63a27aeeb4b104db # v3.6.1
- name: Login to Docker Hub
uses: docker/login-action@9780b0c442fbb1117ed29e0efdff1e18412f7567 # v3.3.0
with:
username: ${{ secrets.DOCKERHUB_USER }}
password: ${{ secrets.DOCKERHUB_TOKEN }}
- name: Release the RC docker image
run: |
python docker/docker_release.py ${{ github.event.inputs.rc_docker_image }} --kafka-url ${{ github.event.inputs.kafka_url }} --image-type ${{ github.event.inputs.image_type }}
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
name: Docker Image CVE Scanner
on:
schedule:
# This job will run at 3:30 UTC daily
- cron: '30 3 * * *'
workflow_dispatch:
jobs:
scan_jvm:
if: github.repository == 'apache/kafka'
runs-on: ubuntu-latest
strategy:
matrix:
# This is an array of supported tags. Make sure this array only contains the supported tags
supported_image_tag: ['latest', '3.7.1']
steps:
- name: Run CVE scan
uses: aquasecurity/trivy-action@6e7b7d1fd3e4fef0c5fa8cce1229c54b2c9bd0d8 # v0.24.0
if: always()
with:
image-ref: apache/kafka:${{ matrix.supported_image_tag }}
format: 'table'
severity: 'CRITICAL,HIGH'
output: scan_report_jvm_${{ matrix.supported_image_tag }}.txt
exit-code: '1'
- name: Upload CVE scan report
if: always()
uses: actions/upload-artifact@v4
with:
name: scan_report_jvm_${{ matrix.supported_image_tag }}.txt
path: scan_report_jvm_${{ matrix.supported_image_tag }}.txt
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
name: Docker Prepare Docker Official Image Source
on:
workflow_dispatch:
inputs:
image_type:
type: choice
description: Docker image type to build and test
options:
- "jvm"
kafka_version:
description: Kafka version for the docker official image. This should be >=3.7.0
required: true
jobs:
build:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
- name: Set up Python 3.10
uses: actions/setup-python@v5
with:
python-version: "3.10"
- name: Install dependencies
run: |
python -m pip install --upgrade pip
pip install -r docker/requirements.txt
- name: Build Docker Official Image Artifact
working-directory: ./docker
run: |
python prepare_docker_official_image_source.py -type=${{ github.event.inputs.image_type }} -v=${{ github.event.inputs.kafka_version }}
- name: Upload Docker Official Image Artifact
if: success()
uses: actions/upload-artifact@v4
with:
name: ${{ github.event.inputs.kafka_version }}
path: docker/docker_official_images/${{ github.event.inputs.kafka_version }}
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
name: 'Handle stale PRs'
on:
schedule:
- cron: '30 3 * * *' # Run at 3:30 UTC nightly
workflow_dispatch:
inputs:
dryRun:
description: 'Dry Run'
required: true
default: true
type: boolean
operationsPerRun:
description: 'Max GitHub API operations'
required: true
default: 30
type: number
permissions:
issues: write
pull-requests: write
jobs:
stale:
runs-on: ubuntu-latest
steps:
- uses: actions/stale@v9
with:
debug-only: ${{ inputs.dryRun || false }}
operations-per-run: ${{ inputs.operationsPerRun || 100 }}
ascending: true
days-before-stale: 90
days-before-close: 30 # Since adding 'stale' will update the PR, days-before-close is relative to that.
stale-pr-label: 'stale'
stale-pr-message: |
This PR is being marked as stale since it has not had any activity in 90 days. If you
would like to keep this PR alive, please leave a comment asking for a review. If the PR has
merge conflicts, update it with the latest from the base branch.
<p>
If you are having difficulty finding a reviewer, please reach out on the
[mailing list](https://kafka.apache.org/contact).
<p>
If this PR is no longer valid or desired, please feel free to close it. If no activity
occurs in the next 30 days, it will be automatically closed.
close-pr-label: 'closed-stale'
close-pr-message: |
This PR has been closed since it has not had any activity in 120 days. If you feel like this
was a mistake, or you would like to continue working on it, please feel free to re-open the
PR and ask for a review.
......@@ -6,6 +6,7 @@ build/
build_eclipse/
out/
.gradle/
.vscode/
lib_managed/
src_managed/
project/boot/
......@@ -33,7 +34,6 @@ Vagrantfile.local
config/server-*
config/zookeeper-*
core/data/*
gradle/wrapper/*.jar
gradlew.bat
......@@ -51,8 +51,13 @@ docs/generated/
kafkatest.egg-info/
systest/
*.swp
clients/src/generated
clients/src/generated-test
jmh-benchmarks/generated
jmh-benchmarks/src/main/generated
streams/src/generated
**/.jqwik-database
**/src/generated
**/src/generated-test
storage/kafka-tiered-storage/
docker/test/report_*.html
kafka.Kafka
__pycache__
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
sudo: required
dist: trusty
language: java
env:
- _DUCKTAPE_OPTIONS="--subset 0 --subsets 15"
- _DUCKTAPE_OPTIONS="--subset 1 --subsets 15"
- _DUCKTAPE_OPTIONS="--subset 2 --subsets 15"
- _DUCKTAPE_OPTIONS="--subset 3 --subsets 15"
- _DUCKTAPE_OPTIONS="--subset 4 --subsets 15"
- _DUCKTAPE_OPTIONS="--subset 5 --subsets 15"
- _DUCKTAPE_OPTIONS="--subset 6 --subsets 15"
- _DUCKTAPE_OPTIONS="--subset 7 --subsets 15"
- _DUCKTAPE_OPTIONS="--subset 8 --subsets 15"
- _DUCKTAPE_OPTIONS="--subset 9 --subsets 15"
- _DUCKTAPE_OPTIONS="--subset 10 --subsets 15"
- _DUCKTAPE_OPTIONS="--subset 11 --subsets 15"
- _DUCKTAPE_OPTIONS="--subset 12 --subsets 15"
- _DUCKTAPE_OPTIONS="--subset 13 --subsets 15"
- _DUCKTAPE_OPTIONS="--subset 14 --subsets 15"
jdk:
- oraclejdk8
before_install:
- gradle wrapper
script:
- ./gradlew rat
- ./gradlew systemTestLibs && /bin/bash ./tests/docker/run_tests.sh
services:
- docker
before_cache:
- rm -f $HOME/.gradle/caches/modules-2/modules-2.lock
- rm -fr $HOME/.gradle/caches/*/plugin-resolution/
cache:
directories:
- "$HOME/.m2/repository"
- "$HOME/.gradle/caches/"
- "$HOME/.gradle/wrapper/"
/*
*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
def doValidation() {
// Run all the tasks associated with `check` except for `test` - the latter is executed via `doTest`
sh """
./retry_zinc ./gradlew -PscalaVersion=$SCALA_VERSION clean check -x test \
--profile --continue -PxmlSpotBugsReport=true -PkeepAliveMode="session"
"""
}
def isChangeRequest(env) {
env.CHANGE_ID != null && !env.CHANGE_ID.isEmpty()
}
def doTest(env, target = "test") {
sh """./gradlew -PscalaVersion=$SCALA_VERSION ${target} \
--profile --continue -PkeepAliveMode="session" -PtestLoggingEvents=started,passed,skipped,failed \
-PignoreFailures=true -PmaxParallelForks=2 -PmaxTestRetries=1 -PmaxTestRetryFailures=10"""
junit '**/build/test-results/**/TEST-*.xml'
}
def runTestOnDevBranch(env) {
if (!isChangeRequest(env)) {
doTest(env)
}
}
def doStreamsArchetype() {
echo 'Verify that Kafka Streams archetype compiles'
sh '''
./gradlew streams:publishToMavenLocal clients:publishToMavenLocal connect:json:publishToMavenLocal connect:api:publishToMavenLocal \
|| { echo 'Could not publish kafka-streams.jar (and dependencies) locally to Maven'; exit 1; }
'''
VERSION = sh(script: 'grep "^version=" gradle.properties | cut -d= -f 2', returnStdout: true).trim()
dir('streams/quickstart') {
sh '''
mvn clean install -Dgpg.skip \
|| { echo 'Could not `mvn install` streams quickstart archetype'; exit 1; }
'''
dir('test-streams-archetype') {
// Note the double quotes for variable interpolation
sh """
echo "Y" | mvn archetype:generate \
-DarchetypeCatalog=local \
-DarchetypeGroupId=org.apache.kafka \
-DarchetypeArtifactId=streams-quickstart-java \
-DarchetypeVersion=${VERSION} \
-DgroupId=streams.examples \
-DartifactId=streams.examples \
-Dversion=0.1 \
-Dpackage=myapps \
|| { echo 'Could not create new project using streams quickstart archetype'; exit 1; }
"""
dir('streams.examples') {
sh '''
mvn compile \
|| { echo 'Could not compile streams quickstart archetype project'; exit 1; }
'''
}
}
}
}
def tryStreamsArchetype() {
try {
doStreamsArchetype()
} catch(err) {
echo 'Failed to build Kafka Streams archetype, marking this build UNSTABLE'
currentBuild.result = 'UNSTABLE'
}
}
pipeline {
agent none
options {
disableConcurrentBuilds(abortPrevious: isChangeRequest(env))
}
stages {
stage('Build') {
parallel {
stage('JDK 8 and Scala 2.12') {
agent { label 'ubuntu' }
tools {
jdk 'jdk_1.8_latest'
maven 'maven_3_latest'
}
options {
timeout(time: 8, unit: 'HOURS')
timestamps()
}
environment {
SCALA_VERSION=2.12
}
steps {
doValidation()
doTest(env)
tryStreamsArchetype()
}
}
stage('JDK 11 and Scala 2.13') {
agent { label 'ubuntu' }
tools {
jdk 'jdk_11_latest'
}
options {
timeout(time: 8, unit: 'HOURS')
timestamps()
}
environment {
SCALA_VERSION=2.13
}
steps {
doValidation()
runTestOnDevBranch(env)
echo 'Skipping Kafka Streams archetype test for Java 11'
}
}
stage('JDK 17 and Scala 2.13') {
agent { label 'ubuntu' }
tools {
jdk 'jdk_17_latest'
}
options {
timeout(time: 8, unit: 'HOURS')
timestamps()
}
environment {
SCALA_VERSION=2.13
}
steps {
doValidation()
runTestOnDevBranch(env)
echo 'Skipping Kafka Streams archetype test for Java 17'
}
}
stage('JDK 21 and Scala 2.13') {
agent { label 'ubuntu' }
tools {
jdk 'jdk_21_latest'
}
options {
timeout(time: 8, unit: 'HOURS')
timestamps()
}
environment {
SCALA_VERSION=2.13
}
steps {
doValidation()
doTest(env)
echo 'Skipping Kafka Streams archetype test for Java 21'
}
}
}
}
}
post {
always {
script {
if (!isChangeRequest(env)) {
node('ubuntu') {
step([$class: 'Mailer',
notifyEveryUnstableBuild: true,
recipients: "dev@kafka.apache.org",
sendToIndividuals: false])
}
}
}
}
}
}