Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion ci/defs/job_configs.py
Original file line number Diff line number Diff line change
Expand Up @@ -1045,7 +1045,7 @@ class JobConfigs:
docker_keeper = Job.Config(
name=JobNames.DOCKER_KEEPER,
runs_on=RunnerLabels.STYLE_CHECK_AMD,
command="python3 ./ci/jobs/docker_server.py --tag-type head --allow-build-reuse",
command="python3 ./ci/jobs/docker_server.py --tag-type head --allow-build-reuse --push",
digest_config=Job.CacheDigestConfig(
include_paths=[
"./ci/jobs/docker_server.py",
Expand Down
6 changes: 3 additions & 3 deletions ci/jobs/docker_server.py
Original file line number Diff line number Diff line change
Expand Up @@ -58,10 +58,10 @@ def docker_login(relogin: bool = True) -> None:
"docker system info | grep --quiet -E 'Username|Registry'"
):
Shell.check(
"docker login --username 'robotclickhouse' --password-stdin",
"docker login --username 'altinityinfra' --password-stdin",
strict=True,
stdin_str=Secret.Config(
"dockerhub_robot_password", type=Secret.Type.AWS_SSM_PARAMETER
"DOCKER_PASSWORD", type=Secret.Type.GH_SECRET
).get_value(),
encoding="utf-8",
)
Expand Down Expand Up @@ -348,7 +348,7 @@ def main():
push = True

image = DockerImageData(image_repo, image_path)
tags = [f'{info.pr_number}-{version_dict["string"]}']
tags = [f'{info.pr_number}-{version_dict["describe"]}']
repo_urls = {}
direct_urls: Dict[str, List[str]] = {}

Expand Down
144 changes: 144 additions & 0 deletions ci/jobs/integration_test_job.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,9 @@
from pathlib import Path
from typing import List, Tuple

import yaml # NOTE (strtgbb): Used for loading broken tests rules
import re

from ci.jobs.scripts.find_tests import Targeting
from ci.jobs.scripts.integration_tests_configs import IMAGES_ENV, get_optimal_test_batch
from ci.praktika.info import Info
Expand All @@ -22,6 +25,106 @@
MAX_MEM_PER_WORKER = 11


def get_broken_tests_rules(broken_tests_file_path: str) -> dict:
if (
not os.path.isfile(broken_tests_file_path)
or os.path.getsize(broken_tests_file_path) == 0
):
raise ValueError(
"There is something wrong with getting broken tests rules: "
f"file '{broken_tests_file_path}' is empty or does not exist."
)

with open(broken_tests_file_path, "r", encoding="utf-8") as broken_tests_file:
broken_tests = yaml.safe_load(broken_tests_file)

compiled_rules = {"exact": {}, "pattern": {}}

for test in broken_tests:
regex = test.get("regex") is True
rule = {
"reason": test["reason"],
}

if test.get("message"):
rule["message"] = re.compile(test["message"]) if regex else test["message"]

if test.get("not_message"):
rule["not_message"] = (
re.compile(test["not_message"]) if regex else test["not_message"]
)
if test.get("check_types"):
rule["check_types"] = test["check_types"]

if regex:
rule["regex"] = True
compiled_rules["pattern"][re.compile(test["name"])] = rule
else:
compiled_rules["exact"][test["name"]] = rule

return compiled_rules


def test_is_known_fail(broken_tests_rules, test_name, test_logs, job_flags):
matching_rules = []

def matches_substring(substring, log, is_regex):
if log is None:
return False
if is_regex:
return bool(substring.search(log))
return substring in log

broken_tests_log = f"{temp_path}/broken_tests_handler.log"

with open(broken_tests_log, "a") as log_file:

log_file.write(f"Checking known broken tests for failed test: {test_name}\n")
log_file.write("Potential matching rules:\n")
exact_rule = broken_tests_rules["exact"].get(test_name)
if exact_rule:
log_file.write(f"{test_name} - {exact_rule}\n")
matching_rules.append(exact_rule)

for name_re, data in broken_tests_rules["pattern"].items():
if name_re.fullmatch(test_name):
log_file.write(f"{name_re} - {data}\n")
matching_rules.append(data)

if not matching_rules:
return False

log_file.write(f"First line of test logs: {test_logs.splitlines()[0]}\n")

for rule_data in matching_rules:
if rule_data.get("check_types") and not any(
ct in job_flags for ct in rule_data["check_types"]
):
log_file.write(
f"Skip rule: Check types didn't match: '{rule_data['check_types']}' not in '{job_flags}'\n"
)
continue # check_types didn't match → skip rule

is_regex = rule_data.get("regex", False)
not_message = rule_data.get("not_message")
if not_message and matches_substring(not_message, test_logs, is_regex):
log_file.write(
f"Skip rule: Not message matched: '{rule_data['not_message']}'\n"
)
continue # not_message matched → skip rule
message = rule_data.get("message")
if message and not matches_substring(message, test_logs, is_regex):
log_file.write(
f"Skip rule: Message didn't match: '{rule_data['message']}'\n"
)
continue

log_file.write(f"Matched rule: {rule_data}\n")
return rule_data["reason"]

return False


def _start_docker_in_docker():
with open("./ci/tmp/docker-in-docker.log", "w") as log_file:
dockerd_proc = subprocess.Popen(
Expand Down Expand Up @@ -533,6 +636,47 @@ def main():
)
attached_files.append("./ci/tmp/dmesg.log")

broken_tests_rules = get_broken_tests_rules("tests/broken_tests.yaml")
for result in test_results:
if result.status == Result.StatusExtended.FAIL:
try:
last_log_path = sorted([p for p in result.files if p.endswith(".log")])[
-1
]
with open(last_log_path, "r") as log_file:
log_content = log_file.read()
except Exception as e:
print(f"Error getting last log path for result {result.name}: {e}")
print(
[
a
for a in dir(result)
if not a.startswith("_") and not callable(getattr(result, a))
]
)
print(f"Result files: {result.files}")
print(f"Result info: {result.info}")
print(f"Error info: {error_info}")

continue
try:
known_fail_reason = test_is_known_fail(
broken_tests_rules,
result.name,
log_content,
job_params,
)
except Exception as e:
print(f"Error getting known fail reason for result {result.name}: {e}")
continue

if known_fail_reason:
result.status = Result.StatusExtended.BROKEN
result.info += f"\nMarked as broken: {known_fail_reason}"

if os.path.exists(f"{temp_path}/broken_tests_handler.log"):
attached_files.append(f"{temp_path}/broken_tests_handler.log")

R = Result.create_from(results=test_results, stopwatch=sw, files=attached_files)

if has_error:
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,7 @@
)
backward = cluster.add_instance(
"backward",
image="clickhouse/clickhouse-server",
image="altinityinfra/clickhouse-server",
tag=CLICKHOUSE_CI_MIN_TESTED_VERSION,
with_installed_binary=True,
)
Expand Down
Loading