Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
4 changes: 3 additions & 1 deletion scripts/metric_reporter/main.py
Original file line number Diff line number Diff line change
Expand Up @@ -101,7 +101,9 @@ def main(
except GCSClientError as error:
logger.error(f"GCS client error: {error}")
except ParserError as error:
logger.error(f"Parsing error: {error}")
# Log as warning since it's not a fatal situation.
# This allows the pipeline to continue on parsing other files.
logger.warning(f"Parsing error: {error}")
except ReporterError as error:
logger.error(f"Test Suite Reporter error: {error}")
except Exception as error:
Expand Down
11 changes: 9 additions & 2 deletions scripts/metric_reporter/parser/coverage_json_parser.py
Original file line number Diff line number Diff line change
Expand Up @@ -204,15 +204,22 @@ def parse(self, artifact_file_names: list[str]) -> list[CoverageJsonGroup]:
coverage_json_groups: list[CoverageJsonGroup] = []
for artifact_file_name in artifact_file_names:
self.logger.info(f"Parsing {artifact_file_name}")
file: ArtifactFile = self._parse_artifact_file_name(artifact_file_name)
group: CoverageJsonGroup = self._get_coverage_json_group(file, coverage_json_groups)
try:
file: ArtifactFile = self._parse_artifact_file_name(artifact_file_name)
group: CoverageJsonGroup = self._get_coverage_json_group(
file, coverage_json_groups
)
content: str = self._gcs_client.get_coverage_artifact_content(
file.repository, artifact_file_name
)
json_data: dict[str, Any] = json.loads(content)
coverage_json: CoverageJson = self._parse_json_data(file, json_data)
group.coverage_jsons.append(coverage_json)
except ParserError as error:
# We don't want to completely kill the pipeline if a file is a format that can't
# be parsed. So we log a warning and continue with the next file.
self.logger.warning("Skipping file %s: %s", artifact_file_name, error)
continue
except (JSONDecodeError, ValidationError) as error:
error_mapping: dict[type, str] = {
JSONDecodeError: f"Invalid JSON format for file {artifact_file_name}",
Expand Down
11 changes: 9 additions & 2 deletions scripts/metric_reporter/parser/junit_xml_parser.py
Original file line number Diff line number Diff line change
Expand Up @@ -371,13 +371,20 @@ def parse(self, artifact_file_names: list[str]) -> list[JUnitXmlGroup]:
junit_xml_groups: list[JUnitXmlGroup] = []
for artifact_file_name in artifact_file_names:
self.logger.info(f"Parsing {artifact_file_name}")
file: ArtifactFile = self._parse_artifact_file_name(artifact_file_name)
junit_xml: JUnitXmlJobTestSuites = self._get_junit_xml(file, junit_xml_groups)
try:
file: ArtifactFile = self._parse_artifact_file_name(artifact_file_name)
junit_xml: JUnitXmlJobTestSuites = self._get_junit_xml(file, junit_xml_groups)
test_suites: JUnitXmlTestSuites = self._parse_test_suites(
file.repository, file.name
)
junit_xml.test_suites.append(test_suites)
except ParserError as error:
# We don't want to completely kill the pipeline if a file is a format that can't
# be parsed. So we log a warning and continue with the next file.
# Validation error is lower priority, but internally raises
# a ParserError so it needs to be second.
self.logger.warning("Skipping file %s: %s", artifact_file_name, error)
continue
except ValidationError as error:
error_msg: str = f"Unexpected value or schema in file {artifact_file_name}"
self.logger.error(error_msg, exc_info=error)
Expand Down