|
31 | 31 | import sys |
32 | 32 | import pathlib |
33 | 33 | import infra.concurrency |
| 34 | +import ccf.read_ledger |
| 35 | +import re |
34 | 36 |
|
35 | 37 | from loguru import logger as LOG |
36 | 38 |
|
@@ -1553,6 +1555,67 @@ def test_error_message_on_failure_to_read_aci_sec_context(args): |
1553 | 1555 | ), f"Did not find expected log messages: {expected_log_messages}" |
1554 | 1556 |
|
1555 | 1557 |
|
| 1558 | +def test_error_message_on_failure_to_fetch_snapshot(const_args): |
| 1559 | + args = copy.deepcopy(const_args) |
| 1560 | + args.nodes = infra.e2e_args.min_nodes(args, 0) |
| 1561 | + with infra.network.network( |
| 1562 | + args.nodes, |
| 1563 | + args.binary_dir, |
| 1564 | + args.debug_nodes, |
| 1565 | + pdb=args.pdb, |
| 1566 | + ) as network: |
| 1567 | + network.start_and_open(args) |
| 1568 | + |
| 1569 | + primary, _ = network.find_primary() |
| 1570 | + |
| 1571 | + new_node = network.create_node("local://localhost") |
| 1572 | + |
| 1573 | + # Shut down primary to cause snapshot fetch to fail |
| 1574 | + primary.remote.stop() |
| 1575 | + |
| 1576 | + failed = False |
| 1577 | + try: |
| 1578 | + LOG.info("Starting join") |
| 1579 | + network.join_node( |
| 1580 | + new_node, |
| 1581 | + args.package, |
| 1582 | + args, |
| 1583 | + target_node=primary, |
| 1584 | + timeout=10, |
| 1585 | + from_snapshot=False, |
| 1586 | + wait_for_node_in_store=False, |
| 1587 | + ) |
| 1588 | + new_node.wait_for_node_to_join(timeout=5) |
| 1589 | + except Exception as e: |
| 1590 | + LOG.info(f"Joining node could not join as expected {e}") |
| 1591 | + failed = True |
| 1592 | + |
| 1593 | + assert failed, "Joining node could not join failed node as expected" |
| 1594 | + |
| 1595 | + expected_log_messages = [ |
| 1596 | + re.compile(r"Fetching snapshot from .* \(attempt 1/3\)"), |
| 1597 | + re.compile(r"Fetching snapshot from .* \(attempt 2/3\)"), |
| 1598 | + re.compile(r"Fetching snapshot from .* \(attempt 3/3\)"), |
| 1599 | + re.compile( |
| 1600 | + r"Exceeded maximum snapshot fetch retries \([0-9]+\), giving up" |
| 1601 | + ), |
| 1602 | + ] |
| 1603 | + |
| 1604 | + out_path, _ = new_node.get_logs() |
| 1605 | + for line in open(out_path, "r", encoding="utf-8").readlines(): |
| 1606 | + for expected in expected_log_messages: |
| 1607 | + match = re.search(expected, line) |
| 1608 | + if match: |
| 1609 | + expected_log_messages.remove(expected) |
| 1610 | + LOG.info(f"Found expected log message: {line}") |
| 1611 | + if len(expected_log_messages) == 0: |
| 1612 | + break |
| 1613 | + |
| 1614 | + assert ( |
| 1615 | + len(expected_log_messages) == 0 |
| 1616 | + ), f"Did not find expected log messages: {expected_log_messages}" |
| 1617 | + |
| 1618 | + |
1556 | 1619 | def run(args): |
1557 | 1620 | run_max_uncommitted_tx_count(args) |
1558 | 1621 | run_file_operations(args) |
|
0 commit comments