From 9accb01054e65cc5db56676a6b3f78afc7360495 Mon Sep 17 00:00:00 2001 From: Kirk Brauer Date: Thu, 4 Sep 2025 18:40:47 -0400 Subject: [PATCH 01/21] Add pre/post lease hooks --- .../jumpstarter/config/exporter.py | 26 +- .../jumpstarter/config/exporter_test.py | 52 +++ .../jumpstarter/exporter/exporter.py | 79 ++++- .../jumpstarter/jumpstarter/exporter/hooks.py | 141 ++++++++ .../jumpstarter/exporter/hooks_test.py | 319 ++++++++++++++++++ 5 files changed, 612 insertions(+), 5 deletions(-) create mode 100644 packages/jumpstarter/jumpstarter/exporter/hooks.py create mode 100644 packages/jumpstarter/jumpstarter/exporter/hooks_test.py diff --git a/packages/jumpstarter/jumpstarter/config/exporter.py b/packages/jumpstarter/jumpstarter/config/exporter.py index 8cc8b9bd6..6f0d71330 100644 --- a/packages/jumpstarter/jumpstarter/config/exporter.py +++ b/packages/jumpstarter/jumpstarter/config/exporter.py @@ -18,6 +18,16 @@ from jumpstarter.driver import Driver +class HookConfigV1Alpha1(BaseModel): + """Configuration for lifecycle hooks.""" + + model_config = ConfigDict(populate_by_name=True) + + pre_lease: str | None = Field(default=None, alias="preLease") + post_lease: str | None = Field(default=None, alias="postLease") + timeout: int = Field(default=300, description="Hook execution timeout in seconds") + + class ExporterConfigV1Alpha1DriverInstanceProxy(BaseModel): ref: str @@ -85,6 +95,7 @@ class ExporterConfigV1Alpha1(BaseModel): grpcOptions: dict[str, str | int] | None = Field(default_factory=dict) export: dict[str, ExporterConfigV1Alpha1DriverInstance] = Field(default_factory=dict) + hooks: HookConfigV1Alpha1 = Field(default_factory=HookConfigV1Alpha1) path: Path | None = Field(default=None) @@ -119,7 +130,7 @@ def list(cls) -> ExporterConfigListV1Alpha1: @classmethod def dump_yaml(self, config: Self) -> str: - return yaml.safe_dump(config.model_dump(mode="json", exclude={"alias", "path"}), sort_keys=False) + return yaml.safe_dump(config.model_dump(mode="json", by_alias=True, exclude={"alias", "path"}), sort_keys=False) @classmethod def save(cls, config: Self, path: Optional[str] = None) -> Path: @@ -130,7 +141,7 @@ def save(cls, config: Self, path: Optional[str] = None) -> Path: else: config.path = Path(path) with config.path.open(mode="w") as f: - yaml.safe_dump(config.model_dump(mode="json", exclude={"alias", "path"}), f, sort_keys=False) + yaml.safe_dump(config.model_dump(mode="json", by_alias=True, exclude={"alias", "path"}), f, sort_keys=False) return config.path @classmethod @@ -173,6 +184,16 @@ async def channel_factory(): ) return aio_secure_channel(self.endpoint, credentials, self.grpcOptions) + # Create hook executor if hooks are configured + hook_executor = None + if self.hooks.pre_lease or self.hooks.post_lease: + from jumpstarter.exporter.hooks import HookExecutor + + hook_executor = HookExecutor( + config=self.hooks, + device_factory=ExporterConfigV1Alpha1DriverInstance(children=self.export).instantiate, + ) + exporter = None entered = False try: @@ -181,6 +202,7 @@ async def channel_factory(): device_factory=ExporterConfigV1Alpha1DriverInstance(children=self.export).instantiate, tls=self.tls, grpc_options=self.grpcOptions, + hook_executor=hook_executor, ) # Initialize the exporter (registration, etc.) await exporter.__aenter__() diff --git a/packages/jumpstarter/jumpstarter/config/exporter_test.py b/packages/jumpstarter/jumpstarter/config/exporter_test.py index e9fb48630..eebce7839 100644 --- a/packages/jumpstarter/jumpstarter/config/exporter_test.py +++ b/packages/jumpstarter/jumpstarter/config/exporter_test.py @@ -101,3 +101,55 @@ def test_exporter_config(monkeypatch: pytest.MonkeyPatch, tmp_path: Path): ExporterConfigV1Alpha1.save(config) assert config == ExporterConfigV1Alpha1.load("test") + + +def test_exporter_config_with_hooks(monkeypatch: pytest.MonkeyPatch, tmp_path: Path): + monkeypatch.setattr(ExporterConfigV1Alpha1, "BASE_PATH", tmp_path) + + path = tmp_path / "test-hooks.yaml" + + text = """apiVersion: jumpstarter.dev/v1alpha1 +kind: ExporterConfig +metadata: + namespace: default + name: test-hooks +endpoint: "jumpstarter.my-lab.com:1443" +token: "test-token" +hooks: + preLease: | + echo "Pre-lease hook for $LEASE_NAME" + j power on + postLease: | + echo "Post-lease hook for $LEASE_NAME" + j power off + timeout: 600 +export: + power: + type: "jumpstarter_driver_power.driver.PduPower" +""" + path.write_text( + text, + encoding="utf-8", + ) + + config = ExporterConfigV1Alpha1.load("test-hooks") + + assert config.hooks.pre_lease == 'echo "Pre-lease hook for $LEASE_NAME"\nj power on\n' + assert config.hooks.post_lease == 'echo "Post-lease hook for $LEASE_NAME"\nj power off\n' + assert config.hooks.timeout == 600 + + # Test that it round-trips correctly + path.unlink() + ExporterConfigV1Alpha1.save(config) + reloaded_config = ExporterConfigV1Alpha1.load("test-hooks") + + assert reloaded_config.hooks.pre_lease == config.hooks.pre_lease + assert reloaded_config.hooks.post_lease == config.hooks.post_lease + assert reloaded_config.hooks.timeout == config.hooks.timeout + + # Test that the YAML uses camelCase + yaml_output = ExporterConfigV1Alpha1.dump_yaml(config) + assert "preLease:" in yaml_output + assert "postLease:" in yaml_output + assert "pre_lease:" not in yaml_output + assert "post_lease:" not in yaml_output diff --git a/packages/jumpstarter/jumpstarter/exporter/exporter.py b/packages/jumpstarter/jumpstarter/exporter/exporter.py index e63589ee5..21bd783b5 100644 --- a/packages/jumpstarter/jumpstarter/exporter/exporter.py +++ b/packages/jumpstarter/jumpstarter/exporter/exporter.py @@ -8,6 +8,7 @@ from anyio import ( AsyncContextManagerMixin, CancelScope, + Event, connect_unix, create_memory_object_stream, create_task_group, @@ -25,6 +26,7 @@ from jumpstarter.common.streams import connect_router_stream from jumpstarter.config.tls import TLSConfigV1Alpha1 from jumpstarter.driver import Driver +from jumpstarter.exporter.hooks import HookContext, HookExecutor from jumpstarter.exporter.session import Session logger = logging.getLogger(__name__) @@ -37,10 +39,13 @@ class Exporter(AsyncContextManagerMixin, Metadata): lease_name: str = field(init=False, default="") tls: TLSConfigV1Alpha1 = field(default_factory=TLSConfigV1Alpha1) grpc_options: dict[str, str] = field(default_factory=dict) + hook_executor: HookExecutor | None = field(default=None) registered: bool = field(init=False, default=False) _stop_requested: bool = field(init=False, default=False) _started: bool = field(init=False, default=False) _tg: TaskGroup | None = field(init=False, default=None) + _current_client_name: str = field(init=False, default="") + _pre_lease_ready: Event | None = field(init=False, default=None) def stop(self, wait_for_lease_exit=False): """Signal the exporter to stop. @@ -49,9 +54,7 @@ def stop(self, wait_for_lease_exit=False): wait_for_lease_exit (bool): If True, wait for the current lease to exit before stopping. """ - # Stop immediately if not started yet or if immediate stop is requested if (not self._started or not wait_for_lease_exit) and self._tg is not None: - logger.info("Stopping exporter immediately") self._tg.cancel_scope.cancel() elif not self._stop_requested: self._stop_requested = True @@ -145,6 +148,12 @@ async def listen(retries=5, backoff=3): tg.start_soon(listen) + # Wait for pre-lease hook to complete before processing connections + if self._pre_lease_ready is not None: + logger.info("Waiting for pre-lease hook to complete before accepting connections") + await self._pre_lease_ready.wait() + logger.info("Pre-lease hook completed, now accepting connections") + async with self.session() as path: async for request in listen_rx: logger.info("Handling new connection request on lease %s", lease_name) @@ -187,19 +196,83 @@ async def status(retries=5, backoff=3): tg.start_soon(status) async for status in status_rx: if self.lease_name != "" and self.lease_name != status.lease_name: + # Post-lease hook for the previous lease + if self.hook_executor and self._current_client_name: + hook_context = HookContext( + lease_name=self.lease_name, + client_name=self._current_client_name, + ) + # Shield the post-lease hook from cancellation and await it + with CancelScope(shield=True): + await self.hook_executor.execute_post_lease_hook(hook_context) + self.lease_name = status.lease_name logger.info("Lease status changed, killing existing connections") + # Reset event for next lease + self._pre_lease_ready = None self.stop() break + + # Check for lease state transitions + previous_leased = hasattr(self, "_previous_leased") and self._previous_leased + current_leased = status.leased + self.lease_name = status.lease_name if not self._started and self.lease_name != "": self._started = True + # Create event for pre-lease synchronization + self._pre_lease_ready = Event() tg.start_soon(self.handle, self.lease_name, tg) - if status.leased: + + if current_leased: logger.info("Currently leased by %s under %s", status.client_name, status.lease_name) + self._current_client_name = status.client_name + + # Pre-lease hook when transitioning from unleased to leased + if not previous_leased: + if self.hook_executor: + hook_context = HookContext( + lease_name=status.lease_name, + client_name=status.client_name, + ) + + # Start pre-lease hook asynchronously + async def run_pre_lease_hook(): + try: + await self.hook_executor.execute_pre_lease_hook(hook_context) + logger.info("Pre-lease hook completed successfully") + except Exception as e: + logger.error("Pre-lease hook failed: %s", e) + finally: + # Always set the event to unblock connections + if self._pre_lease_ready: + self._pre_lease_ready.set() + + tg.start_soon(run_pre_lease_hook) + else: + # No hook configured, set event immediately + if self._pre_lease_ready: + self._pre_lease_ready.set() else: logger.info("Currently not leased") + + # Post-lease hook when transitioning from leased to unleased + if previous_leased and self.hook_executor and self._current_client_name: + hook_context = HookContext( + lease_name=self.lease_name, + client_name=self._current_client_name, + ) + # Shield the post-lease hook from cancellation and await it + with CancelScope(shield=True): + await self.hook_executor.execute_post_lease_hook(hook_context) + + self._current_client_name = "" + # Reset event for next lease + self._pre_lease_ready = None + if self._stop_requested: self.stop() break + + self._previous_leased = current_leased self._tg = None diff --git a/packages/jumpstarter/jumpstarter/exporter/hooks.py b/packages/jumpstarter/jumpstarter/exporter/hooks.py new file mode 100644 index 000000000..16318ce33 --- /dev/null +++ b/packages/jumpstarter/jumpstarter/exporter/hooks.py @@ -0,0 +1,141 @@ +"""Lifecycle hooks for Jumpstarter exporters.""" + +import asyncio +import logging +import os +from contextlib import asynccontextmanager +from dataclasses import dataclass, field +from typing import Callable + +from jumpstarter.config.env import JMP_DRIVERS_ALLOW, JUMPSTARTER_HOST +from jumpstarter.config.exporter import HookConfigV1Alpha1 +from jumpstarter.driver import Driver +from jumpstarter.exporter.session import Session + +logger = logging.getLogger(__name__) + + +@dataclass(kw_only=True) +class HookContext: + """Context information passed to hooks.""" + + lease_name: str + client_name: str = "" + lease_duration: str = "" + exporter_name: str = "" + exporter_namespace: str = "" + + +@dataclass(kw_only=True) +class HookExecutor: + """Executes lifecycle hooks with access to the j CLI.""" + + config: HookConfigV1Alpha1 + device_factory: Callable[[], Driver] + timeout: int = field(init=False) + + def __post_init__(self): + self.timeout = self.config.timeout + + @asynccontextmanager + async def _create_hook_environment(self, context: HookContext): + """Create a local session and Unix socket for j CLI access.""" + with Session( + root_device=self.device_factory(), + # Use hook context for metadata + labels={ + "jumpstarter.dev/hook-context": "true", + "jumpstarter.dev/lease": context.lease_name, + }, + ) as session: + async with session.serve_unix_async() as unix_path: + # Create environment variables for the hook + hook_env = os.environ.copy() + hook_env.update( + { + JUMPSTARTER_HOST: str(unix_path), + JMP_DRIVERS_ALLOW: "UNSAFE", # Allow all drivers for local access + "LEASE_NAME": context.lease_name, + "CLIENT_NAME": context.client_name, + "LEASE_DURATION": context.lease_duration, + "EXPORTER_NAME": context.exporter_name, + "EXPORTER_NAMESPACE": context.exporter_namespace, + } + ) + + yield hook_env + + async def _execute_hook(self, command: str, context: HookContext) -> bool: + """Execute a single hook command.""" + if not command or not command.strip(): + logger.debug("Hook command is empty, skipping") + return True + + logger.info("Executing hook: %s", command.strip().split("\n")[0][:100]) + + async with self._create_hook_environment(context) as hook_env: + try: + # Execute the hook command using shell + process = await asyncio.create_subprocess_shell( + command, + env=hook_env, + stdout=asyncio.subprocess.PIPE, + stderr=asyncio.subprocess.STDOUT, + ) + + try: + # Stream output line-by-line for real-time logging + output_lines = [] + + async def read_output(): + while True: + line = await process.stdout.readline() + if not line: + break + line_decoded = line.decode().rstrip() + output_lines.append(line_decoded) + logger.info("[Hook Output] %s", line_decoded) + + # Run output reading and process waiting concurrently with timeout + await asyncio.wait_for(asyncio.gather(read_output(), process.wait()), timeout=self.timeout) + + if process.returncode == 0: + logger.info("Hook executed successfully") + return True + else: + logger.error("Hook failed with return code %d", process.returncode) + if output_lines: + logger.error("Hook output: %s", "\n".join(output_lines)) + return False + + except asyncio.TimeoutError: + logger.error("Hook timed out after %d seconds", self.timeout) + try: + process.terminate() + await asyncio.wait_for(process.wait(), timeout=5) + except asyncio.TimeoutError: + process.kill() + await process.wait() + return False + + except Exception as e: + logger.error("Error executing hook: %s", e, exc_info=True) + return False + + async def execute_pre_lease_hook(self, context: HookContext) -> bool: + """Execute the pre-lease hook.""" + if not self.config.pre_lease: + logger.debug("No pre-lease hook configured") + return True + + logger.info("Executing pre-lease hook for lease %s", context.lease_name) + return await self._execute_hook(self.config.pre_lease, context) + + async def execute_post_lease_hook(self, context: HookContext) -> bool: + """Execute the post-lease hook.""" + if not self.config.post_lease: + logger.debug("No post-lease hook configured") + return True + + logger.info("Executing post-lease hook for lease %s", context.lease_name) + return await self._execute_hook(self.config.post_lease, context) diff --git a/packages/jumpstarter/jumpstarter/exporter/hooks_test.py b/packages/jumpstarter/jumpstarter/exporter/hooks_test.py new file mode 100644 index 000000000..576ca6810 --- /dev/null +++ b/packages/jumpstarter/jumpstarter/exporter/hooks_test.py @@ -0,0 +1,319 @@ +import asyncio +from unittest.mock import AsyncMock, Mock, call, patch + +import pytest + +from jumpstarter.config.env import JMP_DRIVERS_ALLOW, JUMPSTARTER_HOST +from jumpstarter.config.exporter import HookConfigV1Alpha1 +from jumpstarter.driver import Driver +from jumpstarter.exporter.hooks import HookContext, HookExecutor + +pytestmark = pytest.mark.anyio + + +class MockDriver(Driver): + @classmethod + def client(cls) -> str: + return "test.MockClient" + + def close(self): + pass + + def reset(self): + pass + + +@pytest.fixture +def mock_device_factory(): + def factory(): + return MockDriver() + + return factory + + +@pytest.fixture +def hook_config(): + return HookConfigV1Alpha1( + pre_lease="echo 'Pre-lease hook executed'", + post_lease="echo 'Post-lease hook executed'", + timeout=10, + ) + + +@pytest.fixture +def hook_context(): + return HookContext( + lease_name="test-lease-123", + client_name="test-client", + lease_duration="30m", + exporter_name="test-exporter", + exporter_namespace="default", + ) + + +class TestHookExecutor: + async def test_hook_executor_creation(self, hook_config, mock_device_factory): + executor = HookExecutor( + config=hook_config, + device_factory=mock_device_factory, + ) + + assert executor.config == hook_config + assert executor.device_factory == mock_device_factory + assert executor.timeout == 10 + + async def test_empty_hook_execution(self, mock_device_factory, hook_context): + empty_config = HookConfigV1Alpha1() + executor = HookExecutor( + config=empty_config, + device_factory=mock_device_factory, + ) + + # Both hooks should return True for empty/None commands + assert await executor.execute_pre_lease_hook(hook_context) is True + assert await executor.execute_post_lease_hook(hook_context) is True + + async def test_successful_hook_execution(self, mock_device_factory, hook_context): + hook_config = HookConfigV1Alpha1( + pre_lease="echo 'Pre-lease hook executed'", + timeout=10, + ) + # Mock the Session and serve_unix_async + with patch("jumpstarter.exporter.hooks.Session") as mock_session_class: + mock_session = Mock() + mock_session_class.return_value.__enter__.return_value = mock_session + + # Mock the async context manager for serve_unix_async + mock_session.serve_unix_async.return_value.__aenter__ = AsyncMock(return_value="/tmp/test_socket") + mock_session.serve_unix_async.return_value.__aexit__ = AsyncMock(return_value=None) + + # Mock asyncio.create_subprocess_shell to simulate successful execution + mock_process = AsyncMock() + mock_process.returncode = 0 + # Mock stdout.readline to simulate line-by-line output + mock_process.stdout.readline.side_effect = [ + b"Pre-lease hook executed\n", + b"", # EOF + ] + mock_process.wait = AsyncMock(return_value=None) + + with patch("asyncio.create_subprocess_shell", return_value=mock_process) as mock_subprocess: + executor = HookExecutor( + config=hook_config, + device_factory=mock_device_factory, + ) + + result = await executor.execute_pre_lease_hook(hook_context) + + assert result is True + + # Verify subprocess was called with correct environment + mock_subprocess.assert_called_once() + call_args = mock_subprocess.call_args + command = call_args[0][0] + env = call_args[1]["env"] + + assert command == "echo 'Pre-lease hook executed'" + assert JUMPSTARTER_HOST in env + assert env[JUMPSTARTER_HOST] == "/tmp/test_socket" + assert env[JMP_DRIVERS_ALLOW] == "UNSAFE" + assert env["LEASE_NAME"] == "test-lease-123" + assert env["CLIENT_NAME"] == "test-client" + + async def test_failed_hook_execution(self, mock_device_factory, hook_context): + failed_config = HookConfigV1Alpha1( + pre_lease="exit 1", # Command that will fail + timeout=10, + ) + + with patch("jumpstarter.exporter.hooks.Session") as mock_session_class: + mock_session = Mock() + mock_session_class.return_value.__enter__.return_value = mock_session + + mock_session.serve_unix_async.return_value.__aenter__ = AsyncMock(return_value="/tmp/test_socket") + mock_session.serve_unix_async.return_value.__aexit__ = AsyncMock(return_value=None) + + # Mock failed process + mock_process = AsyncMock() + mock_process.returncode = 1 + # Mock stdout.readline for failed process + mock_process.stdout.readline.side_effect = [ + b"Command failed\n", + b"", # EOF + ] + mock_process.wait = AsyncMock(return_value=None) + + with patch("asyncio.create_subprocess_shell", return_value=mock_process): + executor = HookExecutor( + config=failed_config, + device_factory=mock_device_factory, + ) + + result = await executor.execute_pre_lease_hook(hook_context) + + assert result is False + + async def test_hook_timeout(self, mock_device_factory, hook_context): + timeout_config = HookConfigV1Alpha1( + pre_lease="sleep 60", # Command that will timeout + timeout=1, # 1 second timeout + ) + + with patch("jumpstarter.exporter.hooks.Session") as mock_session_class: + mock_session = Mock() + mock_session_class.return_value.__enter__.return_value = mock_session + + mock_session.serve_unix_async.return_value.__aenter__ = AsyncMock(return_value="/tmp/test_socket") + mock_session.serve_unix_async.return_value.__aexit__ = AsyncMock(return_value=None) + + # Mock process that times out + mock_process = AsyncMock() + mock_process.terminate.return_value = None + mock_process.wait.return_value = None + + with ( + patch("asyncio.create_subprocess_shell", return_value=mock_process), + patch("asyncio.wait_for", side_effect=asyncio.TimeoutError()), + ): + executor = HookExecutor( + config=timeout_config, + device_factory=mock_device_factory, + ) + + result = await executor.execute_pre_lease_hook(hook_context) + + assert result is False + mock_process.terminate.assert_called_once() + + async def test_hook_environment_variables(self, mock_device_factory, hook_context): + hook_config = HookConfigV1Alpha1( + pre_lease="echo 'Pre-lease hook executed'", + timeout=10, + ) + with patch("jumpstarter.exporter.hooks.Session") as mock_session_class: + mock_session = Mock() + mock_session_class.return_value.__enter__.return_value = mock_session + + mock_session.serve_unix_async.return_value.__aenter__ = AsyncMock(return_value="/tmp/test_socket") + mock_session.serve_unix_async.return_value.__aexit__ = AsyncMock(return_value=None) + + mock_process = AsyncMock() + mock_process.returncode = 0 + # Mock stdout.readline for environment test + mock_process.stdout.readline.side_effect = [ + b"", # EOF (no output) + ] + mock_process.wait = AsyncMock(return_value=None) + + with patch("asyncio.create_subprocess_shell", return_value=mock_process) as mock_subprocess: + executor = HookExecutor( + config=hook_config, + device_factory=mock_device_factory, + ) + + await executor.execute_pre_lease_hook(hook_context) + + # Check that all expected environment variables are set + call_args = mock_subprocess.call_args + env = call_args[1]["env"] + + assert env["LEASE_NAME"] == "test-lease-123" + assert env["CLIENT_NAME"] == "test-client" + assert env["LEASE_DURATION"] == "30m" + assert env["EXPORTER_NAME"] == "test-exporter" + assert env["EXPORTER_NAMESPACE"] == "default" + assert env[JUMPSTARTER_HOST] == "/tmp/test_socket" + assert env[JMP_DRIVERS_ALLOW] == "UNSAFE" + + async def test_real_time_output_logging(self, mock_device_factory, hook_context): + """Test that hook output is logged in real-time at INFO level.""" + hook_config = HookConfigV1Alpha1( + pre_lease="echo 'Line 1'; echo 'Line 2'; echo 'Line 3'", + timeout=10, + ) + + with patch("jumpstarter.exporter.hooks.Session") as mock_session_class: + mock_session = Mock() + mock_session_class.return_value.__enter__.return_value = mock_session + + mock_session.serve_unix_async.return_value.__aenter__ = AsyncMock(return_value="/tmp/test_socket") + mock_session.serve_unix_async.return_value.__aexit__ = AsyncMock(return_value=None) + + mock_process = AsyncMock() + mock_process.returncode = 0 + # Mock multiple lines of output to verify streaming + mock_process.stdout.readline.side_effect = [ + b"Line 1\n", + b"Line 2\n", + b"Line 3\n", + b"", # EOF + ] + mock_process.wait = AsyncMock(return_value=None) + + # Mock the logger to capture log calls + with ( + patch("jumpstarter.exporter.hooks.logger") as mock_logger, + patch("asyncio.create_subprocess_shell", return_value=mock_process), + ): + executor = HookExecutor( + config=hook_config, + device_factory=mock_device_factory, + ) + + result = await executor.execute_pre_lease_hook(hook_context) + + assert result is True + + # Verify that output lines were logged in real-time at INFO level + expected_calls = [ + call.info("Executing hook: %s", "echo 'Line 1'; echo 'Line 2'; echo 'Line 3'"), + call.info("[Hook Output] %s", "Line 1"), + call.info("[Hook Output] %s", "Line 2"), + call.info("[Hook Output] %s", "Line 3"), + call.info("Hook executed successfully"), + ] + mock_logger.info.assert_has_calls(expected_calls, any_order=False) + + async def test_post_lease_hook_execution_on_completion(self, mock_device_factory, hook_context): + """Test that post-lease hook executes when called directly.""" + hook_config = HookConfigV1Alpha1( + post_lease="echo 'Post-lease cleanup completed'", + timeout=10, + ) + + with patch("jumpstarter.exporter.hooks.Session") as mock_session_class: + mock_session = Mock() + mock_session_class.return_value.__enter__.return_value = mock_session + + mock_session.serve_unix_async.return_value.__aenter__ = AsyncMock(return_value="/tmp/test_socket") + mock_session.serve_unix_async.return_value.__aexit__ = AsyncMock(return_value=None) + + mock_process = AsyncMock() + mock_process.returncode = 0 + # Mock post-lease hook output + mock_process.stdout.readline.side_effect = [ + b"Post-lease cleanup completed\n", + b"", # EOF + ] + mock_process.wait = AsyncMock(return_value=None) + + # Mock the logger to capture log calls + with patch("jumpstarter.exporter.hooks.logger") as mock_logger, \ + patch("asyncio.create_subprocess_shell", return_value=mock_process): + executor = HookExecutor( + config=hook_config, + device_factory=mock_device_factory, + ) + + result = await executor.execute_post_lease_hook(hook_context) + + assert result is True + + # Verify that post-lease hook output was logged + expected_calls = [ + call.info("Executing post-lease hook for lease %s", "test-lease-123"), + call.info("Executing hook: %s", "echo 'Post-lease cleanup completed'"), + call.info("[Hook Output] %s", "Post-lease cleanup completed"), + call.info("Hook executed successfully"), + ] + mock_logger.info.assert_has_calls(expected_calls, any_order=False) From 64962befdf132d655ac977e0c6084d262c2f5cd4 Mon Sep 17 00:00:00 2001 From: Kirk Brauer Date: Mon, 29 Sep 2025 11:00:32 -0400 Subject: [PATCH 02/21] Add enums and exporter status reporting --- .../jumpstarter-cli/jumpstarter_cli/get.py | 9 +- .../jumpstarter/client/v1/client_pb2.py | 59 +++---- .../jumpstarter/v1/common_pb2.py | 39 +++++ .../jumpstarter/v1/common_pb2_grpc.py | 4 + .../jumpstarter/v1/jumpstarter_pb2.py | 147 ++++++++++-------- .../jumpstarter/v1/jumpstarter_pb2_grpc.py | 88 +++++++++++ .../jumpstarter/jumpstarter/client/core.py | 30 +++- .../jumpstarter/jumpstarter/client/grpc.py | 39 +++-- .../jumpstarter/common/__init__.py | 10 +- .../jumpstarter/jumpstarter/common/enums.py | 76 +++++++++ .../jumpstarter/jumpstarter/config/client.py | 4 +- .../jumpstarter/exporter/exporter.py | 83 +++++++--- .../jumpstarter/exporter/logging.py | 6 +- .../jumpstarter/exporter/session.py | 25 ++- 14 files changed, 481 insertions(+), 138 deletions(-) create mode 100644 packages/jumpstarter-protocol/jumpstarter_protocol/jumpstarter/v1/common_pb2.py create mode 100644 packages/jumpstarter-protocol/jumpstarter_protocol/jumpstarter/v1/common_pb2_grpc.py create mode 100644 packages/jumpstarter/jumpstarter/common/enums.py diff --git a/packages/jumpstarter-cli/jumpstarter_cli/get.py b/packages/jumpstarter-cli/jumpstarter_cli/get.py index d62e6dee1..a5dc0f733 100644 --- a/packages/jumpstarter-cli/jumpstarter_cli/get.py +++ b/packages/jumpstarter-cli/jumpstarter_cli/get.py @@ -21,8 +21,8 @@ def get(): @opt_output_all @opt_comma_separated( "with", - {"leases", "online"}, - help_text="Include fields: leases, online (comma-separated or repeated)" + {"leases", "online", "status"}, + help_text="Include fields: leases, online, status (comma-separated or repeated)", ) @handle_exceptions_with_reauthentication(relogin_client) def get_exporters(config, selector: str | None, output: OutputType, with_options: list[str]): @@ -32,7 +32,10 @@ def get_exporters(config, selector: str | None, output: OutputType, with_options include_leases = "leases" in with_options include_online = "online" in with_options - exporters = config.list_exporters(filter=selector, include_leases=include_leases, include_online=include_online) + include_status = "status" in with_options + exporters = config.list_exporters( + filter=selector, include_leases=include_leases, include_online=include_online, include_status=include_status + ) model_print(exporters, output) diff --git a/packages/jumpstarter-protocol/jumpstarter_protocol/jumpstarter/client/v1/client_pb2.py b/packages/jumpstarter-protocol/jumpstarter_protocol/jumpstarter/client/v1/client_pb2.py index 9682b4b6b..580e08408 100644 --- a/packages/jumpstarter-protocol/jumpstarter_protocol/jumpstarter/client/v1/client_pb2.py +++ b/packages/jumpstarter-protocol/jumpstarter_protocol/jumpstarter/client/v1/client_pb2.py @@ -31,9 +31,10 @@ from google.protobuf import field_mask_pb2 as google_dot_protobuf_dot_field__mask__pb2 from google.protobuf import timestamp_pb2 as google_dot_protobuf_dot_timestamp__pb2 from ...v1 import kubernetes_pb2 as jumpstarter_dot_v1_dot_kubernetes__pb2 +from ...v1 import common_pb2 as jumpstarter_dot_v1_dot_common__pb2 -DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\"jumpstarter/client/v1/client.proto\x12\x15jumpstarter.client.v1\x1a\x1cgoogle/api/annotations.proto\x1a\x17google/api/client.proto\x1a\x1fgoogle/api/field_behavior.proto\x1a\x19google/api/resource.proto\x1a\x1egoogle/protobuf/duration.proto\x1a\x1bgoogle/protobuf/empty.proto\x1a google/protobuf/field_mask.proto\x1a\x1fgoogle/protobuf/timestamp.proto\x1a\x1fjumpstarter/v1/kubernetes.proto\"\xa1\x02\n\x08\x45xporter\x12\x17\n\x04name\x18\x01 \x01(\tB\x03\xe0\x41\x08R\x04name\x12\x43\n\x06labels\x18\x02 \x03(\x0b\x32+.jumpstarter.client.v1.Exporter.LabelsEntryR\x06labels\x12\x1b\n\x06online\x18\x03 \x01(\x08\x42\x03\xe0\x41\x03R\x06online\x1a\x39\n\x0bLabelsEntry\x12\x10\n\x03key\x18\x01 \x01(\tR\x03key\x12\x14\n\x05value\x18\x02 \x01(\tR\x05value:\x02\x38\x01:_\xea\x41\\\n\x18jumpstarter.dev/Exporter\x12+namespaces/{namespace}/exporters/{exporter}*\texporters2\x08\x65xporter\"\xed\x06\n\x05Lease\x12\x17\n\x04name\x18\x01 \x01(\tB\x03\xe0\x41\x08R\x04name\x12\"\n\x08selector\x18\x02 \x01(\tB\x06\xe0\x41\x02\xe0\x41\x05R\x08selector\x12:\n\x08\x64uration\x18\x03 \x01(\x0b\x32\x19.google.protobuf.DurationB\x03\xe0\x41\x02R\x08\x64uration\x12M\n\x12\x65\x66\x66\x65\x63tive_duration\x18\x04 \x01(\x0b\x32\x19.google.protobuf.DurationB\x03\xe0\x41\x03R\x11\x65\x66\x66\x65\x63tiveDuration\x12>\n\nbegin_time\x18\x05 \x01(\x0b\x32\x1a.google.protobuf.TimestampH\x00R\tbeginTime\x88\x01\x01\x12V\n\x14\x65\x66\x66\x65\x63tive_begin_time\x18\x06 \x01(\x0b\x32\x1a.google.protobuf.TimestampB\x03\xe0\x41\x03H\x01R\x12\x65\x66\x66\x65\x63tiveBeginTime\x88\x01\x01\x12:\n\x08\x65nd_time\x18\x07 \x01(\x0b\x32\x1a.google.protobuf.TimestampH\x02R\x07\x65ndTime\x88\x01\x01\x12R\n\x12\x65\x66\x66\x65\x63tive_end_time\x18\x08 \x01(\x0b\x32\x1a.google.protobuf.TimestampB\x03\xe0\x41\x03H\x03R\x10\x65\x66\x66\x65\x63tiveEndTime\x88\x01\x01\x12;\n\x06\x63lient\x18\t \x01(\tB\x1e\xe0\x41\x03\xfa\x41\x18\n\x16jumpstarter.dev/ClientH\x04R\x06\x63lient\x88\x01\x01\x12\x41\n\x08\x65xporter\x18\n \x01(\tB \xe0\x41\x03\xfa\x41\x1a\n\x18jumpstarter.dev/ExporterH\x05R\x08\x65xporter\x88\x01\x01\x12>\n\nconditions\x18\x0b \x03(\x0b\x32\x19.jumpstarter.v1.ConditionB\x03\xe0\x41\x03R\nconditions:P\xea\x41M\n\x15jumpstarter.dev/Lease\x12%namespaces/{namespace}/leases/{lease}*\x06leases2\x05leaseB\r\n\x0b_begin_timeB\x17\n\x15_effective_begin_timeB\x0b\n\t_end_timeB\x15\n\x13_effective_end_timeB\t\n\x07_clientB\x0b\n\t_exporter\"J\n\x12GetExporterRequest\x12\x34\n\x04name\x18\x01 \x01(\tB \xe0\x41\x02\xfa\x41\x1a\n\x18jumpstarter.dev/ExporterR\x04name\"\xb3\x01\n\x14ListExportersRequest\x12\x38\n\x06parent\x18\x01 \x01(\tB \xe0\x41\x02\xfa\x41\x1a\x12\x18jumpstarter.dev/ExporterR\x06parent\x12 \n\tpage_size\x18\x02 \x01(\x05\x42\x03\xe0\x41\x01R\x08pageSize\x12\"\n\npage_token\x18\x03 \x01(\tB\x03\xe0\x41\x01R\tpageToken\x12\x1b\n\x06\x66ilter\x18\x04 \x01(\tB\x03\xe0\x41\x01R\x06\x66ilter\"~\n\x15ListExportersResponse\x12=\n\texporters\x18\x01 \x03(\x0b\x32\x1f.jumpstarter.client.v1.ExporterR\texporters\x12&\n\x0fnext_page_token\x18\x02 \x01(\tR\rnextPageToken\"D\n\x0fGetLeaseRequest\x12\x31\n\x04name\x18\x01 \x01(\tB\x1d\xe0\x41\x02\xfa\x41\x17\n\x15jumpstarter.dev/LeaseR\x04name\"\xad\x01\n\x11ListLeasesRequest\x12\x35\n\x06parent\x18\x01 \x01(\tB\x1d\xe0\x41\x02\xfa\x41\x17\x12\x15jumpstarter.dev/LeaseR\x06parent\x12 \n\tpage_size\x18\x02 \x01(\x05\x42\x03\xe0\x41\x01R\x08pageSize\x12\"\n\npage_token\x18\x03 \x01(\tB\x03\xe0\x41\x01R\tpageToken\x12\x1b\n\x06\x66ilter\x18\x04 \x01(\tB\x03\xe0\x41\x01R\x06\x66ilter\"r\n\x12ListLeasesResponse\x12\x34\n\x06leases\x18\x01 \x03(\x0b\x32\x1c.jumpstarter.client.v1.LeaseR\x06leases\x12&\n\x0fnext_page_token\x18\x02 \x01(\tR\rnextPageToken\"\xa4\x01\n\x12\x43reateLeaseRequest\x12\x35\n\x06parent\x18\x01 \x01(\tB\x1d\xe0\x41\x02\xfa\x41\x17\x12\x15jumpstarter.dev/LeaseR\x06parent\x12\x1e\n\x08lease_id\x18\x02 \x01(\tB\x03\xe0\x41\x01R\x07leaseId\x12\x37\n\x05lease\x18\x03 \x01(\x0b\x32\x1c.jumpstarter.client.v1.LeaseB\x03\xe0\x41\x02R\x05lease\"\x8f\x01\n\x12UpdateLeaseRequest\x12\x37\n\x05lease\x18\x01 \x01(\x0b\x32\x1c.jumpstarter.client.v1.LeaseB\x03\xe0\x41\x02R\x05lease\x12@\n\x0bupdate_mask\x18\x02 \x01(\x0b\x32\x1a.google.protobuf.FieldMaskB\x03\xe0\x41\x01R\nupdateMask\"G\n\x12\x44\x65leteLeaseRequest\x12\x31\n\x04name\x18\x01 \x01(\tB\x1d\xe0\x41\x02\xfa\x41\x17\n\x15jumpstarter.dev/LeaseR\x04name2\xa7\x08\n\rClientService\x12\x8d\x01\n\x0bGetExporter\x12).jumpstarter.client.v1.GetExporterRequest\x1a\x1f.jumpstarter.client.v1.Exporter\"2\xda\x41\x04name\x82\xd3\xe4\x93\x02%\x12#/v1/{name=namespaces/*/exporters/*}\x12\xa0\x01\n\rListExporters\x12+.jumpstarter.client.v1.ListExportersRequest\x1a,.jumpstarter.client.v1.ListExportersResponse\"4\xda\x41\x06parent\x82\xd3\xe4\x93\x02%\x12#/v1/{parent=namespaces/*}/exporters\x12\x81\x01\n\x08GetLease\x12&.jumpstarter.client.v1.GetLeaseRequest\x1a\x1c.jumpstarter.client.v1.Lease\"/\xda\x41\x04name\x82\xd3\xe4\x93\x02\"\x12 /v1/{name=namespaces/*/leases/*}\x12\x94\x01\n\nListLeases\x12(.jumpstarter.client.v1.ListLeasesRequest\x1a).jumpstarter.client.v1.ListLeasesResponse\"1\xda\x41\x06parent\x82\xd3\xe4\x93\x02\"\x12 /v1/{parent=namespaces/*}/leases\x12\x9f\x01\n\x0b\x43reateLease\x12).jumpstarter.client.v1.CreateLeaseRequest\x1a\x1c.jumpstarter.client.v1.Lease\"G\xda\x41\x15parent,lease,lease_id\x82\xd3\xe4\x93\x02)\" /v1/{parent=namespaces/*}/leases:\x05lease\x12\xa1\x01\n\x0bUpdateLease\x12).jumpstarter.client.v1.UpdateLeaseRequest\x1a\x1c.jumpstarter.client.v1.Lease\"I\xda\x41\x11lease,update_mask\x82\xd3\xe4\x93\x02/2&/v1/{lease.name=namespaces/*/leases/*}:\x05lease\x12\x81\x01\n\x0b\x44\x65leteLease\x12).jumpstarter.client.v1.DeleteLeaseRequest\x1a\x16.google.protobuf.Empty\"/\xda\x41\x04name\x82\xd3\xe4\x93\x02\"* /v1/{name=namespaces/*/leases/*}B\x9e\x01\n\x19\x63om.jumpstarter.client.v1B\x0b\x43lientProtoP\x01\xa2\x02\x03JCX\xaa\x02\x15Jumpstarter.Client.V1\xca\x02\x15Jumpstarter\\Client\\V1\xe2\x02!Jumpstarter\\Client\\V1\\GPBMetadata\xea\x02\x17Jumpstarter::Client::V1b\x06proto3') +DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\"jumpstarter/client/v1/client.proto\x12\x15jumpstarter.client.v1\x1a\x1cgoogle/api/annotations.proto\x1a\x17google/api/client.proto\x1a\x1fgoogle/api/field_behavior.proto\x1a\x19google/api/resource.proto\x1a\x1egoogle/protobuf/duration.proto\x1a\x1bgoogle/protobuf/empty.proto\x1a google/protobuf/field_mask.proto\x1a\x1fgoogle/protobuf/timestamp.proto\x1a\x1fjumpstarter/v1/kubernetes.proto\x1a\x1bjumpstarter/v1/common.proto\"\xe0\x02\n\x08\x45xporter\x12\x17\n\x04name\x18\x01 \x01(\tB\x03\xe0\x41\x08R\x04name\x12\x43\n\x06labels\x18\x02 \x03(\x0b\x32+.jumpstarter.client.v1.Exporter.LabelsEntryR\x06labels\x12\x1d\n\x06online\x18\x03 \x01(\x08\x42\x05\x18\x01\xe0\x41\x03R\x06online\x12;\n\x06status\x18\x04 \x01(\x0e\x32\x1e.jumpstarter.v1.ExporterStatusB\x03\xe0\x41\x03R\x06status\x1a\x39\n\x0bLabelsEntry\x12\x10\n\x03key\x18\x01 \x01(\tR\x03key\x12\x14\n\x05value\x18\x02 \x01(\tR\x05value:\x02\x38\x01:_\xea\x41\\\n\x18jumpstarter.dev/Exporter\x12+namespaces/{namespace}/exporters/{exporter}*\texporters2\x08\x65xporter\"\xed\x06\n\x05Lease\x12\x17\n\x04name\x18\x01 \x01(\tB\x03\xe0\x41\x08R\x04name\x12\"\n\x08selector\x18\x02 \x01(\tB\x06\xe0\x41\x02\xe0\x41\x05R\x08selector\x12:\n\x08\x64uration\x18\x03 \x01(\x0b\x32\x19.google.protobuf.DurationB\x03\xe0\x41\x02R\x08\x64uration\x12M\n\x12\x65\x66\x66\x65\x63tive_duration\x18\x04 \x01(\x0b\x32\x19.google.protobuf.DurationB\x03\xe0\x41\x03R\x11\x65\x66\x66\x65\x63tiveDuration\x12>\n\nbegin_time\x18\x05 \x01(\x0b\x32\x1a.google.protobuf.TimestampH\x00R\tbeginTime\x88\x01\x01\x12V\n\x14\x65\x66\x66\x65\x63tive_begin_time\x18\x06 \x01(\x0b\x32\x1a.google.protobuf.TimestampB\x03\xe0\x41\x03H\x01R\x12\x65\x66\x66\x65\x63tiveBeginTime\x88\x01\x01\x12:\n\x08\x65nd_time\x18\x07 \x01(\x0b\x32\x1a.google.protobuf.TimestampH\x02R\x07\x65ndTime\x88\x01\x01\x12R\n\x12\x65\x66\x66\x65\x63tive_end_time\x18\x08 \x01(\x0b\x32\x1a.google.protobuf.TimestampB\x03\xe0\x41\x03H\x03R\x10\x65\x66\x66\x65\x63tiveEndTime\x88\x01\x01\x12;\n\x06\x63lient\x18\t \x01(\tB\x1e\xe0\x41\x03\xfa\x41\x18\n\x16jumpstarter.dev/ClientH\x04R\x06\x63lient\x88\x01\x01\x12\x41\n\x08\x65xporter\x18\n \x01(\tB \xe0\x41\x03\xfa\x41\x1a\n\x18jumpstarter.dev/ExporterH\x05R\x08\x65xporter\x88\x01\x01\x12>\n\nconditions\x18\x0b \x03(\x0b\x32\x19.jumpstarter.v1.ConditionB\x03\xe0\x41\x03R\nconditions:P\xea\x41M\n\x15jumpstarter.dev/Lease\x12%namespaces/{namespace}/leases/{lease}*\x06leases2\x05leaseB\r\n\x0b_begin_timeB\x17\n\x15_effective_begin_timeB\x0b\n\t_end_timeB\x15\n\x13_effective_end_timeB\t\n\x07_clientB\x0b\n\t_exporter\"J\n\x12GetExporterRequest\x12\x34\n\x04name\x18\x01 \x01(\tB \xe0\x41\x02\xfa\x41\x1a\n\x18jumpstarter.dev/ExporterR\x04name\"\xb3\x01\n\x14ListExportersRequest\x12\x38\n\x06parent\x18\x01 \x01(\tB \xe0\x41\x02\xfa\x41\x1a\x12\x18jumpstarter.dev/ExporterR\x06parent\x12 \n\tpage_size\x18\x02 \x01(\x05\x42\x03\xe0\x41\x01R\x08pageSize\x12\"\n\npage_token\x18\x03 \x01(\tB\x03\xe0\x41\x01R\tpageToken\x12\x1b\n\x06\x66ilter\x18\x04 \x01(\tB\x03\xe0\x41\x01R\x06\x66ilter\"~\n\x15ListExportersResponse\x12=\n\texporters\x18\x01 \x03(\x0b\x32\x1f.jumpstarter.client.v1.ExporterR\texporters\x12&\n\x0fnext_page_token\x18\x02 \x01(\tR\rnextPageToken\"D\n\x0fGetLeaseRequest\x12\x31\n\x04name\x18\x01 \x01(\tB\x1d\xe0\x41\x02\xfa\x41\x17\n\x15jumpstarter.dev/LeaseR\x04name\"\xad\x01\n\x11ListLeasesRequest\x12\x35\n\x06parent\x18\x01 \x01(\tB\x1d\xe0\x41\x02\xfa\x41\x17\x12\x15jumpstarter.dev/LeaseR\x06parent\x12 \n\tpage_size\x18\x02 \x01(\x05\x42\x03\xe0\x41\x01R\x08pageSize\x12\"\n\npage_token\x18\x03 \x01(\tB\x03\xe0\x41\x01R\tpageToken\x12\x1b\n\x06\x66ilter\x18\x04 \x01(\tB\x03\xe0\x41\x01R\x06\x66ilter\"r\n\x12ListLeasesResponse\x12\x34\n\x06leases\x18\x01 \x03(\x0b\x32\x1c.jumpstarter.client.v1.LeaseR\x06leases\x12&\n\x0fnext_page_token\x18\x02 \x01(\tR\rnextPageToken\"\xa4\x01\n\x12\x43reateLeaseRequest\x12\x35\n\x06parent\x18\x01 \x01(\tB\x1d\xe0\x41\x02\xfa\x41\x17\x12\x15jumpstarter.dev/LeaseR\x06parent\x12\x1e\n\x08lease_id\x18\x02 \x01(\tB\x03\xe0\x41\x01R\x07leaseId\x12\x37\n\x05lease\x18\x03 \x01(\x0b\x32\x1c.jumpstarter.client.v1.LeaseB\x03\xe0\x41\x02R\x05lease\"\x8f\x01\n\x12UpdateLeaseRequest\x12\x37\n\x05lease\x18\x01 \x01(\x0b\x32\x1c.jumpstarter.client.v1.LeaseB\x03\xe0\x41\x02R\x05lease\x12@\n\x0bupdate_mask\x18\x02 \x01(\x0b\x32\x1a.google.protobuf.FieldMaskB\x03\xe0\x41\x01R\nupdateMask\"G\n\x12\x44\x65leteLeaseRequest\x12\x31\n\x04name\x18\x01 \x01(\tB\x1d\xe0\x41\x02\xfa\x41\x17\n\x15jumpstarter.dev/LeaseR\x04name2\xa7\x08\n\rClientService\x12\x8d\x01\n\x0bGetExporter\x12).jumpstarter.client.v1.GetExporterRequest\x1a\x1f.jumpstarter.client.v1.Exporter\"2\xda\x41\x04name\x82\xd3\xe4\x93\x02%\x12#/v1/{name=namespaces/*/exporters/*}\x12\xa0\x01\n\rListExporters\x12+.jumpstarter.client.v1.ListExportersRequest\x1a,.jumpstarter.client.v1.ListExportersResponse\"4\xda\x41\x06parent\x82\xd3\xe4\x93\x02%\x12#/v1/{parent=namespaces/*}/exporters\x12\x81\x01\n\x08GetLease\x12&.jumpstarter.client.v1.GetLeaseRequest\x1a\x1c.jumpstarter.client.v1.Lease\"/\xda\x41\x04name\x82\xd3\xe4\x93\x02\"\x12 /v1/{name=namespaces/*/leases/*}\x12\x94\x01\n\nListLeases\x12(.jumpstarter.client.v1.ListLeasesRequest\x1a).jumpstarter.client.v1.ListLeasesResponse\"1\xda\x41\x06parent\x82\xd3\xe4\x93\x02\"\x12 /v1/{parent=namespaces/*}/leases\x12\x9f\x01\n\x0b\x43reateLease\x12).jumpstarter.client.v1.CreateLeaseRequest\x1a\x1c.jumpstarter.client.v1.Lease\"G\xda\x41\x15parent,lease,lease_id\x82\xd3\xe4\x93\x02)\" /v1/{parent=namespaces/*}/leases:\x05lease\x12\xa1\x01\n\x0bUpdateLease\x12).jumpstarter.client.v1.UpdateLeaseRequest\x1a\x1c.jumpstarter.client.v1.Lease\"I\xda\x41\x11lease,update_mask\x82\xd3\xe4\x93\x02/2&/v1/{lease.name=namespaces/*/leases/*}:\x05lease\x12\x81\x01\n\x0b\x44\x65leteLease\x12).jumpstarter.client.v1.DeleteLeaseRequest\x1a\x16.google.protobuf.Empty\"/\xda\x41\x04name\x82\xd3\xe4\x93\x02\"* /v1/{name=namespaces/*/leases/*}B\x9e\x01\n\x19\x63om.jumpstarter.client.v1B\x0b\x43lientProtoP\x01\xa2\x02\x03JCX\xaa\x02\x15Jumpstarter.Client.V1\xca\x02\x15Jumpstarter\\Client\\V1\xe2\x02!Jumpstarter\\Client\\V1\\GPBMetadata\xea\x02\x17Jumpstarter::Client::V1b\x06proto3') _globals = globals() _builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, _globals) @@ -46,7 +47,9 @@ _globals['_EXPORTER'].fields_by_name['name']._loaded_options = None _globals['_EXPORTER'].fields_by_name['name']._serialized_options = b'\340A\010' _globals['_EXPORTER'].fields_by_name['online']._loaded_options = None - _globals['_EXPORTER'].fields_by_name['online']._serialized_options = b'\340A\003' + _globals['_EXPORTER'].fields_by_name['online']._serialized_options = b'\030\001\340A\003' + _globals['_EXPORTER'].fields_by_name['status']._loaded_options = None + _globals['_EXPORTER'].fields_by_name['status']._serialized_options = b'\340A\003' _globals['_EXPORTER']._loaded_options = None _globals['_EXPORTER']._serialized_options = b'\352A\\\n\030jumpstarter.dev/Exporter\022+namespaces/{namespace}/exporters/{exporter}*\texporters2\010exporter' _globals['_LEASE'].fields_by_name['name']._loaded_options = None @@ -115,30 +118,30 @@ _globals['_CLIENTSERVICE'].methods_by_name['UpdateLease']._serialized_options = b'\332A\021lease,update_mask\202\323\344\223\002/2&/v1/{lease.name=namespaces/*/leases/*}:\005lease' _globals['_CLIENTSERVICE'].methods_by_name['DeleteLease']._loaded_options = None _globals['_CLIENTSERVICE'].methods_by_name['DeleteLease']._serialized_options = b'\332A\004name\202\323\344\223\002\"* /v1/{name=namespaces/*/leases/*}' - _globals['_EXPORTER']._serialized_start=338 - _globals['_EXPORTER']._serialized_end=627 - _globals['_EXPORTER_LABELSENTRY']._serialized_start=473 - _globals['_EXPORTER_LABELSENTRY']._serialized_end=530 - _globals['_LEASE']._serialized_start=630 - _globals['_LEASE']._serialized_end=1507 - _globals['_GETEXPORTERREQUEST']._serialized_start=1509 - _globals['_GETEXPORTERREQUEST']._serialized_end=1583 - _globals['_LISTEXPORTERSREQUEST']._serialized_start=1586 - _globals['_LISTEXPORTERSREQUEST']._serialized_end=1765 - _globals['_LISTEXPORTERSRESPONSE']._serialized_start=1767 - _globals['_LISTEXPORTERSRESPONSE']._serialized_end=1893 - _globals['_GETLEASEREQUEST']._serialized_start=1895 - _globals['_GETLEASEREQUEST']._serialized_end=1963 - _globals['_LISTLEASESREQUEST']._serialized_start=1966 - _globals['_LISTLEASESREQUEST']._serialized_end=2139 - _globals['_LISTLEASESRESPONSE']._serialized_start=2141 - _globals['_LISTLEASESRESPONSE']._serialized_end=2255 - _globals['_CREATELEASEREQUEST']._serialized_start=2258 - _globals['_CREATELEASEREQUEST']._serialized_end=2422 - _globals['_UPDATELEASEREQUEST']._serialized_start=2425 - _globals['_UPDATELEASEREQUEST']._serialized_end=2568 - _globals['_DELETELEASEREQUEST']._serialized_start=2570 - _globals['_DELETELEASEREQUEST']._serialized_end=2641 - _globals['_CLIENTSERVICE']._serialized_start=2644 - _globals['_CLIENTSERVICE']._serialized_end=3707 + _globals['_EXPORTER']._serialized_start=367 + _globals['_EXPORTER']._serialized_end=719 + _globals['_EXPORTER_LABELSENTRY']._serialized_start=565 + _globals['_EXPORTER_LABELSENTRY']._serialized_end=622 + _globals['_LEASE']._serialized_start=722 + _globals['_LEASE']._serialized_end=1599 + _globals['_GETEXPORTERREQUEST']._serialized_start=1601 + _globals['_GETEXPORTERREQUEST']._serialized_end=1675 + _globals['_LISTEXPORTERSREQUEST']._serialized_start=1678 + _globals['_LISTEXPORTERSREQUEST']._serialized_end=1857 + _globals['_LISTEXPORTERSRESPONSE']._serialized_start=1859 + _globals['_LISTEXPORTERSRESPONSE']._serialized_end=1985 + _globals['_GETLEASEREQUEST']._serialized_start=1987 + _globals['_GETLEASEREQUEST']._serialized_end=2055 + _globals['_LISTLEASESREQUEST']._serialized_start=2058 + _globals['_LISTLEASESREQUEST']._serialized_end=2231 + _globals['_LISTLEASESRESPONSE']._serialized_start=2233 + _globals['_LISTLEASESRESPONSE']._serialized_end=2347 + _globals['_CREATELEASEREQUEST']._serialized_start=2350 + _globals['_CREATELEASEREQUEST']._serialized_end=2514 + _globals['_UPDATELEASEREQUEST']._serialized_start=2517 + _globals['_UPDATELEASEREQUEST']._serialized_end=2660 + _globals['_DELETELEASEREQUEST']._serialized_start=2662 + _globals['_DELETELEASEREQUEST']._serialized_end=2733 + _globals['_CLIENTSERVICE']._serialized_start=2736 + _globals['_CLIENTSERVICE']._serialized_end=3799 # @@protoc_insertion_point(module_scope) diff --git a/packages/jumpstarter-protocol/jumpstarter_protocol/jumpstarter/v1/common_pb2.py b/packages/jumpstarter-protocol/jumpstarter_protocol/jumpstarter/v1/common_pb2.py new file mode 100644 index 000000000..4da3a3146 --- /dev/null +++ b/packages/jumpstarter-protocol/jumpstarter_protocol/jumpstarter/v1/common_pb2.py @@ -0,0 +1,39 @@ +# -*- coding: utf-8 -*- +# Generated by the protocol buffer compiler. DO NOT EDIT! +# NO CHECKED-IN PROTOBUF GENCODE +# source: jumpstarter/v1/common.proto +# Protobuf Python Version: 6.30.1 +"""Generated protocol buffer code.""" +from google.protobuf import descriptor as _descriptor +from google.protobuf import descriptor_pool as _descriptor_pool +from google.protobuf import runtime_version as _runtime_version +from google.protobuf import symbol_database as _symbol_database +from google.protobuf.internal import builder as _builder +_runtime_version.ValidateProtobufRuntimeVersion( + _runtime_version.Domain.PUBLIC, + 6, + 30, + 1, + '', + 'jumpstarter/v1/common.proto' +) +# @@protoc_insertion_point(imports) + +_sym_db = _symbol_database.Default() + + + + +DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x1bjumpstarter/v1/common.proto\x12\x0ejumpstarter.v1*\xb6\x02\n\x0e\x45xporterStatus\x12\x1f\n\x1b\x45XPORTER_STATUS_UNSPECIFIED\x10\x00\x12\x1b\n\x17\x45XPORTER_STATUS_OFFLINE\x10\x01\x12\x1d\n\x19\x45XPORTER_STATUS_AVAILABLE\x10\x02\x12%\n!EXPORTER_STATUS_BEFORE_LEASE_HOOK\x10\x03\x12\x1f\n\x1b\x45XPORTER_STATUS_LEASE_READY\x10\x04\x12$\n EXPORTER_STATUS_AFTER_LEASE_HOOK\x10\x05\x12,\n(EXPORTER_STATUS_BEFORE_LEASE_HOOK_FAILED\x10\x06\x12+\n\'EXPORTER_STATUS_AFTER_LEASE_HOOK_FAILED\x10\x07*\x98\x01\n\tLogSource\x12\x1a\n\x16LOG_SOURCE_UNSPECIFIED\x10\x00\x12\x15\n\x11LOG_SOURCE_DRIVER\x10\x01\x12 \n\x1cLOG_SOURCE_BEFORE_LEASE_HOOK\x10\x02\x12\x1f\n\x1bLOG_SOURCE_AFTER_LEASE_HOOK\x10\x03\x12\x15\n\x11LOG_SOURCE_SYSTEM\x10\x04\x42z\n\x12\x63om.jumpstarter.v1B\x0b\x43ommonProtoP\x01\xa2\x02\x03JXX\xaa\x02\x0eJumpstarter.V1\xca\x02\x0eJumpstarter\\V1\xe2\x02\x1aJumpstarter\\V1\\GPBMetadata\xea\x02\x0fJumpstarter::V1b\x06proto3') + +_globals = globals() +_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, _globals) +_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'jumpstarter.v1.common_pb2', _globals) +if not _descriptor._USE_C_DESCRIPTORS: + _globals['DESCRIPTOR']._loaded_options = None + _globals['DESCRIPTOR']._serialized_options = b'\n\022com.jumpstarter.v1B\013CommonProtoP\001\242\002\003JXX\252\002\016Jumpstarter.V1\312\002\016Jumpstarter\\V1\342\002\032Jumpstarter\\V1\\GPBMetadata\352\002\017Jumpstarter::V1' + _globals['_EXPORTERSTATUS']._serialized_start=48 + _globals['_EXPORTERSTATUS']._serialized_end=358 + _globals['_LOGSOURCE']._serialized_start=361 + _globals['_LOGSOURCE']._serialized_end=513 +# @@protoc_insertion_point(module_scope) diff --git a/packages/jumpstarter-protocol/jumpstarter_protocol/jumpstarter/v1/common_pb2_grpc.py b/packages/jumpstarter-protocol/jumpstarter_protocol/jumpstarter/v1/common_pb2_grpc.py new file mode 100644 index 000000000..2daafffeb --- /dev/null +++ b/packages/jumpstarter-protocol/jumpstarter_protocol/jumpstarter/v1/common_pb2_grpc.py @@ -0,0 +1,4 @@ +# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT! +"""Client and server classes corresponding to protobuf-defined services.""" +import grpc + diff --git a/packages/jumpstarter-protocol/jumpstarter_protocol/jumpstarter/v1/jumpstarter_pb2.py b/packages/jumpstarter-protocol/jumpstarter_protocol/jumpstarter/v1/jumpstarter_pb2.py index d75ddd9d4..2ef1177eb 100644 --- a/packages/jumpstarter-protocol/jumpstarter_protocol/jumpstarter/v1/jumpstarter_pb2.py +++ b/packages/jumpstarter-protocol/jumpstarter_protocol/jumpstarter/v1/jumpstarter_pb2.py @@ -27,9 +27,10 @@ from google.protobuf import struct_pb2 as google_dot_protobuf_dot_struct__pb2 from google.protobuf import timestamp_pb2 as google_dot_protobuf_dot_timestamp__pb2 from . import kubernetes_pb2 as jumpstarter_dot_v1_dot_kubernetes__pb2 +from . import common_pb2 as jumpstarter_dot_v1_dot_common__pb2 -DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n jumpstarter/v1/jumpstarter.proto\x12\x0ejumpstarter.v1\x1a\x1egoogle/protobuf/duration.proto\x1a\x1bgoogle/protobuf/empty.proto\x1a\x1cgoogle/protobuf/struct.proto\x1a\x1fgoogle/protobuf/timestamp.proto\x1a\x1fjumpstarter/v1/kubernetes.proto\"\xd1\x01\n\x0fRegisterRequest\x12\x43\n\x06labels\x18\x01 \x03(\x0b\x32+.jumpstarter.v1.RegisterRequest.LabelsEntryR\x06labels\x12>\n\x07reports\x18\x02 \x03(\x0b\x32$.jumpstarter.v1.DriverInstanceReportR\x07reports\x1a\x39\n\x0bLabelsEntry\x12\x10\n\x03key\x18\x01 \x01(\tR\x03key\x12\x14\n\x05value\x18\x02 \x01(\tR\x05value:\x02\x38\x01\"\xe5\x01\n\x14\x44riverInstanceReport\x12\x12\n\x04uuid\x18\x01 \x01(\tR\x04uuid\x12$\n\x0bparent_uuid\x18\x02 \x01(\tH\x00R\nparentUuid\x88\x01\x01\x12H\n\x06labels\x18\x03 \x03(\x0b\x32\x30.jumpstarter.v1.DriverInstanceReport.LabelsEntryR\x06labels\x1a\x39\n\x0bLabelsEntry\x12\x10\n\x03key\x18\x01 \x01(\tR\x03key\x12\x14\n\x05value\x18\x02 \x01(\tR\x05value:\x02\x38\x01\x42\x0e\n\x0c_parent_uuid\"&\n\x10RegisterResponse\x12\x12\n\x04uuid\x18\x01 \x01(\tR\x04uuid\"+\n\x11UnregisterRequest\x12\x16\n\x06reason\x18\x02 \x01(\tR\x06reason\"\x14\n\x12UnregisterResponse\".\n\rListenRequest\x12\x1d\n\nlease_name\x18\x01 \x01(\tR\tleaseName\"\\\n\x0eListenResponse\x12\'\n\x0frouter_endpoint\x18\x01 \x01(\tR\x0erouterEndpoint\x12!\n\x0crouter_token\x18\x02 \x01(\tR\x0brouterToken\"\x0f\n\rStatusRequest\"\x91\x01\n\x0eStatusResponse\x12\x16\n\x06leased\x18\x01 \x01(\x08R\x06leased\x12\"\n\nlease_name\x18\x02 \x01(\tH\x00R\tleaseName\x88\x01\x01\x12$\n\x0b\x63lient_name\x18\x03 \x01(\tH\x01R\nclientName\x88\x01\x01\x42\r\n\x0b_lease_nameB\x0e\n\x0c_client_name\",\n\x0b\x44ialRequest\x12\x1d\n\nlease_name\x18\x01 \x01(\tR\tleaseName\"Z\n\x0c\x44ialResponse\x12\'\n\x0frouter_endpoint\x18\x01 \x01(\tR\x0erouterEndpoint\x12!\n\x0crouter_token\x18\x02 \x01(\tR\x0brouterToken\"\xa1\x01\n\x12\x41uditStreamRequest\x12#\n\rexporter_uuid\x18\x01 \x01(\tR\x0c\x65xporterUuid\x12\x30\n\x14\x64river_instance_uuid\x18\x02 \x01(\tR\x12\x64riverInstanceUuid\x12\x1a\n\x08severity\x18\x03 \x01(\tR\x08severity\x12\x18\n\x07message\x18\x04 \x01(\tR\x07message\"\xb8\x02\n\x11GetReportResponse\x12\x12\n\x04uuid\x18\x01 \x01(\tR\x04uuid\x12\x45\n\x06labels\x18\x02 \x03(\x0b\x32-.jumpstarter.v1.GetReportResponse.LabelsEntryR\x06labels\x12>\n\x07reports\x18\x03 \x03(\x0b\x32$.jumpstarter.v1.DriverInstanceReportR\x07reports\x12M\n\x15\x61lternative_endpoints\x18\x04 \x03(\x0b\x32\x18.jumpstarter.v1.EndpointR\x14\x61lternativeEndpoints\x1a\x39\n\x0bLabelsEntry\x12\x10\n\x03key\x18\x01 \x01(\tR\x03key\x12\x14\n\x05value\x18\x02 \x01(\tR\x05value:\x02\x38\x01\"\xa5\x01\n\x08\x45ndpoint\x12\x1a\n\x08\x65ndpoint\x18\x01 \x01(\tR\x08\x65ndpoint\x12 \n\x0b\x63\x65rtificate\x18\x02 \x01(\tR\x0b\x63\x65rtificate\x12-\n\x12\x63lient_certificate\x18\x03 \x01(\tR\x11\x63lientCertificate\x12,\n\x12\x63lient_private_key\x18\x04 \x01(\tR\x10\x63lientPrivateKey\"k\n\x11\x44riverCallRequest\x12\x12\n\x04uuid\x18\x01 \x01(\tR\x04uuid\x12\x16\n\x06method\x18\x02 \x01(\tR\x06method\x12*\n\x04\x61rgs\x18\x03 \x03(\x0b\x32\x16.google.protobuf.ValueR\x04\x61rgs\"X\n\x12\x44riverCallResponse\x12\x12\n\x04uuid\x18\x01 \x01(\tR\x04uuid\x12.\n\x06result\x18\x02 \x01(\x0b\x32\x16.google.protobuf.ValueR\x06result\"t\n\x1aStreamingDriverCallRequest\x12\x12\n\x04uuid\x18\x01 \x01(\tR\x04uuid\x12\x16\n\x06method\x18\x02 \x01(\tR\x06method\x12*\n\x04\x61rgs\x18\x03 \x03(\x0b\x32\x16.google.protobuf.ValueR\x04\x61rgs\"a\n\x1bStreamingDriverCallResponse\x12\x12\n\x04uuid\x18\x01 \x01(\tR\x04uuid\x12.\n\x06result\x18\x02 \x01(\x0b\x32\x16.google.protobuf.ValueR\x06result\"]\n\x11LogStreamResponse\x12\x12\n\x04uuid\x18\x01 \x01(\tR\x04uuid\x12\x1a\n\x08severity\x18\x02 \x01(\tR\x08severity\x12\x18\n\x07message\x18\x03 \x01(\tR\x07message\"\x0e\n\x0cResetRequest\"\x0f\n\rResetResponse\"%\n\x0fGetLeaseRequest\x12\x12\n\x04name\x18\x01 \x01(\tR\x04name\"\x93\x03\n\x10GetLeaseResponse\x12\x35\n\x08\x64uration\x18\x01 \x01(\x0b\x32\x19.google.protobuf.DurationR\x08\x64uration\x12\x39\n\x08selector\x18\x02 \x01(\x0b\x32\x1d.jumpstarter.v1.LabelSelectorR\x08selector\x12>\n\nbegin_time\x18\x03 \x01(\x0b\x32\x1a.google.protobuf.TimestampH\x00R\tbeginTime\x88\x01\x01\x12:\n\x08\x65nd_time\x18\x04 \x01(\x0b\x32\x1a.google.protobuf.TimestampH\x01R\x07\x65ndTime\x88\x01\x01\x12(\n\rexporter_uuid\x18\x05 \x01(\tH\x02R\x0c\x65xporterUuid\x88\x01\x01\x12\x39\n\nconditions\x18\x06 \x03(\x0b\x32\x19.jumpstarter.v1.ConditionR\nconditionsB\r\n\x0b_begin_timeB\x0b\n\t_end_timeB\x10\n\x0e_exporter_uuid\"\x87\x01\n\x13RequestLeaseRequest\x12\x35\n\x08\x64uration\x18\x01 \x01(\x0b\x32\x19.google.protobuf.DurationR\x08\x64uration\x12\x39\n\x08selector\x18\x02 \x01(\x0b\x32\x1d.jumpstarter.v1.LabelSelectorR\x08selector\"*\n\x14RequestLeaseResponse\x12\x12\n\x04name\x18\x01 \x01(\tR\x04name\")\n\x13ReleaseLeaseRequest\x12\x12\n\x04name\x18\x01 \x01(\tR\x04name\"\x16\n\x14ReleaseLeaseResponse\"\x13\n\x11ListLeasesRequest\"*\n\x12ListLeasesResponse\x12\x14\n\x05names\x18\x01 \x03(\tR\x05names2\xb7\x06\n\x11\x43ontrollerService\x12M\n\x08Register\x12\x1f.jumpstarter.v1.RegisterRequest\x1a .jumpstarter.v1.RegisterResponse\x12S\n\nUnregister\x12!.jumpstarter.v1.UnregisterRequest\x1a\".jumpstarter.v1.UnregisterResponse\x12I\n\x06Listen\x12\x1d.jumpstarter.v1.ListenRequest\x1a\x1e.jumpstarter.v1.ListenResponse0\x01\x12I\n\x06Status\x12\x1d.jumpstarter.v1.StatusRequest\x1a\x1e.jumpstarter.v1.StatusResponse0\x01\x12\x41\n\x04\x44ial\x12\x1b.jumpstarter.v1.DialRequest\x1a\x1c.jumpstarter.v1.DialResponse\x12K\n\x0b\x41uditStream\x12\".jumpstarter.v1.AuditStreamRequest\x1a\x16.google.protobuf.Empty(\x01\x12M\n\x08GetLease\x12\x1f.jumpstarter.v1.GetLeaseRequest\x1a .jumpstarter.v1.GetLeaseResponse\x12Y\n\x0cRequestLease\x12#.jumpstarter.v1.RequestLeaseRequest\x1a$.jumpstarter.v1.RequestLeaseResponse\x12Y\n\x0cReleaseLease\x12#.jumpstarter.v1.ReleaseLeaseRequest\x1a$.jumpstarter.v1.ReleaseLeaseResponse\x12S\n\nListLeases\x12!.jumpstarter.v1.ListLeasesRequest\x1a\".jumpstarter.v1.ListLeasesResponse2\xb0\x03\n\x0f\x45xporterService\x12\x46\n\tGetReport\x12\x16.google.protobuf.Empty\x1a!.jumpstarter.v1.GetReportResponse\x12S\n\nDriverCall\x12!.jumpstarter.v1.DriverCallRequest\x1a\".jumpstarter.v1.DriverCallResponse\x12p\n\x13StreamingDriverCall\x12*.jumpstarter.v1.StreamingDriverCallRequest\x1a+.jumpstarter.v1.StreamingDriverCallResponse0\x01\x12H\n\tLogStream\x12\x16.google.protobuf.Empty\x1a!.jumpstarter.v1.LogStreamResponse0\x01\x12\x44\n\x05Reset\x12\x1c.jumpstarter.v1.ResetRequest\x1a\x1d.jumpstarter.v1.ResetResponseB\x7f\n\x12\x63om.jumpstarter.v1B\x10JumpstarterProtoP\x01\xa2\x02\x03JXX\xaa\x02\x0eJumpstarter.V1\xca\x02\x0eJumpstarter\\V1\xe2\x02\x1aJumpstarter\\V1\\GPBMetadata\xea\x02\x0fJumpstarter::V1b\x06proto3') +DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n jumpstarter/v1/jumpstarter.proto\x12\x0ejumpstarter.v1\x1a\x1egoogle/protobuf/duration.proto\x1a\x1bgoogle/protobuf/empty.proto\x1a\x1cgoogle/protobuf/struct.proto\x1a\x1fgoogle/protobuf/timestamp.proto\x1a\x1fjumpstarter/v1/kubernetes.proto\x1a\x1bjumpstarter/v1/common.proto\"\xd1\x01\n\x0fRegisterRequest\x12\x43\n\x06labels\x18\x01 \x03(\x0b\x32+.jumpstarter.v1.RegisterRequest.LabelsEntryR\x06labels\x12>\n\x07reports\x18\x02 \x03(\x0b\x32$.jumpstarter.v1.DriverInstanceReportR\x07reports\x1a\x39\n\x0bLabelsEntry\x12\x10\n\x03key\x18\x01 \x01(\tR\x03key\x12\x14\n\x05value\x18\x02 \x01(\tR\x05value:\x02\x38\x01\"\xe5\x01\n\x14\x44riverInstanceReport\x12\x12\n\x04uuid\x18\x01 \x01(\tR\x04uuid\x12$\n\x0bparent_uuid\x18\x02 \x01(\tH\x00R\nparentUuid\x88\x01\x01\x12H\n\x06labels\x18\x03 \x03(\x0b\x32\x30.jumpstarter.v1.DriverInstanceReport.LabelsEntryR\x06labels\x1a\x39\n\x0bLabelsEntry\x12\x10\n\x03key\x18\x01 \x01(\tR\x03key\x12\x14\n\x05value\x18\x02 \x01(\tR\x05value:\x02\x38\x01\x42\x0e\n\x0c_parent_uuid\"&\n\x10RegisterResponse\x12\x12\n\x04uuid\x18\x01 \x01(\tR\x04uuid\"+\n\x11UnregisterRequest\x12\x16\n\x06reason\x18\x02 \x01(\tR\x06reason\"\x14\n\x12UnregisterResponse\".\n\rListenRequest\x12\x1d\n\nlease_name\x18\x01 \x01(\tR\tleaseName\"\\\n\x0eListenResponse\x12\'\n\x0frouter_endpoint\x18\x01 \x01(\tR\x0erouterEndpoint\x12!\n\x0crouter_token\x18\x02 \x01(\tR\x0brouterToken\"\x0f\n\rStatusRequest\"\x91\x01\n\x0eStatusResponse\x12\x16\n\x06leased\x18\x01 \x01(\x08R\x06leased\x12\"\n\nlease_name\x18\x02 \x01(\tH\x00R\tleaseName\x88\x01\x01\x12$\n\x0b\x63lient_name\x18\x03 \x01(\tH\x01R\nclientName\x88\x01\x01\x42\r\n\x0b_lease_nameB\x0e\n\x0c_client_name\",\n\x0b\x44ialRequest\x12\x1d\n\nlease_name\x18\x01 \x01(\tR\tleaseName\"Z\n\x0c\x44ialResponse\x12\'\n\x0frouter_endpoint\x18\x01 \x01(\tR\x0erouterEndpoint\x12!\n\x0crouter_token\x18\x02 \x01(\tR\x0brouterToken\"\xa1\x01\n\x12\x41uditStreamRequest\x12#\n\rexporter_uuid\x18\x01 \x01(\tR\x0c\x65xporterUuid\x12\x30\n\x14\x64river_instance_uuid\x18\x02 \x01(\tR\x12\x64riverInstanceUuid\x12\x1a\n\x08severity\x18\x03 \x01(\tR\x08severity\x12\x18\n\x07message\x18\x04 \x01(\tR\x07message\"x\n\x13ReportStatusRequest\x12\x36\n\x06status\x18\x01 \x01(\x0e\x32\x1e.jumpstarter.v1.ExporterStatusR\x06status\x12\x1d\n\x07message\x18\x02 \x01(\tH\x00R\x07message\x88\x01\x01\x42\n\n\x08_message\"\x16\n\x14ReportStatusResponse\"\xb8\x02\n\x11GetReportResponse\x12\x12\n\x04uuid\x18\x01 \x01(\tR\x04uuid\x12\x45\n\x06labels\x18\x02 \x03(\x0b\x32-.jumpstarter.v1.GetReportResponse.LabelsEntryR\x06labels\x12>\n\x07reports\x18\x03 \x03(\x0b\x32$.jumpstarter.v1.DriverInstanceReportR\x07reports\x12M\n\x15\x61lternative_endpoints\x18\x04 \x03(\x0b\x32\x18.jumpstarter.v1.EndpointR\x14\x61lternativeEndpoints\x1a\x39\n\x0bLabelsEntry\x12\x10\n\x03key\x18\x01 \x01(\tR\x03key\x12\x14\n\x05value\x18\x02 \x01(\tR\x05value:\x02\x38\x01\"\xa5\x01\n\x08\x45ndpoint\x12\x1a\n\x08\x65ndpoint\x18\x01 \x01(\tR\x08\x65ndpoint\x12 \n\x0b\x63\x65rtificate\x18\x02 \x01(\tR\x0b\x63\x65rtificate\x12-\n\x12\x63lient_certificate\x18\x03 \x01(\tR\x11\x63lientCertificate\x12,\n\x12\x63lient_private_key\x18\x04 \x01(\tR\x10\x63lientPrivateKey\"k\n\x11\x44riverCallRequest\x12\x12\n\x04uuid\x18\x01 \x01(\tR\x04uuid\x12\x16\n\x06method\x18\x02 \x01(\tR\x06method\x12*\n\x04\x61rgs\x18\x03 \x03(\x0b\x32\x16.google.protobuf.ValueR\x04\x61rgs\"X\n\x12\x44riverCallResponse\x12\x12\n\x04uuid\x18\x01 \x01(\tR\x04uuid\x12.\n\x06result\x18\x02 \x01(\x0b\x32\x16.google.protobuf.ValueR\x06result\"t\n\x1aStreamingDriverCallRequest\x12\x12\n\x04uuid\x18\x01 \x01(\tR\x04uuid\x12\x16\n\x06method\x18\x02 \x01(\tR\x06method\x12*\n\x04\x61rgs\x18\x03 \x03(\x0b\x32\x16.google.protobuf.ValueR\x04\x61rgs\"a\n\x1bStreamingDriverCallResponse\x12\x12\n\x04uuid\x18\x01 \x01(\tR\x04uuid\x12.\n\x06result\x18\x02 \x01(\x0b\x32\x16.google.protobuf.ValueR\x06result\"\xa0\x01\n\x11LogStreamResponse\x12\x12\n\x04uuid\x18\x01 \x01(\tR\x04uuid\x12\x1a\n\x08severity\x18\x02 \x01(\tR\x08severity\x12\x18\n\x07message\x18\x03 \x01(\tR\x07message\x12\x36\n\x06source\x18\x04 \x01(\x0e\x32\x19.jumpstarter.v1.LogSourceH\x00R\x06source\x88\x01\x01\x42\t\n\x07_source\"\x0e\n\x0cResetRequest\"\x0f\n\rResetResponse\"%\n\x0fGetLeaseRequest\x12\x12\n\x04name\x18\x01 \x01(\tR\x04name\"\x93\x03\n\x10GetLeaseResponse\x12\x35\n\x08\x64uration\x18\x01 \x01(\x0b\x32\x19.google.protobuf.DurationR\x08\x64uration\x12\x39\n\x08selector\x18\x02 \x01(\x0b\x32\x1d.jumpstarter.v1.LabelSelectorR\x08selector\x12>\n\nbegin_time\x18\x03 \x01(\x0b\x32\x1a.google.protobuf.TimestampH\x00R\tbeginTime\x88\x01\x01\x12:\n\x08\x65nd_time\x18\x04 \x01(\x0b\x32\x1a.google.protobuf.TimestampH\x01R\x07\x65ndTime\x88\x01\x01\x12(\n\rexporter_uuid\x18\x05 \x01(\tH\x02R\x0c\x65xporterUuid\x88\x01\x01\x12\x39\n\nconditions\x18\x06 \x03(\x0b\x32\x19.jumpstarter.v1.ConditionR\nconditionsB\r\n\x0b_begin_timeB\x0b\n\t_end_timeB\x10\n\x0e_exporter_uuid\"\x87\x01\n\x13RequestLeaseRequest\x12\x35\n\x08\x64uration\x18\x01 \x01(\x0b\x32\x19.google.protobuf.DurationR\x08\x64uration\x12\x39\n\x08selector\x18\x02 \x01(\x0b\x32\x1d.jumpstarter.v1.LabelSelectorR\x08selector\"*\n\x14RequestLeaseResponse\x12\x12\n\x04name\x18\x01 \x01(\tR\x04name\")\n\x13ReleaseLeaseRequest\x12\x12\n\x04name\x18\x01 \x01(\tR\x04name\"\x16\n\x14ReleaseLeaseResponse\"\x13\n\x11ListLeasesRequest\"*\n\x12ListLeasesResponse\x12\x14\n\x05names\x18\x01 \x03(\tR\x05names\"\x12\n\x10GetStatusRequest\"v\n\x11GetStatusResponse\x12\x36\n\x06status\x18\x01 \x01(\x0e\x32\x1e.jumpstarter.v1.ExporterStatusR\x06status\x12\x1d\n\x07message\x18\x02 \x01(\tH\x00R\x07message\x88\x01\x01\x42\n\n\x08_message2\x92\x07\n\x11\x43ontrollerService\x12M\n\x08Register\x12\x1f.jumpstarter.v1.RegisterRequest\x1a .jumpstarter.v1.RegisterResponse\x12S\n\nUnregister\x12!.jumpstarter.v1.UnregisterRequest\x1a\".jumpstarter.v1.UnregisterResponse\x12Y\n\x0cReportStatus\x12#.jumpstarter.v1.ReportStatusRequest\x1a$.jumpstarter.v1.ReportStatusResponse\x12I\n\x06Listen\x12\x1d.jumpstarter.v1.ListenRequest\x1a\x1e.jumpstarter.v1.ListenResponse0\x01\x12I\n\x06Status\x12\x1d.jumpstarter.v1.StatusRequest\x1a\x1e.jumpstarter.v1.StatusResponse0\x01\x12\x41\n\x04\x44ial\x12\x1b.jumpstarter.v1.DialRequest\x1a\x1c.jumpstarter.v1.DialResponse\x12K\n\x0b\x41uditStream\x12\".jumpstarter.v1.AuditStreamRequest\x1a\x16.google.protobuf.Empty(\x01\x12M\n\x08GetLease\x12\x1f.jumpstarter.v1.GetLeaseRequest\x1a .jumpstarter.v1.GetLeaseResponse\x12Y\n\x0cRequestLease\x12#.jumpstarter.v1.RequestLeaseRequest\x1a$.jumpstarter.v1.RequestLeaseResponse\x12Y\n\x0cReleaseLease\x12#.jumpstarter.v1.ReleaseLeaseRequest\x1a$.jumpstarter.v1.ReleaseLeaseResponse\x12S\n\nListLeases\x12!.jumpstarter.v1.ListLeasesRequest\x1a\".jumpstarter.v1.ListLeasesResponse2\x82\x04\n\x0f\x45xporterService\x12\x46\n\tGetReport\x12\x16.google.protobuf.Empty\x1a!.jumpstarter.v1.GetReportResponse\x12S\n\nDriverCall\x12!.jumpstarter.v1.DriverCallRequest\x1a\".jumpstarter.v1.DriverCallResponse\x12p\n\x13StreamingDriverCall\x12*.jumpstarter.v1.StreamingDriverCallRequest\x1a+.jumpstarter.v1.StreamingDriverCallResponse0\x01\x12H\n\tLogStream\x12\x16.google.protobuf.Empty\x1a!.jumpstarter.v1.LogStreamResponse0\x01\x12\x44\n\x05Reset\x12\x1c.jumpstarter.v1.ResetRequest\x1a\x1d.jumpstarter.v1.ResetResponse\x12P\n\tGetStatus\x12 .jumpstarter.v1.GetStatusRequest\x1a!.jumpstarter.v1.GetStatusResponseB\x7f\n\x12\x63om.jumpstarter.v1B\x10JumpstarterProtoP\x01\xa2\x02\x03JXX\xaa\x02\x0eJumpstarter.V1\xca\x02\x0eJumpstarter\\V1\xe2\x02\x1aJumpstarter\\V1\\GPBMetadata\xea\x02\x0fJumpstarter::V1b\x06proto3') _globals = globals() _builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, _globals) @@ -43,72 +44,80 @@ _globals['_DRIVERINSTANCEREPORT_LABELSENTRY']._serialized_options = b'8\001' _globals['_GETREPORTRESPONSE_LABELSENTRY']._loaded_options = None _globals['_GETREPORTRESPONSE_LABELSENTRY']._serialized_options = b'8\001' - _globals['_REGISTERREQUEST']._serialized_start=210 - _globals['_REGISTERREQUEST']._serialized_end=419 - _globals['_REGISTERREQUEST_LABELSENTRY']._serialized_start=362 - _globals['_REGISTERREQUEST_LABELSENTRY']._serialized_end=419 - _globals['_DRIVERINSTANCEREPORT']._serialized_start=422 - _globals['_DRIVERINSTANCEREPORT']._serialized_end=651 - _globals['_DRIVERINSTANCEREPORT_LABELSENTRY']._serialized_start=362 - _globals['_DRIVERINSTANCEREPORT_LABELSENTRY']._serialized_end=419 - _globals['_REGISTERRESPONSE']._serialized_start=653 - _globals['_REGISTERRESPONSE']._serialized_end=691 - _globals['_UNREGISTERREQUEST']._serialized_start=693 - _globals['_UNREGISTERREQUEST']._serialized_end=736 - _globals['_UNREGISTERRESPONSE']._serialized_start=738 - _globals['_UNREGISTERRESPONSE']._serialized_end=758 - _globals['_LISTENREQUEST']._serialized_start=760 - _globals['_LISTENREQUEST']._serialized_end=806 - _globals['_LISTENRESPONSE']._serialized_start=808 - _globals['_LISTENRESPONSE']._serialized_end=900 - _globals['_STATUSREQUEST']._serialized_start=902 - _globals['_STATUSREQUEST']._serialized_end=917 - _globals['_STATUSRESPONSE']._serialized_start=920 - _globals['_STATUSRESPONSE']._serialized_end=1065 - _globals['_DIALREQUEST']._serialized_start=1067 - _globals['_DIALREQUEST']._serialized_end=1111 - _globals['_DIALRESPONSE']._serialized_start=1113 - _globals['_DIALRESPONSE']._serialized_end=1203 - _globals['_AUDITSTREAMREQUEST']._serialized_start=1206 - _globals['_AUDITSTREAMREQUEST']._serialized_end=1367 - _globals['_GETREPORTRESPONSE']._serialized_start=1370 - _globals['_GETREPORTRESPONSE']._serialized_end=1682 - _globals['_GETREPORTRESPONSE_LABELSENTRY']._serialized_start=362 - _globals['_GETREPORTRESPONSE_LABELSENTRY']._serialized_end=419 - _globals['_ENDPOINT']._serialized_start=1685 - _globals['_ENDPOINT']._serialized_end=1850 - _globals['_DRIVERCALLREQUEST']._serialized_start=1852 - _globals['_DRIVERCALLREQUEST']._serialized_end=1959 - _globals['_DRIVERCALLRESPONSE']._serialized_start=1961 - _globals['_DRIVERCALLRESPONSE']._serialized_end=2049 - _globals['_STREAMINGDRIVERCALLREQUEST']._serialized_start=2051 - _globals['_STREAMINGDRIVERCALLREQUEST']._serialized_end=2167 - _globals['_STREAMINGDRIVERCALLRESPONSE']._serialized_start=2169 - _globals['_STREAMINGDRIVERCALLRESPONSE']._serialized_end=2266 - _globals['_LOGSTREAMRESPONSE']._serialized_start=2268 - _globals['_LOGSTREAMRESPONSE']._serialized_end=2361 - _globals['_RESETREQUEST']._serialized_start=2363 - _globals['_RESETREQUEST']._serialized_end=2377 - _globals['_RESETRESPONSE']._serialized_start=2379 - _globals['_RESETRESPONSE']._serialized_end=2394 - _globals['_GETLEASEREQUEST']._serialized_start=2396 - _globals['_GETLEASEREQUEST']._serialized_end=2433 - _globals['_GETLEASERESPONSE']._serialized_start=2436 - _globals['_GETLEASERESPONSE']._serialized_end=2839 - _globals['_REQUESTLEASEREQUEST']._serialized_start=2842 - _globals['_REQUESTLEASEREQUEST']._serialized_end=2977 - _globals['_REQUESTLEASERESPONSE']._serialized_start=2979 - _globals['_REQUESTLEASERESPONSE']._serialized_end=3021 - _globals['_RELEASELEASEREQUEST']._serialized_start=3023 - _globals['_RELEASELEASEREQUEST']._serialized_end=3064 - _globals['_RELEASELEASERESPONSE']._serialized_start=3066 - _globals['_RELEASELEASERESPONSE']._serialized_end=3088 - _globals['_LISTLEASESREQUEST']._serialized_start=3090 - _globals['_LISTLEASESREQUEST']._serialized_end=3109 - _globals['_LISTLEASESRESPONSE']._serialized_start=3111 - _globals['_LISTLEASESRESPONSE']._serialized_end=3153 - _globals['_CONTROLLERSERVICE']._serialized_start=3156 - _globals['_CONTROLLERSERVICE']._serialized_end=3979 - _globals['_EXPORTERSERVICE']._serialized_start=3982 - _globals['_EXPORTERSERVICE']._serialized_end=4414 + _globals['_REGISTERREQUEST']._serialized_start=239 + _globals['_REGISTERREQUEST']._serialized_end=448 + _globals['_REGISTERREQUEST_LABELSENTRY']._serialized_start=391 + _globals['_REGISTERREQUEST_LABELSENTRY']._serialized_end=448 + _globals['_DRIVERINSTANCEREPORT']._serialized_start=451 + _globals['_DRIVERINSTANCEREPORT']._serialized_end=680 + _globals['_DRIVERINSTANCEREPORT_LABELSENTRY']._serialized_start=391 + _globals['_DRIVERINSTANCEREPORT_LABELSENTRY']._serialized_end=448 + _globals['_REGISTERRESPONSE']._serialized_start=682 + _globals['_REGISTERRESPONSE']._serialized_end=720 + _globals['_UNREGISTERREQUEST']._serialized_start=722 + _globals['_UNREGISTERREQUEST']._serialized_end=765 + _globals['_UNREGISTERRESPONSE']._serialized_start=767 + _globals['_UNREGISTERRESPONSE']._serialized_end=787 + _globals['_LISTENREQUEST']._serialized_start=789 + _globals['_LISTENREQUEST']._serialized_end=835 + _globals['_LISTENRESPONSE']._serialized_start=837 + _globals['_LISTENRESPONSE']._serialized_end=929 + _globals['_STATUSREQUEST']._serialized_start=931 + _globals['_STATUSREQUEST']._serialized_end=946 + _globals['_STATUSRESPONSE']._serialized_start=949 + _globals['_STATUSRESPONSE']._serialized_end=1094 + _globals['_DIALREQUEST']._serialized_start=1096 + _globals['_DIALREQUEST']._serialized_end=1140 + _globals['_DIALRESPONSE']._serialized_start=1142 + _globals['_DIALRESPONSE']._serialized_end=1232 + _globals['_AUDITSTREAMREQUEST']._serialized_start=1235 + _globals['_AUDITSTREAMREQUEST']._serialized_end=1396 + _globals['_REPORTSTATUSREQUEST']._serialized_start=1398 + _globals['_REPORTSTATUSREQUEST']._serialized_end=1518 + _globals['_REPORTSTATUSRESPONSE']._serialized_start=1520 + _globals['_REPORTSTATUSRESPONSE']._serialized_end=1542 + _globals['_GETREPORTRESPONSE']._serialized_start=1545 + _globals['_GETREPORTRESPONSE']._serialized_end=1857 + _globals['_GETREPORTRESPONSE_LABELSENTRY']._serialized_start=391 + _globals['_GETREPORTRESPONSE_LABELSENTRY']._serialized_end=448 + _globals['_ENDPOINT']._serialized_start=1860 + _globals['_ENDPOINT']._serialized_end=2025 + _globals['_DRIVERCALLREQUEST']._serialized_start=2027 + _globals['_DRIVERCALLREQUEST']._serialized_end=2134 + _globals['_DRIVERCALLRESPONSE']._serialized_start=2136 + _globals['_DRIVERCALLRESPONSE']._serialized_end=2224 + _globals['_STREAMINGDRIVERCALLREQUEST']._serialized_start=2226 + _globals['_STREAMINGDRIVERCALLREQUEST']._serialized_end=2342 + _globals['_STREAMINGDRIVERCALLRESPONSE']._serialized_start=2344 + _globals['_STREAMINGDRIVERCALLRESPONSE']._serialized_end=2441 + _globals['_LOGSTREAMRESPONSE']._serialized_start=2444 + _globals['_LOGSTREAMRESPONSE']._serialized_end=2604 + _globals['_RESETREQUEST']._serialized_start=2606 + _globals['_RESETREQUEST']._serialized_end=2620 + _globals['_RESETRESPONSE']._serialized_start=2622 + _globals['_RESETRESPONSE']._serialized_end=2637 + _globals['_GETLEASEREQUEST']._serialized_start=2639 + _globals['_GETLEASEREQUEST']._serialized_end=2676 + _globals['_GETLEASERESPONSE']._serialized_start=2679 + _globals['_GETLEASERESPONSE']._serialized_end=3082 + _globals['_REQUESTLEASEREQUEST']._serialized_start=3085 + _globals['_REQUESTLEASEREQUEST']._serialized_end=3220 + _globals['_REQUESTLEASERESPONSE']._serialized_start=3222 + _globals['_REQUESTLEASERESPONSE']._serialized_end=3264 + _globals['_RELEASELEASEREQUEST']._serialized_start=3266 + _globals['_RELEASELEASEREQUEST']._serialized_end=3307 + _globals['_RELEASELEASERESPONSE']._serialized_start=3309 + _globals['_RELEASELEASERESPONSE']._serialized_end=3331 + _globals['_LISTLEASESREQUEST']._serialized_start=3333 + _globals['_LISTLEASESREQUEST']._serialized_end=3352 + _globals['_LISTLEASESRESPONSE']._serialized_start=3354 + _globals['_LISTLEASESRESPONSE']._serialized_end=3396 + _globals['_GETSTATUSREQUEST']._serialized_start=3398 + _globals['_GETSTATUSREQUEST']._serialized_end=3416 + _globals['_GETSTATUSRESPONSE']._serialized_start=3418 + _globals['_GETSTATUSRESPONSE']._serialized_end=3536 + _globals['_CONTROLLERSERVICE']._serialized_start=3539 + _globals['_CONTROLLERSERVICE']._serialized_end=4453 + _globals['_EXPORTERSERVICE']._serialized_start=4456 + _globals['_EXPORTERSERVICE']._serialized_end=4970 # @@protoc_insertion_point(module_scope) diff --git a/packages/jumpstarter-protocol/jumpstarter_protocol/jumpstarter/v1/jumpstarter_pb2_grpc.py b/packages/jumpstarter-protocol/jumpstarter_protocol/jumpstarter/v1/jumpstarter_pb2_grpc.py index d975d654f..641a345ba 100644 --- a/packages/jumpstarter-protocol/jumpstarter_protocol/jumpstarter/v1/jumpstarter_pb2_grpc.py +++ b/packages/jumpstarter-protocol/jumpstarter_protocol/jumpstarter/v1/jumpstarter_pb2_grpc.py @@ -26,6 +26,11 @@ def __init__(self, channel): request_serializer=jumpstarter_dot_v1_dot_jumpstarter__pb2.UnregisterRequest.SerializeToString, response_deserializer=jumpstarter_dot_v1_dot_jumpstarter__pb2.UnregisterResponse.FromString, _registered_method=True) + self.ReportStatus = channel.unary_unary( + '/jumpstarter.v1.ControllerService/ReportStatus', + request_serializer=jumpstarter_dot_v1_dot_jumpstarter__pb2.ReportStatusRequest.SerializeToString, + response_deserializer=jumpstarter_dot_v1_dot_jumpstarter__pb2.ReportStatusResponse.FromString, + _registered_method=True) self.Listen = channel.unary_stream( '/jumpstarter.v1.ControllerService/Listen', request_serializer=jumpstarter_dot_v1_dot_jumpstarter__pb2.ListenRequest.SerializeToString, @@ -89,6 +94,14 @@ def Unregister(self, request, context): context.set_details('Method not implemented!') raise NotImplementedError('Method not implemented!') + def ReportStatus(self, request, context): + """Exporter status report + Allows exporters to report their own status to the controller + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + def Listen(self, request, context): """Exporter listening Returns stream tokens for accepting incoming client connections @@ -163,6 +176,11 @@ def add_ControllerServiceServicer_to_server(servicer, server): request_deserializer=jumpstarter_dot_v1_dot_jumpstarter__pb2.UnregisterRequest.FromString, response_serializer=jumpstarter_dot_v1_dot_jumpstarter__pb2.UnregisterResponse.SerializeToString, ), + 'ReportStatus': grpc.unary_unary_rpc_method_handler( + servicer.ReportStatus, + request_deserializer=jumpstarter_dot_v1_dot_jumpstarter__pb2.ReportStatusRequest.FromString, + response_serializer=jumpstarter_dot_v1_dot_jumpstarter__pb2.ReportStatusResponse.SerializeToString, + ), 'Listen': grpc.unary_stream_rpc_method_handler( servicer.Listen, request_deserializer=jumpstarter_dot_v1_dot_jumpstarter__pb2.ListenRequest.FromString, @@ -269,6 +287,33 @@ def Unregister(request, metadata, _registered_method=True) + @staticmethod + def ReportStatus(request, + target, + options=(), + channel_credentials=None, + call_credentials=None, + insecure=False, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None): + return grpc.experimental.unary_unary( + request, + target, + '/jumpstarter.v1.ControllerService/ReportStatus', + jumpstarter_dot_v1_dot_jumpstarter__pb2.ReportStatusRequest.SerializeToString, + jumpstarter_dot_v1_dot_jumpstarter__pb2.ReportStatusResponse.FromString, + options, + channel_credentials, + insecure, + call_credentials, + compression, + wait_for_ready, + timeout, + metadata, + _registered_method=True) + @staticmethod def Listen(request, target, @@ -522,6 +567,11 @@ def __init__(self, channel): request_serializer=jumpstarter_dot_v1_dot_jumpstarter__pb2.ResetRequest.SerializeToString, response_deserializer=jumpstarter_dot_v1_dot_jumpstarter__pb2.ResetResponse.FromString, _registered_method=True) + self.GetStatus = channel.unary_unary( + '/jumpstarter.v1.ExporterService/GetStatus', + request_serializer=jumpstarter_dot_v1_dot_jumpstarter__pb2.GetStatusRequest.SerializeToString, + response_deserializer=jumpstarter_dot_v1_dot_jumpstarter__pb2.GetStatusResponse.FromString, + _registered_method=True) class ExporterServiceServicer(object): @@ -560,6 +610,12 @@ def Reset(self, request, context): context.set_details('Method not implemented!') raise NotImplementedError('Method not implemented!') + def GetStatus(self, request, context): + """Missing associated documentation comment in .proto file.""" + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + def add_ExporterServiceServicer_to_server(servicer, server): rpc_method_handlers = { @@ -588,6 +644,11 @@ def add_ExporterServiceServicer_to_server(servicer, server): request_deserializer=jumpstarter_dot_v1_dot_jumpstarter__pb2.ResetRequest.FromString, response_serializer=jumpstarter_dot_v1_dot_jumpstarter__pb2.ResetResponse.SerializeToString, ), + 'GetStatus': grpc.unary_unary_rpc_method_handler( + servicer.GetStatus, + request_deserializer=jumpstarter_dot_v1_dot_jumpstarter__pb2.GetStatusRequest.FromString, + response_serializer=jumpstarter_dot_v1_dot_jumpstarter__pb2.GetStatusResponse.SerializeToString, + ), } generic_handler = grpc.method_handlers_generic_handler( 'jumpstarter.v1.ExporterService', rpc_method_handlers) @@ -735,3 +796,30 @@ def Reset(request, timeout, metadata, _registered_method=True) + + @staticmethod + def GetStatus(request, + target, + options=(), + channel_credentials=None, + call_credentials=None, + insecure=False, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None): + return grpc.experimental.unary_unary( + request, + target, + '/jumpstarter.v1.ExporterService/GetStatus', + jumpstarter_dot_v1_dot_jumpstarter__pb2.GetStatusRequest.SerializeToString, + jumpstarter_dot_v1_dot_jumpstarter__pb2.GetStatusResponse.FromString, + options, + channel_credentials, + insecure, + call_credentials, + compression, + wait_for_ready, + timeout, + metadata, + _registered_method=True) diff --git a/packages/jumpstarter/jumpstarter/client/core.py b/packages/jumpstarter/jumpstarter/client/core.py index f842ade6c..f4a541d79 100644 --- a/packages/jumpstarter/jumpstarter/client/core.py +++ b/packages/jumpstarter/jumpstarter/client/core.py @@ -13,7 +13,7 @@ from grpc.aio import AioRpcError from jumpstarter_protocol import jumpstarter_pb2, jumpstarter_pb2_grpc, router_pb2_grpc -from jumpstarter.common import Metadata +from jumpstarter.common import ExporterStatus, Metadata from jumpstarter.common.exceptions import JumpstarterException from jumpstarter.common.resources import ResourceMetadata from jumpstarter.common.serde import decode_value, encode_value @@ -47,6 +47,12 @@ class DriverInvalidArgument(DriverError, ValueError): """ +class ExporterNotReady(DriverError): + """ + Raised when the exporter is not ready to accept driver calls + """ + + @dataclass(kw_only=True) class AsyncDriverClient( Metadata, @@ -76,9 +82,28 @@ def __post_init__(self): handler.setFormatter(logging.Formatter("%(name)s - %(levelname)s - %(message)s")) self.logger.addHandler(handler) + async def check_exporter_status(self): + """Check if the exporter is ready to accept driver calls""" + try: + response = await self.stub.GetStatus(jumpstarter_pb2.GetStatusRequest()) + status = ExporterStatus.from_proto(response.status) + + if status != ExporterStatus.LEASE_READY: + raise ExporterNotReady(f"Exporter status is {status}: {response.status_message}") + + except AioRpcError as e: + # If GetStatus is not implemented, assume ready for backward compatibility + if e.code() == StatusCode.UNIMPLEMENTED: + self.logger.debug("GetStatus not implemented, assuming exporter is ready") + return + raise DriverError(f"Failed to check exporter status: {e.details()}") from e + async def call_async(self, method, *args): """Make DriverCall by method name and arguments""" + # Check exporter status before making the call + await self.check_exporter_status() + request = jumpstarter_pb2.DriverCallRequest( uuid=str(self.uuid), method=method, @@ -105,6 +130,9 @@ async def call_async(self, method, *args): async def streamingcall_async(self, method, *args): """Make StreamingDriverCall by method name and arguments""" + # Check exporter status before making the call + await self.check_exporter_status() + request = jumpstarter_pb2.StreamingDriverCallRequest( uuid=str(self.uuid), method=method, diff --git a/packages/jumpstarter/jumpstarter/client/grpc.py b/packages/jumpstarter/jumpstarter/client/grpc.py index 97763e2f8..7df26b624 100644 --- a/packages/jumpstarter/jumpstarter/client/grpc.py +++ b/packages/jumpstarter/jumpstarter/client/grpc.py @@ -13,6 +13,7 @@ from jumpstarter_protocol import client_pb2, client_pb2_grpc, jumpstarter_pb2_grpc, kubernetes_pb2, router_pb2_grpc from pydantic import BaseModel, ConfigDict, Field, field_serializer +from jumpstarter.common import ExporterStatus from jumpstarter.common.grpc import translate_grpc_exceptions @@ -20,6 +21,7 @@ class WithOptions: show_online: bool = False show_leases: bool = False + show_status: bool = False def add_display_columns(table, options: WithOptions = None): @@ -28,6 +30,8 @@ def add_display_columns(table, options: WithOptions = None): table.add_column("NAME") if options.show_online: table.add_column("ONLINE") + if options.show_status: + table.add_column("STATUS") table.add_column("LABELS") if options.show_leases: table.add_column("LEASED BY") @@ -42,6 +46,9 @@ def add_exporter_row(table, exporter, options: WithOptions = None, lease_info: t row_data.append(exporter.name) if options.show_online: row_data.append("yes" if exporter.online else "no") + if options.show_status: + status_str = str(exporter.status) if exporter.status else "UNKNOWN" + row_data.append(status_str) row_data.append(",".join(("{}={}".format(k, v) for k, v in sorted(exporter.labels.items())))) if options.show_leases: if lease_info: @@ -81,12 +88,16 @@ class Exporter(BaseModel): name: str labels: dict[str, str] online: bool = False + status: ExporterStatus | None = None lease: Lease | None = None @classmethod def from_protobuf(cls, data: client_pb2.Exporter) -> Exporter: namespace, name = parse_exporter_identifier(data.name) - return cls(namespace=namespace, name=name, labels=data.labels, online=data.online) + status = None + if hasattr(data, "status") and data.status: + status = ExporterStatus.from_proto(data.status) + return cls(namespace=namespace, name=name, labels=data.labels, online=data.online, status=status) @classmethod def rich_add_columns(cls, table, options: WithOptions = None): @@ -197,6 +208,7 @@ class ExporterList(BaseModel): next_page_token: str | None = Field(exclude=True) include_online: bool = Field(default=False, exclude=True) include_leases: bool = Field(default=False, exclude=True) + include_status: bool = Field(default=False, exclude=True) @classmethod def from_protobuf(cls, data: client_pb2.ListExportersResponse) -> ExporterList: @@ -206,11 +218,15 @@ def from_protobuf(cls, data: client_pb2.ListExportersResponse) -> ExporterList: ) def rich_add_columns(self, table): - options = WithOptions(show_online=self.include_online, show_leases=self.include_leases) + options = WithOptions( + show_online=self.include_online, show_leases=self.include_leases, show_status=self.include_status + ) Exporter.rich_add_columns(table, options) def rich_add_rows(self, table): - options = WithOptions(show_online=self.include_online, show_leases=self.include_leases) + options = WithOptions( + show_online=self.include_online, show_leases=self.include_leases, show_status=self.include_status + ) for exporter in self.exporters: exporter.rich_add_rows(table, options) @@ -227,12 +243,10 @@ def model_dump_json(self, **kwargs): exclude_fields.add("lease") if not self.include_online: exclude_fields.add("online") + if not self.include_status: + exclude_fields.add("status") - data = { - "exporters": [ - exporter.model_dump(mode="json", exclude=exclude_fields) for exporter in self.exporters - ] - } + data = {"exporters": [exporter.model_dump(mode="json", exclude=exclude_fields) for exporter in self.exporters]} return json.dumps(data, **json_kwargs) def model_dump(self, **kwargs): @@ -241,12 +255,11 @@ def model_dump(self, **kwargs): exclude_fields.add("lease") if not self.include_online: exclude_fields.add("online") + if not self.include_status: + exclude_fields.add("status") + + return {"exporters": [exporter.model_dump(mode="json", exclude=exclude_fields) for exporter in self.exporters]} - return { - "exporters": [ - exporter.model_dump(mode="json", exclude=exclude_fields) for exporter in self.exporters - ] - } class LeaseList(BaseModel): leases: list[Lease] diff --git a/packages/jumpstarter/jumpstarter/common/__init__.py b/packages/jumpstarter/jumpstarter/common/__init__.py index 13058cb09..08645b471 100644 --- a/packages/jumpstarter/jumpstarter/common/__init__.py +++ b/packages/jumpstarter/jumpstarter/common/__init__.py @@ -1,4 +1,12 @@ +from .enums import ExporterStatus, LogSource from .metadata import Metadata from .tempfile import TemporarySocket, TemporaryTcpListener, TemporaryUnixListener -__all__ = ["Metadata", "TemporarySocket", "TemporaryUnixListener", "TemporaryTcpListener"] +__all__ = [ + "ExporterStatus", + "LogSource", + "Metadata", + "TemporarySocket", + "TemporaryUnixListener", + "TemporaryTcpListener", +] diff --git a/packages/jumpstarter/jumpstarter/common/enums.py b/packages/jumpstarter/jumpstarter/common/enums.py new file mode 100644 index 000000000..ce6a79c2b --- /dev/null +++ b/packages/jumpstarter/jumpstarter/common/enums.py @@ -0,0 +1,76 @@ +"""Human-readable enum wrappers for protobuf-generated constants.""" + +from enum import IntEnum + +from jumpstarter_protocol.jumpstarter.v1 import common_pb2 + + +class ExporterStatus(IntEnum): + """Exporter status states.""" + + UNSPECIFIED = common_pb2.EXPORTER_STATUS_UNSPECIFIED + """Unknown/unspecified exporter status""" + + OFFLINE = common_pb2.EXPORTER_STATUS_OFFLINE + """The exporter is currently offline""" + + AVAILABLE = common_pb2.EXPORTER_STATUS_AVAILABLE + """Exporter is available to be leased""" + + BEFORE_LEASE_HOOK = common_pb2.EXPORTER_STATUS_BEFORE_LEASE_HOOK + """Exporter is leased, but currently executing before lease hook""" + + LEASE_READY = common_pb2.EXPORTER_STATUS_LEASE_READY + """Exporter is leased and ready to accept commands""" + + AFTER_LEASE_HOOK = common_pb2.EXPORTER_STATUS_AFTER_LEASE_HOOK + """Lease was releaseed, but exporter is executing after lease hook""" + + BEFORE_LEASE_HOOK_FAILED = common_pb2.EXPORTER_STATUS_BEFORE_LEASE_HOOK_FAILED + """The before lease hook failed and the exporter is no longer available""" + + AFTER_LEASE_HOOK_FAILED = common_pb2.EXPORTER_STATUS_AFTER_LEASE_HOOK_FAILED + """The after lease hook failed and the exporter is no longer available""" + + def __str__(self): + return self.name + + @classmethod + def from_proto(cls, value: int) -> "ExporterStatus": + """Convert from protobuf integer to enum.""" + return cls(value) + + def to_proto(self) -> int: + """Convert to protobuf integer.""" + return self.value + + +class LogSource(IntEnum): + """Log source types.""" + + UNSPECIFIED = common_pb2.LOG_SOURCE_UNSPECIFIED + """Unspecified/unknown log source""" + + DRIVER = common_pb2.LOG_SOURCE_DRIVER + """Logs produced by a Jumpstarter driver""" + + BEFORE_LEASE_HOOK = common_pb2.LOG_SOURCE_BEFORE_LEASE_HOOK + """Logs produced by a before lease hook""" + + AFTER_LEASE_HOOK = common_pb2.LOG_SOURCE_AFTER_LEASE_HOOK + """Logs produced by an after lease hook""" + + SYSTEM = common_pb2.LOG_SOURCE_SYSTEM + """System/exporter logs""" + + def __str__(self): + return self.name + + @classmethod + def from_proto(cls, value: int) -> "LogSource": + """Convert from protobuf integer to enum.""" + return cls(value) + + def to_proto(self) -> int: + """Convert to protobuf integer.""" + return self.value diff --git a/packages/jumpstarter/jumpstarter/config/client.py b/packages/jumpstarter/jumpstarter/config/client.py index 9872c3c7e..ad5e195fc 100644 --- a/packages/jumpstarter/jumpstarter/config/client.py +++ b/packages/jumpstarter/jumpstarter/config/client.py @@ -148,12 +148,14 @@ async def list_exporters( filter: str | None = None, include_leases: bool = False, include_online: bool = False, + include_status: bool = False, ): svc = ClientService(channel=await self.channel(), namespace=self.metadata.namespace) exporters_response = await svc.ListExporters(page_size=page_size, page_token=page_token, filter=filter) - # Set the include_online flag for display purposes + # Set the include flags for display purposes exporters_response.include_online = include_online + exporters_response.include_status = include_status if not include_leases: return exporters_response diff --git a/packages/jumpstarter/jumpstarter/exporter/exporter.py b/packages/jumpstarter/jumpstarter/exporter/exporter.py index 21bd783b5..e1ba881ba 100644 --- a/packages/jumpstarter/jumpstarter/exporter/exporter.py +++ b/packages/jumpstarter/jumpstarter/exporter/exporter.py @@ -22,7 +22,7 @@ jumpstarter_pb2_grpc, ) -from jumpstarter.common import Metadata +from jumpstarter.common import ExporterStatus, Metadata from jumpstarter.common.streams import connect_router_stream from jumpstarter.config.tls import TLSConfigV1Alpha1 from jumpstarter.driver import Driver @@ -46,6 +46,8 @@ class Exporter(AsyncContextManagerMixin, Metadata): _tg: TaskGroup | None = field(init=False, default=None) _current_client_name: str = field(init=False, default="") _pre_lease_ready: Event | None = field(init=False, default=None) + _current_status: ExporterStatus = field(init=False, default=ExporterStatus.OFFLINE) + _current_session: Session | None = field(init=False, default=None) def stop(self, wait_for_lease_exit=False): """Signal the exporter to stop. @@ -54,12 +56,34 @@ def stop(self, wait_for_lease_exit=False): wait_for_lease_exit (bool): If True, wait for the current lease to exit before stopping. """ + # Stop immediately if not started yet or if immediate stop is requested if (not self._started or not wait_for_lease_exit) and self._tg is not None: + logger.info("Stopping exporter immediately") self._tg.cancel_scope.cancel() elif not self._stop_requested: self._stop_requested = True logger.info("Exporter marked for stop upon lease exit") + async def _update_status(self, status: ExporterStatus, message: str = ""): + """Update exporter status with the controller and session.""" + self._current_status = status + + # Update session status if available + if self._current_session: + self._current_session.update_status(status, message) + + try: + controller = jumpstarter_pb2_grpc.ControllerServiceStub(await self.channel_factory()) + await controller.UpdateStatus( + jumpstarter_pb2.UpdateStatusRequest( + status=status.to_proto(), + status_message=message, + ) + ) + logger.info(f"Updated status to {status}: {message}") + except Exception as e: + logger.error(f"Failed to update status: {e}") + @asynccontextmanager async def __asynccontextmanager__(self) -> AsyncGenerator[Self]: try: @@ -73,6 +97,7 @@ async def __asynccontextmanager__(self) -> AsyncGenerator[Self]: channel = await self.channel_factory() try: controller = jumpstarter_pb2_grpc.ControllerServiceStub(channel) + await self._update_status(ExporterStatus.OFFLINE, "Exporter shutting down") await controller.Unregister( jumpstarter_pb2.UnregisterRequest( reason="Exporter shutdown", @@ -105,20 +130,27 @@ async def session(self): labels=self.labels, root_device=self.device_factory(), ) as session: - async with session.serve_unix_async() as path: - async with grpc.aio.secure_channel( - f"unix://{path}", grpc.local_channel_credentials(grpc.LocalConnectionType.UDS) - ) as channel: - response = await jumpstarter_pb2_grpc.ExporterServiceStub(channel).GetReport(empty_pb2.Empty()) - logger.info("Registering exporter with controller") - await controller.Register( - jumpstarter_pb2.RegisterRequest( - labels=self.labels, - reports=response.reports, + # Store session reference for status updates + self._current_session = session + try: + async with session.serve_unix_async() as path: + async with grpc.aio.secure_channel( + f"unix://{path}", grpc.local_channel_credentials(grpc.LocalConnectionType.UDS) + ) as channel: + response = await jumpstarter_pb2_grpc.ExporterServiceStub(channel).GetReport(empty_pb2.Empty()) + logger.info("Registering exporter with controller") + await controller.Register( + jumpstarter_pb2.RegisterRequest( + labels=self.labels, + reports=response.reports, + ) ) - ) - self.registered = True - yield path + self.registered = True + await self._update_status(ExporterStatus.AVAILABLE, "Exporter registered and available") + yield path + finally: + # Clear session reference + self._current_session = None async def handle(self, lease_name, tg): logger.info("Listening for incoming connection requests on lease %s", lease_name) @@ -204,7 +236,9 @@ async def status(retries=5, backoff=3): ) # Shield the post-lease hook from cancellation and await it with CancelScope(shield=True): + await self._update_status(ExporterStatus.AFTER_LEASE_HOOK, "Running afterLease hooks") await self.hook_executor.execute_post_lease_hook(hook_context) + await self._update_status(ExporterStatus.AVAILABLE, "Available for new lease") self.lease_name = status.lease_name logger.info("Lease status changed, killing existing connections") @@ -237,20 +271,29 @@ async def status(retries=5, backoff=3): ) # Start pre-lease hook asynchronously - async def run_pre_lease_hook(): + async def run_before_lease_hook(hook_ctx): try: - await self.hook_executor.execute_pre_lease_hook(hook_context) - logger.info("Pre-lease hook completed successfully") + await self._update_status( + ExporterStatus.BEFORE_LEASE_HOOK, "Running beforeLease hooks" + ) + await self.hook_executor.execute_pre_lease_hook(hook_ctx) + await self._update_status(ExporterStatus.LEASE_READY, "Ready for commands") + logger.info("beforeLease hook completed successfully") except Exception as e: - logger.error("Pre-lease hook failed: %s", e) + logger.error("beforeLease hook failed: %s", e) + # Still transition to ready even if hook fails + await self._update_status( + ExporterStatus.LEASE_READY, f"Ready (beforeLease hook failed: {e})" + ) finally: # Always set the event to unblock connections if self._pre_lease_ready: self._pre_lease_ready.set() - tg.start_soon(run_pre_lease_hook) + tg.start_soon(run_before_lease_hook, hook_context) else: # No hook configured, set event immediately + await self._update_status(ExporterStatus.LEASE_READY, "Ready for commands") if self._pre_lease_ready: self._pre_lease_ready.set() else: @@ -264,7 +307,9 @@ async def run_pre_lease_hook(): ) # Shield the post-lease hook from cancellation and await it with CancelScope(shield=True): + await self._update_status(ExporterStatus.AFTER_LEASE_HOOK, "Running afterLease hooks") await self.hook_executor.execute_post_lease_hook(hook_context) + await self._update_status(ExporterStatus.AVAILABLE, "Available for new lease") self._current_client_name = "" # Reset event for next lease diff --git a/packages/jumpstarter/jumpstarter/exporter/logging.py b/packages/jumpstarter/jumpstarter/exporter/logging.py index 629306c29..8b73467df 100644 --- a/packages/jumpstarter/jumpstarter/exporter/logging.py +++ b/packages/jumpstarter/jumpstarter/exporter/logging.py @@ -3,12 +3,15 @@ from jumpstarter_protocol import jumpstarter_pb2 +from jumpstarter.common import LogSource + class LogHandler(logging.Handler): - def __init__(self, queue: deque): + def __init__(self, queue: deque, source: LogSource = LogSource.UNSPECIFIED): logging.Handler.__init__(self) self.queue = queue self.listener = None + self.source = source # LogSource enum value def enqueue(self, record): self.queue.append(record) @@ -18,6 +21,7 @@ def prepare(self, record): uuid="", severity=record.levelname, message=self.format(record), + source=self.source.value, # Convert to proto value ) def emit(self, record): diff --git a/packages/jumpstarter/jumpstarter/exporter/session.py b/packages/jumpstarter/jumpstarter/exporter/session.py index 63ae2f08d..f9f2340dc 100644 --- a/packages/jumpstarter/jumpstarter/exporter/session.py +++ b/packages/jumpstarter/jumpstarter/exporter/session.py @@ -17,7 +17,8 @@ ) from .logging import LogHandler -from jumpstarter.common import Metadata, TemporarySocket +from jumpstarter.common import ExporterStatus, Metadata, TemporarySocket +from jumpstarter.common.enums import LogSource from jumpstarter.common.streams import StreamRequestMetadata from jumpstarter.driver import Driver from jumpstarter.streams.common import forward_stream @@ -39,6 +40,9 @@ class Session( _logging_queue: deque = field(init=False) _logging_handler: QueueHandler = field(init=False) + _current_status: ExporterStatus = field(init=False, default=ExporterStatus.AVAILABLE) + _status_message: str = field(init=False, default="") + _status_update_event: Event = field(init=False) @contextmanager def __contextmanager__(self) -> Generator[Self]: @@ -67,7 +71,8 @@ def __init__(self, *args, root_device, **kwargs): self.mapping = {u: i for (u, _, _, i) in self.root_device.enumerate()} self._logging_queue = deque(maxlen=32) - self._logging_handler = LogHandler(self._logging_queue) + self._logging_handler = LogHandler(self._logging_queue, LogSource.SYSTEM) + self._status_update_event = Event() @asynccontextmanager async def serve_port_async(self, port): @@ -139,3 +144,19 @@ async def LogStream(self, request, context): yield self._logging_queue.popleft() except IndexError: await sleep(0.5) + + def update_status(self, status: int | ExporterStatus, message: str = ""): + """Update the current exporter status for the session.""" + if isinstance(status, int): + self._current_status = ExporterStatus.from_proto(status) + else: + self._current_status = status + self._status_message = message + + async def GetStatus(self, request, context): + """Get the current exporter status.""" + logger.debug("GetStatus() -> %s", self._current_status) + return jumpstarter_pb2.GetStatusResponse( + status=self._current_status.to_proto(), + status_message=self._status_message, + ) From 6e8b44f92a3aceadeafe04c1e1103ad02279e7f8 Mon Sep 17 00:00:00 2001 From: Kirk Brauer Date: Mon, 29 Sep 2025 16:34:50 -0400 Subject: [PATCH 03/21] Improve logging infrastructure --- .../jumpstarter/jumpstarter/driver/base.py | 5 +- .../jumpstarter/exporter/exporter.py | 6 ++ .../jumpstarter/jumpstarter/exporter/hooks.py | 24 +++++--- .../jumpstarter/exporter/logging.py | 61 ++++++++++++++++++- .../jumpstarter/exporter/session.py | 18 +++++- 5 files changed, 101 insertions(+), 13 deletions(-) diff --git a/packages/jumpstarter/jumpstarter/driver/base.py b/packages/jumpstarter/jumpstarter/driver/base.py index eedb92a01..6c0373be1 100644 --- a/packages/jumpstarter/jumpstarter/driver/base.py +++ b/packages/jumpstarter/jumpstarter/driver/base.py @@ -27,8 +27,9 @@ MARKER_STREAMCALL, MARKER_STREAMING_DRIVERCALL, ) -from jumpstarter.common import Metadata +from jumpstarter.common import LogSource, Metadata from jumpstarter.common.resources import ClientStreamResource, PresignedRequestResource, Resource, ResourceMetadata +from jumpstarter.exporter.logging import get_logger from jumpstarter.common.serde import decode_value, encode_value from jumpstarter.common.streams import ( DriverStreamRequest, @@ -79,7 +80,7 @@ def __post_init__(self): if hasattr(super(), "__post_init__"): super().__post_init__() - self.logger = logging.getLogger(self.__class__.__name__) + self.logger = get_logger(f"driver.{self.__class__.__name__}", LogSource.DRIVER) self.logger.setLevel(self.log_level) def close(self): diff --git a/packages/jumpstarter/jumpstarter/exporter/exporter.py b/packages/jumpstarter/jumpstarter/exporter/exporter.py index e1ba881ba..8f2e74f90 100644 --- a/packages/jumpstarter/jumpstarter/exporter/exporter.py +++ b/packages/jumpstarter/jumpstarter/exporter/exporter.py @@ -237,6 +237,8 @@ async def status(retries=5, backoff=3): # Shield the post-lease hook from cancellation and await it with CancelScope(shield=True): await self._update_status(ExporterStatus.AFTER_LEASE_HOOK, "Running afterLease hooks") + # Pass the current session to hook executor for logging + self.hook_executor.main_session = self._current_session await self.hook_executor.execute_post_lease_hook(hook_context) await self._update_status(ExporterStatus.AVAILABLE, "Available for new lease") @@ -276,6 +278,8 @@ async def run_before_lease_hook(hook_ctx): await self._update_status( ExporterStatus.BEFORE_LEASE_HOOK, "Running beforeLease hooks" ) + # Pass the current session to hook executor for logging + self.hook_executor.main_session = self._current_session await self.hook_executor.execute_pre_lease_hook(hook_ctx) await self._update_status(ExporterStatus.LEASE_READY, "Ready for commands") logger.info("beforeLease hook completed successfully") @@ -308,6 +312,8 @@ async def run_before_lease_hook(hook_ctx): # Shield the post-lease hook from cancellation and await it with CancelScope(shield=True): await self._update_status(ExporterStatus.AFTER_LEASE_HOOK, "Running afterLease hooks") + # Pass the current session to hook executor for logging + self.hook_executor.main_session = self._current_session await self.hook_executor.execute_post_lease_hook(hook_context) await self._update_status(ExporterStatus.AVAILABLE, "Available for new lease") diff --git a/packages/jumpstarter/jumpstarter/exporter/hooks.py b/packages/jumpstarter/jumpstarter/exporter/hooks.py index 16318ce33..d71b3e1a6 100644 --- a/packages/jumpstarter/jumpstarter/exporter/hooks.py +++ b/packages/jumpstarter/jumpstarter/exporter/hooks.py @@ -7,9 +7,11 @@ from dataclasses import dataclass, field from typing import Callable +from jumpstarter.common import LogSource from jumpstarter.config.env import JMP_DRIVERS_ALLOW, JUMPSTARTER_HOST from jumpstarter.config.exporter import HookConfigV1Alpha1 from jumpstarter.driver import Driver +from jumpstarter.exporter.logging import get_logger from jumpstarter.exporter.session import Session logger = logging.getLogger(__name__) @@ -32,6 +34,7 @@ class HookExecutor: config: HookConfigV1Alpha1 device_factory: Callable[[], Driver] + main_session: Session | None = field(default=None) timeout: int = field(init=False) def __post_init__(self): @@ -63,9 +66,9 @@ async def _create_hook_environment(self, context: HookContext): } ) - yield hook_env + yield session, hook_env - async def _execute_hook(self, command: str, context: HookContext) -> bool: + async def _execute_hook(self, command: str, context: HookContext, log_source: LogSource) -> bool: """Execute a single hook command.""" if not command or not command.strip(): logger.debug("Hook command is empty, skipping") @@ -73,7 +76,7 @@ async def _execute_hook(self, command: str, context: HookContext) -> bool: logger.info("Executing hook: %s", command.strip().split("\n")[0][:100]) - async with self._create_hook_environment(context) as hook_env: + async with self._create_hook_environment(context) as (session, hook_env): try: # Execute the hook command using shell process = await asyncio.create_subprocess_shell( @@ -84,6 +87,12 @@ async def _execute_hook(self, command: str, context: HookContext) -> bool: ) try: + # Determine which session to use for logging - prefer main session if available + logging_session = self.main_session if self.main_session is not None else session + + # Create a logger with automatic source registration + hook_logger = get_logger(f"hook.{context.lease_name}", log_source, logging_session) + # Stream output line-by-line for real-time logging output_lines = [] @@ -94,7 +103,8 @@ async def read_output(): break line_decoded = line.decode().rstrip() output_lines.append(line_decoded) - logger.info("[Hook Output] %s", line_decoded) + # Route hook output through the logging system + hook_logger.info(line_decoded) # Run output reading and process waiting concurrently with timeout await asyncio.wait_for(asyncio.gather(read_output(), process.wait()), timeout=self.timeout) @@ -104,8 +114,6 @@ async def read_output(): return True else: logger.error("Hook failed with return code %d", process.returncode) - if output_lines: - logger.error("Hook output: %s", "\n".join(output_lines)) return False except asyncio.TimeoutError: @@ -129,7 +137,7 @@ async def execute_pre_lease_hook(self, context: HookContext) -> bool: return True logger.info("Executing pre-lease hook for lease %s", context.lease_name) - return await self._execute_hook(self.config.pre_lease, context) + return await self._execute_hook(self.config.pre_lease, context, LogSource.BEFORE_LEASE_HOOK) async def execute_post_lease_hook(self, context: HookContext) -> bool: """Execute the post-lease hook.""" @@ -138,4 +146,4 @@ async def execute_post_lease_hook(self, context: HookContext) -> bool: return True logger.info("Executing post-lease hook for lease %s", context.lease_name) - return await self._execute_hook(self.config.post_lease, context) + return await self._execute_hook(self.config.post_lease, context, LogSource.AFTER_LEASE_HOOK) diff --git a/packages/jumpstarter/jumpstarter/exporter/logging.py b/packages/jumpstarter/jumpstarter/exporter/logging.py index 8b73467df..ec8243f0d 100644 --- a/packages/jumpstarter/jumpstarter/exporter/logging.py +++ b/packages/jumpstarter/jumpstarter/exporter/logging.py @@ -1,10 +1,16 @@ import logging from collections import deque +from contextlib import contextmanager +from threading import RLock +from typing import TYPE_CHECKING from jumpstarter_protocol import jumpstarter_pb2 from jumpstarter.common import LogSource +if TYPE_CHECKING: + from .session import Session + class LogHandler(logging.Handler): def __init__(self, queue: deque, source: LogSource = LogSource.UNSPECIFIED): @@ -12,16 +18,39 @@ def __init__(self, queue: deque, source: LogSource = LogSource.UNSPECIFIED): self.queue = queue self.listener = None self.source = source # LogSource enum value + self._lock = RLock() + self._child_handlers = {} # Dict of logger_name -> LogSource mappings + + def add_child_handler(self, logger_name: str, source: LogSource): + """Add a child handler that will route logs from a specific logger with a different source.""" + with self._lock: + self._child_handlers[logger_name] = source + + def remove_child_handler(self, logger_name: str): + """Remove a child handler mapping.""" + with self._lock: + self._child_handlers.pop(logger_name, None) + + def get_source_for_record(self, record): + """Determine the appropriate log source for a record.""" + with self._lock: + # Check if this record comes from a logger with a specific source mapping + logger_name = record.name + for mapped_logger, source in self._child_handlers.items(): + if logger_name.startswith(mapped_logger): + return source + return self.source def enqueue(self, record): self.queue.append(record) def prepare(self, record): + source = self.get_source_for_record(record) return jumpstarter_pb2.LogStreamResponse( uuid="", severity=record.levelname, message=self.format(record), - source=self.source.value, # Convert to proto value + source=source.value, # Convert to proto value ) def emit(self, record): @@ -29,3 +58,33 @@ def emit(self, record): self.enqueue(self.prepare(record)) except Exception: self.handleError(record) + + @contextmanager + def context_log_source(self, logger_name: str, source: LogSource): + """Context manager to temporarily set a log source for a specific logger.""" + self.add_child_handler(logger_name, source) + try: + yield + finally: + self.remove_child_handler(logger_name) + + +def get_logger(name: str, source: LogSource = LogSource.SYSTEM, session: "Session" = None) -> logging.Logger: + """ + Get a logger with automatic LogSource mapping. + + Args: + name: Logger name (e.g., __name__ or custom name) + source: The LogSource to associate with this logger + session: Optional session to register with immediately + + Returns: + A standard Python logger instance + """ + logger = logging.getLogger(name) + + # If session provided, register the source mapping + if session: + session.add_logger_source(name, source) + + return logger diff --git a/packages/jumpstarter/jumpstarter/exporter/session.py b/packages/jumpstarter/jumpstarter/exporter/session.py index f9f2340dc..13d1a462a 100644 --- a/packages/jumpstarter/jumpstarter/exporter/session.py +++ b/packages/jumpstarter/jumpstarter/exporter/session.py @@ -17,8 +17,7 @@ ) from .logging import LogHandler -from jumpstarter.common import ExporterStatus, Metadata, TemporarySocket -from jumpstarter.common.enums import LogSource +from jumpstarter.common import ExporterStatus, LogSource, Metadata, TemporarySocket from jumpstarter.common.streams import StreamRequestMetadata from jumpstarter.driver import Driver from jumpstarter.streams.common import forward_stream @@ -74,6 +73,9 @@ def __init__(self, *args, root_device, **kwargs): self._logging_handler = LogHandler(self._logging_queue, LogSource.SYSTEM) self._status_update_event = Event() + # Map all driver logs to DRIVER source + self._logging_handler.add_child_handler("driver.", LogSource.DRIVER) + @asynccontextmanager async def serve_port_async(self, port): server = grpc.aio.server() @@ -153,6 +155,18 @@ def update_status(self, status: int | ExporterStatus, message: str = ""): self._current_status = status self._status_message = message + def add_logger_source(self, logger_name: str, source: LogSource): + """Add a log source mapping for a specific logger.""" + self._logging_handler.add_child_handler(logger_name, source) + + def remove_logger_source(self, logger_name: str): + """Remove a log source mapping for a specific logger.""" + self._logging_handler.remove_child_handler(logger_name) + + def context_log_source(self, logger_name: str, source: LogSource): + """Context manager to temporarily set a log source for a specific logger.""" + return self._logging_handler.context_log_source(logger_name, source) + async def GetStatus(self, request, context): """Get the current exporter status.""" logger.debug("GetStatus() -> %s", self._current_status) From 0d7607ae0732110bb709f3d1b46886b73483dd5c Mon Sep 17 00:00:00 2001 From: Kirk Brauer Date: Fri, 31 Oct 2025 11:03:24 -0400 Subject: [PATCH 04/21] Fix circular dependency in logging.py --- .../jumpstarter/exporter/logging.py | 9 ++++---- .../jumpstarter/exporter/logging_protocol.py | 22 +++++++++++++++++++ 2 files changed, 26 insertions(+), 5 deletions(-) create mode 100644 packages/jumpstarter/jumpstarter/exporter/logging_protocol.py diff --git a/packages/jumpstarter/jumpstarter/exporter/logging.py b/packages/jumpstarter/jumpstarter/exporter/logging.py index ec8243f0d..6a6e8dad9 100644 --- a/packages/jumpstarter/jumpstarter/exporter/logging.py +++ b/packages/jumpstarter/jumpstarter/exporter/logging.py @@ -2,15 +2,12 @@ from collections import deque from contextlib import contextmanager from threading import RLock -from typing import TYPE_CHECKING from jumpstarter_protocol import jumpstarter_pb2 +from .logging_protocol import LoggerRegistration from jumpstarter.common import LogSource -if TYPE_CHECKING: - from .session import Session - class LogHandler(logging.Handler): def __init__(self, queue: deque, source: LogSource = LogSource.UNSPECIFIED): @@ -69,7 +66,9 @@ def context_log_source(self, logger_name: str, source: LogSource): self.remove_child_handler(logger_name) -def get_logger(name: str, source: LogSource = LogSource.SYSTEM, session: "Session" = None) -> logging.Logger: +def get_logger( + name: str, source: LogSource = LogSource.SYSTEM, session: LoggerRegistration | None = None +) -> logging.Logger: """ Get a logger with automatic LogSource mapping. diff --git a/packages/jumpstarter/jumpstarter/exporter/logging_protocol.py b/packages/jumpstarter/jumpstarter/exporter/logging_protocol.py new file mode 100644 index 000000000..04ed885f2 --- /dev/null +++ b/packages/jumpstarter/jumpstarter/exporter/logging_protocol.py @@ -0,0 +1,22 @@ +"""Protocol for logger registration to avoid circular dependencies.""" + +from typing import Protocol + +from jumpstarter.common import LogSource + + +class LoggerRegistration(Protocol): + """Protocol for objects that can register logger sources. + + This protocol defines the interface for objects that can associate + logger names with log sources, enabling proper routing of log messages. + """ + + def add_logger_source(self, logger_name: str, source: LogSource) -> None: + """Register a logger name with its corresponding log source. + + Args: + logger_name: Name of the logger to register + source: The log source category for this logger + """ + ... From b5de1722ea1cf9aecd56da7b08c4c7d358f48c36 Mon Sep 17 00:00:00 2001 From: Kirk Brauer Date: Fri, 31 Oct 2025 17:57:45 -0400 Subject: [PATCH 05/21] Update hook behavior to match spec --- .../jumpstarter/config/exporter.py | 27 +- .../jumpstarter/config/exporter_test.py | 35 +-- .../jumpstarter/exporter/exporter.py | 132 ++++++--- .../jumpstarter/jumpstarter/exporter/hooks.py | 252 ++++++++++++----- .../jumpstarter/exporter/hooks_test.py | 256 +++++++++++++++--- 5 files changed, 530 insertions(+), 172 deletions(-) diff --git a/packages/jumpstarter/jumpstarter/config/exporter.py b/packages/jumpstarter/jumpstarter/config/exporter.py index 893f04496..3e7b88b21 100644 --- a/packages/jumpstarter/jumpstarter/config/exporter.py +++ b/packages/jumpstarter/jumpstarter/config/exporter.py @@ -18,14 +18,31 @@ from jumpstarter.driver import Driver +class HookInstanceConfigV1Alpha1(BaseModel): + """Configuration for a specific lifecycle hook.""" + + model_config = ConfigDict(populate_by_name=True) + + script: str = Field(alias="script", description="The j script to execute for this hook") + timeout: int = Field(default=120, description="The hook execution timeout in seconds (default: 120s)") + exit_code: int = Field(alias="exitCode", default=0, description="The expected exit code (default: 0)") + on_failure: Literal["pass", "block", "warn"] = Field( + default="pass", + alias="onFailure", + description=( + "Action to take when the expected exit code is not returned: 'pass' continues normally, " + "'block' takes the exporter offline and blocks leases, 'warn' continues and prints a warning" + ), + ) + + class HookConfigV1Alpha1(BaseModel): """Configuration for lifecycle hooks.""" model_config = ConfigDict(populate_by_name=True) - pre_lease: str | None = Field(default=None, alias="preLease") - post_lease: str | None = Field(default=None, alias="postLease") - timeout: int = Field(default=300, description="Hook execution timeout in seconds") + before_lease: HookInstanceConfigV1Alpha1 | None = Field(default=None, alias="beforeLease") + after_lease: HookInstanceConfigV1Alpha1 | None = Field(default=None, alias="afterLease") class ExporterConfigV1Alpha1DriverInstanceProxy(BaseModel): @@ -62,7 +79,7 @@ def instantiate(self) -> Driver: description=self.root.description, methods_description=self.root.methods_description, children=children, - **self.root.config + **self.root.config, ) case ExporterConfigV1Alpha1DriverInstanceComposite(): @@ -198,7 +215,7 @@ async def channel_factory(): # Create hook executor if hooks are configured hook_executor = None - if self.hooks.pre_lease or self.hooks.post_lease: + if self.hooks.before_lease or self.hooks.after_lease: from jumpstarter.exporter.hooks import HookExecutor hook_executor = HookExecutor( diff --git a/packages/jumpstarter/jumpstarter/config/exporter_test.py b/packages/jumpstarter/jumpstarter/config/exporter_test.py index eebce7839..68d0e3f42 100644 --- a/packages/jumpstarter/jumpstarter/config/exporter_test.py +++ b/packages/jumpstarter/jumpstarter/config/exporter_test.py @@ -116,13 +116,16 @@ def test_exporter_config_with_hooks(monkeypatch: pytest.MonkeyPatch, tmp_path: P endpoint: "jumpstarter.my-lab.com:1443" token: "test-token" hooks: - preLease: | - echo "Pre-lease hook for $LEASE_NAME" - j power on - postLease: | - echo "Post-lease hook for $LEASE_NAME" - j power off - timeout: 600 + beforeLease: + script: | + echo "Pre-lease hook for $LEASE_NAME" + j power on + timeout: 600 + afterLease: + script: | + echo "Post-lease hook for $LEASE_NAME" + j power off + timeout: 600 export: power: type: "jumpstarter_driver_power.driver.PduPower" @@ -134,22 +137,20 @@ def test_exporter_config_with_hooks(monkeypatch: pytest.MonkeyPatch, tmp_path: P config = ExporterConfigV1Alpha1.load("test-hooks") - assert config.hooks.pre_lease == 'echo "Pre-lease hook for $LEASE_NAME"\nj power on\n' - assert config.hooks.post_lease == 'echo "Post-lease hook for $LEASE_NAME"\nj power off\n' - assert config.hooks.timeout == 600 + assert config.hooks.before_lease.script == 'echo "Pre-lease hook for $LEASE_NAME"\nj power on\n' + assert config.hooks.after_lease.script == 'echo "Post-lease hook for $LEASE_NAME"\nj power off\n' # Test that it round-trips correctly path.unlink() ExporterConfigV1Alpha1.save(config) reloaded_config = ExporterConfigV1Alpha1.load("test-hooks") - assert reloaded_config.hooks.pre_lease == config.hooks.pre_lease - assert reloaded_config.hooks.post_lease == config.hooks.post_lease - assert reloaded_config.hooks.timeout == config.hooks.timeout + assert reloaded_config.hooks.before_lease.script == config.hooks.before_lease.script + assert reloaded_config.hooks.after_lease.script == config.hooks.after_lease.script # Test that the YAML uses camelCase yaml_output = ExporterConfigV1Alpha1.dump_yaml(config) - assert "preLease:" in yaml_output - assert "postLease:" in yaml_output - assert "pre_lease:" not in yaml_output - assert "post_lease:" not in yaml_output + assert "beforeLease:" in yaml_output + assert "afterLease:" in yaml_output + assert "before_lease:" not in yaml_output + assert "after_lease:" not in yaml_output diff --git a/packages/jumpstarter/jumpstarter/exporter/exporter.py b/packages/jumpstarter/jumpstarter/exporter/exporter.py index c1f370d87..301c57bbf 100644 --- a/packages/jumpstarter/jumpstarter/exporter/exporter.py +++ b/packages/jumpstarter/jumpstarter/exporter/exporter.py @@ -26,7 +26,7 @@ from jumpstarter.common.streams import connect_router_stream from jumpstarter.config.tls import TLSConfigV1Alpha1 from jumpstarter.driver import Driver -from jumpstarter.exporter.hooks import HookContext, HookExecutor +from jumpstarter.exporter.hooks import HookContext, HookExecutionError, HookExecutor from jumpstarter.exporter.session import Session logger = logging.getLogger(__name__) @@ -49,6 +49,7 @@ class Exporter(AsyncContextManagerMixin, Metadata): _pre_lease_ready: Event | None = field(init=False, default=None) _current_status: ExporterStatus = field(init=False, default=ExporterStatus.OFFLINE) _current_session: Session | None = field(init=False, default=None) + _session_socket_path: str | None = field(init=False, default=None) def stop(self, wait_for_lease_exit=False, should_unregister=False): """Signal the exporter to stop. @@ -183,13 +184,18 @@ async def listen(retries=5, backoff=3): tg.start_soon(listen) - # Wait for pre-lease hook to complete before processing connections - if self._pre_lease_ready is not None: - logger.info("Waiting for pre-lease hook to complete before accepting connections") - await self._pre_lease_ready.wait() - logger.info("Pre-lease hook completed, now accepting connections") - + # Create session before hooks run async with self.session() as path: + # Store socket path for hook execution + self._session_socket_path = path + + # Wait for before-lease hook to complete before processing connections + if self._pre_lease_ready is not None: + logger.info("Waiting for before-lease hook to complete before accepting connections") + await self._pre_lease_ready.wait() + logger.info("before-lease hook completed, now accepting connections") + + # Process client connections async for request in listen_rx: logger.info("Handling new connection request on lease %s", lease_name) tg.start_soon( @@ -231,19 +237,15 @@ async def status(retries=5, backoff=3): tg.start_soon(status) async for status in status_rx: if self.lease_name != "" and self.lease_name != status.lease_name: - # Post-lease hook for the previous lease + # After-lease hook for the previous lease if self.hook_executor and self._current_client_name: hook_context = HookContext( lease_name=self.lease_name, client_name=self._current_client_name, ) - # Shield the post-lease hook from cancellation and await it + # Shield the after-lease hook from cancellation and await it with CancelScope(shield=True): - await self._update_status(ExporterStatus.AFTER_LEASE_HOOK, "Running afterLease hooks") - # Pass the current session to hook executor for logging - self.hook_executor.main_session = self._current_session - await self.hook_executor.execute_post_lease_hook(hook_context) - await self._update_status(ExporterStatus.AVAILABLE, "Available for new lease") + await self.run_after_lease_hook(hook_context) self.lease_name = status.lease_name logger.info("Lease status changed, killing existing connections") @@ -267,37 +269,14 @@ async def status(retries=5, backoff=3): logger.info("Currently leased by %s under %s", status.client_name, status.lease_name) self._current_client_name = status.client_name - # Pre-lease hook when transitioning from unleased to leased + # Before-lease hook when transitioning from unleased to leased if not previous_leased: if self.hook_executor: hook_context = HookContext( lease_name=status.lease_name, client_name=status.client_name, ) - - # Start pre-lease hook asynchronously - async def run_before_lease_hook(hook_ctx): - try: - await self._update_status( - ExporterStatus.BEFORE_LEASE_HOOK, "Running beforeLease hooks" - ) - # Pass the current session to hook executor for logging - self.hook_executor.main_session = self._current_session - await self.hook_executor.execute_pre_lease_hook(hook_ctx) - await self._update_status(ExporterStatus.LEASE_READY, "Ready for commands") - logger.info("beforeLease hook completed successfully") - except Exception as e: - logger.error("beforeLease hook failed: %s", e) - # Still transition to ready even if hook fails - await self._update_status( - ExporterStatus.LEASE_READY, f"Ready (beforeLease hook failed: {e})" - ) - finally: - # Always set the event to unblock connections - if self._pre_lease_ready: - self._pre_lease_ready.set() - - tg.start_soon(run_before_lease_hook, hook_context) + tg.start_soon(self.run_before_lease_hook, self, hook_context) else: # No hook configured, set event immediately await self._update_status(ExporterStatus.LEASE_READY, "Ready for commands") @@ -306,18 +285,21 @@ async def run_before_lease_hook(hook_ctx): else: logger.info("Currently not leased") - # Post-lease hook when transitioning from leased to unleased + # After-lease hook when transitioning from leased to unleased if previous_leased and self.hook_executor and self._current_client_name: hook_context = HookContext( lease_name=self.lease_name, client_name=self._current_client_name, ) - # Shield the post-lease hook from cancellation and await it + # Shield the after-lease hook from cancellation and await it with CancelScope(shield=True): await self._update_status(ExporterStatus.AFTER_LEASE_HOOK, "Running afterLease hooks") # Pass the current session to hook executor for logging self.hook_executor.main_session = self._current_session - await self.hook_executor.execute_post_lease_hook(hook_context) + # Use session socket if available, otherwise create new session + await self.hook_executor.execute_after_lease_hook( + hook_context, socket_path=self._session_socket_path + ) await self._update_status(ExporterStatus.AVAILABLE, "Available for new lease") self._current_client_name = "" @@ -330,3 +312,69 @@ async def run_before_lease_hook(hook_ctx): self._previous_leased = current_leased self._tg = None + + async def run_before_lease_hook(self, hook_ctx: HookContext): + """ + Execute the before-lease hook for the current exporter session. + + Args: + hook_ctx (HookContext): The current hook execution context + """ + try: + await self._update_status(ExporterStatus.BEFORE_LEASE_HOOK, "Running beforeLease hooks") + # Pass the current session to hook executor for logging + self.hook_executor.main_session = self._current_session + + # Wait for socket path to be available + while self._session_socket_path is None: + await sleep(0.1) + + # Execute hook with main session socket + await self.hook_executor.execute_before_lease_hook(hook_ctx, socket_path=self._session_socket_path) + await self._update_status(ExporterStatus.LEASE_READY, "Ready for commands") + logger.info("beforeLease hook completed successfully") + except HookExecutionError as e: + # Hook failed with on_failure='block' - end lease and set failed status + logger.error("beforeLease hook failed (on_failure=block): %s", e) + await self._update_status( + ExporterStatus.BEFORE_LEASE_HOOK_FAILED, f"beforeLease hook failed (on_failure=block): {e}" + ) + # Note: We don't take the exporter offline for before_lease hook failures + # The lease is simply not ready, and the exporter remains available for future leases + except Exception as e: + # Unexpected error during hook execution + logger.error("beforeLease hook failed with unexpected error: %s", e, exc_info=True) + await self._update_status(ExporterStatus.BEFORE_LEASE_HOOK_FAILED, f"beforeLease hook failed: {e}") + finally: + # Always set the event to unblock connections + if self._pre_lease_ready: + self._pre_lease_ready.set() + + async def run_after_lease_hook(self, hook_ctx: HookContext): + """ + Execute the after-lease hook for the current exporter session. + + Args: + hook_ctx (HookContext): The current hook execution context + """ + try: + await self._update_status(ExporterStatus.AFTER_LEASE_HOOK, "Running afterLease hooks") + # Pass the current session to hook executor for logging + self.hook_executor.main_session = self._current_session + # Use session socket if available, otherwise create new session + await self.hook_executor.execute_after_lease_hook(hook_ctx, socket_path=self._session_socket_path) + await self._update_status(ExporterStatus.AVAILABLE, "Available for new lease") + logger.info("afterLease hook completed successfully") + except HookExecutionError as e: + # Hook failed with on_failure='block' - set failed status and shut down exporter + logger.error("afterLease hook failed (on_failure=block): %s", e) + await self._update_status( + ExporterStatus.AFTER_LEASE_HOOK_FAILED, f"afterLease hook failed (on_failure=block): {e}" + ) + # Shut down the exporter after after_lease hook failure with on_failure='block' + logger.error("Shutting down exporter due to afterLease hook failure") + self.stop() + except Exception as e: + # Unexpected error during hook execution + logger.error("afterLease hook failed with unexpected error: %s", e, exc_info=True) + await self._update_status(ExporterStatus.AFTER_LEASE_HOOK_FAILED, f"afterLease hook failed: {e}") diff --git a/packages/jumpstarter/jumpstarter/exporter/hooks.py b/packages/jumpstarter/jumpstarter/exporter/hooks.py index d71b3e1a6..63ca84c39 100644 --- a/packages/jumpstarter/jumpstarter/exporter/hooks.py +++ b/packages/jumpstarter/jumpstarter/exporter/hooks.py @@ -9,7 +9,7 @@ from jumpstarter.common import LogSource from jumpstarter.config.env import JMP_DRIVERS_ALLOW, JUMPSTARTER_HOST -from jumpstarter.config.exporter import HookConfigV1Alpha1 +from jumpstarter.config.exporter import HookConfigV1Alpha1, HookInstanceConfigV1Alpha1 from jumpstarter.driver import Driver from jumpstarter.exporter.logging import get_logger from jumpstarter.exporter.session import Session @@ -17,6 +17,12 @@ logger = logging.getLogger(__name__) +class HookExecutionError(Exception): + """Raised when a hook fails and on_failure is set to 'block'.""" + + pass + + @dataclass(kw_only=True) class HookContext: """Context information passed to hooks.""" @@ -35,10 +41,6 @@ class HookExecutor: config: HookConfigV1Alpha1 device_factory: Callable[[], Driver] main_session: Session | None = field(default=None) - timeout: int = field(init=False) - - def __post_init__(self): - self.timeout = self.config.timeout @asynccontextmanager async def _create_hook_environment(self, context: HookContext): @@ -68,82 +70,196 @@ async def _create_hook_environment(self, context: HookContext): yield session, hook_env - async def _execute_hook(self, command: str, context: HookContext, log_source: LogSource) -> bool: - """Execute a single hook command.""" + async def _execute_hook( + self, + hook_config: HookInstanceConfigV1Alpha1, + context: HookContext, + log_source: LogSource, + socket_path: str | None = None, + ) -> bool: + """Execute a single hook command. + + Args: + hook_config: Hook configuration including script, timeout, exit_code, and on_failure + context: Hook context information + log_source: Log source for hook output + socket_path: Optional Unix socket path to reuse existing session. + If provided, hooks will access the main session instead of creating their own. + """ + command = hook_config.script if not command or not command.strip(): logger.debug("Hook command is empty, skipping") return True logger.info("Executing hook: %s", command.strip().split("\n")[0][:100]) - async with self._create_hook_environment(context) as (session, hook_env): + # If socket_path provided, use existing session; otherwise create new one + if socket_path is not None: + # Reuse existing session - create environment without session creation + hook_env = os.environ.copy() + hook_env.update( + { + JUMPSTARTER_HOST: str(socket_path), + JMP_DRIVERS_ALLOW: "UNSAFE", + "LEASE_NAME": context.lease_name, + "CLIENT_NAME": context.client_name, + "LEASE_DURATION": context.lease_duration, + "EXPORTER_NAME": context.exporter_name, + "EXPORTER_NAMESPACE": context.exporter_namespace, + } + ) + + # Use main session for logging (must be available when socket_path is provided) + logging_session = self.main_session + if logging_session is None: + raise ValueError("main_session must be set when reusing socket_path") + + return await self._execute_hook_process(hook_config, context, log_source, hook_env, logging_session) + else: + # Create new session for hook execution (fallback/standalone mode) + async with self._create_hook_environment(context) as (session, hook_env): + # Determine which session to use for logging + logging_session = self.main_session if self.main_session is not None else session + return await self._execute_hook_process(hook_config, context, log_source, hook_env, logging_session) + + async def _execute_hook_process( + self, + hook_config: HookInstanceConfigV1Alpha1, + context: HookContext, + log_source: LogSource, + hook_env: dict, + logging_session: Session, + ) -> bool: + """Execute the hook process with the given environment and logging session.""" + command = hook_config.script + timeout = hook_config.timeout + expected_exit_code = hook_config.exit_code + on_failure = hook_config.on_failure + + try: + # Execute the hook command using shell + process = await asyncio.create_subprocess_shell( + command, + env=hook_env, + stdout=asyncio.subprocess.PIPE, + stderr=asyncio.subprocess.STDOUT, + ) + try: - # Execute the hook command using shell - process = await asyncio.create_subprocess_shell( - command, - env=hook_env, - stdout=asyncio.subprocess.PIPE, - stderr=asyncio.subprocess.STDOUT, - ) + # Create a logger with automatic source registration + hook_logger = get_logger(f"hook.{context.lease_name}", log_source, logging_session) - try: - # Determine which session to use for logging - prefer main session if available - logging_session = self.main_session if self.main_session is not None else session - - # Create a logger with automatic source registration - hook_logger = get_logger(f"hook.{context.lease_name}", log_source, logging_session) - - # Stream output line-by-line for real-time logging - output_lines = [] - - async def read_output(): - while True: - line = await process.stdout.readline() - if not line: - break - line_decoded = line.decode().rstrip() - output_lines.append(line_decoded) - # Route hook output through the logging system - hook_logger.info(line_decoded) - - # Run output reading and process waiting concurrently with timeout - await asyncio.wait_for(asyncio.gather(read_output(), process.wait()), timeout=self.timeout) - - if process.returncode == 0: - logger.info("Hook executed successfully") + # Stream output line-by-line for real-time logging + output_lines = [] + + async def read_output(): + while True: + line = await process.stdout.readline() + if not line: + break + line_decoded = line.decode().rstrip() + output_lines.append(line_decoded) + # Route hook output through the logging system + hook_logger.info(line_decoded) + + # Run output reading and process waiting concurrently with timeout + await asyncio.wait_for(asyncio.gather(read_output(), process.wait()), timeout=timeout) + + # Check if exit code matches expected + if process.returncode == expected_exit_code: + logger.info("Hook executed successfully with exit code %d", process.returncode) + return True + else: + # Exit code mismatch - handle according to on_failure setting + error_msg = f"Hook failed: expected exit code {expected_exit_code}, got {process.returncode}" + + if on_failure == "pass": + logger.info("%s (on_failure=pass, continuing)", error_msg) + return True + elif on_failure == "warn": + logger.warning("%s (on_failure=warn, continuing)", error_msg) return True - else: - logger.error("Hook failed with return code %d", process.returncode) - return False + else: # on_failure == "block" + logger.error("%s (on_failure=block, raising exception)", error_msg) + raise HookExecutionError(error_msg) + except asyncio.TimeoutError: + error_msg = f"Hook timed out after {timeout} seconds" + logger.error(error_msg) + try: + process.terminate() + await asyncio.wait_for(process.wait(), timeout=5) except asyncio.TimeoutError: - logger.error("Hook timed out after %d seconds", self.timeout) - try: - process.terminate() - await asyncio.wait_for(process.wait(), timeout=5) - except asyncio.TimeoutError: - process.kill() - await process.wait() - return False - - except Exception as e: - logger.error("Error executing hook: %s", e, exc_info=True) - return False - - async def execute_pre_lease_hook(self, context: HookContext) -> bool: - """Execute the pre-lease hook.""" - if not self.config.pre_lease: - logger.debug("No pre-lease hook configured") + process.kill() + await process.wait() + + # Handle timeout according to on_failure setting + if on_failure == "pass": + logger.info("%s (on_failure=pass, continuing)", error_msg) + return True + elif on_failure == "warn": + logger.warning("%s (on_failure=warn, continuing)", error_msg) + return True + else: # on_failure == "block" + raise HookExecutionError(error_msg) + + except HookExecutionError: + # Re-raise HookExecutionError to propagate to exporter + raise + except Exception as e: + error_msg = f"Error executing hook: {e}" + logger.error(error_msg, exc_info=True) + + # Handle exception according to on_failure setting + if on_failure == "pass": + logger.info("%s (on_failure=pass, continuing)", error_msg) + return True + elif on_failure == "warn": + logger.warning("%s (on_failure=warn, continuing)", error_msg) + return True + else: # on_failure == "block" + raise HookExecutionError(error_msg) from e + + async def execute_before_lease_hook(self, context: HookContext, socket_path: str | None = None) -> bool: + """Execute the before-lease hook. + + Args: + context: Hook context information + socket_path: Optional Unix socket path to reuse existing session + + Raises: + HookExecutionError: If hook fails and on_failure is set to 'block' + """ + if not self.config.before_lease: + logger.debug("No before-lease hook configured") return True - logger.info("Executing pre-lease hook for lease %s", context.lease_name) - return await self._execute_hook(self.config.pre_lease, context, LogSource.BEFORE_LEASE_HOOK) + logger.info("Executing before-lease hook for lease %s", context.lease_name) + return await self._execute_hook( + self.config.before_lease, + context, + LogSource.BEFORE_LEASE_HOOK, + socket_path, + ) + + async def execute_after_lease_hook(self, context: HookContext, socket_path: str | None = None) -> bool: + """Execute the after-lease hook. + + Args: + context: Hook context information + socket_path: Optional Unix socket path to reuse existing session - async def execute_post_lease_hook(self, context: HookContext) -> bool: - """Execute the post-lease hook.""" - if not self.config.post_lease: - logger.debug("No post-lease hook configured") + Raises: + HookExecutionError: If hook fails and on_failure is set to 'block' + """ + if not self.config.after_lease: + logger.debug("No after-lease hook configured") return True - logger.info("Executing post-lease hook for lease %s", context.lease_name) - return await self._execute_hook(self.config.post_lease, context, LogSource.AFTER_LEASE_HOOK) + logger.info("Executing after-lease hook for lease %s", context.lease_name) + return await self._execute_hook( + self.config.after_lease, + context, + LogSource.AFTER_LEASE_HOOK, + socket_path, + ) diff --git a/packages/jumpstarter/jumpstarter/exporter/hooks_test.py b/packages/jumpstarter/jumpstarter/exporter/hooks_test.py index 576ca6810..bf86d7972 100644 --- a/packages/jumpstarter/jumpstarter/exporter/hooks_test.py +++ b/packages/jumpstarter/jumpstarter/exporter/hooks_test.py @@ -4,9 +4,9 @@ import pytest from jumpstarter.config.env import JMP_DRIVERS_ALLOW, JUMPSTARTER_HOST -from jumpstarter.config.exporter import HookConfigV1Alpha1 +from jumpstarter.config.exporter import HookConfigV1Alpha1, HookInstanceConfigV1Alpha1 from jumpstarter.driver import Driver -from jumpstarter.exporter.hooks import HookContext, HookExecutor +from jumpstarter.exporter.hooks import HookContext, HookExecutionError, HookExecutor pytestmark = pytest.mark.anyio @@ -34,9 +34,8 @@ def factory(): @pytest.fixture def hook_config(): return HookConfigV1Alpha1( - pre_lease="echo 'Pre-lease hook executed'", - post_lease="echo 'Post-lease hook executed'", - timeout=10, + before_lease=HookInstanceConfigV1Alpha1(script="echo 'Pre-lease hook executed'", timeout=10), + after_lease=HookInstanceConfigV1Alpha1(script="echo 'Post-lease hook executed'", timeout=10), ) @@ -60,7 +59,6 @@ async def test_hook_executor_creation(self, hook_config, mock_device_factory): assert executor.config == hook_config assert executor.device_factory == mock_device_factory - assert executor.timeout == 10 async def test_empty_hook_execution(self, mock_device_factory, hook_context): empty_config = HookConfigV1Alpha1() @@ -70,13 +68,12 @@ async def test_empty_hook_execution(self, mock_device_factory, hook_context): ) # Both hooks should return True for empty/None commands - assert await executor.execute_pre_lease_hook(hook_context) is True - assert await executor.execute_post_lease_hook(hook_context) is True + assert await executor.execute_before_lease_hook(hook_context) is True + assert await executor.execute_after_lease_hook(hook_context) is True async def test_successful_hook_execution(self, mock_device_factory, hook_context): hook_config = HookConfigV1Alpha1( - pre_lease="echo 'Pre-lease hook executed'", - timeout=10, + before_lease=HookInstanceConfigV1Alpha1(script="echo 'Pre-lease hook executed'", timeout=10), ) # Mock the Session and serve_unix_async with patch("jumpstarter.exporter.hooks.Session") as mock_session_class: @@ -103,7 +100,7 @@ async def test_successful_hook_execution(self, mock_device_factory, hook_context device_factory=mock_device_factory, ) - result = await executor.execute_pre_lease_hook(hook_context) + result = await executor.execute_before_lease_hook(hook_context) assert result is True @@ -122,8 +119,9 @@ async def test_successful_hook_execution(self, mock_device_factory, hook_context async def test_failed_hook_execution(self, mock_device_factory, hook_context): failed_config = HookConfigV1Alpha1( - pre_lease="exit 1", # Command that will fail - timeout=10, + before_lease=HookInstanceConfigV1Alpha1( + script="exit 1", timeout=10, on_failure="block" + ), # Command that will fail with on_failure="block" ) with patch("jumpstarter.exporter.hooks.Session") as mock_session_class: @@ -149,14 +147,15 @@ async def test_failed_hook_execution(self, mock_device_factory, hook_context): device_factory=mock_device_factory, ) - result = await executor.execute_pre_lease_hook(hook_context) - - assert result is False + # Should raise HookExecutionError since on_failure="block" + with pytest.raises(HookExecutionError, match="expected exit code 0, got 1"): + await executor.execute_before_lease_hook(hook_context) async def test_hook_timeout(self, mock_device_factory, hook_context): timeout_config = HookConfigV1Alpha1( - pre_lease="sleep 60", # Command that will timeout - timeout=1, # 1 second timeout + before_lease=HookInstanceConfigV1Alpha1( + script="sleep 60", timeout=1, on_failure="block" + ), # Command that will timeout with on_failure="block" ) with patch("jumpstarter.exporter.hooks.Session") as mock_session_class: @@ -180,15 +179,15 @@ async def test_hook_timeout(self, mock_device_factory, hook_context): device_factory=mock_device_factory, ) - result = await executor.execute_pre_lease_hook(hook_context) + # Should raise HookExecutionError since on_failure="block" + with pytest.raises(HookExecutionError, match="timed out after 1 seconds"): + await executor.execute_before_lease_hook(hook_context) - assert result is False mock_process.terminate.assert_called_once() async def test_hook_environment_variables(self, mock_device_factory, hook_context): hook_config = HookConfigV1Alpha1( - pre_lease="echo 'Pre-lease hook executed'", - timeout=10, + before_lease=HookInstanceConfigV1Alpha1(script="echo 'Pre-lease hook executed'", timeout=10), ) with patch("jumpstarter.exporter.hooks.Session") as mock_session_class: mock_session = Mock() @@ -211,7 +210,7 @@ async def test_hook_environment_variables(self, mock_device_factory, hook_contex device_factory=mock_device_factory, ) - await executor.execute_pre_lease_hook(hook_context) + await executor.execute_before_lease_hook(hook_context) # Check that all expected environment variables are set call_args = mock_subprocess.call_args @@ -228,8 +227,9 @@ async def test_hook_environment_variables(self, mock_device_factory, hook_contex async def test_real_time_output_logging(self, mock_device_factory, hook_context): """Test that hook output is logged in real-time at INFO level.""" hook_config = HookConfigV1Alpha1( - pre_lease="echo 'Line 1'; echo 'Line 2'; echo 'Line 3'", - timeout=10, + before_lease=HookInstanceConfigV1Alpha1( + script="echo 'Line 1'; echo 'Line 2'; echo 'Line 3'", timeout=10 + ), ) with patch("jumpstarter.exporter.hooks.Session") as mock_session_class: @@ -260,25 +260,22 @@ async def test_real_time_output_logging(self, mock_device_factory, hook_context) device_factory=mock_device_factory, ) - result = await executor.execute_pre_lease_hook(hook_context) + result = await executor.execute_before_lease_hook(hook_context) assert result is True # Verify that output lines were logged in real-time at INFO level expected_calls = [ - call.info("Executing hook: %s", "echo 'Line 1'; echo 'Line 2'; echo 'Line 3'"), - call.info("[Hook Output] %s", "Line 1"), - call.info("[Hook Output] %s", "Line 2"), - call.info("[Hook Output] %s", "Line 3"), - call.info("Hook executed successfully"), + call("Executing before-lease hook for lease %s", "test-lease-123"), + call("Executing hook: %s", "echo 'Line 1'; echo 'Line 2'; echo 'Line 3'"), + call("Hook executed successfully with exit code %d", 0), ] mock_logger.info.assert_has_calls(expected_calls, any_order=False) async def test_post_lease_hook_execution_on_completion(self, mock_device_factory, hook_context): """Test that post-lease hook executes when called directly.""" hook_config = HookConfigV1Alpha1( - post_lease="echo 'Post-lease cleanup completed'", - timeout=10, + after_lease=HookInstanceConfigV1Alpha1(script="echo 'Post-lease cleanup completed'", timeout=10), ) with patch("jumpstarter.exporter.hooks.Session") as mock_session_class: @@ -298,22 +295,201 @@ async def test_post_lease_hook_execution_on_completion(self, mock_device_factory mock_process.wait = AsyncMock(return_value=None) # Mock the logger to capture log calls - with patch("jumpstarter.exporter.hooks.logger") as mock_logger, \ - patch("asyncio.create_subprocess_shell", return_value=mock_process): + with ( + patch("jumpstarter.exporter.hooks.logger") as mock_logger, + patch("asyncio.create_subprocess_shell", return_value=mock_process), + ): executor = HookExecutor( config=hook_config, device_factory=mock_device_factory, ) - result = await executor.execute_post_lease_hook(hook_context) + result = await executor.execute_after_lease_hook(hook_context) assert result is True # Verify that post-lease hook output was logged expected_calls = [ - call.info("Executing post-lease hook for lease %s", "test-lease-123"), - call.info("Executing hook: %s", "echo 'Post-lease cleanup completed'"), - call.info("[Hook Output] %s", "Post-lease cleanup completed"), - call.info("Hook executed successfully"), + call("Executing after-lease hook for lease %s", "test-lease-123"), + call("Executing hook: %s", "echo 'Post-lease cleanup completed'"), + call("Hook executed successfully with exit code %d", 0), ] mock_logger.info.assert_has_calls(expected_calls, any_order=False) + + async def test_hook_exit_code_matching_success(self, mock_device_factory, hook_context): + """Test that hook succeeds when exit code matches expected value.""" + hook_config = HookConfigV1Alpha1( + before_lease=HookInstanceConfigV1Alpha1(script="exit 0", timeout=10, exit_code=0), + ) + + with patch("jumpstarter.exporter.hooks.Session") as mock_session_class: + mock_session = Mock() + mock_session_class.return_value.__enter__.return_value = mock_session + mock_session.serve_unix_async.return_value.__aenter__ = AsyncMock(return_value="/tmp/test_socket") + mock_session.serve_unix_async.return_value.__aexit__ = AsyncMock(return_value=None) + + mock_process = AsyncMock() + mock_process.returncode = 0 + mock_process.stdout.readline.side_effect = [b""] + mock_process.wait = AsyncMock(return_value=None) + + with patch("asyncio.create_subprocess_shell", return_value=mock_process): + executor = HookExecutor(config=hook_config, device_factory=mock_device_factory) + result = await executor.execute_before_lease_hook(hook_context) + assert result is True + + async def test_hook_exit_code_matching_custom(self, mock_device_factory, hook_context): + """Test that hook succeeds when exit code matches custom expected value.""" + hook_config = HookConfigV1Alpha1( + before_lease=HookInstanceConfigV1Alpha1(script="exit 42", timeout=10, exit_code=42), + ) + + with patch("jumpstarter.exporter.hooks.Session") as mock_session_class: + mock_session = Mock() + mock_session_class.return_value.__enter__.return_value = mock_session + mock_session.serve_unix_async.return_value.__aenter__ = AsyncMock(return_value="/tmp/test_socket") + mock_session.serve_unix_async.return_value.__aexit__ = AsyncMock(return_value=None) + + mock_process = AsyncMock() + mock_process.returncode = 42 + mock_process.stdout.readline.side_effect = [b""] + mock_process.wait = AsyncMock(return_value=None) + + with patch("asyncio.create_subprocess_shell", return_value=mock_process): + executor = HookExecutor(config=hook_config, device_factory=mock_device_factory) + result = await executor.execute_before_lease_hook(hook_context) + assert result is True + + async def test_hook_exit_code_mismatch_pass(self, mock_device_factory, hook_context): + """Test that hook succeeds when exit code mismatches but on_failure='pass'.""" + hook_config = HookConfigV1Alpha1( + before_lease=HookInstanceConfigV1Alpha1(script="exit 1", timeout=10, exit_code=0, on_failure="pass"), + ) + + with patch("jumpstarter.exporter.hooks.Session") as mock_session_class: + mock_session = Mock() + mock_session_class.return_value.__enter__.return_value = mock_session + mock_session.serve_unix_async.return_value.__aenter__ = AsyncMock(return_value="/tmp/test_socket") + mock_session.serve_unix_async.return_value.__aexit__ = AsyncMock(return_value=None) + + mock_process = AsyncMock() + mock_process.returncode = 1 + mock_process.stdout.readline.side_effect = [b""] + mock_process.wait = AsyncMock(return_value=None) + + with ( + patch("asyncio.create_subprocess_shell", return_value=mock_process), + patch("jumpstarter.exporter.hooks.logger") as mock_logger, + ): + executor = HookExecutor(config=hook_config, device_factory=mock_device_factory) + result = await executor.execute_before_lease_hook(hook_context) + assert result is True + # Verify INFO log was created + mock_logger.info.assert_any_call( + "Hook failed: expected exit code 0, got 1 (on_failure=pass, continuing)" + ) + + async def test_hook_exit_code_mismatch_warn(self, mock_device_factory, hook_context): + """Test that hook succeeds when exit code mismatches but on_failure='warn'.""" + hook_config = HookConfigV1Alpha1( + before_lease=HookInstanceConfigV1Alpha1(script="exit 1", timeout=10, exit_code=0, on_failure="warn"), + ) + + with patch("jumpstarter.exporter.hooks.Session") as mock_session_class: + mock_session = Mock() + mock_session_class.return_value.__enter__.return_value = mock_session + mock_session.serve_unix_async.return_value.__aenter__ = AsyncMock(return_value="/tmp/test_socket") + mock_session.serve_unix_async.return_value.__aexit__ = AsyncMock(return_value=None) + + mock_process = AsyncMock() + mock_process.returncode = 1 + mock_process.stdout.readline.side_effect = [b""] + mock_process.wait = AsyncMock(return_value=None) + + with ( + patch("asyncio.create_subprocess_shell", return_value=mock_process), + patch("jumpstarter.exporter.hooks.logger") as mock_logger, + ): + executor = HookExecutor(config=hook_config, device_factory=mock_device_factory) + result = await executor.execute_before_lease_hook(hook_context) + assert result is True + # Verify WARNING log was created + mock_logger.warning.assert_any_call( + "Hook failed: expected exit code 0, got 1 (on_failure=warn, continuing)" + ) + + async def test_hook_exit_code_mismatch_block(self, mock_device_factory, hook_context): + """Test that hook raises exception when exit code mismatches and on_failure='block'.""" + hook_config = HookConfigV1Alpha1( + before_lease=HookInstanceConfigV1Alpha1(script="exit 1", timeout=10, exit_code=0, on_failure="block"), + ) + + with patch("jumpstarter.exporter.hooks.Session") as mock_session_class: + mock_session = Mock() + mock_session_class.return_value.__enter__.return_value = mock_session + mock_session.serve_unix_async.return_value.__aenter__ = AsyncMock(return_value="/tmp/test_socket") + mock_session.serve_unix_async.return_value.__aexit__ = AsyncMock(return_value=None) + + mock_process = AsyncMock() + mock_process.returncode = 1 + mock_process.stdout.readline.side_effect = [b""] + mock_process.wait = AsyncMock(return_value=None) + + with patch("asyncio.create_subprocess_shell", return_value=mock_process): + executor = HookExecutor(config=hook_config, device_factory=mock_device_factory) + with pytest.raises(HookExecutionError, match="expected exit code 0, got 1"): + await executor.execute_before_lease_hook(hook_context) + + async def test_hook_timeout_with_pass(self, mock_device_factory, hook_context): + """Test that hook succeeds when timeout occurs but on_failure='pass'.""" + hook_config = HookConfigV1Alpha1( + before_lease=HookInstanceConfigV1Alpha1(script="sleep 60", timeout=1, on_failure="pass"), + ) + + with patch("jumpstarter.exporter.hooks.Session") as mock_session_class: + mock_session = Mock() + mock_session_class.return_value.__enter__.return_value = mock_session + mock_session.serve_unix_async.return_value.__aenter__ = AsyncMock(return_value="/tmp/test_socket") + mock_session.serve_unix_async.return_value.__aexit__ = AsyncMock(return_value=None) + + mock_process = AsyncMock() + mock_process.terminate = AsyncMock(return_value=None) + mock_process.wait = AsyncMock(return_value=None) + + with ( + patch("asyncio.create_subprocess_shell", return_value=mock_process), + patch("asyncio.wait_for", side_effect=asyncio.TimeoutError()), + patch("jumpstarter.exporter.hooks.logger") as mock_logger, + ): + executor = HookExecutor(config=hook_config, device_factory=mock_device_factory) + result = await executor.execute_before_lease_hook(hook_context) + assert result is True + # Verify INFO log was created + assert any("on_failure=pass, continuing" in str(call) for call in mock_logger.info.call_args_list) + + async def test_hook_timeout_with_warn(self, mock_device_factory, hook_context): + """Test that hook succeeds when timeout occurs but on_failure='warn'.""" + hook_config = HookConfigV1Alpha1( + before_lease=HookInstanceConfigV1Alpha1(script="sleep 60", timeout=1, on_failure="warn"), + ) + + with patch("jumpstarter.exporter.hooks.Session") as mock_session_class: + mock_session = Mock() + mock_session_class.return_value.__enter__.return_value = mock_session + mock_session.serve_unix_async.return_value.__aenter__ = AsyncMock(return_value="/tmp/test_socket") + mock_session.serve_unix_async.return_value.__aexit__ = AsyncMock(return_value=None) + + mock_process = AsyncMock() + mock_process.terminate = AsyncMock(return_value=None) + mock_process.wait = AsyncMock(return_value=None) + + with ( + patch("asyncio.create_subprocess_shell", return_value=mock_process), + patch("asyncio.wait_for", side_effect=asyncio.TimeoutError()), + patch("jumpstarter.exporter.hooks.logger") as mock_logger, + ): + executor = HookExecutor(config=hook_config, device_factory=mock_device_factory) + result = await executor.execute_before_lease_hook(hook_context) + assert result is True + # Verify WARNING log was created + assert any("on_failure=warn, continuing" in str(call) for call in mock_logger.warning.call_args_list) From 7581373734e4c14e67e76dfb45a69c3c8ac50c02 Mon Sep 17 00:00:00 2001 From: Kirk Brauer Date: Mon, 3 Nov 2025 17:07:37 -0500 Subject: [PATCH 06/21] Improve hook error handling --- .../jumpstarter/jumpstarter/exporter/hooks.py | 36 +++++++++---------- .../jumpstarter/exporter/hooks_test.py | 8 ++--- 2 files changed, 22 insertions(+), 22 deletions(-) diff --git a/packages/jumpstarter/jumpstarter/exporter/hooks.py b/packages/jumpstarter/jumpstarter/exporter/hooks.py index 63ca84c39..77803b28d 100644 --- a/packages/jumpstarter/jumpstarter/exporter/hooks.py +++ b/packages/jumpstarter/jumpstarter/exporter/hooks.py @@ -76,7 +76,7 @@ async def _execute_hook( context: HookContext, log_source: LogSource, socket_path: str | None = None, - ) -> bool: + ): """Execute a single hook command. Args: @@ -89,7 +89,7 @@ async def _execute_hook( command = hook_config.script if not command or not command.strip(): logger.debug("Hook command is empty, skipping") - return True + return logger.info("Executing hook: %s", command.strip().split("\n")[0][:100]) @@ -129,7 +129,7 @@ async def _execute_hook_process( log_source: LogSource, hook_env: dict, logging_session: Session, - ) -> bool: + ): """Execute the hook process with the given environment and logging session.""" command = hook_config.script timeout = hook_config.timeout @@ -168,22 +168,22 @@ async def read_output(): # Check if exit code matches expected if process.returncode == expected_exit_code: logger.info("Hook executed successfully with exit code %d", process.returncode) - return True + return else: # Exit code mismatch - handle according to on_failure setting error_msg = f"Hook failed: expected exit code {expected_exit_code}, got {process.returncode}" if on_failure == "pass": logger.info("%s (on_failure=pass, continuing)", error_msg) - return True + return elif on_failure == "warn": logger.warning("%s (on_failure=warn, continuing)", error_msg) - return True + return else: # on_failure == "block" logger.error("%s (on_failure=block, raising exception)", error_msg) raise HookExecutionError(error_msg) - except asyncio.TimeoutError: + except asyncio.TimeoutError as e: error_msg = f"Hook timed out after {timeout} seconds" logger.error(error_msg) try: @@ -196,12 +196,12 @@ async def read_output(): # Handle timeout according to on_failure setting if on_failure == "pass": logger.info("%s (on_failure=pass, continuing)", error_msg) - return True + return elif on_failure == "warn": logger.warning("%s (on_failure=warn, continuing)", error_msg) - return True + return else: # on_failure == "block" - raise HookExecutionError(error_msg) + raise HookExecutionError(error_msg) from e except HookExecutionError: # Re-raise HookExecutionError to propagate to exporter @@ -213,14 +213,14 @@ async def read_output(): # Handle exception according to on_failure setting if on_failure == "pass": logger.info("%s (on_failure=pass, continuing)", error_msg) - return True + return elif on_failure == "warn": logger.warning("%s (on_failure=warn, continuing)", error_msg) - return True + return else: # on_failure == "block" raise HookExecutionError(error_msg) from e - async def execute_before_lease_hook(self, context: HookContext, socket_path: str | None = None) -> bool: + async def execute_before_lease_hook(self, context: HookContext, socket_path: str | None = None): """Execute the before-lease hook. Args: @@ -232,17 +232,17 @@ async def execute_before_lease_hook(self, context: HookContext, socket_path: str """ if not self.config.before_lease: logger.debug("No before-lease hook configured") - return True + return logger.info("Executing before-lease hook for lease %s", context.lease_name) - return await self._execute_hook( + await self._execute_hook( self.config.before_lease, context, LogSource.BEFORE_LEASE_HOOK, socket_path, ) - async def execute_after_lease_hook(self, context: HookContext, socket_path: str | None = None) -> bool: + async def execute_after_lease_hook(self, context: HookContext, socket_path: str | None = None): """Execute the after-lease hook. Args: @@ -254,10 +254,10 @@ async def execute_after_lease_hook(self, context: HookContext, socket_path: str """ if not self.config.after_lease: logger.debug("No after-lease hook configured") - return True + return logger.info("Executing after-lease hook for lease %s", context.lease_name) - return await self._execute_hook( + await self._execute_hook( self.config.after_lease, context, LogSource.AFTER_LEASE_HOOK, diff --git a/packages/jumpstarter/jumpstarter/exporter/hooks_test.py b/packages/jumpstarter/jumpstarter/exporter/hooks_test.py index bf86d7972..0e18d332c 100644 --- a/packages/jumpstarter/jumpstarter/exporter/hooks_test.py +++ b/packages/jumpstarter/jumpstarter/exporter/hooks_test.py @@ -384,9 +384,9 @@ async def test_hook_exit_code_mismatch_pass(self, mock_device_factory, hook_cont executor = HookExecutor(config=hook_config, device_factory=mock_device_factory) result = await executor.execute_before_lease_hook(hook_context) assert result is True - # Verify INFO log was created + # Verify INFO log was created (using format string) mock_logger.info.assert_any_call( - "Hook failed: expected exit code 0, got 1 (on_failure=pass, continuing)" + "%s (on_failure=pass, continuing)", "Hook failed: expected exit code 0, got 1" ) async def test_hook_exit_code_mismatch_warn(self, mock_device_factory, hook_context): @@ -413,9 +413,9 @@ async def test_hook_exit_code_mismatch_warn(self, mock_device_factory, hook_cont executor = HookExecutor(config=hook_config, device_factory=mock_device_factory) result = await executor.execute_before_lease_hook(hook_context) assert result is True - # Verify WARNING log was created + # Verify WARNING log was created (using format string) mock_logger.warning.assert_any_call( - "Hook failed: expected exit code 0, got 1 (on_failure=warn, continuing)" + "%s (on_failure=warn, continuing)", "Hook failed: expected exit code 0, got 1" ) async def test_hook_exit_code_mismatch_block(self, mock_device_factory, hook_context): From aa951a32d80718ff2debfb5e980ddd8b30cc059c Mon Sep 17 00:00:00 2001 From: Kirk Brauer Date: Mon, 24 Nov 2025 13:43:35 -0500 Subject: [PATCH 07/21] Add strongly-typed Protobuf and gRPC codegen and refactor exporter for clarity --- buf.gen.yaml | 4 + .../jumpstarter/client/v1/client_pb2.pyi | 318 ++++++++ .../jumpstarter/client/v1/client_pb2_grpc.pyi | 307 +++++++ .../jumpstarter/v1/common_pb2.pyi | 96 +++ .../jumpstarter/v1/common_pb2_grpc.pyi | 20 + .../jumpstarter/v1/jumpstarter_pb2.pyi | 717 +++++++++++++++++ .../jumpstarter/v1/jumpstarter_pb2_grpc.pyi | 752 ++++++++++++++++++ .../jumpstarter/v1/kubernetes_pb2.pyi | 148 ++++ .../jumpstarter/v1/kubernetes_pb2_grpc.pyi | 20 + .../jumpstarter/v1/router_pb2.pyi | 73 ++ .../jumpstarter/v1/router_pb2_grpc.pyi | 96 +++ .../jumpstarter/config/exporter.py | 13 +- .../jumpstarter/jumpstarter/driver/base.py | 2 +- .../jumpstarter/exporter/exporter.py | 523 ++++++++---- .../jumpstarter/jumpstarter/exporter/hooks.py | 115 +-- .../jumpstarter/exporter/hooks_test.py | 214 +---- 16 files changed, 3036 insertions(+), 382 deletions(-) create mode 100644 packages/jumpstarter-protocol/jumpstarter_protocol/jumpstarter/client/v1/client_pb2.pyi create mode 100644 packages/jumpstarter-protocol/jumpstarter_protocol/jumpstarter/client/v1/client_pb2_grpc.pyi create mode 100644 packages/jumpstarter-protocol/jumpstarter_protocol/jumpstarter/v1/common_pb2.pyi create mode 100644 packages/jumpstarter-protocol/jumpstarter_protocol/jumpstarter/v1/common_pb2_grpc.pyi create mode 100644 packages/jumpstarter-protocol/jumpstarter_protocol/jumpstarter/v1/jumpstarter_pb2.pyi create mode 100644 packages/jumpstarter-protocol/jumpstarter_protocol/jumpstarter/v1/jumpstarter_pb2_grpc.pyi create mode 100644 packages/jumpstarter-protocol/jumpstarter_protocol/jumpstarter/v1/kubernetes_pb2.pyi create mode 100644 packages/jumpstarter-protocol/jumpstarter_protocol/jumpstarter/v1/kubernetes_pb2_grpc.pyi create mode 100644 packages/jumpstarter-protocol/jumpstarter_protocol/jumpstarter/v1/router_pb2.pyi create mode 100644 packages/jumpstarter-protocol/jumpstarter_protocol/jumpstarter/v1/router_pb2_grpc.pyi diff --git a/buf.gen.yaml b/buf.gen.yaml index fce4d534c..78467f082 100644 --- a/buf.gen.yaml +++ b/buf.gen.yaml @@ -6,6 +6,10 @@ plugins: out: ./packages/jumpstarter-protocol/jumpstarter_protocol - remote: buf.build/grpc/python out: ./packages/jumpstarter-protocol/jumpstarter_protocol + - remote: buf.build/community/nipunn1313-mypy:v3.7.0 + out: ./packages/jumpstarter-protocol/jumpstarter_protocol + - remote: buf.build/community/nipunn1313-mypy-grpc:v3.7.0 + out: ./packages/jumpstarter-protocol/jumpstarter_protocol inputs: - git_repo: https://github.com/jumpstarter-dev/jumpstarter-protocol.git branch: main diff --git a/packages/jumpstarter-protocol/jumpstarter_protocol/jumpstarter/client/v1/client_pb2.pyi b/packages/jumpstarter-protocol/jumpstarter_protocol/jumpstarter/client/v1/client_pb2.pyi new file mode 100644 index 000000000..500b13794 --- /dev/null +++ b/packages/jumpstarter-protocol/jumpstarter_protocol/jumpstarter/client/v1/client_pb2.pyi @@ -0,0 +1,318 @@ +""" +@generated by mypy-protobuf. Do not edit manually! +isort:skip_file +Copyright 2024 The Jumpstarter Authors +(-- api-linter: core::0215::foreign-type-reference=disabled +(-- api-linter: core::0192::has-comments=disabled +(-- api-linter: core::0191::java-package=disabled +(-- api-linter: core::0191::java-outer-classname=disabled +(-- api-linter: core::0191::java-multiple-files=disabled +""" + +import builtins +import collections.abc +import google.protobuf.descriptor +import google.protobuf.duration_pb2 +import google.protobuf.field_mask_pb2 +import google.protobuf.internal.containers +import google.protobuf.message +import google.protobuf.timestamp_pb2 +import jumpstarter.v1.common_pb2 +import jumpstarter.v1.kubernetes_pb2 +import sys +import typing + +if sys.version_info >= (3, 10): + import typing as typing_extensions +else: + import typing_extensions + +DESCRIPTOR: google.protobuf.descriptor.FileDescriptor + +@typing.final +class Exporter(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + @typing.final + class LabelsEntry(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + KEY_FIELD_NUMBER: builtins.int + VALUE_FIELD_NUMBER: builtins.int + key: builtins.str + value: builtins.str + def __init__( + self, + *, + key: builtins.str = ..., + value: builtins.str = ..., + ) -> None: ... + def ClearField(self, field_name: typing.Literal["key", b"key", "value", b"value"]) -> None: ... + + NAME_FIELD_NUMBER: builtins.int + LABELS_FIELD_NUMBER: builtins.int + ONLINE_FIELD_NUMBER: builtins.int + STATUS_FIELD_NUMBER: builtins.int + name: builtins.str + online: builtins.bool + status: jumpstarter.v1.common_pb2.ExporterStatus.ValueType + @property + def labels(self) -> google.protobuf.internal.containers.ScalarMap[builtins.str, builtins.str]: ... + def __init__( + self, + *, + name: builtins.str = ..., + labels: collections.abc.Mapping[builtins.str, builtins.str] | None = ..., + online: builtins.bool = ..., + status: jumpstarter.v1.common_pb2.ExporterStatus.ValueType = ..., + ) -> None: ... + def ClearField(self, field_name: typing.Literal["labels", b"labels", "name", b"name", "online", b"online", "status", b"status"]) -> None: ... + +Global___Exporter: typing_extensions.TypeAlias = Exporter + +@typing.final +class Lease(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + NAME_FIELD_NUMBER: builtins.int + SELECTOR_FIELD_NUMBER: builtins.int + DURATION_FIELD_NUMBER: builtins.int + EFFECTIVE_DURATION_FIELD_NUMBER: builtins.int + BEGIN_TIME_FIELD_NUMBER: builtins.int + EFFECTIVE_BEGIN_TIME_FIELD_NUMBER: builtins.int + END_TIME_FIELD_NUMBER: builtins.int + EFFECTIVE_END_TIME_FIELD_NUMBER: builtins.int + CLIENT_FIELD_NUMBER: builtins.int + EXPORTER_FIELD_NUMBER: builtins.int + CONDITIONS_FIELD_NUMBER: builtins.int + name: builtins.str + selector: builtins.str + client: builtins.str + exporter: builtins.str + @property + def duration(self) -> google.protobuf.duration_pb2.Duration: ... + @property + def effective_duration(self) -> google.protobuf.duration_pb2.Duration: ... + @property + def begin_time(self) -> google.protobuf.timestamp_pb2.Timestamp: ... + @property + def effective_begin_time(self) -> google.protobuf.timestamp_pb2.Timestamp: ... + @property + def end_time(self) -> google.protobuf.timestamp_pb2.Timestamp: ... + @property + def effective_end_time(self) -> google.protobuf.timestamp_pb2.Timestamp: ... + @property + def conditions(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[jumpstarter.v1.kubernetes_pb2.Condition]: ... + def __init__( + self, + *, + name: builtins.str = ..., + selector: builtins.str = ..., + duration: google.protobuf.duration_pb2.Duration | None = ..., + effective_duration: google.protobuf.duration_pb2.Duration | None = ..., + begin_time: google.protobuf.timestamp_pb2.Timestamp | None = ..., + effective_begin_time: google.protobuf.timestamp_pb2.Timestamp | None = ..., + end_time: google.protobuf.timestamp_pb2.Timestamp | None = ..., + effective_end_time: google.protobuf.timestamp_pb2.Timestamp | None = ..., + client: builtins.str | None = ..., + exporter: builtins.str | None = ..., + conditions: collections.abc.Iterable[jumpstarter.v1.kubernetes_pb2.Condition] | None = ..., + ) -> None: ... + def HasField(self, field_name: typing.Literal["_begin_time", b"_begin_time", "_client", b"_client", "_duration", b"_duration", "_effective_begin_time", b"_effective_begin_time", "_effective_end_time", b"_effective_end_time", "_end_time", b"_end_time", "_exporter", b"_exporter", "begin_time", b"begin_time", "client", b"client", "duration", b"duration", "effective_begin_time", b"effective_begin_time", "effective_duration", b"effective_duration", "effective_end_time", b"effective_end_time", "end_time", b"end_time", "exporter", b"exporter"]) -> builtins.bool: ... + def ClearField(self, field_name: typing.Literal["_begin_time", b"_begin_time", "_client", b"_client", "_duration", b"_duration", "_effective_begin_time", b"_effective_begin_time", "_effective_end_time", b"_effective_end_time", "_end_time", b"_end_time", "_exporter", b"_exporter", "begin_time", b"begin_time", "client", b"client", "conditions", b"conditions", "duration", b"duration", "effective_begin_time", b"effective_begin_time", "effective_duration", b"effective_duration", "effective_end_time", b"effective_end_time", "end_time", b"end_time", "exporter", b"exporter", "name", b"name", "selector", b"selector"]) -> None: ... + @typing.overload + def WhichOneof(self, oneof_group: typing.Literal["_begin_time", b"_begin_time"]) -> typing.Literal["begin_time"] | None: ... + @typing.overload + def WhichOneof(self, oneof_group: typing.Literal["_client", b"_client"]) -> typing.Literal["client"] | None: ... + @typing.overload + def WhichOneof(self, oneof_group: typing.Literal["_duration", b"_duration"]) -> typing.Literal["duration"] | None: ... + @typing.overload + def WhichOneof(self, oneof_group: typing.Literal["_effective_begin_time", b"_effective_begin_time"]) -> typing.Literal["effective_begin_time"] | None: ... + @typing.overload + def WhichOneof(self, oneof_group: typing.Literal["_effective_end_time", b"_effective_end_time"]) -> typing.Literal["effective_end_time"] | None: ... + @typing.overload + def WhichOneof(self, oneof_group: typing.Literal["_end_time", b"_end_time"]) -> typing.Literal["end_time"] | None: ... + @typing.overload + def WhichOneof(self, oneof_group: typing.Literal["_exporter", b"_exporter"]) -> typing.Literal["exporter"] | None: ... + +Global___Lease: typing_extensions.TypeAlias = Lease + +@typing.final +class GetExporterRequest(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + NAME_FIELD_NUMBER: builtins.int + name: builtins.str + def __init__( + self, + *, + name: builtins.str = ..., + ) -> None: ... + def ClearField(self, field_name: typing.Literal["name", b"name"]) -> None: ... + +Global___GetExporterRequest: typing_extensions.TypeAlias = GetExporterRequest + +@typing.final +class ListExportersRequest(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + PARENT_FIELD_NUMBER: builtins.int + PAGE_SIZE_FIELD_NUMBER: builtins.int + PAGE_TOKEN_FIELD_NUMBER: builtins.int + FILTER_FIELD_NUMBER: builtins.int + parent: builtins.str + page_size: builtins.int + page_token: builtins.str + filter: builtins.str + def __init__( + self, + *, + parent: builtins.str = ..., + page_size: builtins.int = ..., + page_token: builtins.str = ..., + filter: builtins.str = ..., + ) -> None: ... + def ClearField(self, field_name: typing.Literal["filter", b"filter", "page_size", b"page_size", "page_token", b"page_token", "parent", b"parent"]) -> None: ... + +Global___ListExportersRequest: typing_extensions.TypeAlias = ListExportersRequest + +@typing.final +class ListExportersResponse(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + EXPORTERS_FIELD_NUMBER: builtins.int + NEXT_PAGE_TOKEN_FIELD_NUMBER: builtins.int + next_page_token: builtins.str + @property + def exporters(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[Global___Exporter]: ... + def __init__( + self, + *, + exporters: collections.abc.Iterable[Global___Exporter] | None = ..., + next_page_token: builtins.str = ..., + ) -> None: ... + def ClearField(self, field_name: typing.Literal["exporters", b"exporters", "next_page_token", b"next_page_token"]) -> None: ... + +Global___ListExportersResponse: typing_extensions.TypeAlias = ListExportersResponse + +@typing.final +class GetLeaseRequest(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + NAME_FIELD_NUMBER: builtins.int + name: builtins.str + def __init__( + self, + *, + name: builtins.str = ..., + ) -> None: ... + def ClearField(self, field_name: typing.Literal["name", b"name"]) -> None: ... + +Global___GetLeaseRequest: typing_extensions.TypeAlias = GetLeaseRequest + +@typing.final +class ListLeasesRequest(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + PARENT_FIELD_NUMBER: builtins.int + PAGE_SIZE_FIELD_NUMBER: builtins.int + PAGE_TOKEN_FIELD_NUMBER: builtins.int + FILTER_FIELD_NUMBER: builtins.int + ONLY_ACTIVE_FIELD_NUMBER: builtins.int + parent: builtins.str + page_size: builtins.int + page_token: builtins.str + filter: builtins.str + only_active: builtins.bool + def __init__( + self, + *, + parent: builtins.str = ..., + page_size: builtins.int = ..., + page_token: builtins.str = ..., + filter: builtins.str = ..., + only_active: builtins.bool | None = ..., + ) -> None: ... + def HasField(self, field_name: typing.Literal["_only_active", b"_only_active", "only_active", b"only_active"]) -> builtins.bool: ... + def ClearField(self, field_name: typing.Literal["_only_active", b"_only_active", "filter", b"filter", "only_active", b"only_active", "page_size", b"page_size", "page_token", b"page_token", "parent", b"parent"]) -> None: ... + def WhichOneof(self, oneof_group: typing.Literal["_only_active", b"_only_active"]) -> typing.Literal["only_active"] | None: ... + +Global___ListLeasesRequest: typing_extensions.TypeAlias = ListLeasesRequest + +@typing.final +class ListLeasesResponse(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + LEASES_FIELD_NUMBER: builtins.int + NEXT_PAGE_TOKEN_FIELD_NUMBER: builtins.int + next_page_token: builtins.str + @property + def leases(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[Global___Lease]: ... + def __init__( + self, + *, + leases: collections.abc.Iterable[Global___Lease] | None = ..., + next_page_token: builtins.str = ..., + ) -> None: ... + def ClearField(self, field_name: typing.Literal["leases", b"leases", "next_page_token", b"next_page_token"]) -> None: ... + +Global___ListLeasesResponse: typing_extensions.TypeAlias = ListLeasesResponse + +@typing.final +class CreateLeaseRequest(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + PARENT_FIELD_NUMBER: builtins.int + LEASE_ID_FIELD_NUMBER: builtins.int + LEASE_FIELD_NUMBER: builtins.int + parent: builtins.str + lease_id: builtins.str + @property + def lease(self) -> Global___Lease: ... + def __init__( + self, + *, + parent: builtins.str = ..., + lease_id: builtins.str = ..., + lease: Global___Lease | None = ..., + ) -> None: ... + def HasField(self, field_name: typing.Literal["lease", b"lease"]) -> builtins.bool: ... + def ClearField(self, field_name: typing.Literal["lease", b"lease", "lease_id", b"lease_id", "parent", b"parent"]) -> None: ... + +Global___CreateLeaseRequest: typing_extensions.TypeAlias = CreateLeaseRequest + +@typing.final +class UpdateLeaseRequest(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + LEASE_FIELD_NUMBER: builtins.int + UPDATE_MASK_FIELD_NUMBER: builtins.int + @property + def lease(self) -> Global___Lease: ... + @property + def update_mask(self) -> google.protobuf.field_mask_pb2.FieldMask: ... + def __init__( + self, + *, + lease: Global___Lease | None = ..., + update_mask: google.protobuf.field_mask_pb2.FieldMask | None = ..., + ) -> None: ... + def HasField(self, field_name: typing.Literal["lease", b"lease", "update_mask", b"update_mask"]) -> builtins.bool: ... + def ClearField(self, field_name: typing.Literal["lease", b"lease", "update_mask", b"update_mask"]) -> None: ... + +Global___UpdateLeaseRequest: typing_extensions.TypeAlias = UpdateLeaseRequest + +@typing.final +class DeleteLeaseRequest(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + NAME_FIELD_NUMBER: builtins.int + name: builtins.str + def __init__( + self, + *, + name: builtins.str = ..., + ) -> None: ... + def ClearField(self, field_name: typing.Literal["name", b"name"]) -> None: ... + +Global___DeleteLeaseRequest: typing_extensions.TypeAlias = DeleteLeaseRequest diff --git a/packages/jumpstarter-protocol/jumpstarter_protocol/jumpstarter/client/v1/client_pb2_grpc.pyi b/packages/jumpstarter-protocol/jumpstarter_protocol/jumpstarter/client/v1/client_pb2_grpc.pyi new file mode 100644 index 000000000..a5aa7937b --- /dev/null +++ b/packages/jumpstarter-protocol/jumpstarter_protocol/jumpstarter/client/v1/client_pb2_grpc.pyi @@ -0,0 +1,307 @@ +""" +@generated by mypy-protobuf. Do not edit manually! +isort:skip_file +Copyright 2024 The Jumpstarter Authors +(-- api-linter: core::0215::foreign-type-reference=disabled +(-- api-linter: core::0192::has-comments=disabled +(-- api-linter: core::0191::java-package=disabled +(-- api-linter: core::0191::java-outer-classname=disabled +(-- api-linter: core::0191::java-multiple-files=disabled +""" + +import abc +import collections.abc +import google.protobuf.empty_pb2 +import grpc +import grpc.aio +import jumpstarter.client.v1.client_pb2 +import sys +import typing + +if sys.version_info >= (3, 13): + import typing as typing_extensions +else: + import typing_extensions + +_T = typing.TypeVar("_T") + +class _MaybeAsyncIterator(collections.abc.AsyncIterator[_T], collections.abc.Iterator[_T], metaclass=abc.ABCMeta): ... + +class _ServicerContext(grpc.ServicerContext, grpc.aio.ServicerContext): # type: ignore[misc, type-arg] + ... + +GRPC_GENERATED_VERSION: str +GRPC_VERSION: str +_ClientServiceGetExporterType = typing_extensions.TypeVar( + '_ClientServiceGetExporterType', + grpc.UnaryUnaryMultiCallable[ + jumpstarter.client.v1.client_pb2.GetExporterRequest, + jumpstarter.client.v1.client_pb2.Exporter, + ], + grpc.aio.UnaryUnaryMultiCallable[ + jumpstarter.client.v1.client_pb2.GetExporterRequest, + jumpstarter.client.v1.client_pb2.Exporter, + ], + default=grpc.UnaryUnaryMultiCallable[ + jumpstarter.client.v1.client_pb2.GetExporterRequest, + jumpstarter.client.v1.client_pb2.Exporter, + ], +) + +_ClientServiceListExportersType = typing_extensions.TypeVar( + '_ClientServiceListExportersType', + grpc.UnaryUnaryMultiCallable[ + jumpstarter.client.v1.client_pb2.ListExportersRequest, + jumpstarter.client.v1.client_pb2.ListExportersResponse, + ], + grpc.aio.UnaryUnaryMultiCallable[ + jumpstarter.client.v1.client_pb2.ListExportersRequest, + jumpstarter.client.v1.client_pb2.ListExportersResponse, + ], + default=grpc.UnaryUnaryMultiCallable[ + jumpstarter.client.v1.client_pb2.ListExportersRequest, + jumpstarter.client.v1.client_pb2.ListExportersResponse, + ], +) + +_ClientServiceGetLeaseType = typing_extensions.TypeVar( + '_ClientServiceGetLeaseType', + grpc.UnaryUnaryMultiCallable[ + jumpstarter.client.v1.client_pb2.GetLeaseRequest, + jumpstarter.client.v1.client_pb2.Lease, + ], + grpc.aio.UnaryUnaryMultiCallable[ + jumpstarter.client.v1.client_pb2.GetLeaseRequest, + jumpstarter.client.v1.client_pb2.Lease, + ], + default=grpc.UnaryUnaryMultiCallable[ + jumpstarter.client.v1.client_pb2.GetLeaseRequest, + jumpstarter.client.v1.client_pb2.Lease, + ], +) + +_ClientServiceListLeasesType = typing_extensions.TypeVar( + '_ClientServiceListLeasesType', + grpc.UnaryUnaryMultiCallable[ + jumpstarter.client.v1.client_pb2.ListLeasesRequest, + jumpstarter.client.v1.client_pb2.ListLeasesResponse, + ], + grpc.aio.UnaryUnaryMultiCallable[ + jumpstarter.client.v1.client_pb2.ListLeasesRequest, + jumpstarter.client.v1.client_pb2.ListLeasesResponse, + ], + default=grpc.UnaryUnaryMultiCallable[ + jumpstarter.client.v1.client_pb2.ListLeasesRequest, + jumpstarter.client.v1.client_pb2.ListLeasesResponse, + ], +) + +_ClientServiceCreateLeaseType = typing_extensions.TypeVar( + '_ClientServiceCreateLeaseType', + grpc.UnaryUnaryMultiCallable[ + jumpstarter.client.v1.client_pb2.CreateLeaseRequest, + jumpstarter.client.v1.client_pb2.Lease, + ], + grpc.aio.UnaryUnaryMultiCallable[ + jumpstarter.client.v1.client_pb2.CreateLeaseRequest, + jumpstarter.client.v1.client_pb2.Lease, + ], + default=grpc.UnaryUnaryMultiCallable[ + jumpstarter.client.v1.client_pb2.CreateLeaseRequest, + jumpstarter.client.v1.client_pb2.Lease, + ], +) + +_ClientServiceUpdateLeaseType = typing_extensions.TypeVar( + '_ClientServiceUpdateLeaseType', + grpc.UnaryUnaryMultiCallable[ + jumpstarter.client.v1.client_pb2.UpdateLeaseRequest, + jumpstarter.client.v1.client_pb2.Lease, + ], + grpc.aio.UnaryUnaryMultiCallable[ + jumpstarter.client.v1.client_pb2.UpdateLeaseRequest, + jumpstarter.client.v1.client_pb2.Lease, + ], + default=grpc.UnaryUnaryMultiCallable[ + jumpstarter.client.v1.client_pb2.UpdateLeaseRequest, + jumpstarter.client.v1.client_pb2.Lease, + ], +) + +_ClientServiceDeleteLeaseType = typing_extensions.TypeVar( + '_ClientServiceDeleteLeaseType', + grpc.UnaryUnaryMultiCallable[ + jumpstarter.client.v1.client_pb2.DeleteLeaseRequest, + google.protobuf.empty_pb2.Empty, + ], + grpc.aio.UnaryUnaryMultiCallable[ + jumpstarter.client.v1.client_pb2.DeleteLeaseRequest, + google.protobuf.empty_pb2.Empty, + ], + default=grpc.UnaryUnaryMultiCallable[ + jumpstarter.client.v1.client_pb2.DeleteLeaseRequest, + google.protobuf.empty_pb2.Empty, + ], +) + +class ClientServiceStub(typing.Generic[_ClientServiceGetExporterType, _ClientServiceListExportersType, _ClientServiceGetLeaseType, _ClientServiceListLeasesType, _ClientServiceCreateLeaseType, _ClientServiceUpdateLeaseType, _ClientServiceDeleteLeaseType]): + @typing.overload + def __init__(self: ClientServiceStub[ + grpc.UnaryUnaryMultiCallable[ + jumpstarter.client.v1.client_pb2.GetExporterRequest, + jumpstarter.client.v1.client_pb2.Exporter, + ], + grpc.UnaryUnaryMultiCallable[ + jumpstarter.client.v1.client_pb2.ListExportersRequest, + jumpstarter.client.v1.client_pb2.ListExportersResponse, + ], + grpc.UnaryUnaryMultiCallable[ + jumpstarter.client.v1.client_pb2.GetLeaseRequest, + jumpstarter.client.v1.client_pb2.Lease, + ], + grpc.UnaryUnaryMultiCallable[ + jumpstarter.client.v1.client_pb2.ListLeasesRequest, + jumpstarter.client.v1.client_pb2.ListLeasesResponse, + ], + grpc.UnaryUnaryMultiCallable[ + jumpstarter.client.v1.client_pb2.CreateLeaseRequest, + jumpstarter.client.v1.client_pb2.Lease, + ], + grpc.UnaryUnaryMultiCallable[ + jumpstarter.client.v1.client_pb2.UpdateLeaseRequest, + jumpstarter.client.v1.client_pb2.Lease, + ], + grpc.UnaryUnaryMultiCallable[ + jumpstarter.client.v1.client_pb2.DeleteLeaseRequest, + google.protobuf.empty_pb2.Empty, + ], + ], channel: grpc.Channel) -> None: ... + + @typing.overload + def __init__(self: ClientServiceStub[ + grpc.aio.UnaryUnaryMultiCallable[ + jumpstarter.client.v1.client_pb2.GetExporterRequest, + jumpstarter.client.v1.client_pb2.Exporter, + ], + grpc.aio.UnaryUnaryMultiCallable[ + jumpstarter.client.v1.client_pb2.ListExportersRequest, + jumpstarter.client.v1.client_pb2.ListExportersResponse, + ], + grpc.aio.UnaryUnaryMultiCallable[ + jumpstarter.client.v1.client_pb2.GetLeaseRequest, + jumpstarter.client.v1.client_pb2.Lease, + ], + grpc.aio.UnaryUnaryMultiCallable[ + jumpstarter.client.v1.client_pb2.ListLeasesRequest, + jumpstarter.client.v1.client_pb2.ListLeasesResponse, + ], + grpc.aio.UnaryUnaryMultiCallable[ + jumpstarter.client.v1.client_pb2.CreateLeaseRequest, + jumpstarter.client.v1.client_pb2.Lease, + ], + grpc.aio.UnaryUnaryMultiCallable[ + jumpstarter.client.v1.client_pb2.UpdateLeaseRequest, + jumpstarter.client.v1.client_pb2.Lease, + ], + grpc.aio.UnaryUnaryMultiCallable[ + jumpstarter.client.v1.client_pb2.DeleteLeaseRequest, + google.protobuf.empty_pb2.Empty, + ], + ], channel: grpc.aio.Channel) -> None: ... + + GetExporter: _ClientServiceGetExporterType + + ListExporters: _ClientServiceListExportersType + + GetLease: _ClientServiceGetLeaseType + + ListLeases: _ClientServiceListLeasesType + + CreateLease: _ClientServiceCreateLeaseType + + UpdateLease: _ClientServiceUpdateLeaseType + + DeleteLease: _ClientServiceDeleteLeaseType + +ClientServiceAsyncStub: typing_extensions.TypeAlias = ClientServiceStub[ + grpc.aio.UnaryUnaryMultiCallable[ + jumpstarter.client.v1.client_pb2.GetExporterRequest, + jumpstarter.client.v1.client_pb2.Exporter, + ], + grpc.aio.UnaryUnaryMultiCallable[ + jumpstarter.client.v1.client_pb2.ListExportersRequest, + jumpstarter.client.v1.client_pb2.ListExportersResponse, + ], + grpc.aio.UnaryUnaryMultiCallable[ + jumpstarter.client.v1.client_pb2.GetLeaseRequest, + jumpstarter.client.v1.client_pb2.Lease, + ], + grpc.aio.UnaryUnaryMultiCallable[ + jumpstarter.client.v1.client_pb2.ListLeasesRequest, + jumpstarter.client.v1.client_pb2.ListLeasesResponse, + ], + grpc.aio.UnaryUnaryMultiCallable[ + jumpstarter.client.v1.client_pb2.CreateLeaseRequest, + jumpstarter.client.v1.client_pb2.Lease, + ], + grpc.aio.UnaryUnaryMultiCallable[ + jumpstarter.client.v1.client_pb2.UpdateLeaseRequest, + jumpstarter.client.v1.client_pb2.Lease, + ], + grpc.aio.UnaryUnaryMultiCallable[ + jumpstarter.client.v1.client_pb2.DeleteLeaseRequest, + google.protobuf.empty_pb2.Empty, + ], +] + +class ClientServiceServicer(metaclass=abc.ABCMeta): + @abc.abstractmethod + def GetExporter( + self, + request: jumpstarter.client.v1.client_pb2.GetExporterRequest, + context: _ServicerContext, + ) -> typing.Union[jumpstarter.client.v1.client_pb2.Exporter, collections.abc.Awaitable[jumpstarter.client.v1.client_pb2.Exporter]]: ... + + @abc.abstractmethod + def ListExporters( + self, + request: jumpstarter.client.v1.client_pb2.ListExportersRequest, + context: _ServicerContext, + ) -> typing.Union[jumpstarter.client.v1.client_pb2.ListExportersResponse, collections.abc.Awaitable[jumpstarter.client.v1.client_pb2.ListExportersResponse]]: ... + + @abc.abstractmethod + def GetLease( + self, + request: jumpstarter.client.v1.client_pb2.GetLeaseRequest, + context: _ServicerContext, + ) -> typing.Union[jumpstarter.client.v1.client_pb2.Lease, collections.abc.Awaitable[jumpstarter.client.v1.client_pb2.Lease]]: ... + + @abc.abstractmethod + def ListLeases( + self, + request: jumpstarter.client.v1.client_pb2.ListLeasesRequest, + context: _ServicerContext, + ) -> typing.Union[jumpstarter.client.v1.client_pb2.ListLeasesResponse, collections.abc.Awaitable[jumpstarter.client.v1.client_pb2.ListLeasesResponse]]: ... + + @abc.abstractmethod + def CreateLease( + self, + request: jumpstarter.client.v1.client_pb2.CreateLeaseRequest, + context: _ServicerContext, + ) -> typing.Union[jumpstarter.client.v1.client_pb2.Lease, collections.abc.Awaitable[jumpstarter.client.v1.client_pb2.Lease]]: ... + + @abc.abstractmethod + def UpdateLease( + self, + request: jumpstarter.client.v1.client_pb2.UpdateLeaseRequest, + context: _ServicerContext, + ) -> typing.Union[jumpstarter.client.v1.client_pb2.Lease, collections.abc.Awaitable[jumpstarter.client.v1.client_pb2.Lease]]: ... + + @abc.abstractmethod + def DeleteLease( + self, + request: jumpstarter.client.v1.client_pb2.DeleteLeaseRequest, + context: _ServicerContext, + ) -> typing.Union[google.protobuf.empty_pb2.Empty, collections.abc.Awaitable[google.protobuf.empty_pb2.Empty]]: ... + +def add_ClientServiceServicer_to_server(servicer: ClientServiceServicer, server: typing.Union[grpc.Server, grpc.aio.Server]) -> None: ... diff --git a/packages/jumpstarter-protocol/jumpstarter_protocol/jumpstarter/v1/common_pb2.pyi b/packages/jumpstarter-protocol/jumpstarter_protocol/jumpstarter/v1/common_pb2.pyi new file mode 100644 index 000000000..f433f1db2 --- /dev/null +++ b/packages/jumpstarter-protocol/jumpstarter_protocol/jumpstarter/v1/common_pb2.pyi @@ -0,0 +1,96 @@ +""" +@generated by mypy-protobuf. Do not edit manually! +isort:skip_file +Copyright 2024 The Jumpstarter Authors""" + +import builtins +import google.protobuf.descriptor +import google.protobuf.internal.enum_type_wrapper +import sys +import typing + +if sys.version_info >= (3, 10): + import typing as typing_extensions +else: + import typing_extensions + +DESCRIPTOR: google.protobuf.descriptor.FileDescriptor + +class _ExporterStatus: + ValueType = typing.NewType("ValueType", builtins.int) + V: typing_extensions.TypeAlias = ValueType + +class _ExporterStatusEnumTypeWrapper(google.protobuf.internal.enum_type_wrapper._EnumTypeWrapper[_ExporterStatus.ValueType], builtins.type): + DESCRIPTOR: google.protobuf.descriptor.EnumDescriptor + EXPORTER_STATUS_UNSPECIFIED: _ExporterStatus.ValueType # 0 + """Unspecified exporter status""" + EXPORTER_STATUS_OFFLINE: _ExporterStatus.ValueType # 1 + """Exporter is offline""" + EXPORTER_STATUS_AVAILABLE: _ExporterStatus.ValueType # 2 + """Exporter is available to be leased""" + EXPORTER_STATUS_BEFORE_LEASE_HOOK: _ExporterStatus.ValueType # 3 + """Exporter is executing before lease hook(s)""" + EXPORTER_STATUS_LEASE_READY: _ExporterStatus.ValueType # 4 + """Exporter is leased and ready to accept commands""" + EXPORTER_STATUS_AFTER_LEASE_HOOK: _ExporterStatus.ValueType # 5 + """Exporter is executing after lease hook(s)""" + EXPORTER_STATUS_BEFORE_LEASE_HOOK_FAILED: _ExporterStatus.ValueType # 6 + """Exporter before lease hook failed""" + EXPORTER_STATUS_AFTER_LEASE_HOOK_FAILED: _ExporterStatus.ValueType # 7 + """Exporter after lease hook failed""" + +class ExporterStatus(_ExporterStatus, metaclass=_ExporterStatusEnumTypeWrapper): + """Shared types used across multiple Jumpstarter services + + Exporter status information + """ + +EXPORTER_STATUS_UNSPECIFIED: ExporterStatus.ValueType # 0 +"""Unspecified exporter status""" +EXPORTER_STATUS_OFFLINE: ExporterStatus.ValueType # 1 +"""Exporter is offline""" +EXPORTER_STATUS_AVAILABLE: ExporterStatus.ValueType # 2 +"""Exporter is available to be leased""" +EXPORTER_STATUS_BEFORE_LEASE_HOOK: ExporterStatus.ValueType # 3 +"""Exporter is executing before lease hook(s)""" +EXPORTER_STATUS_LEASE_READY: ExporterStatus.ValueType # 4 +"""Exporter is leased and ready to accept commands""" +EXPORTER_STATUS_AFTER_LEASE_HOOK: ExporterStatus.ValueType # 5 +"""Exporter is executing after lease hook(s)""" +EXPORTER_STATUS_BEFORE_LEASE_HOOK_FAILED: ExporterStatus.ValueType # 6 +"""Exporter before lease hook failed""" +EXPORTER_STATUS_AFTER_LEASE_HOOK_FAILED: ExporterStatus.ValueType # 7 +"""Exporter after lease hook failed""" +Global___ExporterStatus: typing_extensions.TypeAlias = ExporterStatus + +class _LogSource: + ValueType = typing.NewType("ValueType", builtins.int) + V: typing_extensions.TypeAlias = ValueType + +class _LogSourceEnumTypeWrapper(google.protobuf.internal.enum_type_wrapper._EnumTypeWrapper[_LogSource.ValueType], builtins.type): + DESCRIPTOR: google.protobuf.descriptor.EnumDescriptor + LOG_SOURCE_UNSPECIFIED: _LogSource.ValueType # 0 + """Unspecified log source""" + LOG_SOURCE_DRIVER: _LogSource.ValueType # 1 + """Driver/device logs""" + LOG_SOURCE_BEFORE_LEASE_HOOK: _LogSource.ValueType # 2 + """beforeLease hook execution logs""" + LOG_SOURCE_AFTER_LEASE_HOOK: _LogSource.ValueType # 3 + """afterLease hook execution logs""" + LOG_SOURCE_SYSTEM: _LogSource.ValueType # 4 + """System/exporter logs""" + +class LogSource(_LogSource, metaclass=_LogSourceEnumTypeWrapper): + """Source of log stream messages""" + +LOG_SOURCE_UNSPECIFIED: LogSource.ValueType # 0 +"""Unspecified log source""" +LOG_SOURCE_DRIVER: LogSource.ValueType # 1 +"""Driver/device logs""" +LOG_SOURCE_BEFORE_LEASE_HOOK: LogSource.ValueType # 2 +"""beforeLease hook execution logs""" +LOG_SOURCE_AFTER_LEASE_HOOK: LogSource.ValueType # 3 +"""afterLease hook execution logs""" +LOG_SOURCE_SYSTEM: LogSource.ValueType # 4 +"""System/exporter logs""" +Global___LogSource: typing_extensions.TypeAlias = LogSource diff --git a/packages/jumpstarter-protocol/jumpstarter_protocol/jumpstarter/v1/common_pb2_grpc.pyi b/packages/jumpstarter-protocol/jumpstarter_protocol/jumpstarter/v1/common_pb2_grpc.pyi new file mode 100644 index 000000000..6aac97060 --- /dev/null +++ b/packages/jumpstarter-protocol/jumpstarter_protocol/jumpstarter/v1/common_pb2_grpc.pyi @@ -0,0 +1,20 @@ +""" +@generated by mypy-protobuf. Do not edit manually! +isort:skip_file +Copyright 2024 The Jumpstarter Authors""" + +import abc +import collections.abc +import grpc +import grpc.aio +import typing + +_T = typing.TypeVar("_T") + +class _MaybeAsyncIterator(collections.abc.AsyncIterator[_T], collections.abc.Iterator[_T], metaclass=abc.ABCMeta): ... + +class _ServicerContext(grpc.ServicerContext, grpc.aio.ServicerContext): # type: ignore[misc, type-arg] + ... + +GRPC_GENERATED_VERSION: str +GRPC_VERSION: str diff --git a/packages/jumpstarter-protocol/jumpstarter_protocol/jumpstarter/v1/jumpstarter_pb2.pyi b/packages/jumpstarter-protocol/jumpstarter_protocol/jumpstarter/v1/jumpstarter_pb2.pyi new file mode 100644 index 000000000..762c46c62 --- /dev/null +++ b/packages/jumpstarter-protocol/jumpstarter_protocol/jumpstarter/v1/jumpstarter_pb2.pyi @@ -0,0 +1,717 @@ +""" +@generated by mypy-protobuf. Do not edit manually! +isort:skip_file +Copyright 2024 The Jumpstarter Authors""" + +import builtins +import collections.abc +import google.protobuf.descriptor +import google.protobuf.duration_pb2 +import google.protobuf.internal.containers +import google.protobuf.message +import google.protobuf.struct_pb2 +import google.protobuf.timestamp_pb2 +import jumpstarter.v1.common_pb2 +import jumpstarter.v1.kubernetes_pb2 +import sys +import typing + +if sys.version_info >= (3, 10): + import typing as typing_extensions +else: + import typing_extensions + +DESCRIPTOR: google.protobuf.descriptor.FileDescriptor + +@typing.final +class RegisterRequest(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + @typing.final + class LabelsEntry(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + KEY_FIELD_NUMBER: builtins.int + VALUE_FIELD_NUMBER: builtins.int + key: builtins.str + value: builtins.str + def __init__( + self, + *, + key: builtins.str = ..., + value: builtins.str = ..., + ) -> None: ... + def ClearField(self, field_name: typing.Literal["key", b"key", "value", b"value"]) -> None: ... + + LABELS_FIELD_NUMBER: builtins.int + REPORTS_FIELD_NUMBER: builtins.int + @property + def labels(self) -> google.protobuf.internal.containers.ScalarMap[builtins.str, builtins.str]: + """additional context: + - token/authentication mechanism + """ + + @property + def reports(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[Global___DriverInstanceReport]: + """standard labels: + jumpstarter.dev/hostname= + jumpstarter.dev/name= + """ + + def __init__( + self, + *, + labels: collections.abc.Mapping[builtins.str, builtins.str] | None = ..., + reports: collections.abc.Iterable[Global___DriverInstanceReport] | None = ..., + ) -> None: ... + def ClearField(self, field_name: typing.Literal["labels", b"labels", "reports", b"reports"]) -> None: ... + +Global___RegisterRequest: typing_extensions.TypeAlias = RegisterRequest + +@typing.final +class DriverInstanceReport(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + @typing.final + class LabelsEntry(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + KEY_FIELD_NUMBER: builtins.int + VALUE_FIELD_NUMBER: builtins.int + key: builtins.str + value: builtins.str + def __init__( + self, + *, + key: builtins.str = ..., + value: builtins.str = ..., + ) -> None: ... + def ClearField(self, field_name: typing.Literal["key", b"key", "value", b"value"]) -> None: ... + + @typing.final + class MethodsDescriptionEntry(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + KEY_FIELD_NUMBER: builtins.int + VALUE_FIELD_NUMBER: builtins.int + key: builtins.str + value: builtins.str + def __init__( + self, + *, + key: builtins.str = ..., + value: builtins.str = ..., + ) -> None: ... + def ClearField(self, field_name: typing.Literal["key", b"key", "value", b"value"]) -> None: ... + + UUID_FIELD_NUMBER: builtins.int + PARENT_UUID_FIELD_NUMBER: builtins.int + LABELS_FIELD_NUMBER: builtins.int + DESCRIPTION_FIELD_NUMBER: builtins.int + METHODS_DESCRIPTION_FIELD_NUMBER: builtins.int + uuid: builtins.str + """a unique id within the exporter""" + parent_uuid: builtins.str + """optional, if device has a parent device""" + description: builtins.str + """optional custom driver description for CLI""" + @property + def labels(self) -> google.protobuf.internal.containers.ScalarMap[builtins.str, builtins.str]: ... + @property + def methods_description(self) -> google.protobuf.internal.containers.ScalarMap[builtins.str, builtins.str]: + """method name -> help text for CLI""" + + def __init__( + self, + *, + uuid: builtins.str = ..., + parent_uuid: builtins.str | None = ..., + labels: collections.abc.Mapping[builtins.str, builtins.str] | None = ..., + description: builtins.str | None = ..., + methods_description: collections.abc.Mapping[builtins.str, builtins.str] | None = ..., + ) -> None: ... + def HasField(self, field_name: typing.Literal["_description", b"_description", "_parent_uuid", b"_parent_uuid", "description", b"description", "parent_uuid", b"parent_uuid"]) -> builtins.bool: ... + def ClearField(self, field_name: typing.Literal["_description", b"_description", "_parent_uuid", b"_parent_uuid", "description", b"description", "labels", b"labels", "methods_description", b"methods_description", "parent_uuid", b"parent_uuid", "uuid", b"uuid"]) -> None: ... + @typing.overload + def WhichOneof(self, oneof_group: typing.Literal["_description", b"_description"]) -> typing.Literal["description"] | None: ... + @typing.overload + def WhichOneof(self, oneof_group: typing.Literal["_parent_uuid", b"_parent_uuid"]) -> typing.Literal["parent_uuid"] | None: ... + +Global___DriverInstanceReport: typing_extensions.TypeAlias = DriverInstanceReport + +@typing.final +class RegisterResponse(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + UUID_FIELD_NUMBER: builtins.int + uuid: builtins.str + def __init__( + self, + *, + uuid: builtins.str = ..., + ) -> None: ... + def ClearField(self, field_name: typing.Literal["uuid", b"uuid"]) -> None: ... + +Global___RegisterResponse: typing_extensions.TypeAlias = RegisterResponse + +@typing.final +class UnregisterRequest(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + REASON_FIELD_NUMBER: builtins.int + reason: builtins.str + def __init__( + self, + *, + reason: builtins.str = ..., + ) -> None: ... + def ClearField(self, field_name: typing.Literal["reason", b"reason"]) -> None: ... + +Global___UnregisterRequest: typing_extensions.TypeAlias = UnregisterRequest + +@typing.final +class UnregisterResponse(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + def __init__( + self, + ) -> None: ... + +Global___UnregisterResponse: typing_extensions.TypeAlias = UnregisterResponse + +@typing.final +class ListenRequest(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + LEASE_NAME_FIELD_NUMBER: builtins.int + lease_name: builtins.str + def __init__( + self, + *, + lease_name: builtins.str = ..., + ) -> None: ... + def ClearField(self, field_name: typing.Literal["lease_name", b"lease_name"]) -> None: ... + +Global___ListenRequest: typing_extensions.TypeAlias = ListenRequest + +@typing.final +class ListenResponse(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + ROUTER_ENDPOINT_FIELD_NUMBER: builtins.int + ROUTER_TOKEN_FIELD_NUMBER: builtins.int + router_endpoint: builtins.str + router_token: builtins.str + def __init__( + self, + *, + router_endpoint: builtins.str = ..., + router_token: builtins.str = ..., + ) -> None: ... + def ClearField(self, field_name: typing.Literal["router_endpoint", b"router_endpoint", "router_token", b"router_token"]) -> None: ... + +Global___ListenResponse: typing_extensions.TypeAlias = ListenResponse + +@typing.final +class StatusRequest(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + def __init__( + self, + ) -> None: ... + +Global___StatusRequest: typing_extensions.TypeAlias = StatusRequest + +@typing.final +class StatusResponse(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + LEASED_FIELD_NUMBER: builtins.int + LEASE_NAME_FIELD_NUMBER: builtins.int + CLIENT_NAME_FIELD_NUMBER: builtins.int + leased: builtins.bool + lease_name: builtins.str + client_name: builtins.str + def __init__( + self, + *, + leased: builtins.bool = ..., + lease_name: builtins.str | None = ..., + client_name: builtins.str | None = ..., + ) -> None: ... + def HasField(self, field_name: typing.Literal["_client_name", b"_client_name", "_lease_name", b"_lease_name", "client_name", b"client_name", "lease_name", b"lease_name"]) -> builtins.bool: ... + def ClearField(self, field_name: typing.Literal["_client_name", b"_client_name", "_lease_name", b"_lease_name", "client_name", b"client_name", "lease_name", b"lease_name", "leased", b"leased"]) -> None: ... + @typing.overload + def WhichOneof(self, oneof_group: typing.Literal["_client_name", b"_client_name"]) -> typing.Literal["client_name"] | None: ... + @typing.overload + def WhichOneof(self, oneof_group: typing.Literal["_lease_name", b"_lease_name"]) -> typing.Literal["lease_name"] | None: ... + +Global___StatusResponse: typing_extensions.TypeAlias = StatusResponse + +@typing.final +class DialRequest(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + LEASE_NAME_FIELD_NUMBER: builtins.int + lease_name: builtins.str + def __init__( + self, + *, + lease_name: builtins.str = ..., + ) -> None: ... + def ClearField(self, field_name: typing.Literal["lease_name", b"lease_name"]) -> None: ... + +Global___DialRequest: typing_extensions.TypeAlias = DialRequest + +@typing.final +class DialResponse(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + ROUTER_ENDPOINT_FIELD_NUMBER: builtins.int + ROUTER_TOKEN_FIELD_NUMBER: builtins.int + router_endpoint: builtins.str + router_token: builtins.str + def __init__( + self, + *, + router_endpoint: builtins.str = ..., + router_token: builtins.str = ..., + ) -> None: ... + def ClearField(self, field_name: typing.Literal["router_endpoint", b"router_endpoint", "router_token", b"router_token"]) -> None: ... + +Global___DialResponse: typing_extensions.TypeAlias = DialResponse + +@typing.final +class AuditStreamRequest(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + EXPORTER_UUID_FIELD_NUMBER: builtins.int + DRIVER_INSTANCE_UUID_FIELD_NUMBER: builtins.int + SEVERITY_FIELD_NUMBER: builtins.int + MESSAGE_FIELD_NUMBER: builtins.int + exporter_uuid: builtins.str + """additional context: + - token/authentication mechanism + """ + driver_instance_uuid: builtins.str + severity: builtins.str + message: builtins.str + def __init__( + self, + *, + exporter_uuid: builtins.str = ..., + driver_instance_uuid: builtins.str = ..., + severity: builtins.str = ..., + message: builtins.str = ..., + ) -> None: ... + def ClearField(self, field_name: typing.Literal["driver_instance_uuid", b"driver_instance_uuid", "exporter_uuid", b"exporter_uuid", "message", b"message", "severity", b"severity"]) -> None: ... + +Global___AuditStreamRequest: typing_extensions.TypeAlias = AuditStreamRequest + +@typing.final +class ReportStatusRequest(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + STATUS_FIELD_NUMBER: builtins.int + MESSAGE_FIELD_NUMBER: builtins.int + status: jumpstarter.v1.common_pb2.ExporterStatus.ValueType + message: builtins.str + """Optional human-readable status message""" + def __init__( + self, + *, + status: jumpstarter.v1.common_pb2.ExporterStatus.ValueType = ..., + message: builtins.str | None = ..., + ) -> None: ... + def HasField(self, field_name: typing.Literal["_message", b"_message", "message", b"message"]) -> builtins.bool: ... + def ClearField(self, field_name: typing.Literal["_message", b"_message", "message", b"message", "status", b"status"]) -> None: ... + def WhichOneof(self, oneof_group: typing.Literal["_message", b"_message"]) -> typing.Literal["message"] | None: ... + +Global___ReportStatusRequest: typing_extensions.TypeAlias = ReportStatusRequest + +@typing.final +class ReportStatusResponse(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + def __init__( + self, + ) -> None: ... + +Global___ReportStatusResponse: typing_extensions.TypeAlias = ReportStatusResponse + +@typing.final +class GetReportResponse(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + @typing.final + class LabelsEntry(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + KEY_FIELD_NUMBER: builtins.int + VALUE_FIELD_NUMBER: builtins.int + key: builtins.str + value: builtins.str + def __init__( + self, + *, + key: builtins.str = ..., + value: builtins.str = ..., + ) -> None: ... + def ClearField(self, field_name: typing.Literal["key", b"key", "value", b"value"]) -> None: ... + + UUID_FIELD_NUMBER: builtins.int + LABELS_FIELD_NUMBER: builtins.int + REPORTS_FIELD_NUMBER: builtins.int + ALTERNATIVE_ENDPOINTS_FIELD_NUMBER: builtins.int + uuid: builtins.str + @property + def labels(self) -> google.protobuf.internal.containers.ScalarMap[builtins.str, builtins.str]: ... + @property + def reports(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[Global___DriverInstanceReport]: + """standard labels: + jumpstarter.dev/hostname= + jumpstarter.dev/name= + """ + + @property + def alternative_endpoints(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[Global___Endpoint]: ... + def __init__( + self, + *, + uuid: builtins.str = ..., + labels: collections.abc.Mapping[builtins.str, builtins.str] | None = ..., + reports: collections.abc.Iterable[Global___DriverInstanceReport] | None = ..., + alternative_endpoints: collections.abc.Iterable[Global___Endpoint] | None = ..., + ) -> None: ... + def ClearField(self, field_name: typing.Literal["alternative_endpoints", b"alternative_endpoints", "labels", b"labels", "reports", b"reports", "uuid", b"uuid"]) -> None: ... + +Global___GetReportResponse: typing_extensions.TypeAlias = GetReportResponse + +@typing.final +class Endpoint(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + ENDPOINT_FIELD_NUMBER: builtins.int + CERTIFICATE_FIELD_NUMBER: builtins.int + CLIENT_CERTIFICATE_FIELD_NUMBER: builtins.int + CLIENT_PRIVATE_KEY_FIELD_NUMBER: builtins.int + endpoint: builtins.str + certificate: builtins.str + client_certificate: builtins.str + client_private_key: builtins.str + def __init__( + self, + *, + endpoint: builtins.str = ..., + certificate: builtins.str = ..., + client_certificate: builtins.str = ..., + client_private_key: builtins.str = ..., + ) -> None: ... + def ClearField(self, field_name: typing.Literal["certificate", b"certificate", "client_certificate", b"client_certificate", "client_private_key", b"client_private_key", "endpoint", b"endpoint"]) -> None: ... + +Global___Endpoint: typing_extensions.TypeAlias = Endpoint + +@typing.final +class DriverCallRequest(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + UUID_FIELD_NUMBER: builtins.int + METHOD_FIELD_NUMBER: builtins.int + ARGS_FIELD_NUMBER: builtins.int + uuid: builtins.str + method: builtins.str + @property + def args(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[google.protobuf.struct_pb2.Value]: ... + def __init__( + self, + *, + uuid: builtins.str = ..., + method: builtins.str = ..., + args: collections.abc.Iterable[google.protobuf.struct_pb2.Value] | None = ..., + ) -> None: ... + def ClearField(self, field_name: typing.Literal["args", b"args", "method", b"method", "uuid", b"uuid"]) -> None: ... + +Global___DriverCallRequest: typing_extensions.TypeAlias = DriverCallRequest + +@typing.final +class DriverCallResponse(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + UUID_FIELD_NUMBER: builtins.int + RESULT_FIELD_NUMBER: builtins.int + uuid: builtins.str + @property + def result(self) -> google.protobuf.struct_pb2.Value: ... + def __init__( + self, + *, + uuid: builtins.str = ..., + result: google.protobuf.struct_pb2.Value | None = ..., + ) -> None: ... + def HasField(self, field_name: typing.Literal["result", b"result"]) -> builtins.bool: ... + def ClearField(self, field_name: typing.Literal["result", b"result", "uuid", b"uuid"]) -> None: ... + +Global___DriverCallResponse: typing_extensions.TypeAlias = DriverCallResponse + +@typing.final +class StreamingDriverCallRequest(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + UUID_FIELD_NUMBER: builtins.int + METHOD_FIELD_NUMBER: builtins.int + ARGS_FIELD_NUMBER: builtins.int + uuid: builtins.str + method: builtins.str + @property + def args(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[google.protobuf.struct_pb2.Value]: ... + def __init__( + self, + *, + uuid: builtins.str = ..., + method: builtins.str = ..., + args: collections.abc.Iterable[google.protobuf.struct_pb2.Value] | None = ..., + ) -> None: ... + def ClearField(self, field_name: typing.Literal["args", b"args", "method", b"method", "uuid", b"uuid"]) -> None: ... + +Global___StreamingDriverCallRequest: typing_extensions.TypeAlias = StreamingDriverCallRequest + +@typing.final +class StreamingDriverCallResponse(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + UUID_FIELD_NUMBER: builtins.int + RESULT_FIELD_NUMBER: builtins.int + uuid: builtins.str + @property + def result(self) -> google.protobuf.struct_pb2.Value: ... + def __init__( + self, + *, + uuid: builtins.str = ..., + result: google.protobuf.struct_pb2.Value | None = ..., + ) -> None: ... + def HasField(self, field_name: typing.Literal["result", b"result"]) -> builtins.bool: ... + def ClearField(self, field_name: typing.Literal["result", b"result", "uuid", b"uuid"]) -> None: ... + +Global___StreamingDriverCallResponse: typing_extensions.TypeAlias = StreamingDriverCallResponse + +@typing.final +class LogStreamResponse(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + UUID_FIELD_NUMBER: builtins.int + SEVERITY_FIELD_NUMBER: builtins.int + MESSAGE_FIELD_NUMBER: builtins.int + SOURCE_FIELD_NUMBER: builtins.int + uuid: builtins.str + severity: builtins.str + message: builtins.str + source: jumpstarter.v1.common_pb2.LogSource.ValueType + """New optional field""" + def __init__( + self, + *, + uuid: builtins.str = ..., + severity: builtins.str = ..., + message: builtins.str = ..., + source: jumpstarter.v1.common_pb2.LogSource.ValueType | None = ..., + ) -> None: ... + def HasField(self, field_name: typing.Literal["_source", b"_source", "source", b"source"]) -> builtins.bool: ... + def ClearField(self, field_name: typing.Literal["_source", b"_source", "message", b"message", "severity", b"severity", "source", b"source", "uuid", b"uuid"]) -> None: ... + def WhichOneof(self, oneof_group: typing.Literal["_source", b"_source"]) -> typing.Literal["source"] | None: ... + +Global___LogStreamResponse: typing_extensions.TypeAlias = LogStreamResponse + +@typing.final +class ResetRequest(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + def __init__( + self, + ) -> None: ... + +Global___ResetRequest: typing_extensions.TypeAlias = ResetRequest + +@typing.final +class ResetResponse(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + def __init__( + self, + ) -> None: ... + +Global___ResetResponse: typing_extensions.TypeAlias = ResetResponse + +@typing.final +class GetLeaseRequest(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + NAME_FIELD_NUMBER: builtins.int + name: builtins.str + def __init__( + self, + *, + name: builtins.str = ..., + ) -> None: ... + def ClearField(self, field_name: typing.Literal["name", b"name"]) -> None: ... + +Global___GetLeaseRequest: typing_extensions.TypeAlias = GetLeaseRequest + +@typing.final +class GetLeaseResponse(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + DURATION_FIELD_NUMBER: builtins.int + SELECTOR_FIELD_NUMBER: builtins.int + BEGIN_TIME_FIELD_NUMBER: builtins.int + END_TIME_FIELD_NUMBER: builtins.int + EXPORTER_UUID_FIELD_NUMBER: builtins.int + CONDITIONS_FIELD_NUMBER: builtins.int + exporter_uuid: builtins.str + @property + def duration(self) -> google.protobuf.duration_pb2.Duration: ... + @property + def selector(self) -> jumpstarter.v1.kubernetes_pb2.LabelSelector: ... + @property + def begin_time(self) -> google.protobuf.timestamp_pb2.Timestamp: ... + @property + def end_time(self) -> google.protobuf.timestamp_pb2.Timestamp: ... + @property + def conditions(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[jumpstarter.v1.kubernetes_pb2.Condition]: ... + def __init__( + self, + *, + duration: google.protobuf.duration_pb2.Duration | None = ..., + selector: jumpstarter.v1.kubernetes_pb2.LabelSelector | None = ..., + begin_time: google.protobuf.timestamp_pb2.Timestamp | None = ..., + end_time: google.protobuf.timestamp_pb2.Timestamp | None = ..., + exporter_uuid: builtins.str | None = ..., + conditions: collections.abc.Iterable[jumpstarter.v1.kubernetes_pb2.Condition] | None = ..., + ) -> None: ... + def HasField(self, field_name: typing.Literal["_begin_time", b"_begin_time", "_end_time", b"_end_time", "_exporter_uuid", b"_exporter_uuid", "begin_time", b"begin_time", "duration", b"duration", "end_time", b"end_time", "exporter_uuid", b"exporter_uuid", "selector", b"selector"]) -> builtins.bool: ... + def ClearField(self, field_name: typing.Literal["_begin_time", b"_begin_time", "_end_time", b"_end_time", "_exporter_uuid", b"_exporter_uuid", "begin_time", b"begin_time", "conditions", b"conditions", "duration", b"duration", "end_time", b"end_time", "exporter_uuid", b"exporter_uuid", "selector", b"selector"]) -> None: ... + @typing.overload + def WhichOneof(self, oneof_group: typing.Literal["_begin_time", b"_begin_time"]) -> typing.Literal["begin_time"] | None: ... + @typing.overload + def WhichOneof(self, oneof_group: typing.Literal["_end_time", b"_end_time"]) -> typing.Literal["end_time"] | None: ... + @typing.overload + def WhichOneof(self, oneof_group: typing.Literal["_exporter_uuid", b"_exporter_uuid"]) -> typing.Literal["exporter_uuid"] | None: ... + +Global___GetLeaseResponse: typing_extensions.TypeAlias = GetLeaseResponse + +@typing.final +class RequestLeaseRequest(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + DURATION_FIELD_NUMBER: builtins.int + SELECTOR_FIELD_NUMBER: builtins.int + @property + def duration(self) -> google.protobuf.duration_pb2.Duration: ... + @property + def selector(self) -> jumpstarter.v1.kubernetes_pb2.LabelSelector: ... + def __init__( + self, + *, + duration: google.protobuf.duration_pb2.Duration | None = ..., + selector: jumpstarter.v1.kubernetes_pb2.LabelSelector | None = ..., + ) -> None: ... + def HasField(self, field_name: typing.Literal["duration", b"duration", "selector", b"selector"]) -> builtins.bool: ... + def ClearField(self, field_name: typing.Literal["duration", b"duration", "selector", b"selector"]) -> None: ... + +Global___RequestLeaseRequest: typing_extensions.TypeAlias = RequestLeaseRequest + +@typing.final +class RequestLeaseResponse(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + NAME_FIELD_NUMBER: builtins.int + name: builtins.str + def __init__( + self, + *, + name: builtins.str = ..., + ) -> None: ... + def ClearField(self, field_name: typing.Literal["name", b"name"]) -> None: ... + +Global___RequestLeaseResponse: typing_extensions.TypeAlias = RequestLeaseResponse + +@typing.final +class ReleaseLeaseRequest(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + NAME_FIELD_NUMBER: builtins.int + name: builtins.str + def __init__( + self, + *, + name: builtins.str = ..., + ) -> None: ... + def ClearField(self, field_name: typing.Literal["name", b"name"]) -> None: ... + +Global___ReleaseLeaseRequest: typing_extensions.TypeAlias = ReleaseLeaseRequest + +@typing.final +class ReleaseLeaseResponse(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + def __init__( + self, + ) -> None: ... + +Global___ReleaseLeaseResponse: typing_extensions.TypeAlias = ReleaseLeaseResponse + +@typing.final +class ListLeasesRequest(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + def __init__( + self, + ) -> None: ... + +Global___ListLeasesRequest: typing_extensions.TypeAlias = ListLeasesRequest + +@typing.final +class ListLeasesResponse(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + NAMES_FIELD_NUMBER: builtins.int + @property + def names(self) -> google.protobuf.internal.containers.RepeatedScalarFieldContainer[builtins.str]: ... + def __init__( + self, + *, + names: collections.abc.Iterable[builtins.str] | None = ..., + ) -> None: ... + def ClearField(self, field_name: typing.Literal["names", b"names"]) -> None: ... + +Global___ListLeasesResponse: typing_extensions.TypeAlias = ListLeasesResponse + +@typing.final +class GetStatusRequest(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + def __init__( + self, + ) -> None: ... + +Global___GetStatusRequest: typing_extensions.TypeAlias = GetStatusRequest + +@typing.final +class GetStatusResponse(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + STATUS_FIELD_NUMBER: builtins.int + MESSAGE_FIELD_NUMBER: builtins.int + status: jumpstarter.v1.common_pb2.ExporterStatus.ValueType + message: builtins.str + def __init__( + self, + *, + status: jumpstarter.v1.common_pb2.ExporterStatus.ValueType = ..., + message: builtins.str | None = ..., + ) -> None: ... + def HasField(self, field_name: typing.Literal["_message", b"_message", "message", b"message"]) -> builtins.bool: ... + def ClearField(self, field_name: typing.Literal["_message", b"_message", "message", b"message", "status", b"status"]) -> None: ... + def WhichOneof(self, oneof_group: typing.Literal["_message", b"_message"]) -> typing.Literal["message"] | None: ... + +Global___GetStatusResponse: typing_extensions.TypeAlias = GetStatusResponse diff --git a/packages/jumpstarter-protocol/jumpstarter_protocol/jumpstarter/v1/jumpstarter_pb2_grpc.pyi b/packages/jumpstarter-protocol/jumpstarter_protocol/jumpstarter/v1/jumpstarter_pb2_grpc.pyi new file mode 100644 index 000000000..78c9ffbb2 --- /dev/null +++ b/packages/jumpstarter-protocol/jumpstarter_protocol/jumpstarter/v1/jumpstarter_pb2_grpc.pyi @@ -0,0 +1,752 @@ +""" +@generated by mypy-protobuf. Do not edit manually! +isort:skip_file +Copyright 2024 The Jumpstarter Authors""" + +import abc +import collections.abc +import google.protobuf.empty_pb2 +import grpc +import grpc.aio +import jumpstarter.v1.jumpstarter_pb2 +import sys +import typing + +if sys.version_info >= (3, 13): + import typing as typing_extensions +else: + import typing_extensions + +_T = typing.TypeVar("_T") + +class _MaybeAsyncIterator(collections.abc.AsyncIterator[_T], collections.abc.Iterator[_T], metaclass=abc.ABCMeta): ... + +class _ServicerContext(grpc.ServicerContext, grpc.aio.ServicerContext): # type: ignore[misc, type-arg] + ... + +GRPC_GENERATED_VERSION: str +GRPC_VERSION: str +_ControllerServiceRegisterType = typing_extensions.TypeVar( + '_ControllerServiceRegisterType', + grpc.UnaryUnaryMultiCallable[ + jumpstarter.v1.jumpstarter_pb2.RegisterRequest, + jumpstarter.v1.jumpstarter_pb2.RegisterResponse, + ], + grpc.aio.UnaryUnaryMultiCallable[ + jumpstarter.v1.jumpstarter_pb2.RegisterRequest, + jumpstarter.v1.jumpstarter_pb2.RegisterResponse, + ], + default=grpc.UnaryUnaryMultiCallable[ + jumpstarter.v1.jumpstarter_pb2.RegisterRequest, + jumpstarter.v1.jumpstarter_pb2.RegisterResponse, + ], +) + +_ControllerServiceUnregisterType = typing_extensions.TypeVar( + '_ControllerServiceUnregisterType', + grpc.UnaryUnaryMultiCallable[ + jumpstarter.v1.jumpstarter_pb2.UnregisterRequest, + jumpstarter.v1.jumpstarter_pb2.UnregisterResponse, + ], + grpc.aio.UnaryUnaryMultiCallable[ + jumpstarter.v1.jumpstarter_pb2.UnregisterRequest, + jumpstarter.v1.jumpstarter_pb2.UnregisterResponse, + ], + default=grpc.UnaryUnaryMultiCallable[ + jumpstarter.v1.jumpstarter_pb2.UnregisterRequest, + jumpstarter.v1.jumpstarter_pb2.UnregisterResponse, + ], +) + +_ControllerServiceReportStatusType = typing_extensions.TypeVar( + '_ControllerServiceReportStatusType', + grpc.UnaryUnaryMultiCallable[ + jumpstarter.v1.jumpstarter_pb2.ReportStatusRequest, + jumpstarter.v1.jumpstarter_pb2.ReportStatusResponse, + ], + grpc.aio.UnaryUnaryMultiCallable[ + jumpstarter.v1.jumpstarter_pb2.ReportStatusRequest, + jumpstarter.v1.jumpstarter_pb2.ReportStatusResponse, + ], + default=grpc.UnaryUnaryMultiCallable[ + jumpstarter.v1.jumpstarter_pb2.ReportStatusRequest, + jumpstarter.v1.jumpstarter_pb2.ReportStatusResponse, + ], +) + +_ControllerServiceListenType = typing_extensions.TypeVar( + '_ControllerServiceListenType', + grpc.UnaryStreamMultiCallable[ + jumpstarter.v1.jumpstarter_pb2.ListenRequest, + jumpstarter.v1.jumpstarter_pb2.ListenResponse, + ], + grpc.aio.UnaryStreamMultiCallable[ + jumpstarter.v1.jumpstarter_pb2.ListenRequest, + jumpstarter.v1.jumpstarter_pb2.ListenResponse, + ], + default=grpc.UnaryStreamMultiCallable[ + jumpstarter.v1.jumpstarter_pb2.ListenRequest, + jumpstarter.v1.jumpstarter_pb2.ListenResponse, + ], +) + +_ControllerServiceStatusType = typing_extensions.TypeVar( + '_ControllerServiceStatusType', + grpc.UnaryStreamMultiCallable[ + jumpstarter.v1.jumpstarter_pb2.StatusRequest, + jumpstarter.v1.jumpstarter_pb2.StatusResponse, + ], + grpc.aio.UnaryStreamMultiCallable[ + jumpstarter.v1.jumpstarter_pb2.StatusRequest, + jumpstarter.v1.jumpstarter_pb2.StatusResponse, + ], + default=grpc.UnaryStreamMultiCallable[ + jumpstarter.v1.jumpstarter_pb2.StatusRequest, + jumpstarter.v1.jumpstarter_pb2.StatusResponse, + ], +) + +_ControllerServiceDialType = typing_extensions.TypeVar( + '_ControllerServiceDialType', + grpc.UnaryUnaryMultiCallable[ + jumpstarter.v1.jumpstarter_pb2.DialRequest, + jumpstarter.v1.jumpstarter_pb2.DialResponse, + ], + grpc.aio.UnaryUnaryMultiCallable[ + jumpstarter.v1.jumpstarter_pb2.DialRequest, + jumpstarter.v1.jumpstarter_pb2.DialResponse, + ], + default=grpc.UnaryUnaryMultiCallable[ + jumpstarter.v1.jumpstarter_pb2.DialRequest, + jumpstarter.v1.jumpstarter_pb2.DialResponse, + ], +) + +_ControllerServiceAuditStreamType = typing_extensions.TypeVar( + '_ControllerServiceAuditStreamType', + grpc.StreamUnaryMultiCallable[ + jumpstarter.v1.jumpstarter_pb2.AuditStreamRequest, + google.protobuf.empty_pb2.Empty, + ], + grpc.aio.StreamUnaryMultiCallable[ + jumpstarter.v1.jumpstarter_pb2.AuditStreamRequest, + google.protobuf.empty_pb2.Empty, + ], + default=grpc.StreamUnaryMultiCallable[ + jumpstarter.v1.jumpstarter_pb2.AuditStreamRequest, + google.protobuf.empty_pb2.Empty, + ], +) + +_ControllerServiceGetLeaseType = typing_extensions.TypeVar( + '_ControllerServiceGetLeaseType', + grpc.UnaryUnaryMultiCallable[ + jumpstarter.v1.jumpstarter_pb2.GetLeaseRequest, + jumpstarter.v1.jumpstarter_pb2.GetLeaseResponse, + ], + grpc.aio.UnaryUnaryMultiCallable[ + jumpstarter.v1.jumpstarter_pb2.GetLeaseRequest, + jumpstarter.v1.jumpstarter_pb2.GetLeaseResponse, + ], + default=grpc.UnaryUnaryMultiCallable[ + jumpstarter.v1.jumpstarter_pb2.GetLeaseRequest, + jumpstarter.v1.jumpstarter_pb2.GetLeaseResponse, + ], +) + +_ControllerServiceRequestLeaseType = typing_extensions.TypeVar( + '_ControllerServiceRequestLeaseType', + grpc.UnaryUnaryMultiCallable[ + jumpstarter.v1.jumpstarter_pb2.RequestLeaseRequest, + jumpstarter.v1.jumpstarter_pb2.RequestLeaseResponse, + ], + grpc.aio.UnaryUnaryMultiCallable[ + jumpstarter.v1.jumpstarter_pb2.RequestLeaseRequest, + jumpstarter.v1.jumpstarter_pb2.RequestLeaseResponse, + ], + default=grpc.UnaryUnaryMultiCallable[ + jumpstarter.v1.jumpstarter_pb2.RequestLeaseRequest, + jumpstarter.v1.jumpstarter_pb2.RequestLeaseResponse, + ], +) + +_ControllerServiceReleaseLeaseType = typing_extensions.TypeVar( + '_ControllerServiceReleaseLeaseType', + grpc.UnaryUnaryMultiCallable[ + jumpstarter.v1.jumpstarter_pb2.ReleaseLeaseRequest, + jumpstarter.v1.jumpstarter_pb2.ReleaseLeaseResponse, + ], + grpc.aio.UnaryUnaryMultiCallable[ + jumpstarter.v1.jumpstarter_pb2.ReleaseLeaseRequest, + jumpstarter.v1.jumpstarter_pb2.ReleaseLeaseResponse, + ], + default=grpc.UnaryUnaryMultiCallable[ + jumpstarter.v1.jumpstarter_pb2.ReleaseLeaseRequest, + jumpstarter.v1.jumpstarter_pb2.ReleaseLeaseResponse, + ], +) + +_ControllerServiceListLeasesType = typing_extensions.TypeVar( + '_ControllerServiceListLeasesType', + grpc.UnaryUnaryMultiCallable[ + jumpstarter.v1.jumpstarter_pb2.ListLeasesRequest, + jumpstarter.v1.jumpstarter_pb2.ListLeasesResponse, + ], + grpc.aio.UnaryUnaryMultiCallable[ + jumpstarter.v1.jumpstarter_pb2.ListLeasesRequest, + jumpstarter.v1.jumpstarter_pb2.ListLeasesResponse, + ], + default=grpc.UnaryUnaryMultiCallable[ + jumpstarter.v1.jumpstarter_pb2.ListLeasesRequest, + jumpstarter.v1.jumpstarter_pb2.ListLeasesResponse, + ], +) + +class ControllerServiceStub(typing.Generic[_ControllerServiceRegisterType, _ControllerServiceUnregisterType, _ControllerServiceReportStatusType, _ControllerServiceListenType, _ControllerServiceStatusType, _ControllerServiceDialType, _ControllerServiceAuditStreamType, _ControllerServiceGetLeaseType, _ControllerServiceRequestLeaseType, _ControllerServiceReleaseLeaseType, _ControllerServiceListLeasesType]): + """A service where a exporter can connect to make itself available""" + + @typing.overload + def __init__(self: ControllerServiceStub[ + grpc.UnaryUnaryMultiCallable[ + jumpstarter.v1.jumpstarter_pb2.RegisterRequest, + jumpstarter.v1.jumpstarter_pb2.RegisterResponse, + ], + grpc.UnaryUnaryMultiCallable[ + jumpstarter.v1.jumpstarter_pb2.UnregisterRequest, + jumpstarter.v1.jumpstarter_pb2.UnregisterResponse, + ], + grpc.UnaryUnaryMultiCallable[ + jumpstarter.v1.jumpstarter_pb2.ReportStatusRequest, + jumpstarter.v1.jumpstarter_pb2.ReportStatusResponse, + ], + grpc.UnaryStreamMultiCallable[ + jumpstarter.v1.jumpstarter_pb2.ListenRequest, + jumpstarter.v1.jumpstarter_pb2.ListenResponse, + ], + grpc.UnaryStreamMultiCallable[ + jumpstarter.v1.jumpstarter_pb2.StatusRequest, + jumpstarter.v1.jumpstarter_pb2.StatusResponse, + ], + grpc.UnaryUnaryMultiCallable[ + jumpstarter.v1.jumpstarter_pb2.DialRequest, + jumpstarter.v1.jumpstarter_pb2.DialResponse, + ], + grpc.StreamUnaryMultiCallable[ + jumpstarter.v1.jumpstarter_pb2.AuditStreamRequest, + google.protobuf.empty_pb2.Empty, + ], + grpc.UnaryUnaryMultiCallable[ + jumpstarter.v1.jumpstarter_pb2.GetLeaseRequest, + jumpstarter.v1.jumpstarter_pb2.GetLeaseResponse, + ], + grpc.UnaryUnaryMultiCallable[ + jumpstarter.v1.jumpstarter_pb2.RequestLeaseRequest, + jumpstarter.v1.jumpstarter_pb2.RequestLeaseResponse, + ], + grpc.UnaryUnaryMultiCallable[ + jumpstarter.v1.jumpstarter_pb2.ReleaseLeaseRequest, + jumpstarter.v1.jumpstarter_pb2.ReleaseLeaseResponse, + ], + grpc.UnaryUnaryMultiCallable[ + jumpstarter.v1.jumpstarter_pb2.ListLeasesRequest, + jumpstarter.v1.jumpstarter_pb2.ListLeasesResponse, + ], + ], channel: grpc.Channel) -> None: ... + + @typing.overload + def __init__(self: ControllerServiceStub[ + grpc.aio.UnaryUnaryMultiCallable[ + jumpstarter.v1.jumpstarter_pb2.RegisterRequest, + jumpstarter.v1.jumpstarter_pb2.RegisterResponse, + ], + grpc.aio.UnaryUnaryMultiCallable[ + jumpstarter.v1.jumpstarter_pb2.UnregisterRequest, + jumpstarter.v1.jumpstarter_pb2.UnregisterResponse, + ], + grpc.aio.UnaryUnaryMultiCallable[ + jumpstarter.v1.jumpstarter_pb2.ReportStatusRequest, + jumpstarter.v1.jumpstarter_pb2.ReportStatusResponse, + ], + grpc.aio.UnaryStreamMultiCallable[ + jumpstarter.v1.jumpstarter_pb2.ListenRequest, + jumpstarter.v1.jumpstarter_pb2.ListenResponse, + ], + grpc.aio.UnaryStreamMultiCallable[ + jumpstarter.v1.jumpstarter_pb2.StatusRequest, + jumpstarter.v1.jumpstarter_pb2.StatusResponse, + ], + grpc.aio.UnaryUnaryMultiCallable[ + jumpstarter.v1.jumpstarter_pb2.DialRequest, + jumpstarter.v1.jumpstarter_pb2.DialResponse, + ], + grpc.aio.StreamUnaryMultiCallable[ + jumpstarter.v1.jumpstarter_pb2.AuditStreamRequest, + google.protobuf.empty_pb2.Empty, + ], + grpc.aio.UnaryUnaryMultiCallable[ + jumpstarter.v1.jumpstarter_pb2.GetLeaseRequest, + jumpstarter.v1.jumpstarter_pb2.GetLeaseResponse, + ], + grpc.aio.UnaryUnaryMultiCallable[ + jumpstarter.v1.jumpstarter_pb2.RequestLeaseRequest, + jumpstarter.v1.jumpstarter_pb2.RequestLeaseResponse, + ], + grpc.aio.UnaryUnaryMultiCallable[ + jumpstarter.v1.jumpstarter_pb2.ReleaseLeaseRequest, + jumpstarter.v1.jumpstarter_pb2.ReleaseLeaseResponse, + ], + grpc.aio.UnaryUnaryMultiCallable[ + jumpstarter.v1.jumpstarter_pb2.ListLeasesRequest, + jumpstarter.v1.jumpstarter_pb2.ListLeasesResponse, + ], + ], channel: grpc.aio.Channel) -> None: ... + + Register: _ControllerServiceRegisterType + """Exporter registration""" + + Unregister: _ControllerServiceUnregisterType + """Exporter disconnection + Disconnecting with bye will invalidate any existing router tokens + we will eventually have a mechanism to tell the router this token + has been invalidated + """ + + ReportStatus: _ControllerServiceReportStatusType + """Exporter status report + Allows exporters to report their own status to the controller + """ + + Listen: _ControllerServiceListenType + """Exporter listening + Returns stream tokens for accepting incoming client connections + """ + + Status: _ControllerServiceStatusType + """Exporter status + Returns lease status for the exporter + """ + + Dial: _ControllerServiceDialType + """Client connecting + Returns stream token for connecting to the desired exporter + Leases are checked before token issuance + """ + + AuditStream: _ControllerServiceAuditStreamType + """Audit events from the exporters + audit events are used to track the exporter's activity + """ + + GetLease: _ControllerServiceGetLeaseType + """Get Lease""" + + RequestLease: _ControllerServiceRequestLeaseType + """Request Lease""" + + ReleaseLease: _ControllerServiceReleaseLeaseType + """Release Lease""" + + ListLeases: _ControllerServiceListLeasesType + """List Leases""" + +ControllerServiceAsyncStub: typing_extensions.TypeAlias = ControllerServiceStub[ + grpc.aio.UnaryUnaryMultiCallable[ + jumpstarter.v1.jumpstarter_pb2.RegisterRequest, + jumpstarter.v1.jumpstarter_pb2.RegisterResponse, + ], + grpc.aio.UnaryUnaryMultiCallable[ + jumpstarter.v1.jumpstarter_pb2.UnregisterRequest, + jumpstarter.v1.jumpstarter_pb2.UnregisterResponse, + ], + grpc.aio.UnaryUnaryMultiCallable[ + jumpstarter.v1.jumpstarter_pb2.ReportStatusRequest, + jumpstarter.v1.jumpstarter_pb2.ReportStatusResponse, + ], + grpc.aio.UnaryStreamMultiCallable[ + jumpstarter.v1.jumpstarter_pb2.ListenRequest, + jumpstarter.v1.jumpstarter_pb2.ListenResponse, + ], + grpc.aio.UnaryStreamMultiCallable[ + jumpstarter.v1.jumpstarter_pb2.StatusRequest, + jumpstarter.v1.jumpstarter_pb2.StatusResponse, + ], + grpc.aio.UnaryUnaryMultiCallable[ + jumpstarter.v1.jumpstarter_pb2.DialRequest, + jumpstarter.v1.jumpstarter_pb2.DialResponse, + ], + grpc.aio.StreamUnaryMultiCallable[ + jumpstarter.v1.jumpstarter_pb2.AuditStreamRequest, + google.protobuf.empty_pb2.Empty, + ], + grpc.aio.UnaryUnaryMultiCallable[ + jumpstarter.v1.jumpstarter_pb2.GetLeaseRequest, + jumpstarter.v1.jumpstarter_pb2.GetLeaseResponse, + ], + grpc.aio.UnaryUnaryMultiCallable[ + jumpstarter.v1.jumpstarter_pb2.RequestLeaseRequest, + jumpstarter.v1.jumpstarter_pb2.RequestLeaseResponse, + ], + grpc.aio.UnaryUnaryMultiCallable[ + jumpstarter.v1.jumpstarter_pb2.ReleaseLeaseRequest, + jumpstarter.v1.jumpstarter_pb2.ReleaseLeaseResponse, + ], + grpc.aio.UnaryUnaryMultiCallable[ + jumpstarter.v1.jumpstarter_pb2.ListLeasesRequest, + jumpstarter.v1.jumpstarter_pb2.ListLeasesResponse, + ], +] + +class ControllerServiceServicer(metaclass=abc.ABCMeta): + """A service where a exporter can connect to make itself available""" + + @abc.abstractmethod + def Register( + self, + request: jumpstarter.v1.jumpstarter_pb2.RegisterRequest, + context: _ServicerContext, + ) -> typing.Union[jumpstarter.v1.jumpstarter_pb2.RegisterResponse, collections.abc.Awaitable[jumpstarter.v1.jumpstarter_pb2.RegisterResponse]]: + """Exporter registration""" + + @abc.abstractmethod + def Unregister( + self, + request: jumpstarter.v1.jumpstarter_pb2.UnregisterRequest, + context: _ServicerContext, + ) -> typing.Union[jumpstarter.v1.jumpstarter_pb2.UnregisterResponse, collections.abc.Awaitable[jumpstarter.v1.jumpstarter_pb2.UnregisterResponse]]: + """Exporter disconnection + Disconnecting with bye will invalidate any existing router tokens + we will eventually have a mechanism to tell the router this token + has been invalidated + """ + + @abc.abstractmethod + def ReportStatus( + self, + request: jumpstarter.v1.jumpstarter_pb2.ReportStatusRequest, + context: _ServicerContext, + ) -> typing.Union[jumpstarter.v1.jumpstarter_pb2.ReportStatusResponse, collections.abc.Awaitable[jumpstarter.v1.jumpstarter_pb2.ReportStatusResponse]]: + """Exporter status report + Allows exporters to report their own status to the controller + """ + + @abc.abstractmethod + def Listen( + self, + request: jumpstarter.v1.jumpstarter_pb2.ListenRequest, + context: _ServicerContext, + ) -> typing.Union[collections.abc.Iterator[jumpstarter.v1.jumpstarter_pb2.ListenResponse], collections.abc.AsyncIterator[jumpstarter.v1.jumpstarter_pb2.ListenResponse]]: + """Exporter listening + Returns stream tokens for accepting incoming client connections + """ + + @abc.abstractmethod + def Status( + self, + request: jumpstarter.v1.jumpstarter_pb2.StatusRequest, + context: _ServicerContext, + ) -> typing.Union[collections.abc.Iterator[jumpstarter.v1.jumpstarter_pb2.StatusResponse], collections.abc.AsyncIterator[jumpstarter.v1.jumpstarter_pb2.StatusResponse]]: + """Exporter status + Returns lease status for the exporter + """ + + @abc.abstractmethod + def Dial( + self, + request: jumpstarter.v1.jumpstarter_pb2.DialRequest, + context: _ServicerContext, + ) -> typing.Union[jumpstarter.v1.jumpstarter_pb2.DialResponse, collections.abc.Awaitable[jumpstarter.v1.jumpstarter_pb2.DialResponse]]: + """Client connecting + Returns stream token for connecting to the desired exporter + Leases are checked before token issuance + """ + + @abc.abstractmethod + def AuditStream( + self, + request_iterator: _MaybeAsyncIterator[jumpstarter.v1.jumpstarter_pb2.AuditStreamRequest], + context: _ServicerContext, + ) -> typing.Union[google.protobuf.empty_pb2.Empty, collections.abc.Awaitable[google.protobuf.empty_pb2.Empty]]: + """Audit events from the exporters + audit events are used to track the exporter's activity + """ + + @abc.abstractmethod + def GetLease( + self, + request: jumpstarter.v1.jumpstarter_pb2.GetLeaseRequest, + context: _ServicerContext, + ) -> typing.Union[jumpstarter.v1.jumpstarter_pb2.GetLeaseResponse, collections.abc.Awaitable[jumpstarter.v1.jumpstarter_pb2.GetLeaseResponse]]: + """Get Lease""" + + @abc.abstractmethod + def RequestLease( + self, + request: jumpstarter.v1.jumpstarter_pb2.RequestLeaseRequest, + context: _ServicerContext, + ) -> typing.Union[jumpstarter.v1.jumpstarter_pb2.RequestLeaseResponse, collections.abc.Awaitable[jumpstarter.v1.jumpstarter_pb2.RequestLeaseResponse]]: + """Request Lease""" + + @abc.abstractmethod + def ReleaseLease( + self, + request: jumpstarter.v1.jumpstarter_pb2.ReleaseLeaseRequest, + context: _ServicerContext, + ) -> typing.Union[jumpstarter.v1.jumpstarter_pb2.ReleaseLeaseResponse, collections.abc.Awaitable[jumpstarter.v1.jumpstarter_pb2.ReleaseLeaseResponse]]: + """Release Lease""" + + @abc.abstractmethod + def ListLeases( + self, + request: jumpstarter.v1.jumpstarter_pb2.ListLeasesRequest, + context: _ServicerContext, + ) -> typing.Union[jumpstarter.v1.jumpstarter_pb2.ListLeasesResponse, collections.abc.Awaitable[jumpstarter.v1.jumpstarter_pb2.ListLeasesResponse]]: + """List Leases""" + +def add_ControllerServiceServicer_to_server(servicer: ControllerServiceServicer, server: typing.Union[grpc.Server, grpc.aio.Server]) -> None: ... + +_ExporterServiceGetReportType = typing_extensions.TypeVar( + '_ExporterServiceGetReportType', + grpc.UnaryUnaryMultiCallable[ + google.protobuf.empty_pb2.Empty, + jumpstarter.v1.jumpstarter_pb2.GetReportResponse, + ], + grpc.aio.UnaryUnaryMultiCallable[ + google.protobuf.empty_pb2.Empty, + jumpstarter.v1.jumpstarter_pb2.GetReportResponse, + ], + default=grpc.UnaryUnaryMultiCallable[ + google.protobuf.empty_pb2.Empty, + jumpstarter.v1.jumpstarter_pb2.GetReportResponse, + ], +) + +_ExporterServiceDriverCallType = typing_extensions.TypeVar( + '_ExporterServiceDriverCallType', + grpc.UnaryUnaryMultiCallable[ + jumpstarter.v1.jumpstarter_pb2.DriverCallRequest, + jumpstarter.v1.jumpstarter_pb2.DriverCallResponse, + ], + grpc.aio.UnaryUnaryMultiCallable[ + jumpstarter.v1.jumpstarter_pb2.DriverCallRequest, + jumpstarter.v1.jumpstarter_pb2.DriverCallResponse, + ], + default=grpc.UnaryUnaryMultiCallable[ + jumpstarter.v1.jumpstarter_pb2.DriverCallRequest, + jumpstarter.v1.jumpstarter_pb2.DriverCallResponse, + ], +) + +_ExporterServiceStreamingDriverCallType = typing_extensions.TypeVar( + '_ExporterServiceStreamingDriverCallType', + grpc.UnaryStreamMultiCallable[ + jumpstarter.v1.jumpstarter_pb2.StreamingDriverCallRequest, + jumpstarter.v1.jumpstarter_pb2.StreamingDriverCallResponse, + ], + grpc.aio.UnaryStreamMultiCallable[ + jumpstarter.v1.jumpstarter_pb2.StreamingDriverCallRequest, + jumpstarter.v1.jumpstarter_pb2.StreamingDriverCallResponse, + ], + default=grpc.UnaryStreamMultiCallable[ + jumpstarter.v1.jumpstarter_pb2.StreamingDriverCallRequest, + jumpstarter.v1.jumpstarter_pb2.StreamingDriverCallResponse, + ], +) + +_ExporterServiceLogStreamType = typing_extensions.TypeVar( + '_ExporterServiceLogStreamType', + grpc.UnaryStreamMultiCallable[ + google.protobuf.empty_pb2.Empty, + jumpstarter.v1.jumpstarter_pb2.LogStreamResponse, + ], + grpc.aio.UnaryStreamMultiCallable[ + google.protobuf.empty_pb2.Empty, + jumpstarter.v1.jumpstarter_pb2.LogStreamResponse, + ], + default=grpc.UnaryStreamMultiCallable[ + google.protobuf.empty_pb2.Empty, + jumpstarter.v1.jumpstarter_pb2.LogStreamResponse, + ], +) + +_ExporterServiceResetType = typing_extensions.TypeVar( + '_ExporterServiceResetType', + grpc.UnaryUnaryMultiCallable[ + jumpstarter.v1.jumpstarter_pb2.ResetRequest, + jumpstarter.v1.jumpstarter_pb2.ResetResponse, + ], + grpc.aio.UnaryUnaryMultiCallable[ + jumpstarter.v1.jumpstarter_pb2.ResetRequest, + jumpstarter.v1.jumpstarter_pb2.ResetResponse, + ], + default=grpc.UnaryUnaryMultiCallable[ + jumpstarter.v1.jumpstarter_pb2.ResetRequest, + jumpstarter.v1.jumpstarter_pb2.ResetResponse, + ], +) + +_ExporterServiceGetStatusType = typing_extensions.TypeVar( + '_ExporterServiceGetStatusType', + grpc.UnaryUnaryMultiCallable[ + jumpstarter.v1.jumpstarter_pb2.GetStatusRequest, + jumpstarter.v1.jumpstarter_pb2.GetStatusResponse, + ], + grpc.aio.UnaryUnaryMultiCallable[ + jumpstarter.v1.jumpstarter_pb2.GetStatusRequest, + jumpstarter.v1.jumpstarter_pb2.GetStatusResponse, + ], + default=grpc.UnaryUnaryMultiCallable[ + jumpstarter.v1.jumpstarter_pb2.GetStatusRequest, + jumpstarter.v1.jumpstarter_pb2.GetStatusResponse, + ], +) + +class ExporterServiceStub(typing.Generic[_ExporterServiceGetReportType, _ExporterServiceDriverCallType, _ExporterServiceStreamingDriverCallType, _ExporterServiceLogStreamType, _ExporterServiceResetType, _ExporterServiceGetStatusType]): + """A service a exporter can share locally to be used without a server + Channel/Call credentials are used to authenticate the client, and routing to the right exporter + """ + + @typing.overload + def __init__(self: ExporterServiceStub[ + grpc.UnaryUnaryMultiCallable[ + google.protobuf.empty_pb2.Empty, + jumpstarter.v1.jumpstarter_pb2.GetReportResponse, + ], + grpc.UnaryUnaryMultiCallable[ + jumpstarter.v1.jumpstarter_pb2.DriverCallRequest, + jumpstarter.v1.jumpstarter_pb2.DriverCallResponse, + ], + grpc.UnaryStreamMultiCallable[ + jumpstarter.v1.jumpstarter_pb2.StreamingDriverCallRequest, + jumpstarter.v1.jumpstarter_pb2.StreamingDriverCallResponse, + ], + grpc.UnaryStreamMultiCallable[ + google.protobuf.empty_pb2.Empty, + jumpstarter.v1.jumpstarter_pb2.LogStreamResponse, + ], + grpc.UnaryUnaryMultiCallable[ + jumpstarter.v1.jumpstarter_pb2.ResetRequest, + jumpstarter.v1.jumpstarter_pb2.ResetResponse, + ], + grpc.UnaryUnaryMultiCallable[ + jumpstarter.v1.jumpstarter_pb2.GetStatusRequest, + jumpstarter.v1.jumpstarter_pb2.GetStatusResponse, + ], + ], channel: grpc.Channel) -> None: ... + + @typing.overload + def __init__(self: ExporterServiceStub[ + grpc.aio.UnaryUnaryMultiCallable[ + google.protobuf.empty_pb2.Empty, + jumpstarter.v1.jumpstarter_pb2.GetReportResponse, + ], + grpc.aio.UnaryUnaryMultiCallable[ + jumpstarter.v1.jumpstarter_pb2.DriverCallRequest, + jumpstarter.v1.jumpstarter_pb2.DriverCallResponse, + ], + grpc.aio.UnaryStreamMultiCallable[ + jumpstarter.v1.jumpstarter_pb2.StreamingDriverCallRequest, + jumpstarter.v1.jumpstarter_pb2.StreamingDriverCallResponse, + ], + grpc.aio.UnaryStreamMultiCallable[ + google.protobuf.empty_pb2.Empty, + jumpstarter.v1.jumpstarter_pb2.LogStreamResponse, + ], + grpc.aio.UnaryUnaryMultiCallable[ + jumpstarter.v1.jumpstarter_pb2.ResetRequest, + jumpstarter.v1.jumpstarter_pb2.ResetResponse, + ], + grpc.aio.UnaryUnaryMultiCallable[ + jumpstarter.v1.jumpstarter_pb2.GetStatusRequest, + jumpstarter.v1.jumpstarter_pb2.GetStatusResponse, + ], + ], channel: grpc.aio.Channel) -> None: ... + + GetReport: _ExporterServiceGetReportType + """Exporter registration""" + + DriverCall: _ExporterServiceDriverCallType + + StreamingDriverCall: _ExporterServiceStreamingDriverCallType + + LogStream: _ExporterServiceLogStreamType + + Reset: _ExporterServiceResetType + + GetStatus: _ExporterServiceGetStatusType + +ExporterServiceAsyncStub: typing_extensions.TypeAlias = ExporterServiceStub[ + grpc.aio.UnaryUnaryMultiCallable[ + google.protobuf.empty_pb2.Empty, + jumpstarter.v1.jumpstarter_pb2.GetReportResponse, + ], + grpc.aio.UnaryUnaryMultiCallable[ + jumpstarter.v1.jumpstarter_pb2.DriverCallRequest, + jumpstarter.v1.jumpstarter_pb2.DriverCallResponse, + ], + grpc.aio.UnaryStreamMultiCallable[ + jumpstarter.v1.jumpstarter_pb2.StreamingDriverCallRequest, + jumpstarter.v1.jumpstarter_pb2.StreamingDriverCallResponse, + ], + grpc.aio.UnaryStreamMultiCallable[ + google.protobuf.empty_pb2.Empty, + jumpstarter.v1.jumpstarter_pb2.LogStreamResponse, + ], + grpc.aio.UnaryUnaryMultiCallable[ + jumpstarter.v1.jumpstarter_pb2.ResetRequest, + jumpstarter.v1.jumpstarter_pb2.ResetResponse, + ], + grpc.aio.UnaryUnaryMultiCallable[ + jumpstarter.v1.jumpstarter_pb2.GetStatusRequest, + jumpstarter.v1.jumpstarter_pb2.GetStatusResponse, + ], +] + +class ExporterServiceServicer(metaclass=abc.ABCMeta): + """A service a exporter can share locally to be used without a server + Channel/Call credentials are used to authenticate the client, and routing to the right exporter + """ + + @abc.abstractmethod + def GetReport( + self, + request: google.protobuf.empty_pb2.Empty, + context: _ServicerContext, + ) -> typing.Union[jumpstarter.v1.jumpstarter_pb2.GetReportResponse, collections.abc.Awaitable[jumpstarter.v1.jumpstarter_pb2.GetReportResponse]]: + """Exporter registration""" + + @abc.abstractmethod + def DriverCall( + self, + request: jumpstarter.v1.jumpstarter_pb2.DriverCallRequest, + context: _ServicerContext, + ) -> typing.Union[jumpstarter.v1.jumpstarter_pb2.DriverCallResponse, collections.abc.Awaitable[jumpstarter.v1.jumpstarter_pb2.DriverCallResponse]]: ... + + @abc.abstractmethod + def StreamingDriverCall( + self, + request: jumpstarter.v1.jumpstarter_pb2.StreamingDriverCallRequest, + context: _ServicerContext, + ) -> typing.Union[collections.abc.Iterator[jumpstarter.v1.jumpstarter_pb2.StreamingDriverCallResponse], collections.abc.AsyncIterator[jumpstarter.v1.jumpstarter_pb2.StreamingDriverCallResponse]]: ... + + @abc.abstractmethod + def LogStream( + self, + request: google.protobuf.empty_pb2.Empty, + context: _ServicerContext, + ) -> typing.Union[collections.abc.Iterator[jumpstarter.v1.jumpstarter_pb2.LogStreamResponse], collections.abc.AsyncIterator[jumpstarter.v1.jumpstarter_pb2.LogStreamResponse]]: ... + + @abc.abstractmethod + def Reset( + self, + request: jumpstarter.v1.jumpstarter_pb2.ResetRequest, + context: _ServicerContext, + ) -> typing.Union[jumpstarter.v1.jumpstarter_pb2.ResetResponse, collections.abc.Awaitable[jumpstarter.v1.jumpstarter_pb2.ResetResponse]]: ... + + @abc.abstractmethod + def GetStatus( + self, + request: jumpstarter.v1.jumpstarter_pb2.GetStatusRequest, + context: _ServicerContext, + ) -> typing.Union[jumpstarter.v1.jumpstarter_pb2.GetStatusResponse, collections.abc.Awaitable[jumpstarter.v1.jumpstarter_pb2.GetStatusResponse]]: ... + +def add_ExporterServiceServicer_to_server(servicer: ExporterServiceServicer, server: typing.Union[grpc.Server, grpc.aio.Server]) -> None: ... diff --git a/packages/jumpstarter-protocol/jumpstarter_protocol/jumpstarter/v1/kubernetes_pb2.pyi b/packages/jumpstarter-protocol/jumpstarter_protocol/jumpstarter/v1/kubernetes_pb2.pyi new file mode 100644 index 000000000..e07fb6250 --- /dev/null +++ b/packages/jumpstarter-protocol/jumpstarter_protocol/jumpstarter/v1/kubernetes_pb2.pyi @@ -0,0 +1,148 @@ +""" +@generated by mypy-protobuf. Do not edit manually! +isort:skip_file +Copyright 2024 The Jumpstarter Authors""" + +import builtins +import collections.abc +import google.protobuf.descriptor +import google.protobuf.internal.containers +import google.protobuf.message +import sys +import typing + +if sys.version_info >= (3, 10): + import typing as typing_extensions +else: + import typing_extensions + +DESCRIPTOR: google.protobuf.descriptor.FileDescriptor + +@typing.final +class LabelSelectorRequirement(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + KEY_FIELD_NUMBER: builtins.int + OPERATOR_FIELD_NUMBER: builtins.int + VALUES_FIELD_NUMBER: builtins.int + key: builtins.str + operator: builtins.str + @property + def values(self) -> google.protobuf.internal.containers.RepeatedScalarFieldContainer[builtins.str]: ... + def __init__( + self, + *, + key: builtins.str = ..., + operator: builtins.str = ..., + values: collections.abc.Iterable[builtins.str] | None = ..., + ) -> None: ... + def ClearField(self, field_name: typing.Literal["key", b"key", "operator", b"operator", "values", b"values"]) -> None: ... + +Global___LabelSelectorRequirement: typing_extensions.TypeAlias = LabelSelectorRequirement + +@typing.final +class LabelSelector(google.protobuf.message.Message): + """Reference: https://kubernetes.io/docs/reference/kubernetes-api/common-definitions/label-selector/""" + + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + @typing.final + class MatchLabelsEntry(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + KEY_FIELD_NUMBER: builtins.int + VALUE_FIELD_NUMBER: builtins.int + key: builtins.str + value: builtins.str + def __init__( + self, + *, + key: builtins.str = ..., + value: builtins.str = ..., + ) -> None: ... + def ClearField(self, field_name: typing.Literal["key", b"key", "value", b"value"]) -> None: ... + + MATCH_EXPRESSIONS_FIELD_NUMBER: builtins.int + MATCH_LABELS_FIELD_NUMBER: builtins.int + @property + def match_expressions(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[Global___LabelSelectorRequirement]: ... + @property + def match_labels(self) -> google.protobuf.internal.containers.ScalarMap[builtins.str, builtins.str]: ... + def __init__( + self, + *, + match_expressions: collections.abc.Iterable[Global___LabelSelectorRequirement] | None = ..., + match_labels: collections.abc.Mapping[builtins.str, builtins.str] | None = ..., + ) -> None: ... + def ClearField(self, field_name: typing.Literal["match_expressions", b"match_expressions", "match_labels", b"match_labels"]) -> None: ... + +Global___LabelSelector: typing_extensions.TypeAlias = LabelSelector + +@typing.final +class Time(google.protobuf.message.Message): + """Reference: https://github.com/kubernetes/kubernetes/blob/v1.31.1/staging/src/k8s.io/apimachinery/pkg/apis/meta/v1/generated.proto""" + + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + SECONDS_FIELD_NUMBER: builtins.int + NANOS_FIELD_NUMBER: builtins.int + seconds: builtins.int + nanos: builtins.int + def __init__( + self, + *, + seconds: builtins.int | None = ..., + nanos: builtins.int | None = ..., + ) -> None: ... + def HasField(self, field_name: typing.Literal["_nanos", b"_nanos", "_seconds", b"_seconds", "nanos", b"nanos", "seconds", b"seconds"]) -> builtins.bool: ... + def ClearField(self, field_name: typing.Literal["_nanos", b"_nanos", "_seconds", b"_seconds", "nanos", b"nanos", "seconds", b"seconds"]) -> None: ... + @typing.overload + def WhichOneof(self, oneof_group: typing.Literal["_nanos", b"_nanos"]) -> typing.Literal["nanos"] | None: ... + @typing.overload + def WhichOneof(self, oneof_group: typing.Literal["_seconds", b"_seconds"]) -> typing.Literal["seconds"] | None: ... + +Global___Time: typing_extensions.TypeAlias = Time + +@typing.final +class Condition(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + TYPE_FIELD_NUMBER: builtins.int + STATUS_FIELD_NUMBER: builtins.int + OBSERVEDGENERATION_FIELD_NUMBER: builtins.int + LASTTRANSITIONTIME_FIELD_NUMBER: builtins.int + REASON_FIELD_NUMBER: builtins.int + MESSAGE_FIELD_NUMBER: builtins.int + type: builtins.str + status: builtins.str + observedGeneration: builtins.int + reason: builtins.str + message: builtins.str + @property + def lastTransitionTime(self) -> Global___Time: ... + def __init__( + self, + *, + type: builtins.str | None = ..., + status: builtins.str | None = ..., + observedGeneration: builtins.int | None = ..., + lastTransitionTime: Global___Time | None = ..., + reason: builtins.str | None = ..., + message: builtins.str | None = ..., + ) -> None: ... + def HasField(self, field_name: typing.Literal["_lastTransitionTime", b"_lastTransitionTime", "_message", b"_message", "_observedGeneration", b"_observedGeneration", "_reason", b"_reason", "_status", b"_status", "_type", b"_type", "lastTransitionTime", b"lastTransitionTime", "message", b"message", "observedGeneration", b"observedGeneration", "reason", b"reason", "status", b"status", "type", b"type"]) -> builtins.bool: ... + def ClearField(self, field_name: typing.Literal["_lastTransitionTime", b"_lastTransitionTime", "_message", b"_message", "_observedGeneration", b"_observedGeneration", "_reason", b"_reason", "_status", b"_status", "_type", b"_type", "lastTransitionTime", b"lastTransitionTime", "message", b"message", "observedGeneration", b"observedGeneration", "reason", b"reason", "status", b"status", "type", b"type"]) -> None: ... + @typing.overload + def WhichOneof(self, oneof_group: typing.Literal["_lastTransitionTime", b"_lastTransitionTime"]) -> typing.Literal["lastTransitionTime"] | None: ... + @typing.overload + def WhichOneof(self, oneof_group: typing.Literal["_message", b"_message"]) -> typing.Literal["message"] | None: ... + @typing.overload + def WhichOneof(self, oneof_group: typing.Literal["_observedGeneration", b"_observedGeneration"]) -> typing.Literal["observedGeneration"] | None: ... + @typing.overload + def WhichOneof(self, oneof_group: typing.Literal["_reason", b"_reason"]) -> typing.Literal["reason"] | None: ... + @typing.overload + def WhichOneof(self, oneof_group: typing.Literal["_status", b"_status"]) -> typing.Literal["status"] | None: ... + @typing.overload + def WhichOneof(self, oneof_group: typing.Literal["_type", b"_type"]) -> typing.Literal["type"] | None: ... + +Global___Condition: typing_extensions.TypeAlias = Condition diff --git a/packages/jumpstarter-protocol/jumpstarter_protocol/jumpstarter/v1/kubernetes_pb2_grpc.pyi b/packages/jumpstarter-protocol/jumpstarter_protocol/jumpstarter/v1/kubernetes_pb2_grpc.pyi new file mode 100644 index 000000000..6aac97060 --- /dev/null +++ b/packages/jumpstarter-protocol/jumpstarter_protocol/jumpstarter/v1/kubernetes_pb2_grpc.pyi @@ -0,0 +1,20 @@ +""" +@generated by mypy-protobuf. Do not edit manually! +isort:skip_file +Copyright 2024 The Jumpstarter Authors""" + +import abc +import collections.abc +import grpc +import grpc.aio +import typing + +_T = typing.TypeVar("_T") + +class _MaybeAsyncIterator(collections.abc.AsyncIterator[_T], collections.abc.Iterator[_T], metaclass=abc.ABCMeta): ... + +class _ServicerContext(grpc.ServicerContext, grpc.aio.ServicerContext): # type: ignore[misc, type-arg] + ... + +GRPC_GENERATED_VERSION: str +GRPC_VERSION: str diff --git a/packages/jumpstarter-protocol/jumpstarter_protocol/jumpstarter/v1/router_pb2.pyi b/packages/jumpstarter-protocol/jumpstarter_protocol/jumpstarter/v1/router_pb2.pyi new file mode 100644 index 000000000..905fc6b88 --- /dev/null +++ b/packages/jumpstarter-protocol/jumpstarter_protocol/jumpstarter/v1/router_pb2.pyi @@ -0,0 +1,73 @@ +""" +@generated by mypy-protobuf. Do not edit manually! +isort:skip_file +Copyright 2024 The Jumpstarter Authors""" + +import builtins +import google.protobuf.descriptor +import google.protobuf.internal.enum_type_wrapper +import google.protobuf.message +import sys +import typing + +if sys.version_info >= (3, 10): + import typing as typing_extensions +else: + import typing_extensions + +DESCRIPTOR: google.protobuf.descriptor.FileDescriptor + +class _FrameType: + ValueType = typing.NewType("ValueType", builtins.int) + V: typing_extensions.TypeAlias = ValueType + +class _FrameTypeEnumTypeWrapper(google.protobuf.internal.enum_type_wrapper._EnumTypeWrapper[_FrameType.ValueType], builtins.type): + DESCRIPTOR: google.protobuf.descriptor.EnumDescriptor + FRAME_TYPE_DATA: _FrameType.ValueType # 0 + FRAME_TYPE_RST_STREAM: _FrameType.ValueType # 3 + FRAME_TYPE_PING: _FrameType.ValueType # 6 + FRAME_TYPE_GOAWAY: _FrameType.ValueType # 7 + +class FrameType(_FrameType, metaclass=_FrameTypeEnumTypeWrapper): ... + +FRAME_TYPE_DATA: FrameType.ValueType # 0 +FRAME_TYPE_RST_STREAM: FrameType.ValueType # 3 +FRAME_TYPE_PING: FrameType.ValueType # 6 +FRAME_TYPE_GOAWAY: FrameType.ValueType # 7 +Global___FrameType: typing_extensions.TypeAlias = FrameType + +@typing.final +class StreamRequest(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + PAYLOAD_FIELD_NUMBER: builtins.int + FRAME_TYPE_FIELD_NUMBER: builtins.int + payload: builtins.bytes + frame_type: Global___FrameType.ValueType + def __init__( + self, + *, + payload: builtins.bytes = ..., + frame_type: Global___FrameType.ValueType = ..., + ) -> None: ... + def ClearField(self, field_name: typing.Literal["frame_type", b"frame_type", "payload", b"payload"]) -> None: ... + +Global___StreamRequest: typing_extensions.TypeAlias = StreamRequest + +@typing.final +class StreamResponse(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + PAYLOAD_FIELD_NUMBER: builtins.int + FRAME_TYPE_FIELD_NUMBER: builtins.int + payload: builtins.bytes + frame_type: Global___FrameType.ValueType + def __init__( + self, + *, + payload: builtins.bytes = ..., + frame_type: Global___FrameType.ValueType = ..., + ) -> None: ... + def ClearField(self, field_name: typing.Literal["frame_type", b"frame_type", "payload", b"payload"]) -> None: ... + +Global___StreamResponse: typing_extensions.TypeAlias = StreamResponse diff --git a/packages/jumpstarter-protocol/jumpstarter_protocol/jumpstarter/v1/router_pb2_grpc.pyi b/packages/jumpstarter-protocol/jumpstarter_protocol/jumpstarter/v1/router_pb2_grpc.pyi new file mode 100644 index 000000000..032778846 --- /dev/null +++ b/packages/jumpstarter-protocol/jumpstarter_protocol/jumpstarter/v1/router_pb2_grpc.pyi @@ -0,0 +1,96 @@ +""" +@generated by mypy-protobuf. Do not edit manually! +isort:skip_file +Copyright 2024 The Jumpstarter Authors""" + +import abc +import collections.abc +import grpc +import grpc.aio +import jumpstarter.v1.router_pb2 +import sys +import typing + +if sys.version_info >= (3, 13): + import typing as typing_extensions +else: + import typing_extensions + +_T = typing.TypeVar("_T") + +class _MaybeAsyncIterator(collections.abc.AsyncIterator[_T], collections.abc.Iterator[_T], metaclass=abc.ABCMeta): ... + +class _ServicerContext(grpc.ServicerContext, grpc.aio.ServicerContext): # type: ignore[misc, type-arg] + ... + +GRPC_GENERATED_VERSION: str +GRPC_VERSION: str +_RouterServiceStreamType = typing_extensions.TypeVar( + '_RouterServiceStreamType', + grpc.StreamStreamMultiCallable[ + jumpstarter.v1.router_pb2.StreamRequest, + jumpstarter.v1.router_pb2.StreamResponse, + ], + grpc.aio.StreamStreamMultiCallable[ + jumpstarter.v1.router_pb2.StreamRequest, + jumpstarter.v1.router_pb2.StreamResponse, + ], + default=grpc.StreamStreamMultiCallable[ + jumpstarter.v1.router_pb2.StreamRequest, + jumpstarter.v1.router_pb2.StreamResponse, + ], +) + +class RouterServiceStub(typing.Generic[_RouterServiceStreamType]): + """StreamService + Claims: + iss: jumpstarter controller + aud: jumpstarter router + sub: jumpstarter client/exporter + stream: stream id + """ + + @typing.overload + def __init__(self: RouterServiceStub[ + grpc.StreamStreamMultiCallable[ + jumpstarter.v1.router_pb2.StreamRequest, + jumpstarter.v1.router_pb2.StreamResponse, + ], + ], channel: grpc.Channel) -> None: ... + + @typing.overload + def __init__(self: RouterServiceStub[ + grpc.aio.StreamStreamMultiCallable[ + jumpstarter.v1.router_pb2.StreamRequest, + jumpstarter.v1.router_pb2.StreamResponse, + ], + ], channel: grpc.aio.Channel) -> None: ... + + Stream: _RouterServiceStreamType + """Stream connects caller to another caller of the same stream""" + +RouterServiceAsyncStub: typing_extensions.TypeAlias = RouterServiceStub[ + grpc.aio.StreamStreamMultiCallable[ + jumpstarter.v1.router_pb2.StreamRequest, + jumpstarter.v1.router_pb2.StreamResponse, + ], +] + +class RouterServiceServicer(metaclass=abc.ABCMeta): + """StreamService + Claims: + iss: jumpstarter controller + aud: jumpstarter router + sub: jumpstarter client/exporter + stream: stream id + """ + + @abc.abstractmethod + def Stream( + self, + request_iterator: _MaybeAsyncIterator[jumpstarter.v1.router_pb2.StreamRequest], + context: _ServicerContext, + ) -> typing.Union[collections.abc.Iterator[jumpstarter.v1.router_pb2.StreamResponse], collections.abc.AsyncIterator[jumpstarter.v1.router_pb2.StreamResponse]]: + """Stream connects caller to another caller of the same stream""" + +def add_RouterServiceServicer_to_server(servicer: RouterServiceServicer, server: typing.Union[grpc.Server, grpc.aio.Server]) -> None: ... diff --git a/packages/jumpstarter/jumpstarter/config/exporter.py b/packages/jumpstarter/jumpstarter/config/exporter.py index 3e7b88b21..11f94f1d4 100644 --- a/packages/jumpstarter/jumpstarter/config/exporter.py +++ b/packages/jumpstarter/jumpstarter/config/exporter.py @@ -25,13 +25,16 @@ class HookInstanceConfigV1Alpha1(BaseModel): script: str = Field(alias="script", description="The j script to execute for this hook") timeout: int = Field(default=120, description="The hook execution timeout in seconds (default: 120s)") - exit_code: int = Field(alias="exitCode", default=0, description="The expected exit code (default: 0)") - on_failure: Literal["pass", "block", "warn"] = Field( - default="pass", + on_failure: Literal[ + "warn", + "endLease", + "exit", + ] = Field( + default="warn", alias="onFailure", description=( - "Action to take when the expected exit code is not returned: 'pass' continues normally, " - "'block' takes the exporter offline and blocks leases, 'warn' continues and prints a warning" + "Action to take when the expected exit code is not returned: 'endLease' to end the lease, " + "'exit' takes the exporter offline and ends the lease, 'warn' continues and prints a warning" ), ) diff --git a/packages/jumpstarter/jumpstarter/driver/base.py b/packages/jumpstarter/jumpstarter/driver/base.py index b735c32b5..b2de25628 100644 --- a/packages/jumpstarter/jumpstarter/driver/base.py +++ b/packages/jumpstarter/jumpstarter/driver/base.py @@ -29,13 +29,13 @@ ) from jumpstarter.common import LogSource, Metadata from jumpstarter.common.resources import ClientStreamResource, PresignedRequestResource, Resource, ResourceMetadata -from jumpstarter.exporter.logging import get_logger from jumpstarter.common.serde import decode_value, encode_value from jumpstarter.common.streams import ( DriverStreamRequest, ResourceStreamRequest, ) from jumpstarter.config.env import JMP_DISABLE_COMPRESSION +from jumpstarter.exporter.logging import get_logger from jumpstarter.streams.aiohttp import AiohttpStreamReaderStream from jumpstarter.streams.common import create_memory_stream from jumpstarter.streams.encoding import Compression, compress_stream diff --git a/packages/jumpstarter/jumpstarter/exporter/exporter.py b/packages/jumpstarter/jumpstarter/exporter/exporter.py index 301c57bbf..8d9c8cf7d 100644 --- a/packages/jumpstarter/jumpstarter/exporter/exporter.py +++ b/packages/jumpstarter/jumpstarter/exporter/exporter.py @@ -2,7 +2,7 @@ from collections.abc import AsyncGenerator, Awaitable, Callable from contextlib import asynccontextmanager from dataclasses import dataclass, field -from typing import Self +from typing import Any, Self import grpc from anyio import ( @@ -34,22 +34,136 @@ @dataclass(kw_only=True) class Exporter(AsyncContextManagerMixin, Metadata): + """Represents a Jumpstarter Exporter runtime instance. + + Inherits from Metadata, which provides: + uuid: Unique identifier for the exporter instance (UUID4) + labels: Key-value labels for exporter identification and selector matching + """ + + # Public Configuration Fields + channel_factory: Callable[[], Awaitable[grpc.aio.Channel]] + """Factory function for creating gRPC channels to communicate with the controller. + + Called multiple times throughout the exporter lifecycle to establish connections. + The factory should handle authentication, credentials, and channel configuration. + Used when creating controller stubs, unregistering, and establishing streams. + """ + device_factory: Callable[[], Driver] - lease_name: str = field(init=False, default="") + """Factory function for creating Driver instances representing the hardware/devices. + + Called when creating Sessions to provide access to the underlying device. + The Driver can contain child drivers in a composite pattern, representing + the full device tree being exported. Typically created from ExporterConfigV1Alpha1. + """ + tls: TLSConfigV1Alpha1 = field(default_factory=TLSConfigV1Alpha1) + """TLS/SSL configuration for secure communication with router and controller. + + Contains certificate authority (ca) and insecure flag for certificate verification. + Passed to connect_router_stream() when handling client connections. + Default creates empty config with ca="" and insecure=False. + """ + grpc_options: dict[str, str] = field(default_factory=dict) + """Custom gRPC channel options that override or supplement default settings. + + Merged with defaults (round_robin load balancing, keepalive settings, etc.). + Configured via YAML as grpcOptions in exporter config. + Passed to connect_router_stream() for client connections. + """ + hook_executor: HookExecutor | None = field(default=None) - registered: bool = field(init=False, default=False) + """Optional executor for lifecycle hooks (before-lease and after-lease). + + When configured, runs custom scripts at key points in the lease lifecycle: + - before-lease: Runs when transitioning to leased state (setup, validation) + - after-lease: Runs when transitioning from leased state (cleanup, reset) + Created when hooks.before_lease or hooks.after_lease are defined in config. + """ + + # Internal State Fields + + _lease_name: str = field(init=False, default="") + """Current lease name assigned by the controller. + + Empty string indicates no active lease. Updated when controller assigns/reassigns + the exporter. Used to detect lease transitions and create hook contexts. + """ + + _registered: bool = field(init=False, default=False) + """Tracks whether exporter has successfully registered with the controller. + + Set to True after successful registration. Used to determine if unregistration + is needed during cleanup. + """ + _unregister: bool = field(init=False, default=False) + """Internal flag indicating whether to actively unregister during shutdown. + + Set when stop(should_unregister=True) is called. When False, relies on + heartbeat timeout for implicit unregistration. + """ + _stop_requested: bool = field(init=False, default=False) + """Internal flag indicating a graceful stop has been requested. + + Set to True when stop(wait_for_lease_exit=True) is called. The exporter + waits for the current lease to exit before stopping. + """ + _started: bool = field(init=False, default=False) + """Internal flag tracking whether the exporter has started serving. + + Set to True when the first lease is assigned. Used to determine immediate + vs graceful stop behavior. + """ + _tg: TaskGroup | None = field(init=False, default=None) + """Reference to the anyio TaskGroup managing concurrent tasks. + + Manages streams and connection handling tasks. Used to cancel all tasks + when stopping. Set during serve() and cleared when done. + """ + _current_client_name: str = field(init=False, default="") - _pre_lease_ready: Event | None = field(init=False, default=None) - _current_status: ExporterStatus = field(init=False, default=ExporterStatus.OFFLINE) + """Name of the client currently holding the lease. + + Used to create hook contexts with client information and determine if + after-lease hooks should run. Reset when lease is released. + """ + + _before_lease_hook: Event | None = field(init=False, default=None) + """Synchronization event that blocks connection handling until hook completes. + + Created when a new lease starts, waited on before accepting connections, + and set when hook completes or is not configured. + """ + + _exporter_status: ExporterStatus = field(init=False, default=ExporterStatus.OFFLINE) + """Current status of the exporter. + + Updated via _update_status() and reported to controller and session. + Possible values: OFFLINE, AVAILABLE, BEFORE_LEASE_HOOK, LEASE_READY, + AFTER_LEASE_HOOK, BEFORE_LEASE_HOOK_FAILED, AFTER_LEASE_HOOK_FAILED. + """ + _current_session: Session | None = field(init=False, default=None) + """Reference to the currently active Session object. + + A Session wraps the root device and provides gRPC service endpoints. + Used to update session status and pass to HookExecutor for logging. + Set in session() context manager and cleared when context exits. + """ + _session_socket_path: str | None = field(init=False, default=None) + """Unix socket path where the current session is serving. + + Passed to hooks so they can communicate with the device via the CLI. + Enables session reuse instead of creating new ones for hooks. + """ def stop(self, wait_for_lease_exit=False, should_unregister=False): """Signal the exporter to stop. @@ -68,57 +182,189 @@ def stop(self, wait_for_lease_exit=False, should_unregister=False): self._stop_requested = True logger.info("Exporter marked for stop upon lease exit") - async def _update_status(self, status: ExporterStatus, message: str = ""): - """Update exporter status with the controller and session.""" - self._current_status = status + async def _get_controller_stub(self) -> jumpstarter_pb2_grpc.ControllerServiceStub: + """Create and return a controller service stub.""" + return jumpstarter_pb2_grpc.ControllerServiceStub(await self.channel_factory()) + + async def _retry_stream( + self, + stream_name: str, + stream_factory: Callable[[jumpstarter_pb2_grpc.ControllerServiceStub], AsyncGenerator], + send_tx, + retries: int = 5, + backoff: float = 3.0, + ): + """Generic retry wrapper for gRPC streaming calls. + + Args: + stream_name: Name of the stream for logging purposes + stream_factory: Function that takes a controller stub and returns an async generator + send_tx: Transmission channel to send stream items to + retries: Maximum number of retry attempts + backoff: Seconds to wait between retries + """ + retries_left = retries + while True: + try: + controller = await self._get_controller_stub() + async for item in stream_factory(controller): + await send_tx.send(item) + except Exception as e: + if retries_left > 0: + retries_left -= 1 + logger.info( + "%s stream interrupted, restarting in %ss, %s retries left: %s", + stream_name, + backoff, + retries_left, + e, + ) + await sleep(backoff) + else: + raise + else: + retries_left = retries + + def _listen_stream_factory( + self, lease_name: str + ) -> Callable[[jumpstarter_pb2_grpc.ControllerServiceStub], AsyncGenerator[jumpstarter_pb2.ListenResponse, None]]: + """Create a stream factory for listening to connection requests. + + Args: + lease_name: The lease name to listen for + + Returns: + A factory function that creates a Listen stream when given a ControllerServiceStub + """ + + def factory( + ctrl: jumpstarter_pb2_grpc.ControllerServiceStub, + ) -> AsyncGenerator[jumpstarter_pb2.ListenResponse, None]: + return ctrl.Listen(jumpstarter_pb2.ListenRequest(lease_name=lease_name)) + + return factory + + def _status_stream_factory( + self, + ) -> Callable[[jumpstarter_pb2_grpc.ControllerServiceStub], AsyncGenerator[jumpstarter_pb2.StatusResponse, None]]: + """Create a stream factory for status updates. + + Returns: + A factory function that creates a Status stream when given a ControllerServiceStub + """ + + def factory( + ctrl: jumpstarter_pb2_grpc.ControllerServiceStub, + ) -> AsyncGenerator[jumpstarter_pb2.StatusResponse, None]: + return ctrl.Status(jumpstarter_pb2.StatusRequest()) + + return factory + + def _create_hook_context(self, lease_name: str, client_name: str) -> HookContext: + """Create a standardized hook context. + + Args: + lease_name: Name of the lease + client_name: Name of the client + + Returns: + HookContext object with consistent fields + """ + return HookContext( + lease_name=lease_name, + client_name=client_name, + ) + + async def _register_with_controller(self, channel: grpc.aio.Channel): + """Register the exporter with the controller.""" + response = await jumpstarter_pb2_grpc.ExporterServiceStub(channel).GetReport(empty_pb2.Empty()) + logger.info("Registering exporter with controller") + controller = jumpstarter_pb2_grpc.ControllerServiceStub(channel) + await controller.Register( + jumpstarter_pb2.RegisterRequest( + labels=self.labels, + reports=response.reports, + ) + ) + self._registered = True + await self._report_status(ExporterStatus.AVAILABLE, "Exporter registered and available") + + async def _report_status(self, status: ExporterStatus, message: str = ""): + """Report the exporter status with the controller and session.""" + self._exporter_status = status # Update session status if available if self._current_session: self._current_session.update_status(status, message) try: - controller = jumpstarter_pb2_grpc.ControllerServiceStub(await self.channel_factory()) - await controller.UpdateStatus( - jumpstarter_pb2.UpdateStatusRequest( + controller = await self._get_controller_stub() + await controller.ReportStatus( + jumpstarter_pb2.ReportStatusRequest( status=status.to_proto(), - status_message=message, + message=message, ) ) logger.info(f"Updated status to {status}: {message}") except Exception as e: logger.error(f"Failed to update status: {e}") + async def _unregister_with_controller(self): + """Safely unregister from controller with timeout and error handling.""" + if not (self._registered and self._unregister): + return + + logger.info("Unregistering exporter with controller") + try: + with move_on_after(10): # 10 second timeout + channel = await self.channel_factory() + try: + controller = jumpstarter_pb2_grpc.ControllerServiceStub(channel) + await self._report_status(ExporterStatus.OFFLINE, "Exporter shutting down") + await controller.Unregister( + jumpstarter_pb2.UnregisterRequest( + reason="Exporter shutdown", + ) + ) + logger.info("Controller unregistration completed successfully") + finally: + with CancelScope(shield=True): + await channel.close() + except Exception as e: + logger.error("Error during controller unregistration: %s", e, exc_info=True) + @asynccontextmanager async def __asynccontextmanager__(self) -> AsyncGenerator[Self]: try: yield self finally: try: - if self.registered and self._unregister: - logger.info("Unregistering exporter with controller") - try: - with move_on_after(10): # 10 second timeout - channel = await self.channel_factory() - try: - controller = jumpstarter_pb2_grpc.ControllerServiceStub(channel) - await self._update_status(ExporterStatus.OFFLINE, "Exporter shutting down") - await controller.Unregister( - jumpstarter_pb2.UnregisterRequest( - reason="Exporter shutdown", - ) - ) - logger.info("Controller unregistration completed successfully") - finally: - with CancelScope(shield=True): - await channel.close() - except Exception as e: - logger.error("Error during controller unregistration: %s", e, exc_info=True) - + await self._unregister_with_controller() except Exception as e: logger.error("Error during exporter cleanup: %s", e, exc_info=True) # Don't re-raise to avoid masking the original exception - async def __handle(self, path, endpoint, token, tls_config, grpc_options): + async def _handle_client_conn( + self, path: str, endpoint: str, token: str, tls_config: TLSConfigV1Alpha1, grpc_options: dict[str, Any] | None + ) -> None: + """Handle a single client connection by proxying between session and router. + + This method establishes a connection from the local session Unix socket to the + router endpoint, creating a bidirectional proxy that allows the client to + communicate with the device through the router infrastructure. + + Args: + path: Unix socket path where the session is serving + endpoint: Router endpoint URL to connect to + token: Authentication token for the router connection + tls_config: TLS configuration for secure router communication + grpc_options: Optional gRPC channel options for the router connection + + Note: + This is a private method spawned as a concurrent task by handle_lease_conn() + for each incoming connection request. It runs until the client disconnects + or an error occurs. + """ try: async with await connect_unix(path) as stream: async with connect_router_stream(endpoint, token, stream, tls_config, grpc_options): @@ -128,78 +374,84 @@ async def __handle(self, path, endpoint, token, tls_config, grpc_options): @asynccontextmanager async def session(self): - controller = jumpstarter_pb2_grpc.ControllerServiceStub(await self.channel_factory()) + """Create and manage an exporter Session context.""" with Session( uuid=self.uuid, labels=self.labels, root_device=self.device_factory(), ) as session: - # Store session reference for status updates + # Store session reference outside context for status updates self._current_session = session try: + # Create a Unix socket async with session.serve_unix_async() as path: + # Create a gRPC channel to the controller via the socket async with grpc.aio.secure_channel( f"unix://{path}", grpc.local_channel_credentials(grpc.LocalConnectionType.UDS) ) as channel: - response = await jumpstarter_pb2_grpc.ExporterServiceStub(channel).GetReport(empty_pb2.Empty()) - logger.info("Registering exporter with controller") - await controller.Register( - jumpstarter_pb2.RegisterRequest( - labels=self.labels, - reports=response.reports, - ) - ) - self.registered = True - await self._update_status(ExporterStatus.AVAILABLE, "Exporter registered and available") + # Register the exporter with the controller + await self._register_with_controller(channel) yield path finally: - # Clear session reference + # Clear the session reference self._current_session = None - async def handle(self, lease_name, tg): - logger.info("Listening for incoming connection requests on lease %s", lease_name) + async def handle_lease(self, lease_name: str, tg: TaskGroup) -> None: + """Handle all incoming client connections for a lease. - listen_tx, listen_rx = create_memory_object_stream() + This method orchestrates the complete lifecycle of managing connections during + a lease period. It listens for connection requests and spawns individual + tasks to handle each client connection. - async def listen(retries=5, backoff=3): - retries_left = retries - while True: - try: - controller = jumpstarter_pb2_grpc.ControllerServiceStub(await self.channel_factory()) - async for request in controller.Listen(jumpstarter_pb2.ListenRequest(lease_name=lease_name)): - await listen_tx.send(request) - except Exception as e: - if retries_left > 0: - retries_left -= 1 - logger.info( - "Listen stream interrupted, restarting in {}s, {} retries left: {}".format( - backoff, retries_left, e - ) - ) - await sleep(backoff) - else: - raise - else: - retries_left = retries + The method performs the following steps: + 1. Sets up a stream to listen for incoming connection requests + 2. Creates a session with a Unix socket for device access + 3. Waits for the before-lease hook to complete (if configured) + 4. Spawns a new task for each incoming connection request - tg.start_soon(listen) + Args: + lease_name: Name of the lease to handle connections for + tg: TaskGroup for spawning concurrent connection handler tasks - # Create session before hooks run + Note: + This method runs for the entire duration of the lease and is spawned by + the serve() method when a lease is assigned. It terminates when the lease + ends or the exporter stops. + """ + logger.info("Listening for incoming connection requests on lease %s", lease_name) + + listen_tx, listen_rx = create_memory_object_stream[jumpstarter_pb2.ListenResponse]() + + # Start listening for connection requests with retry logic + tg.start_soon( + self._retry_stream, + "Listen", + self._listen_stream_factory(lease_name), + listen_tx, + ) + + # Create a lease session to execute hooks and handle connections async with self.session() as path: # Store socket path for hook execution self._session_socket_path = path - # Wait for before-lease hook to complete before processing connections - if self._pre_lease_ready is not None: + # Wait for before-lease hook to complete before processing client connections + if self._before_lease_hook is not None: logger.info("Waiting for before-lease hook to complete before accepting connections") - await self._pre_lease_ready.wait() - logger.info("before-lease hook completed, now accepting connections") + await self._before_lease_hook.wait() + logger.info("Before-lease hook completed, now accepting connections") # Process client connections + # Type: request is jumpstarter_pb2.ListenResponse with router_endpoint and router_token fields async for request in listen_rx: logger.info("Handling new connection request on lease %s", lease_name) tg.start_soon( - self.__handle, path, request.router_endpoint, request.router_token, self.tls, self.grpc_options + self._handle_client_conn, + path, + request.router_endpoint, + request.router_token, + self.tls, + self.grpc_options, ) async def serve(self): # noqa: C901 @@ -209,48 +461,51 @@ async def serve(self): # noqa: C901 # initial registration async with self.session(): pass - status_tx, status_rx = create_memory_object_stream() - - async def status(retries=5, backoff=3): - retries_left = retries - while True: - try: - controller = jumpstarter_pb2_grpc.ControllerServiceStub(await self.channel_factory()) - async for status in controller.Status(jumpstarter_pb2.StatusRequest()): - await status_tx.send(status) - except Exception as e: - if retries_left > 0: - retries_left -= 1 - logger.info( - "Status stream interrupted, restarting in {}s, {} retries left: {}".format( - backoff, retries_left, e - ) - ) - await sleep(backoff) - else: - raise - else: - retries_left = retries + status_tx, status_rx = create_memory_object_stream[jumpstarter_pb2.StatusResponse]() async with create_task_group() as tg: self._tg = tg - tg.start_soon(status) + # Start status stream with retry logic + tg.start_soon( + self._retry_stream, + "Status", + self._status_stream_factory(), + status_tx, + ) + # Type: status is jumpstarter_pb2.StatusResponse with lease_name and other status fields async for status in status_rx: - if self.lease_name != "" and self.lease_name != status.lease_name: - # After-lease hook for the previous lease + if self._lease_name != "" and self._lease_name != status.lease_name: + # After-lease hook for the previous lease (lease name changed) if self.hook_executor and self._current_client_name: - hook_context = HookContext( - lease_name=self.lease_name, - client_name=self._current_client_name, - ) + hook_context = self._create_hook_context(self._lease_name, self._current_client_name) # Shield the after-lease hook from cancellation and await it with CancelScope(shield=True): - await self.run_after_lease_hook(hook_context) + await self._report_status(ExporterStatus.AFTER_LEASE_HOOK, "Running afterLease hooks") + self.hook_executor.main_session = self._current_session + try: + await self.hook_executor.execute_after_lease_hook( + hook_context, socket_path=self._session_socket_path + ) + await self._report_status(ExporterStatus.AVAILABLE, "Available for new lease") + except HookExecutionError as e: + logger.error("afterLease hook failed (on_failure=endLease/exit): %s", e) + await self._report_status( + ExporterStatus.AFTER_LEASE_HOOK_FAILED, + f"afterLease hook failed: {e}", + ) + logger.error("Shutting down exporter due to afterLease hook failure") + self.stop() + except Exception as e: + logger.error("afterLease hook failed with unexpected error: %s", e, exc_info=True) + await self._report_status( + ExporterStatus.AFTER_LEASE_HOOK_FAILED, + f"afterLease hook failed: {e}", + ) - self.lease_name = status.lease_name + self._lease_name = status.lease_name logger.info("Lease status changed, killing existing connections") # Reset event for next lease - self._pre_lease_ready = None + self._before_lease_hook = None self.stop() break @@ -258,12 +513,12 @@ async def status(retries=5, backoff=3): previous_leased = hasattr(self, "_previous_leased") and self._previous_leased current_leased = status.leased - self.lease_name = status.lease_name - if not self._started and self.lease_name != "": + self._lease_name = status.lease_name + if not self._started and self._lease_name != "": self._started = True # Create event for pre-lease synchronization - self._pre_lease_ready = Event() - tg.start_soon(self.handle, self.lease_name, tg) + self._before_lease_hook = Event() + tg.start_soon(self.handle_lease, self._lease_name, tg) if current_leased: logger.info("Currently leased by %s under %s", status.client_name, status.lease_name) @@ -272,39 +527,33 @@ async def status(retries=5, backoff=3): # Before-lease hook when transitioning from unleased to leased if not previous_leased: if self.hook_executor: - hook_context = HookContext( - lease_name=status.lease_name, - client_name=status.client_name, - ) - tg.start_soon(self.run_before_lease_hook, self, hook_context) + hook_context = self._create_hook_context(status.lease_name, status.client_name) + tg.start_soon(self.run_before_lease_hook, hook_context) else: # No hook configured, set event immediately - await self._update_status(ExporterStatus.LEASE_READY, "Ready for commands") - if self._pre_lease_ready: - self._pre_lease_ready.set() + await self._report_status(ExporterStatus.LEASE_READY, "Ready for commands") + if self._before_lease_hook: + self._before_lease_hook.set() else: logger.info("Currently not leased") # After-lease hook when transitioning from leased to unleased if previous_leased and self.hook_executor and self._current_client_name: - hook_context = HookContext( - lease_name=self.lease_name, - client_name=self._current_client_name, - ) + hook_context = self._create_hook_context(self._lease_name, self._current_client_name) # Shield the after-lease hook from cancellation and await it with CancelScope(shield=True): - await self._update_status(ExporterStatus.AFTER_LEASE_HOOK, "Running afterLease hooks") + await self._report_status(ExporterStatus.AFTER_LEASE_HOOK, "Running afterLease hooks") # Pass the current session to hook executor for logging self.hook_executor.main_session = self._current_session # Use session socket if available, otherwise create new session await self.hook_executor.execute_after_lease_hook( hook_context, socket_path=self._session_socket_path ) - await self._update_status(ExporterStatus.AVAILABLE, "Available for new lease") + await self._report_status(ExporterStatus.AVAILABLE, "Available for new lease") self._current_client_name = "" # Reset event for next lease - self._pre_lease_ready = None + self._before_lease_hook = None if self._stop_requested: self.stop(should_unregister=True) @@ -321,7 +570,7 @@ async def run_before_lease_hook(self, hook_ctx: HookContext): hook_ctx (HookContext): The current hook execution context """ try: - await self._update_status(ExporterStatus.BEFORE_LEASE_HOOK, "Running beforeLease hooks") + await self._report_status(ExporterStatus.BEFORE_LEASE_HOOK, "Running beforeLease hooks") # Pass the current session to hook executor for logging self.hook_executor.main_session = self._current_session @@ -331,12 +580,12 @@ async def run_before_lease_hook(self, hook_ctx: HookContext): # Execute hook with main session socket await self.hook_executor.execute_before_lease_hook(hook_ctx, socket_path=self._session_socket_path) - await self._update_status(ExporterStatus.LEASE_READY, "Ready for commands") + await self._report_status(ExporterStatus.LEASE_READY, "Ready for commands") logger.info("beforeLease hook completed successfully") except HookExecutionError as e: # Hook failed with on_failure='block' - end lease and set failed status logger.error("beforeLease hook failed (on_failure=block): %s", e) - await self._update_status( + await self._report_status( ExporterStatus.BEFORE_LEASE_HOOK_FAILED, f"beforeLease hook failed (on_failure=block): {e}" ) # Note: We don't take the exporter offline for before_lease hook failures @@ -344,11 +593,11 @@ async def run_before_lease_hook(self, hook_ctx: HookContext): except Exception as e: # Unexpected error during hook execution logger.error("beforeLease hook failed with unexpected error: %s", e, exc_info=True) - await self._update_status(ExporterStatus.BEFORE_LEASE_HOOK_FAILED, f"beforeLease hook failed: {e}") + await self._report_status(ExporterStatus.BEFORE_LEASE_HOOK_FAILED, f"beforeLease hook failed: {e}") finally: # Always set the event to unblock connections - if self._pre_lease_ready: - self._pre_lease_ready.set() + if self._before_lease_hook: + self._before_lease_hook.set() async def run_after_lease_hook(self, hook_ctx: HookContext): """ @@ -358,17 +607,17 @@ async def run_after_lease_hook(self, hook_ctx: HookContext): hook_ctx (HookContext): The current hook execution context """ try: - await self._update_status(ExporterStatus.AFTER_LEASE_HOOK, "Running afterLease hooks") + await self._report_status(ExporterStatus.AFTER_LEASE_HOOK, "Running afterLease hooks") # Pass the current session to hook executor for logging self.hook_executor.main_session = self._current_session # Use session socket if available, otherwise create new session await self.hook_executor.execute_after_lease_hook(hook_ctx, socket_path=self._session_socket_path) - await self._update_status(ExporterStatus.AVAILABLE, "Available for new lease") + await self._report_status(ExporterStatus.AVAILABLE, "Available for new lease") logger.info("afterLease hook completed successfully") except HookExecutionError as e: # Hook failed with on_failure='block' - set failed status and shut down exporter logger.error("afterLease hook failed (on_failure=block): %s", e) - await self._update_status( + await self._report_status( ExporterStatus.AFTER_LEASE_HOOK_FAILED, f"afterLease hook failed (on_failure=block): {e}" ) # Shut down the exporter after after_lease hook failure with on_failure='block' @@ -377,4 +626,4 @@ async def run_after_lease_hook(self, hook_ctx: HookContext): except Exception as e: # Unexpected error during hook execution logger.error("afterLease hook failed with unexpected error: %s", e, exc_info=True) - await self._update_status(ExporterStatus.AFTER_LEASE_HOOK_FAILED, f"afterLease hook failed: {e}") + await self._report_status(ExporterStatus.AFTER_LEASE_HOOK_FAILED, f"afterLease hook failed: {e}") diff --git a/packages/jumpstarter/jumpstarter/exporter/hooks.py b/packages/jumpstarter/jumpstarter/exporter/hooks.py index 77803b28d..a38275603 100644 --- a/packages/jumpstarter/jumpstarter/exporter/hooks.py +++ b/packages/jumpstarter/jumpstarter/exporter/hooks.py @@ -5,7 +5,7 @@ import os from contextlib import asynccontextmanager from dataclasses import dataclass, field -from typing import Callable +from typing import AsyncGenerator, Callable from jumpstarter.common import LogSource from jumpstarter.config.env import JMP_DRIVERS_ALLOW, JUMPSTARTER_HOST @@ -18,7 +18,7 @@ class HookExecutionError(Exception): - """Raised when a hook fails and on_failure is set to 'block'.""" + """Raised when a hook fails and on_failure is set to 'endLease' or 'exit'.""" pass @@ -42,8 +42,34 @@ class HookExecutor: device_factory: Callable[[], Driver] main_session: Session | None = field(default=None) + def _create_hook_env(self, context: HookContext, socket_path: str) -> dict[str, str]: + """Create standardized hook environment variables. + + Args: + context: Hook context information + socket_path: Path to the Unix socket for JUMPSTARTER_HOST + + Returns: + Dictionary of environment variables for hook execution + """ + hook_env = os.environ.copy() + hook_env.update( + { + JUMPSTARTER_HOST: str(socket_path), + JMP_DRIVERS_ALLOW: "UNSAFE", # Allow all drivers for local access + "LEASE_NAME": context.lease_name, + "CLIENT_NAME": context.client_name, + "LEASE_DURATION": context.lease_duration, + "EXPORTER_NAME": context.exporter_name, + "EXPORTER_NAMESPACE": context.exporter_namespace, + } + ) + return hook_env + @asynccontextmanager - async def _create_hook_environment(self, context: HookContext): + async def _create_hook_environment( + self, context: HookContext + ) -> AsyncGenerator[tuple[Session, dict[str, str]], None]: """Create a local session and Unix socket for j CLI access.""" with Session( root_device=self.device_factory(), @@ -55,18 +81,7 @@ async def _create_hook_environment(self, context: HookContext): ) as session: async with session.serve_unix_async() as unix_path: # Create environment variables for the hook - hook_env = os.environ.copy() - hook_env.update( - { - JUMPSTARTER_HOST: str(unix_path), - JMP_DRIVERS_ALLOW: "UNSAFE", # Allow all drivers for local access - "LEASE_NAME": context.lease_name, - "CLIENT_NAME": context.client_name, - "LEASE_DURATION": context.lease_duration, - "EXPORTER_NAME": context.exporter_name, - "EXPORTER_NAMESPACE": context.exporter_namespace, - } - ) + hook_env = self._create_hook_env(context, unix_path) yield session, hook_env @@ -76,11 +91,11 @@ async def _execute_hook( context: HookContext, log_source: LogSource, socket_path: str | None = None, - ): + ) -> None: """Execute a single hook command. Args: - hook_config: Hook configuration including script, timeout, exit_code, and on_failure + hook_config: Hook configuration including script, timeout, and on_failure context: Hook context information log_source: Log source for hook output socket_path: Optional Unix socket path to reuse existing session. @@ -96,18 +111,7 @@ async def _execute_hook( # If socket_path provided, use existing session; otherwise create new one if socket_path is not None: # Reuse existing session - create environment without session creation - hook_env = os.environ.copy() - hook_env.update( - { - JUMPSTARTER_HOST: str(socket_path), - JMP_DRIVERS_ALLOW: "UNSAFE", - "LEASE_NAME": context.lease_name, - "CLIENT_NAME": context.client_name, - "LEASE_DURATION": context.lease_duration, - "EXPORTER_NAME": context.exporter_name, - "EXPORTER_NAMESPACE": context.exporter_namespace, - } - ) + hook_env = self._create_hook_env(context, socket_path) # Use main session for logging (must be available when socket_path is provided) logging_session = self.main_session @@ -127,13 +131,12 @@ async def _execute_hook_process( hook_config: HookInstanceConfigV1Alpha1, context: HookContext, log_source: LogSource, - hook_env: dict, + hook_env: dict[str, str], logging_session: Session, - ): + ) -> None: """Execute the hook process with the given environment and logging session.""" command = hook_config.script timeout = hook_config.timeout - expected_exit_code = hook_config.exit_code on_failure = hook_config.on_failure try: @@ -165,22 +168,22 @@ async def read_output(): # Run output reading and process waiting concurrently with timeout await asyncio.wait_for(asyncio.gather(read_output(), process.wait()), timeout=timeout) - # Check if exit code matches expected - if process.returncode == expected_exit_code: - logger.info("Hook executed successfully with exit code %d", process.returncode) + # Check if hook succeeded (exit code 0) + if process.returncode == 0: + logger.info("Hook executed successfully") return else: - # Exit code mismatch - handle according to on_failure setting - error_msg = f"Hook failed: expected exit code {expected_exit_code}, got {process.returncode}" + # Non-zero exit code is a failure - handle according to on_failure setting + error_msg = f"Hook failed with exit code {process.returncode}" - if on_failure == "pass": - logger.info("%s (on_failure=pass, continuing)", error_msg) - return - elif on_failure == "warn": + if on_failure == "warn": logger.warning("%s (on_failure=warn, continuing)", error_msg) return - else: # on_failure == "block" - logger.error("%s (on_failure=block, raising exception)", error_msg) + elif on_failure == "endLease": + logger.error("%s (on_failure=endLease, raising exception)", error_msg) + raise HookExecutionError(error_msg) + else: # on_failure == "exit" + logger.error("%s (on_failure=exit, raising exception)", error_msg) raise HookExecutionError(error_msg) except asyncio.TimeoutError as e: @@ -194,13 +197,12 @@ async def read_output(): await process.wait() # Handle timeout according to on_failure setting - if on_failure == "pass": - logger.info("%s (on_failure=pass, continuing)", error_msg) - return - elif on_failure == "warn": + if on_failure == "warn": logger.warning("%s (on_failure=warn, continuing)", error_msg) return - else: # on_failure == "block" + elif on_failure == "endLease": + raise HookExecutionError(error_msg) from e + else: # on_failure == "exit" raise HookExecutionError(error_msg) from e except HookExecutionError: @@ -211,16 +213,15 @@ async def read_output(): logger.error(error_msg, exc_info=True) # Handle exception according to on_failure setting - if on_failure == "pass": - logger.info("%s (on_failure=pass, continuing)", error_msg) - return - elif on_failure == "warn": + if on_failure == "warn": logger.warning("%s (on_failure=warn, continuing)", error_msg) return - else: # on_failure == "block" + elif on_failure == "endLease": + raise HookExecutionError(error_msg) from e + else: # on_failure == "exit" raise HookExecutionError(error_msg) from e - async def execute_before_lease_hook(self, context: HookContext, socket_path: str | None = None): + async def execute_before_lease_hook(self, context: HookContext, socket_path: str | None = None) -> None: """Execute the before-lease hook. Args: @@ -228,7 +229,7 @@ async def execute_before_lease_hook(self, context: HookContext, socket_path: str socket_path: Optional Unix socket path to reuse existing session Raises: - HookExecutionError: If hook fails and on_failure is set to 'block' + HookExecutionError: If hook fails and on_failure is set to 'endLease' or 'exit' """ if not self.config.before_lease: logger.debug("No before-lease hook configured") @@ -242,7 +243,7 @@ async def execute_before_lease_hook(self, context: HookContext, socket_path: str socket_path, ) - async def execute_after_lease_hook(self, context: HookContext, socket_path: str | None = None): + async def execute_after_lease_hook(self, context: HookContext, socket_path: str | None = None) -> None: """Execute the after-lease hook. Args: @@ -250,7 +251,7 @@ async def execute_after_lease_hook(self, context: HookContext, socket_path: str socket_path: Optional Unix socket path to reuse existing session Raises: - HookExecutionError: If hook fails and on_failure is set to 'block' + HookExecutionError: If hook fails and on_failure is set to 'endLease' or 'exit' """ if not self.config.after_lease: logger.debug("No after-lease hook configured") diff --git a/packages/jumpstarter/jumpstarter/exporter/hooks_test.py b/packages/jumpstarter/jumpstarter/exporter/hooks_test.py index 0e18d332c..f31257dff 100644 --- a/packages/jumpstarter/jumpstarter/exporter/hooks_test.py +++ b/packages/jumpstarter/jumpstarter/exporter/hooks_test.py @@ -1,4 +1,5 @@ import asyncio +from typing import Callable from unittest.mock import AsyncMock, Mock, call, patch import pytest @@ -16,23 +17,23 @@ class MockDriver(Driver): def client(cls) -> str: return "test.MockClient" - def close(self): + def close(self) -> None: pass - def reset(self): + def reset(self) -> None: pass @pytest.fixture -def mock_device_factory(): - def factory(): +def mock_device_factory() -> Callable[[], MockDriver]: + def factory() -> MockDriver: return MockDriver() return factory @pytest.fixture -def hook_config(): +def hook_config() -> HookConfigV1Alpha1: return HookConfigV1Alpha1( before_lease=HookInstanceConfigV1Alpha1(script="echo 'Pre-lease hook executed'", timeout=10), after_lease=HookInstanceConfigV1Alpha1(script="echo 'Post-lease hook executed'", timeout=10), @@ -40,7 +41,7 @@ def hook_config(): @pytest.fixture -def hook_context(): +def hook_context() -> HookContext: return HookContext( lease_name="test-lease-123", client_name="test-client", @@ -51,7 +52,7 @@ def hook_context(): class TestHookExecutor: - async def test_hook_executor_creation(self, hook_config, mock_device_factory): + async def test_hook_executor_creation(self, hook_config, mock_device_factory) -> None: executor = HookExecutor( config=hook_config, device_factory=mock_device_factory, @@ -60,18 +61,18 @@ async def test_hook_executor_creation(self, hook_config, mock_device_factory): assert executor.config == hook_config assert executor.device_factory == mock_device_factory - async def test_empty_hook_execution(self, mock_device_factory, hook_context): + async def test_empty_hook_execution(self, mock_device_factory, hook_context) -> None: empty_config = HookConfigV1Alpha1() executor = HookExecutor( config=empty_config, device_factory=mock_device_factory, ) - # Both hooks should return True for empty/None commands - assert await executor.execute_before_lease_hook(hook_context) is True - assert await executor.execute_after_lease_hook(hook_context) is True + # Both hooks should return None for empty/None commands + assert await executor.execute_before_lease_hook(hook_context) is None + assert await executor.execute_after_lease_hook(hook_context) is None - async def test_successful_hook_execution(self, mock_device_factory, hook_context): + async def test_successful_hook_execution(self, mock_device_factory, hook_context) -> None: hook_config = HookConfigV1Alpha1( before_lease=HookInstanceConfigV1Alpha1(script="echo 'Pre-lease hook executed'", timeout=10), ) @@ -102,7 +103,7 @@ async def test_successful_hook_execution(self, mock_device_factory, hook_context result = await executor.execute_before_lease_hook(hook_context) - assert result is True + assert result is None # Verify subprocess was called with correct environment mock_subprocess.assert_called_once() @@ -117,11 +118,11 @@ async def test_successful_hook_execution(self, mock_device_factory, hook_context assert env["LEASE_NAME"] == "test-lease-123" assert env["CLIENT_NAME"] == "test-client" - async def test_failed_hook_execution(self, mock_device_factory, hook_context): + async def test_failed_hook_execution(self, mock_device_factory, hook_context) -> None: failed_config = HookConfigV1Alpha1( before_lease=HookInstanceConfigV1Alpha1( - script="exit 1", timeout=10, on_failure="block" - ), # Command that will fail with on_failure="block" + script="exit 1", timeout=10, on_failure="endLease" + ), # Command that will fail with on_failure="endLease" ) with patch("jumpstarter.exporter.hooks.Session") as mock_session_class: @@ -147,15 +148,15 @@ async def test_failed_hook_execution(self, mock_device_factory, hook_context): device_factory=mock_device_factory, ) - # Should raise HookExecutionError since on_failure="block" - with pytest.raises(HookExecutionError, match="expected exit code 0, got 1"): + # Should raise HookExecutionError since on_failure="endLease" + with pytest.raises(HookExecutionError, match="Hook failed with exit code 1"): await executor.execute_before_lease_hook(hook_context) - async def test_hook_timeout(self, mock_device_factory, hook_context): + async def test_hook_timeout(self, mock_device_factory, hook_context) -> None: timeout_config = HookConfigV1Alpha1( before_lease=HookInstanceConfigV1Alpha1( - script="sleep 60", timeout=1, on_failure="block" - ), # Command that will timeout with on_failure="block" + script="sleep 60", timeout=1, on_failure="exit" + ), # Command that will timeout with on_failure="exit" ) with patch("jumpstarter.exporter.hooks.Session") as mock_session_class: @@ -179,13 +180,13 @@ async def test_hook_timeout(self, mock_device_factory, hook_context): device_factory=mock_device_factory, ) - # Should raise HookExecutionError since on_failure="block" + # Should raise HookExecutionError since on_failure="exit" with pytest.raises(HookExecutionError, match="timed out after 1 seconds"): await executor.execute_before_lease_hook(hook_context) mock_process.terminate.assert_called_once() - async def test_hook_environment_variables(self, mock_device_factory, hook_context): + async def test_hook_environment_variables(self, mock_device_factory, hook_context) -> None: hook_config = HookConfigV1Alpha1( before_lease=HookInstanceConfigV1Alpha1(script="echo 'Pre-lease hook executed'", timeout=10), ) @@ -224,7 +225,7 @@ async def test_hook_environment_variables(self, mock_device_factory, hook_contex assert env[JUMPSTARTER_HOST] == "/tmp/test_socket" assert env[JMP_DRIVERS_ALLOW] == "UNSAFE" - async def test_real_time_output_logging(self, mock_device_factory, hook_context): + async def test_real_time_output_logging(self, mock_device_factory, hook_context) -> None: """Test that hook output is logged in real-time at INFO level.""" hook_config = HookConfigV1Alpha1( before_lease=HookInstanceConfigV1Alpha1( @@ -262,17 +263,17 @@ async def test_real_time_output_logging(self, mock_device_factory, hook_context) result = await executor.execute_before_lease_hook(hook_context) - assert result is True + assert result is None # Verify that output lines were logged in real-time at INFO level expected_calls = [ call("Executing before-lease hook for lease %s", "test-lease-123"), call("Executing hook: %s", "echo 'Line 1'; echo 'Line 2'; echo 'Line 3'"), - call("Hook executed successfully with exit code %d", 0), + call("Hook executed successfully"), ] mock_logger.info.assert_has_calls(expected_calls, any_order=False) - async def test_post_lease_hook_execution_on_completion(self, mock_device_factory, hook_context): + async def test_post_lease_hook_execution_on_completion(self, mock_device_factory, hook_context) -> None: """Test that post-lease hook executes when called directly.""" hook_config = HookConfigV1Alpha1( after_lease=HookInstanceConfigV1Alpha1(script="echo 'Post-lease cleanup completed'", timeout=10), @@ -306,168 +307,17 @@ async def test_post_lease_hook_execution_on_completion(self, mock_device_factory result = await executor.execute_after_lease_hook(hook_context) - assert result is True + assert result is None # Verify that post-lease hook output was logged expected_calls = [ call("Executing after-lease hook for lease %s", "test-lease-123"), call("Executing hook: %s", "echo 'Post-lease cleanup completed'"), - call("Hook executed successfully with exit code %d", 0), + call("Hook executed successfully"), ] mock_logger.info.assert_has_calls(expected_calls, any_order=False) - async def test_hook_exit_code_matching_success(self, mock_device_factory, hook_context): - """Test that hook succeeds when exit code matches expected value.""" - hook_config = HookConfigV1Alpha1( - before_lease=HookInstanceConfigV1Alpha1(script="exit 0", timeout=10, exit_code=0), - ) - - with patch("jumpstarter.exporter.hooks.Session") as mock_session_class: - mock_session = Mock() - mock_session_class.return_value.__enter__.return_value = mock_session - mock_session.serve_unix_async.return_value.__aenter__ = AsyncMock(return_value="/tmp/test_socket") - mock_session.serve_unix_async.return_value.__aexit__ = AsyncMock(return_value=None) - - mock_process = AsyncMock() - mock_process.returncode = 0 - mock_process.stdout.readline.side_effect = [b""] - mock_process.wait = AsyncMock(return_value=None) - - with patch("asyncio.create_subprocess_shell", return_value=mock_process): - executor = HookExecutor(config=hook_config, device_factory=mock_device_factory) - result = await executor.execute_before_lease_hook(hook_context) - assert result is True - - async def test_hook_exit_code_matching_custom(self, mock_device_factory, hook_context): - """Test that hook succeeds when exit code matches custom expected value.""" - hook_config = HookConfigV1Alpha1( - before_lease=HookInstanceConfigV1Alpha1(script="exit 42", timeout=10, exit_code=42), - ) - - with patch("jumpstarter.exporter.hooks.Session") as mock_session_class: - mock_session = Mock() - mock_session_class.return_value.__enter__.return_value = mock_session - mock_session.serve_unix_async.return_value.__aenter__ = AsyncMock(return_value="/tmp/test_socket") - mock_session.serve_unix_async.return_value.__aexit__ = AsyncMock(return_value=None) - - mock_process = AsyncMock() - mock_process.returncode = 42 - mock_process.stdout.readline.side_effect = [b""] - mock_process.wait = AsyncMock(return_value=None) - - with patch("asyncio.create_subprocess_shell", return_value=mock_process): - executor = HookExecutor(config=hook_config, device_factory=mock_device_factory) - result = await executor.execute_before_lease_hook(hook_context) - assert result is True - - async def test_hook_exit_code_mismatch_pass(self, mock_device_factory, hook_context): - """Test that hook succeeds when exit code mismatches but on_failure='pass'.""" - hook_config = HookConfigV1Alpha1( - before_lease=HookInstanceConfigV1Alpha1(script="exit 1", timeout=10, exit_code=0, on_failure="pass"), - ) - - with patch("jumpstarter.exporter.hooks.Session") as mock_session_class: - mock_session = Mock() - mock_session_class.return_value.__enter__.return_value = mock_session - mock_session.serve_unix_async.return_value.__aenter__ = AsyncMock(return_value="/tmp/test_socket") - mock_session.serve_unix_async.return_value.__aexit__ = AsyncMock(return_value=None) - - mock_process = AsyncMock() - mock_process.returncode = 1 - mock_process.stdout.readline.side_effect = [b""] - mock_process.wait = AsyncMock(return_value=None) - - with ( - patch("asyncio.create_subprocess_shell", return_value=mock_process), - patch("jumpstarter.exporter.hooks.logger") as mock_logger, - ): - executor = HookExecutor(config=hook_config, device_factory=mock_device_factory) - result = await executor.execute_before_lease_hook(hook_context) - assert result is True - # Verify INFO log was created (using format string) - mock_logger.info.assert_any_call( - "%s (on_failure=pass, continuing)", "Hook failed: expected exit code 0, got 1" - ) - - async def test_hook_exit_code_mismatch_warn(self, mock_device_factory, hook_context): - """Test that hook succeeds when exit code mismatches but on_failure='warn'.""" - hook_config = HookConfigV1Alpha1( - before_lease=HookInstanceConfigV1Alpha1(script="exit 1", timeout=10, exit_code=0, on_failure="warn"), - ) - - with patch("jumpstarter.exporter.hooks.Session") as mock_session_class: - mock_session = Mock() - mock_session_class.return_value.__enter__.return_value = mock_session - mock_session.serve_unix_async.return_value.__aenter__ = AsyncMock(return_value="/tmp/test_socket") - mock_session.serve_unix_async.return_value.__aexit__ = AsyncMock(return_value=None) - - mock_process = AsyncMock() - mock_process.returncode = 1 - mock_process.stdout.readline.side_effect = [b""] - mock_process.wait = AsyncMock(return_value=None) - - with ( - patch("asyncio.create_subprocess_shell", return_value=mock_process), - patch("jumpstarter.exporter.hooks.logger") as mock_logger, - ): - executor = HookExecutor(config=hook_config, device_factory=mock_device_factory) - result = await executor.execute_before_lease_hook(hook_context) - assert result is True - # Verify WARNING log was created (using format string) - mock_logger.warning.assert_any_call( - "%s (on_failure=warn, continuing)", "Hook failed: expected exit code 0, got 1" - ) - - async def test_hook_exit_code_mismatch_block(self, mock_device_factory, hook_context): - """Test that hook raises exception when exit code mismatches and on_failure='block'.""" - hook_config = HookConfigV1Alpha1( - before_lease=HookInstanceConfigV1Alpha1(script="exit 1", timeout=10, exit_code=0, on_failure="block"), - ) - - with patch("jumpstarter.exporter.hooks.Session") as mock_session_class: - mock_session = Mock() - mock_session_class.return_value.__enter__.return_value = mock_session - mock_session.serve_unix_async.return_value.__aenter__ = AsyncMock(return_value="/tmp/test_socket") - mock_session.serve_unix_async.return_value.__aexit__ = AsyncMock(return_value=None) - - mock_process = AsyncMock() - mock_process.returncode = 1 - mock_process.stdout.readline.side_effect = [b""] - mock_process.wait = AsyncMock(return_value=None) - - with patch("asyncio.create_subprocess_shell", return_value=mock_process): - executor = HookExecutor(config=hook_config, device_factory=mock_device_factory) - with pytest.raises(HookExecutionError, match="expected exit code 0, got 1"): - await executor.execute_before_lease_hook(hook_context) - - async def test_hook_timeout_with_pass(self, mock_device_factory, hook_context): - """Test that hook succeeds when timeout occurs but on_failure='pass'.""" - hook_config = HookConfigV1Alpha1( - before_lease=HookInstanceConfigV1Alpha1(script="sleep 60", timeout=1, on_failure="pass"), - ) - - with patch("jumpstarter.exporter.hooks.Session") as mock_session_class: - mock_session = Mock() - mock_session_class.return_value.__enter__.return_value = mock_session - mock_session.serve_unix_async.return_value.__aenter__ = AsyncMock(return_value="/tmp/test_socket") - mock_session.serve_unix_async.return_value.__aexit__ = AsyncMock(return_value=None) - - mock_process = AsyncMock() - mock_process.terminate = AsyncMock(return_value=None) - mock_process.wait = AsyncMock(return_value=None) - - with ( - patch("asyncio.create_subprocess_shell", return_value=mock_process), - patch("asyncio.wait_for", side_effect=asyncio.TimeoutError()), - patch("jumpstarter.exporter.hooks.logger") as mock_logger, - ): - executor = HookExecutor(config=hook_config, device_factory=mock_device_factory) - result = await executor.execute_before_lease_hook(hook_context) - assert result is True - # Verify INFO log was created - assert any("on_failure=pass, continuing" in str(call) for call in mock_logger.info.call_args_list) - - async def test_hook_timeout_with_warn(self, mock_device_factory, hook_context): + async def test_hook_timeout_with_warn(self, mock_device_factory, hook_context) -> None: """Test that hook succeeds when timeout occurs but on_failure='warn'.""" hook_config = HookConfigV1Alpha1( before_lease=HookInstanceConfigV1Alpha1(script="sleep 60", timeout=1, on_failure="warn"), @@ -490,6 +340,6 @@ async def test_hook_timeout_with_warn(self, mock_device_factory, hook_context): ): executor = HookExecutor(config=hook_config, device_factory=mock_device_factory) result = await executor.execute_before_lease_hook(hook_context) - assert result is True + assert result is None # Verify WARNING log was created assert any("on_failure=warn, continuing" in str(call) for call in mock_logger.warning.call_args_list) From c7ca1e570115b2e634dbfd3752e30226809305a2 Mon Sep 17 00:00:00 2001 From: Kirk Brauer Date: Mon, 24 Nov 2025 14:07:09 -0500 Subject: [PATCH 08/21] Improve messaging and typing --- .../jumpstarter/exporter/exporter.py | 49 ++++++------------- 1 file changed, 15 insertions(+), 34 deletions(-) diff --git a/packages/jumpstarter/jumpstarter/exporter/exporter.py b/packages/jumpstarter/jumpstarter/exporter/exporter.py index 8d9c8cf7d..ef7398e96 100644 --- a/packages/jumpstarter/jumpstarter/exporter/exporter.py +++ b/packages/jumpstarter/jumpstarter/exporter/exporter.py @@ -175,8 +175,12 @@ def stop(self, wait_for_lease_exit=False, should_unregister=False): # Stop immediately if not started yet or if immediate stop is requested if (not self._started or not wait_for_lease_exit) and self._tg is not None: - logger.info("Stopping exporter immediately, unregister from controller=%s", should_unregister) + if should_unregister: + logger.info("Stopping exporter immediately, unregistering from controller") + else: + logger.info("Stopping exporter immediately, will not unregister from controller") self._unregister = should_unregister + # Cancel any ongoing tasks self._tg.cancel_scope.cancel() elif not self._stop_requested: self._stop_requested = True @@ -228,14 +232,7 @@ async def _retry_stream( def _listen_stream_factory( self, lease_name: str ) -> Callable[[jumpstarter_pb2_grpc.ControllerServiceStub], AsyncGenerator[jumpstarter_pb2.ListenResponse, None]]: - """Create a stream factory for listening to connection requests. - - Args: - lease_name: The lease name to listen for - - Returns: - A factory function that creates a Listen stream when given a ControllerServiceStub - """ + """Create a stream factory for listening to connection requests.""" def factory( ctrl: jumpstarter_pb2_grpc.ControllerServiceStub, @@ -247,11 +244,7 @@ def factory( def _status_stream_factory( self, ) -> Callable[[jumpstarter_pb2_grpc.ControllerServiceStub], AsyncGenerator[jumpstarter_pb2.StatusResponse, None]]: - """Create a stream factory for status updates. - - Returns: - A factory function that creates a Status stream when given a ControllerServiceStub - """ + """Create a stream factory for status updates.""" def factory( ctrl: jumpstarter_pb2_grpc.ControllerServiceStub, @@ -260,24 +253,10 @@ def factory( return factory - def _create_hook_context(self, lease_name: str, client_name: str) -> HookContext: - """Create a standardized hook context. - - Args: - lease_name: Name of the lease - client_name: Name of the client - - Returns: - HookContext object with consistent fields - """ - return HookContext( - lease_name=lease_name, - client_name=client_name, - ) - async def _register_with_controller(self, channel: grpc.aio.Channel): """Register the exporter with the controller.""" - response = await jumpstarter_pb2_grpc.ExporterServiceStub(channel).GetReport(empty_pb2.Empty()) + exporter_stub = jumpstarter_pb2_grpc.ExporterServiceStub(channel) + response: jumpstarter_pb2.GetReportResponse = await exporter_stub.GetReport(empty_pb2.Empty()) logger.info("Registering exporter with controller") controller = jumpstarter_pb2_grpc.ControllerServiceStub(channel) await controller.Register( @@ -286,7 +265,10 @@ async def _register_with_controller(self, channel: grpc.aio.Channel): reports=response.reports, ) ) + # Mark exporter as registered internally self._registered = True + # Report that exporter is available to the controller + # TODO: Determine if the controller should handle this logic internally await self._report_status(ExporterStatus.AVAILABLE, "Exporter registered and available") async def _report_status(self, status: ExporterStatus, message: str = ""): @@ -472,12 +454,11 @@ async def serve(self): # noqa: C901 self._status_stream_factory(), status_tx, ) - # Type: status is jumpstarter_pb2.StatusResponse with lease_name and other status fields async for status in status_rx: if self._lease_name != "" and self._lease_name != status.lease_name: # After-lease hook for the previous lease (lease name changed) if self.hook_executor and self._current_client_name: - hook_context = self._create_hook_context(self._lease_name, self._current_client_name) + hook_context = HookContext(self._lease_name, self._current_client_name) # Shield the after-lease hook from cancellation and await it with CancelScope(shield=True): await self._report_status(ExporterStatus.AFTER_LEASE_HOOK, "Running afterLease hooks") @@ -527,7 +508,7 @@ async def serve(self): # noqa: C901 # Before-lease hook when transitioning from unleased to leased if not previous_leased: if self.hook_executor: - hook_context = self._create_hook_context(status.lease_name, status.client_name) + hook_context = HookContext(status.lease_name, status.client_name) tg.start_soon(self.run_before_lease_hook, hook_context) else: # No hook configured, set event immediately @@ -539,7 +520,7 @@ async def serve(self): # noqa: C901 # After-lease hook when transitioning from leased to unleased if previous_leased and self.hook_executor and self._current_client_name: - hook_context = self._create_hook_context(self._lease_name, self._current_client_name) + hook_context = HookContext(self._lease_name, self._current_client_name) # Shield the after-lease hook from cancellation and await it with CancelScope(shield=True): await self._report_status(ExporterStatus.AFTER_LEASE_HOOK, "Running afterLease hooks") From 4e4a42bb3d2755bf317c14ac5432521e4cc4046d Mon Sep 17 00:00:00 2001 From: Kirk Brauer Date: Tue, 25 Nov 2025 14:17:42 -0500 Subject: [PATCH 09/21] Finish refactoring the Exporter class and improve hooks handling --- .../jumpstarter/jumpstarter/common/utils.py | 9 +- .../jumpstarter/config/exporter.py | 8 +- .../jumpstarter/exporter/exporter.py | 286 ++++-------- .../jumpstarter/jumpstarter/exporter/hooks.py | 358 +++++++++----- .../jumpstarter/exporter/hooks_test.py | 442 ++++++++---------- .../jumpstarter/exporter/lease_context.py | 63 +++ .../jumpstarter/exporter/session.py | 10 +- 7 files changed, 626 insertions(+), 550 deletions(-) create mode 100644 packages/jumpstarter/jumpstarter/exporter/lease_context.py diff --git a/packages/jumpstarter/jumpstarter/common/utils.py b/packages/jumpstarter/jumpstarter/common/utils.py index 8fb3cc67f..fc37ae796 100644 --- a/packages/jumpstarter/jumpstarter/common/utils.py +++ b/packages/jumpstarter/jumpstarter/common/utils.py @@ -2,20 +2,23 @@ import sys from contextlib import ExitStack, asynccontextmanager, contextmanager from subprocess import Popen +from typing import TYPE_CHECKING from anyio.from_thread import BlockingPortal, start_blocking_portal from jumpstarter.client import client_from_path from jumpstarter.config.env import JMP_DRIVERS_ALLOW, JUMPSTARTER_HOST -from jumpstarter.driver import Driver from jumpstarter.exporter import Session from jumpstarter.utils.env import env +if TYPE_CHECKING: + from jumpstarter.driver import Driver + __all__ = ["env"] @asynccontextmanager -async def serve_async(root_device: Driver, portal: BlockingPortal, stack: ExitStack): +async def serve_async(root_device: "Driver", portal: BlockingPortal, stack: ExitStack): with Session(root_device=root_device) as session: async with session.serve_unix_async() as path: # SAFETY: the root_device instance is constructed locally thus considered trusted @@ -28,7 +31,7 @@ async def serve_async(root_device: Driver, portal: BlockingPortal, stack: ExitSt @contextmanager -def serve(root_device: Driver): +def serve(root_device: "Driver"): with start_blocking_portal() as portal: with ExitStack() as stack: with portal.wrap_async_context_manager(serve_async(root_device, portal, stack)) as client: diff --git a/packages/jumpstarter/jumpstarter/config/exporter.py b/packages/jumpstarter/jumpstarter/config/exporter.py index 11f94f1d4..b4998caf4 100644 --- a/packages/jumpstarter/jumpstarter/config/exporter.py +++ b/packages/jumpstarter/jumpstarter/config/exporter.py @@ -2,7 +2,7 @@ from contextlib import asynccontextmanager, contextmanager, suppress from pathlib import Path -from typing import Any, ClassVar, Literal, Optional, Self +from typing import TYPE_CHECKING, Any, ClassVar, Literal, Optional, Self import grpc import yaml @@ -15,7 +15,9 @@ from jumpstarter.common.exceptions import ConfigurationError from jumpstarter.common.grpc import aio_secure_channel, ssl_channel_credentials from jumpstarter.common.importlib import import_class -from jumpstarter.driver import Driver + +if TYPE_CHECKING: + from jumpstarter.driver import Driver class HookInstanceConfigV1Alpha1(BaseModel): @@ -71,7 +73,7 @@ class ExporterConfigV1Alpha1DriverInstance(RootModel): | ExporterConfigV1Alpha1DriverInstanceProxy ) - def instantiate(self) -> Driver: + def instantiate(self) -> "Driver": match self.root: case ExporterConfigV1Alpha1DriverInstanceBase(): driver_class = import_class(self.root.type, [], True) diff --git a/packages/jumpstarter/jumpstarter/exporter/exporter.py b/packages/jumpstarter/jumpstarter/exporter/exporter.py index ef7398e96..1efc2dff3 100644 --- a/packages/jumpstarter/jumpstarter/exporter/exporter.py +++ b/packages/jumpstarter/jumpstarter/exporter/exporter.py @@ -2,7 +2,7 @@ from collections.abc import AsyncGenerator, Awaitable, Callable from contextlib import asynccontextmanager from dataclasses import dataclass, field -from typing import Any, Self +from typing import TYPE_CHECKING, Any, Self import grpc from anyio import ( @@ -25,10 +25,13 @@ from jumpstarter.common import ExporterStatus, Metadata from jumpstarter.common.streams import connect_router_stream from jumpstarter.config.tls import TLSConfigV1Alpha1 -from jumpstarter.driver import Driver -from jumpstarter.exporter.hooks import HookContext, HookExecutionError, HookExecutor +from jumpstarter.exporter.hooks import HookExecutor +from jumpstarter.exporter.lease_context import LeaseContext from jumpstarter.exporter.session import Session +if TYPE_CHECKING: + from jumpstarter.driver import Driver + logger = logging.getLogger(__name__) @@ -51,7 +54,7 @@ class Exporter(AsyncContextManagerMixin, Metadata): Used when creating controller stubs, unregistering, and establishing streams. """ - device_factory: Callable[[], Driver] + device_factory: Callable[[], "Driver"] """Factory function for creating Driver instances representing the hardware/devices. Called when creating Sessions to provide access to the underlying device. @@ -86,13 +89,6 @@ class Exporter(AsyncContextManagerMixin, Metadata): # Internal State Fields - _lease_name: str = field(init=False, default="") - """Current lease name assigned by the controller. - - Empty string indicates no active lease. Updated when controller assigns/reassigns - the exporter. Used to detect lease transitions and create hook contexts. - """ - _registered: bool = field(init=False, default=False) """Tracks whether exporter has successfully registered with the controller. @@ -128,20 +124,6 @@ class Exporter(AsyncContextManagerMixin, Metadata): when stopping. Set during serve() and cleared when done. """ - _current_client_name: str = field(init=False, default="") - """Name of the client currently holding the lease. - - Used to create hook contexts with client information and determine if - after-lease hooks should run. Reset when lease is released. - """ - - _before_lease_hook: Event | None = field(init=False, default=None) - """Synchronization event that blocks connection handling until hook completes. - - Created when a new lease starts, waited on before accepting connections, - and set when hook completes or is not configured. - """ - _exporter_status: ExporterStatus = field(init=False, default=ExporterStatus.OFFLINE) """Current status of the exporter. @@ -150,19 +132,22 @@ class Exporter(AsyncContextManagerMixin, Metadata): AFTER_LEASE_HOOK, BEFORE_LEASE_HOOK_FAILED, AFTER_LEASE_HOOK_FAILED. """ - _current_session: Session | None = field(init=False, default=None) - """Reference to the currently active Session object. + _lease_scope: LeaseContext | None = field(init=False, default=None) + """Encapsulates all resources associated with the current lease. - A Session wraps the root device and provides gRPC service endpoints. - Used to update session status and pass to HookExecutor for logging. - Set in session() context manager and cleared when context exits. - """ + Contains the session, socket path, and synchronization event needed + throughout the lease lifecycle. This replaces the previous individual + _current_session, _session_socket_path, and _before_lease_hook fields. - _session_socket_path: str | None = field(init=False, default=None) - """Unix socket path where the current session is serving. + Lifecycle: + 1. Created in serve() when a lease is assigned (session/socket initially None) + 2. Populated in handle_lease() when the session is created + 3. Accessed by hook execution methods and status reporting + 4. Cleared when lease ends or changes - Passed to hooks so they can communicate with the device via the CLI. - Enables session reuse instead of creating new ones for hooks. + The session and socket are managed by the context manager in handle_lease(), + ensuring proper cleanup when the lease ends. The LeaseScope itself is just + a reference holder and doesn't manage resource lifecycles directly. """ def stop(self, wait_for_lease_exit=False, should_unregister=False): @@ -276,8 +261,8 @@ async def _report_status(self, status: ExporterStatus, message: str = ""): self._exporter_status = status # Update session status if available - if self._current_session: - self._current_session.update_status(status, message) + if self._lease_scope and self._lease_scope.session: + self._lease_scope.session.update_status(status, message) try: controller = await self._get_controller_stub() @@ -356,29 +341,28 @@ async def _handle_client_conn( @asynccontextmanager async def session(self): - """Create and manage an exporter Session context.""" + """Create and manage an exporter Session context. + + Yields: + tuple[Session, str]: A tuple of (session, socket_path) for use in lease handling. + """ with Session( uuid=self.uuid, labels=self.labels, root_device=self.device_factory(), ) as session: - # Store session reference outside context for status updates - self._current_session = session - try: - # Create a Unix socket - async with session.serve_unix_async() as path: - # Create a gRPC channel to the controller via the socket - async with grpc.aio.secure_channel( - f"unix://{path}", grpc.local_channel_credentials(grpc.LocalConnectionType.UDS) - ) as channel: - # Register the exporter with the controller - await self._register_with_controller(channel) - yield path - finally: - # Clear the session reference - self._current_session = None - - async def handle_lease(self, lease_name: str, tg: TaskGroup) -> None: + # Create a Unix socket + async with session.serve_unix_async() as path: + # Create a gRPC channel to the controller via the socket + async with grpc.aio.secure_channel( + f"unix://{path}", grpc.local_channel_credentials(grpc.LocalConnectionType.UDS) + ) as channel: + # Register the exporter with the controller + await self._register_with_controller(channel) + # Yield both session and path for creating LeaseScope + yield session, path + + async def handle_lease(self, lease_name: str, tg: TaskGroup, lease_scope: LeaseContext) -> None: """Handle all incoming client connections for a lease. This method orchestrates the complete lifecycle of managing connections during @@ -386,14 +370,16 @@ async def handle_lease(self, lease_name: str, tg: TaskGroup) -> None: tasks to handle each client connection. The method performs the following steps: - 1. Sets up a stream to listen for incoming connection requests - 2. Creates a session with a Unix socket for device access - 3. Waits for the before-lease hook to complete (if configured) - 4. Spawns a new task for each incoming connection request + 1. Creates a session for the lease duration + 2. Populates the lease_scope with session and socket path + 3. Sets up a stream to listen for incoming connection requests + 4. Waits for the before-lease hook to complete (if configured) + 5. Spawns a new task for each incoming connection request Args: lease_name: Name of the lease to handle connections for tg: TaskGroup for spawning concurrent connection handler tasks + lease_scope: LeaseScope with before_lease_hook event (session/socket set here) Note: This method runs for the entire duration of the lease and is spawned by @@ -412,16 +398,16 @@ async def handle_lease(self, lease_name: str, tg: TaskGroup) -> None: listen_tx, ) - # Create a lease session to execute hooks and handle connections - async with self.session() as path: - # Store socket path for hook execution - self._session_socket_path = path + # Create session for the lease duration and populate lease_scope + async with self.session() as (session, path): + # Populate the lease scope with session and socket path + lease_scope.session = session + lease_scope.socket_path = path # Wait for before-lease hook to complete before processing client connections - if self._before_lease_hook is not None: - logger.info("Waiting for before-lease hook to complete before accepting connections") - await self._before_lease_hook.wait() - logger.info("Before-lease hook completed, now accepting connections") + logger.info("Waiting for before-lease hook to complete before accepting connections") + await lease_scope.before_lease_hook.wait() + logger.info("Before-lease hook completed, now accepting connections") # Process client connections # Type: request is jumpstarter_pb2.ListenResponse with router_endpoint and router_token fields @@ -429,7 +415,7 @@ async def handle_lease(self, lease_name: str, tg: TaskGroup) -> None: logger.info("Handling new connection request on lease %s", lease_name) tg.start_soon( self._handle_client_conn, - path, + lease_scope.socket_path, request.router_endpoint, request.router_token, self.tls, @@ -455,38 +441,25 @@ async def serve(self): # noqa: C901 status_tx, ) async for status in status_rx: - if self._lease_name != "" and self._lease_name != status.lease_name: + # Check if lease name changed (and there was a previous active lease) + lease_changed = ( + self._lease_scope + and self._lease_scope.is_active() + and self._lease_scope.lease_name != status.lease_name + ) + if lease_changed: # After-lease hook for the previous lease (lease name changed) - if self.hook_executor and self._current_client_name: - hook_context = HookContext(self._lease_name, self._current_client_name) - # Shield the after-lease hook from cancellation and await it + if self.hook_executor and self._lease_scope.has_client(): with CancelScope(shield=True): - await self._report_status(ExporterStatus.AFTER_LEASE_HOOK, "Running afterLease hooks") - self.hook_executor.main_session = self._current_session - try: - await self.hook_executor.execute_after_lease_hook( - hook_context, socket_path=self._session_socket_path - ) - await self._report_status(ExporterStatus.AVAILABLE, "Available for new lease") - except HookExecutionError as e: - logger.error("afterLease hook failed (on_failure=endLease/exit): %s", e) - await self._report_status( - ExporterStatus.AFTER_LEASE_HOOK_FAILED, - f"afterLease hook failed: {e}", - ) - logger.error("Shutting down exporter due to afterLease hook failure") - self.stop() - except Exception as e: - logger.error("afterLease hook failed with unexpected error: %s", e, exc_info=True) - await self._report_status( - ExporterStatus.AFTER_LEASE_HOOK_FAILED, - f"afterLease hook failed: {e}", - ) - - self._lease_name = status.lease_name + await self.hook_executor.run_after_lease_hook( + self._lease_scope, + self._report_status, + self.stop, + ) + logger.info("Lease status changed, killing existing connections") - # Reset event for next lease - self._before_lease_hook = None + # Clear lease scope for next lease + self._lease_scope = None self.stop() break @@ -494,47 +467,52 @@ async def serve(self): # noqa: C901 previous_leased = hasattr(self, "_previous_leased") and self._previous_leased current_leased = status.leased - self._lease_name = status.lease_name - if not self._started and self._lease_name != "": + # Check if this is a new lease assignment (first time or lease name changed) + if not self._started and status.lease_name != "": self._started = True - # Create event for pre-lease synchronization - self._before_lease_hook = Event() - tg.start_soon(self.handle_lease, self._lease_name, tg) + # Create lease scope and start handling the lease + # The session will be created inside handle_lease and stay open for the lease duration + lease_scope = LeaseContext( + lease_name=status.lease_name, + before_lease_hook=Event(), + ) + self._lease_scope = lease_scope + tg.start_soon(self.handle_lease, status.lease_name, tg, lease_scope) if current_leased: logger.info("Currently leased by %s under %s", status.client_name, status.lease_name) - self._current_client_name = status.client_name + if self._lease_scope: + self._lease_scope.update_client(status.client_name) # Before-lease hook when transitioning from unleased to leased if not previous_leased: - if self.hook_executor: - hook_context = HookContext(status.lease_name, status.client_name) - tg.start_soon(self.run_before_lease_hook, hook_context) + if self.hook_executor and self._lease_scope: + tg.start_soon( + self.hook_executor.run_before_lease_hook, + self._lease_scope, + self._report_status, + self.stop, # Pass shutdown callback + ) else: # No hook configured, set event immediately await self._report_status(ExporterStatus.LEASE_READY, "Ready for commands") - if self._before_lease_hook: - self._before_lease_hook.set() + if self._lease_scope: + self._lease_scope.before_lease_hook.set() else: logger.info("Currently not leased") # After-lease hook when transitioning from leased to unleased - if previous_leased and self.hook_executor and self._current_client_name: - hook_context = HookContext(self._lease_name, self._current_client_name) - # Shield the after-lease hook from cancellation and await it + if previous_leased and self.hook_executor and self._lease_scope and self._lease_scope.has_client(): + # Shield the after-lease hook from cancellation with CancelScope(shield=True): - await self._report_status(ExporterStatus.AFTER_LEASE_HOOK, "Running afterLease hooks") - # Pass the current session to hook executor for logging - self.hook_executor.main_session = self._current_session - # Use session socket if available, otherwise create new session - await self.hook_executor.execute_after_lease_hook( - hook_context, socket_path=self._session_socket_path + await self.hook_executor.run_after_lease_hook( + self._lease_scope, + self._report_status, + self.stop, ) - await self._report_status(ExporterStatus.AVAILABLE, "Available for new lease") - self._current_client_name = "" - # Reset event for next lease - self._before_lease_hook = None + # Clear lease scope for next lease + self._lease_scope = None if self._stop_requested: self.stop(should_unregister=True) @@ -542,69 +520,3 @@ async def serve(self): # noqa: C901 self._previous_leased = current_leased self._tg = None - - async def run_before_lease_hook(self, hook_ctx: HookContext): - """ - Execute the before-lease hook for the current exporter session. - - Args: - hook_ctx (HookContext): The current hook execution context - """ - try: - await self._report_status(ExporterStatus.BEFORE_LEASE_HOOK, "Running beforeLease hooks") - # Pass the current session to hook executor for logging - self.hook_executor.main_session = self._current_session - - # Wait for socket path to be available - while self._session_socket_path is None: - await sleep(0.1) - - # Execute hook with main session socket - await self.hook_executor.execute_before_lease_hook(hook_ctx, socket_path=self._session_socket_path) - await self._report_status(ExporterStatus.LEASE_READY, "Ready for commands") - logger.info("beforeLease hook completed successfully") - except HookExecutionError as e: - # Hook failed with on_failure='block' - end lease and set failed status - logger.error("beforeLease hook failed (on_failure=block): %s", e) - await self._report_status( - ExporterStatus.BEFORE_LEASE_HOOK_FAILED, f"beforeLease hook failed (on_failure=block): {e}" - ) - # Note: We don't take the exporter offline for before_lease hook failures - # The lease is simply not ready, and the exporter remains available for future leases - except Exception as e: - # Unexpected error during hook execution - logger.error("beforeLease hook failed with unexpected error: %s", e, exc_info=True) - await self._report_status(ExporterStatus.BEFORE_LEASE_HOOK_FAILED, f"beforeLease hook failed: {e}") - finally: - # Always set the event to unblock connections - if self._before_lease_hook: - self._before_lease_hook.set() - - async def run_after_lease_hook(self, hook_ctx: HookContext): - """ - Execute the after-lease hook for the current exporter session. - - Args: - hook_ctx (HookContext): The current hook execution context - """ - try: - await self._report_status(ExporterStatus.AFTER_LEASE_HOOK, "Running afterLease hooks") - # Pass the current session to hook executor for logging - self.hook_executor.main_session = self._current_session - # Use session socket if available, otherwise create new session - await self.hook_executor.execute_after_lease_hook(hook_ctx, socket_path=self._session_socket_path) - await self._report_status(ExporterStatus.AVAILABLE, "Available for new lease") - logger.info("afterLease hook completed successfully") - except HookExecutionError as e: - # Hook failed with on_failure='block' - set failed status and shut down exporter - logger.error("afterLease hook failed (on_failure=block): %s", e) - await self._report_status( - ExporterStatus.AFTER_LEASE_HOOK_FAILED, f"afterLease hook failed (on_failure=block): {e}" - ) - # Shut down the exporter after after_lease hook failure with on_failure='block' - logger.error("Shutting down exporter due to afterLease hook failure") - self.stop() - except Exception as e: - # Unexpected error during hook execution - logger.error("afterLease hook failed with unexpected error: %s", e, exc_info=True) - await self._report_status(ExporterStatus.AFTER_LEASE_HOOK_FAILED, f"afterLease hook failed: {e}") diff --git a/packages/jumpstarter/jumpstarter/exporter/hooks.py b/packages/jumpstarter/jumpstarter/exporter/hooks.py index a38275603..3a76fda77 100644 --- a/packages/jumpstarter/jumpstarter/exporter/hooks.py +++ b/packages/jumpstarter/jumpstarter/exporter/hooks.py @@ -3,35 +3,47 @@ import asyncio import logging import os -from contextlib import asynccontextmanager -from dataclasses import dataclass, field -from typing import AsyncGenerator, Callable +from collections.abc import Awaitable +from dataclasses import dataclass +from typing import TYPE_CHECKING, Callable, Literal -from jumpstarter.common import LogSource +from jumpstarter.common import ExporterStatus, LogSource from jumpstarter.config.env import JMP_DRIVERS_ALLOW, JUMPSTARTER_HOST from jumpstarter.config.exporter import HookConfigV1Alpha1, HookInstanceConfigV1Alpha1 -from jumpstarter.driver import Driver from jumpstarter.exporter.logging import get_logger from jumpstarter.exporter.session import Session +if TYPE_CHECKING: + from jumpstarter.driver import Driver + from jumpstarter.exporter.lease_context import LeaseContext + logger = logging.getLogger(__name__) +@dataclass class HookExecutionError(Exception): - """Raised when a hook fails and on_failure is set to 'endLease' or 'exit'.""" + """Raised when a hook fails and on_failure is set to 'endLease' or 'exit'. - pass + Attributes: + message: Error message describing the failure + on_failure: The on_failure mode that triggered this error ('endLease' or 'exit') + hook_type: The type of hook that failed ('before_lease' or 'after_lease') + """ + message: str + on_failure: Literal["endLease", "exit"] + hook_type: Literal["before_lease", "after_lease"] -@dataclass(kw_only=True) -class HookContext: - """Context information passed to hooks.""" + def __str__(self) -> str: + return self.message - lease_name: str - client_name: str = "" - lease_duration: str = "" - exporter_name: str = "" - exporter_namespace: str = "" + def should_shutdown_exporter(self) -> bool: + """Returns True if the exporter should be shut down entirely.""" + return self.on_failure == "exit" + + def should_end_lease(self) -> bool: + """Returns True if the lease should be ended.""" + return self.on_failure in ("endLease", "exit") @dataclass(kw_only=True) @@ -39,15 +51,13 @@ class HookExecutor: """Executes lifecycle hooks with access to the j CLI.""" config: HookConfigV1Alpha1 - device_factory: Callable[[], Driver] - main_session: Session | None = field(default=None) + device_factory: Callable[[], "Driver"] - def _create_hook_env(self, context: HookContext, socket_path: str) -> dict[str, str]: + def _create_hook_env(self, lease_scope: "LeaseContext") -> dict[str, str]: """Create standardized hook environment variables. Args: - context: Hook context information - socket_path: Path to the Unix socket for JUMPSTARTER_HOST + lease_scope: LeaseScope containing lease metadata and socket path Returns: Dictionary of environment variables for hook execution @@ -55,51 +65,26 @@ def _create_hook_env(self, context: HookContext, socket_path: str) -> dict[str, hook_env = os.environ.copy() hook_env.update( { - JUMPSTARTER_HOST: str(socket_path), + JUMPSTARTER_HOST: str(lease_scope.socket_path), JMP_DRIVERS_ALLOW: "UNSAFE", # Allow all drivers for local access - "LEASE_NAME": context.lease_name, - "CLIENT_NAME": context.client_name, - "LEASE_DURATION": context.lease_duration, - "EXPORTER_NAME": context.exporter_name, - "EXPORTER_NAMESPACE": context.exporter_namespace, + "LEASE_NAME": lease_scope.lease_name, + "CLIENT_NAME": lease_scope.client_name, } ) return hook_env - @asynccontextmanager - async def _create_hook_environment( - self, context: HookContext - ) -> AsyncGenerator[tuple[Session, dict[str, str]], None]: - """Create a local session and Unix socket for j CLI access.""" - with Session( - root_device=self.device_factory(), - # Use hook context for metadata - labels={ - "jumpstarter.dev/hook-context": "true", - "jumpstarter.dev/lease": context.lease_name, - }, - ) as session: - async with session.serve_unix_async() as unix_path: - # Create environment variables for the hook - hook_env = self._create_hook_env(context, unix_path) - - yield session, hook_env - async def _execute_hook( self, hook_config: HookInstanceConfigV1Alpha1, - context: HookContext, + lease_scope: "LeaseContext", log_source: LogSource, - socket_path: str | None = None, ) -> None: """Execute a single hook command. Args: hook_config: Hook configuration including script, timeout, and on_failure - context: Hook context information + lease_scope: LeaseScope containing lease metadata and session log_source: Log source for hook output - socket_path: Optional Unix socket path to reuse existing session. - If provided, hooks will access the main session instead of creating their own. """ command = hook_config.script if not command or not command.strip(): @@ -108,37 +93,70 @@ async def _execute_hook( logger.info("Executing hook: %s", command.strip().split("\n")[0][:100]) - # If socket_path provided, use existing session; otherwise create new one - if socket_path is not None: - # Reuse existing session - create environment without session creation - hook_env = self._create_hook_env(context, socket_path) + # Determine hook type from log source + hook_type = "before_lease" if log_source == LogSource.BEFORE_LEASE_HOOK else "after_lease" - # Use main session for logging (must be available when socket_path is provided) - logging_session = self.main_session - if logging_session is None: - raise ValueError("main_session must be set when reusing socket_path") + # Use existing session from lease_scope + hook_env = self._create_hook_env(lease_scope) + + return await self._execute_hook_process( + hook_config, lease_scope, log_source, hook_env, lease_scope.session, hook_type + ) + + def _handle_hook_failure( + self, + error_msg: str, + on_failure: Literal["warn", "endLease", "exit"], + hook_type: Literal["before_lease", "after_lease"], + cause: Exception | None = None, + ) -> None: + """Handle hook failure according to on_failure setting. + + Args: + error_msg: Error message describing the failure + on_failure: The on_failure mode ('warn', 'endLease', or 'exit') + hook_type: The type of hook that failed + cause: Optional exception that caused the failure + + Raises: + HookExecutionError: If on_failure is 'endLease' or 'exit' + """ + if on_failure == "warn": + logger.warning("%s (on_failure=warn, continuing)", error_msg) + return - return await self._execute_hook_process(hook_config, context, log_source, hook_env, logging_session) + logger.error("%s (on_failure=%s, raising exception)", error_msg, on_failure) + + error = HookExecutionError( + message=error_msg, + on_failure=on_failure, + hook_type=hook_type, + ) + + # Properly handle exception chaining + if cause is not None: + raise error from cause else: - # Create new session for hook execution (fallback/standalone mode) - async with self._create_hook_environment(context) as (session, hook_env): - # Determine which session to use for logging - logging_session = self.main_session if self.main_session is not None else session - return await self._execute_hook_process(hook_config, context, log_source, hook_env, logging_session) + raise error async def _execute_hook_process( self, hook_config: HookInstanceConfigV1Alpha1, - context: HookContext, + lease_scope: "LeaseContext", log_source: LogSource, hook_env: dict[str, str], logging_session: Session, + hook_type: Literal["before_lease", "after_lease"], ) -> None: """Execute the hook process with the given environment and logging session.""" command = hook_config.script timeout = hook_config.timeout on_failure = hook_config.on_failure + # Exception handling + error_msg: str | None = None + cause: Exception | None = None + try: # Execute the hook command using shell process = await asyncio.create_subprocess_shell( @@ -150,7 +168,7 @@ async def _execute_hook_process( try: # Create a logger with automatic source registration - hook_logger = get_logger(f"hook.{context.lease_name}", log_source, logging_session) + hook_logger = get_logger(f"hook.{lease_scope.lease_name}", log_source, logging_session) # Stream output line-by-line for real-time logging output_lines = [] @@ -172,61 +190,37 @@ async def read_output(): if process.returncode == 0: logger.info("Hook executed successfully") return - else: - # Non-zero exit code is a failure - handle according to on_failure setting - error_msg = f"Hook failed with exit code {process.returncode}" - - if on_failure == "warn": - logger.warning("%s (on_failure=warn, continuing)", error_msg) - return - elif on_failure == "endLease": - logger.error("%s (on_failure=endLease, raising exception)", error_msg) - raise HookExecutionError(error_msg) - else: # on_failure == "exit" - logger.error("%s (on_failure=exit, raising exception)", error_msg) - raise HookExecutionError(error_msg) + + # Non-zero exit code is a failure + error_msg = f"Hook failed with exit code {process.returncode}" except asyncio.TimeoutError as e: error_msg = f"Hook timed out after {timeout} seconds" + cause = e logger.error(error_msg) try: + # Attempt to gracefully terminate the process process.terminate() await asyncio.wait_for(process.wait(), timeout=5) except asyncio.TimeoutError: + # Force kill if it didn't terminate in time process.kill() await process.wait() - # Handle timeout according to on_failure setting - if on_failure == "warn": - logger.warning("%s (on_failure=warn, continuing)", error_msg) - return - elif on_failure == "endLease": - raise HookExecutionError(error_msg) from e - else: # on_failure == "exit" - raise HookExecutionError(error_msg) from e - - except HookExecutionError: - # Re-raise HookExecutionError to propagate to exporter - raise except Exception as e: error_msg = f"Error executing hook: {e}" + cause = e logger.error(error_msg, exc_info=True) - # Handle exception according to on_failure setting - if on_failure == "warn": - logger.warning("%s (on_failure=warn, continuing)", error_msg) - return - elif on_failure == "endLease": - raise HookExecutionError(error_msg) from e - else: # on_failure == "exit" - raise HookExecutionError(error_msg) from e + # Handle failure if one occurred + if error_msg is not None: + self._handle_hook_failure(error_msg, on_failure, hook_type, cause) - async def execute_before_lease_hook(self, context: HookContext, socket_path: str | None = None) -> None: + async def execute_before_lease_hook(self, lease_scope: "LeaseContext") -> None: """Execute the before-lease hook. Args: - context: Hook context information - socket_path: Optional Unix socket path to reuse existing session + lease_scope: LeaseScope with lease metadata and session Raises: HookExecutionError: If hook fails and on_failure is set to 'endLease' or 'exit' @@ -235,20 +229,18 @@ async def execute_before_lease_hook(self, context: HookContext, socket_path: str logger.debug("No before-lease hook configured") return - logger.info("Executing before-lease hook for lease %s", context.lease_name) + logger.info("Executing before-lease hook for lease %s", lease_scope.lease_name) await self._execute_hook( self.config.before_lease, - context, + lease_scope, LogSource.BEFORE_LEASE_HOOK, - socket_path, ) - async def execute_after_lease_hook(self, context: HookContext, socket_path: str | None = None) -> None: + async def execute_after_lease_hook(self, lease_scope: "LeaseContext") -> None: """Execute the after-lease hook. Args: - context: Hook context information - socket_path: Optional Unix socket path to reuse existing session + lease_scope: LeaseScope with lease metadata and session Raises: HookExecutionError: If hook fails and on_failure is set to 'endLease' or 'exit' @@ -257,10 +249,154 @@ async def execute_after_lease_hook(self, context: HookContext, socket_path: str logger.debug("No after-lease hook configured") return - logger.info("Executing after-lease hook for lease %s", context.lease_name) + logger.info("Executing after-lease hook for lease %s", lease_scope.lease_name) await self._execute_hook( self.config.after_lease, - context, + lease_scope, LogSource.AFTER_LEASE_HOOK, - socket_path, ) + + async def run_before_lease_hook( + self, + lease_scope: "LeaseContext", + report_status: Callable[["ExporterStatus", str], Awaitable[None]], + shutdown: Callable[[], None], + ) -> None: + """Execute before-lease hook with full orchestration. + + This method handles the complete lifecycle of running a before-lease hook: + - Waits for the lease scope to be ready (session/socket populated) + - Reports status changes via the provided callback + - Sets up the hook executor with the session for logging + - Executes the hook and handles errors + - Always signals the before_lease_hook event to unblock connections + + Args: + lease_scope: LeaseScope containing session, socket_path, and sync event + report_status: Async callback to report status changes to controller + shutdown: Callback to trigger exporter shutdown on critical failures + """ + try: + # Wait for lease scope to be fully populated by handle_lease + assert lease_scope.is_ready(), "LeaseScope must be fully initialized before running before-lease hooks" + + # Check if hook is configured + if not self.config.before_lease: + logger.debug("No before-lease hook configured") + await report_status(ExporterStatus.LEASE_READY, "Ready for commands") + return + + await report_status(ExporterStatus.BEFORE_LEASE_HOOK, "Running beforeLease hook") + + # Execute hook with lease scope + logger.info("Executing before-lease hook for lease %s", lease_scope.lease_name) + await self._execute_hook( + self.config.before_lease, + lease_scope, + LogSource.BEFORE_LEASE_HOOK, + ) + + await report_status(ExporterStatus.LEASE_READY, "Ready for commands") + logger.info("beforeLease hook completed successfully") + + except HookExecutionError as e: + if e.should_shutdown_exporter(): + # on_failure='exit' - shut down the entire exporter + logger.error("beforeLease hook failed with on_failure='exit': %s", e) + await report_status( + ExporterStatus.BEFORE_LEASE_HOOK_FAILED, + f"beforeLease hook failed (on_failure=exit, shutting down): {e}", + ) + logger.error("Shutting down exporter due to beforeLease hook failure with on_failure='exit'") + shutdown() + else: + # on_failure='endLease' - just block this lease, exporter stays available + logger.error("beforeLease hook failed with on_failure='endLease': %s", e) + await report_status( + ExporterStatus.BEFORE_LEASE_HOOK_FAILED, + f"beforeLease hook failed (on_failure=endLease): {e}", + ) + # TODO: We need to implement a controller-side mechanism to end the lease here + + except Exception as e: + logger.error("beforeLease hook failed with unexpected error: %s", e, exc_info=True) + await report_status( + ExporterStatus.BEFORE_LEASE_HOOK_FAILED, + f"beforeLease hook failed: {e}", + ) + # Unexpected errors don't trigger shutdown - just block the lease + + finally: + # Always set the event to unblock connections + lease_scope.before_lease_hook.set() + + async def run_after_lease_hook( + self, + lease_scope: "LeaseContext", + report_status: Callable[["ExporterStatus", str], Awaitable[None]], + shutdown: Callable[[], None], + ) -> None: + """Execute after-lease hook with full orchestration. + + This method handles the complete lifecycle of running an after-lease hook: + - Validates that the lease scope is ready + - Reports status changes via the provided callback + - Sets up the hook executor with the session for logging + - Executes the hook and handles errors + - Triggers shutdown on critical failures (HookExecutionError) + + Args: + lease_scope: LeaseScope containing session, socket_path, and client info + report_status: Async callback to report status changes to controller + shutdown: Callback to trigger exporter shutdown on critical failures + """ + try: + # Verify lease scope is ready + assert lease_scope.is_ready(), "LeaseScope must be fully initialized before running after-lease hooks" + + # Check if hook is configured + if not self.config.after_lease: + logger.debug("No after-lease hook configured") + await report_status(ExporterStatus.AVAILABLE, "Available for new lease") + return + + await report_status(ExporterStatus.AFTER_LEASE_HOOK, "Running afterLease hooks") + + # Execute hook with lease scope + logger.info("Executing after-lease hook for lease %s", lease_scope.lease_name) + await self._execute_hook( + self.config.after_lease, + lease_scope, + LogSource.AFTER_LEASE_HOOK, + ) + + await report_status(ExporterStatus.AVAILABLE, "Available for new lease") + logger.info("afterLease hook completed successfully") + + except HookExecutionError as e: + if e.should_shutdown_exporter(): + # on_failure='exit' - shut down the entire exporter + logger.error("afterLease hook failed with on_failure='exit': %s", e) + await report_status( + ExporterStatus.AFTER_LEASE_HOOK_FAILED, + f"afterLease hook failed (on_failure=exit, shutting down): {e}", + ) + logger.error("Shutting down exporter due to afterLease hook failure with on_failure='exit'") + shutdown() + else: + # on_failure='endLease' - lease already ended, just report the failure + # The exporter remains available for new leases + logger.error("afterLease hook failed with on_failure='endLease': %s", e) + await report_status( + ExporterStatus.AFTER_LEASE_HOOK_FAILED, + f"afterLease hook failed (on_failure=endLease): {e}", + ) + # Note: Lease has already ended - no shutdown needed, exporter remains available + + except Exception as e: + logger.error("afterLease hook failed with unexpected error: %s", e, exc_info=True) + await report_status( + ExporterStatus.AFTER_LEASE_HOOK_FAILED, + f"afterLease hook failed: {e}", + ) + # Unexpected errors don't trigger shutdown - exporter remains available diff --git a/packages/jumpstarter/jumpstarter/exporter/hooks_test.py b/packages/jumpstarter/jumpstarter/exporter/hooks_test.py index f31257dff..d39a6ecca 100644 --- a/packages/jumpstarter/jumpstarter/exporter/hooks_test.py +++ b/packages/jumpstarter/jumpstarter/exporter/hooks_test.py @@ -7,7 +7,7 @@ from jumpstarter.config.env import JMP_DRIVERS_ALLOW, JUMPSTARTER_HOST from jumpstarter.config.exporter import HookConfigV1Alpha1, HookInstanceConfigV1Alpha1 from jumpstarter.driver import Driver -from jumpstarter.exporter.hooks import HookContext, HookExecutionError, HookExecutor +from jumpstarter.exporter.hooks import HookExecutionError, HookExecutor pytestmark = pytest.mark.anyio @@ -41,14 +41,21 @@ def hook_config() -> HookConfigV1Alpha1: @pytest.fixture -def hook_context() -> HookContext: - return HookContext( +def lease_scope(): + from anyio import Event + + from jumpstarter.exporter.lease_context import LeaseContext + + lease_scope = LeaseContext( lease_name="test-lease-123", + before_lease_hook=Event(), client_name="test-client", - lease_duration="30m", - exporter_name="test-exporter", - exporter_namespace="default", ) + # Add mock session to lease_scope + mock_session = Mock() + lease_scope.session = mock_session + lease_scope.socket_path = "/tmp/test_socket" + return lease_scope class TestHookExecutor: @@ -61,7 +68,7 @@ async def test_hook_executor_creation(self, hook_config, mock_device_factory) -> assert executor.config == hook_config assert executor.device_factory == mock_device_factory - async def test_empty_hook_execution(self, mock_device_factory, hook_context) -> None: + async def test_empty_hook_execution(self, mock_device_factory, lease_scope) -> None: empty_config = HookConfigV1Alpha1() executor = HookExecutor( config=empty_config, @@ -69,277 +76,228 @@ async def test_empty_hook_execution(self, mock_device_factory, hook_context) -> ) # Both hooks should return None for empty/None commands - assert await executor.execute_before_lease_hook(hook_context) is None - assert await executor.execute_after_lease_hook(hook_context) is None + assert await executor.execute_before_lease_hook(lease_scope) is None + assert await executor.execute_after_lease_hook(lease_scope) is None - async def test_successful_hook_execution(self, mock_device_factory, hook_context) -> None: + async def test_successful_hook_execution(self, mock_device_factory, lease_scope) -> None: hook_config = HookConfigV1Alpha1( before_lease=HookInstanceConfigV1Alpha1(script="echo 'Pre-lease hook executed'", timeout=10), ) - # Mock the Session and serve_unix_async - with patch("jumpstarter.exporter.hooks.Session") as mock_session_class: - mock_session = Mock() - mock_session_class.return_value.__enter__.return_value = mock_session - - # Mock the async context manager for serve_unix_async - mock_session.serve_unix_async.return_value.__aenter__ = AsyncMock(return_value="/tmp/test_socket") - mock_session.serve_unix_async.return_value.__aexit__ = AsyncMock(return_value=None) - - # Mock asyncio.create_subprocess_shell to simulate successful execution - mock_process = AsyncMock() - mock_process.returncode = 0 - # Mock stdout.readline to simulate line-by-line output - mock_process.stdout.readline.side_effect = [ - b"Pre-lease hook executed\n", - b"", # EOF - ] - mock_process.wait = AsyncMock(return_value=None) - - with patch("asyncio.create_subprocess_shell", return_value=mock_process) as mock_subprocess: - executor = HookExecutor( - config=hook_config, - device_factory=mock_device_factory, - ) - - result = await executor.execute_before_lease_hook(hook_context) - - assert result is None - - # Verify subprocess was called with correct environment - mock_subprocess.assert_called_once() - call_args = mock_subprocess.call_args - command = call_args[0][0] - env = call_args[1]["env"] - - assert command == "echo 'Pre-lease hook executed'" - assert JUMPSTARTER_HOST in env - assert env[JUMPSTARTER_HOST] == "/tmp/test_socket" - assert env[JMP_DRIVERS_ALLOW] == "UNSAFE" - assert env["LEASE_NAME"] == "test-lease-123" - assert env["CLIENT_NAME"] == "test-client" - - async def test_failed_hook_execution(self, mock_device_factory, hook_context) -> None: + # Mock asyncio.create_subprocess_shell to simulate successful execution + mock_process = AsyncMock() + mock_process.returncode = 0 + # Mock stdout.readline to simulate line-by-line output + mock_process.stdout.readline.side_effect = [ + b"Pre-lease hook executed\n", + b"", # EOF + ] + mock_process.wait = AsyncMock(return_value=None) + + with patch("asyncio.create_subprocess_shell", return_value=mock_process) as mock_subprocess: + executor = HookExecutor( + config=hook_config, + device_factory=mock_device_factory, + ) + + result = await executor.execute_before_lease_hook(lease_scope) + + assert result is None + + # Verify subprocess was called with correct environment + mock_subprocess.assert_called_once() + call_args = mock_subprocess.call_args + command = call_args[0][0] + env = call_args[1]["env"] + + assert command == "echo 'Pre-lease hook executed'" + assert JUMPSTARTER_HOST in env + assert env[JUMPSTARTER_HOST] == "/tmp/test_socket" + assert env[JMP_DRIVERS_ALLOW] == "UNSAFE" + assert env["LEASE_NAME"] == "test-lease-123" + assert env["CLIENT_NAME"] == "test-client" + + async def test_failed_hook_execution(self, mock_device_factory, lease_scope) -> None: failed_config = HookConfigV1Alpha1( before_lease=HookInstanceConfigV1Alpha1( script="exit 1", timeout=10, on_failure="endLease" ), # Command that will fail with on_failure="endLease" ) - with patch("jumpstarter.exporter.hooks.Session") as mock_session_class: - mock_session = Mock() - mock_session_class.return_value.__enter__.return_value = mock_session - - mock_session.serve_unix_async.return_value.__aenter__ = AsyncMock(return_value="/tmp/test_socket") - mock_session.serve_unix_async.return_value.__aexit__ = AsyncMock(return_value=None) - - # Mock failed process - mock_process = AsyncMock() - mock_process.returncode = 1 - # Mock stdout.readline for failed process - mock_process.stdout.readline.side_effect = [ - b"Command failed\n", - b"", # EOF - ] - mock_process.wait = AsyncMock(return_value=None) - - with patch("asyncio.create_subprocess_shell", return_value=mock_process): - executor = HookExecutor( - config=failed_config, - device_factory=mock_device_factory, - ) - - # Should raise HookExecutionError since on_failure="endLease" - with pytest.raises(HookExecutionError, match="Hook failed with exit code 1"): - await executor.execute_before_lease_hook(hook_context) - - async def test_hook_timeout(self, mock_device_factory, hook_context) -> None: + # Mock failed process + mock_process = AsyncMock() + mock_process.returncode = 1 + # Mock stdout.readline for failed process + mock_process.stdout.readline.side_effect = [ + b"Command failed\n", + b"", # EOF + ] + mock_process.wait = AsyncMock(return_value=None) + + with patch("asyncio.create_subprocess_shell", return_value=mock_process): + executor = HookExecutor( + config=failed_config, + device_factory=mock_device_factory, + ) + + # Should raise HookExecutionError since on_failure="endLease" + with pytest.raises(HookExecutionError, match="Hook failed with exit code 1"): + await executor.execute_before_lease_hook(lease_scope) + + async def test_hook_timeout(self, mock_device_factory, lease_scope) -> None: timeout_config = HookConfigV1Alpha1( before_lease=HookInstanceConfigV1Alpha1( script="sleep 60", timeout=1, on_failure="exit" ), # Command that will timeout with on_failure="exit" ) - with patch("jumpstarter.exporter.hooks.Session") as mock_session_class: - mock_session = Mock() - mock_session_class.return_value.__enter__.return_value = mock_session - - mock_session.serve_unix_async.return_value.__aenter__ = AsyncMock(return_value="/tmp/test_socket") - mock_session.serve_unix_async.return_value.__aexit__ = AsyncMock(return_value=None) + # Mock process that times out + mock_process = AsyncMock() + mock_process.stdout.readline.return_value = b"" # EOF + mock_process.terminate = AsyncMock(return_value=None) + mock_process.wait = AsyncMock(return_value=None) - # Mock process that times out - mock_process = AsyncMock() - mock_process.terminate.return_value = None - mock_process.wait.return_value = None + with ( + patch("asyncio.create_subprocess_shell", return_value=mock_process), + patch("asyncio.wait_for", side_effect=asyncio.TimeoutError()), + ): + executor = HookExecutor( + config=timeout_config, + device_factory=mock_device_factory, + ) - with ( - patch("asyncio.create_subprocess_shell", return_value=mock_process), - patch("asyncio.wait_for", side_effect=asyncio.TimeoutError()), - ): - executor = HookExecutor( - config=timeout_config, - device_factory=mock_device_factory, - ) + # Should raise HookExecutionError since on_failure="exit" + with pytest.raises(HookExecutionError, match="timed out after 1 seconds"): + await executor.execute_before_lease_hook(lease_scope) - # Should raise HookExecutionError since on_failure="exit" - with pytest.raises(HookExecutionError, match="timed out after 1 seconds"): - await executor.execute_before_lease_hook(hook_context) + mock_process.terminate.assert_called_once() - mock_process.terminate.assert_called_once() - - async def test_hook_environment_variables(self, mock_device_factory, hook_context) -> None: + async def test_hook_environment_variables(self, mock_device_factory, lease_scope) -> None: hook_config = HookConfigV1Alpha1( before_lease=HookInstanceConfigV1Alpha1(script="echo 'Pre-lease hook executed'", timeout=10), ) - with patch("jumpstarter.exporter.hooks.Session") as mock_session_class: - mock_session = Mock() - mock_session_class.return_value.__enter__.return_value = mock_session - - mock_session.serve_unix_async.return_value.__aenter__ = AsyncMock(return_value="/tmp/test_socket") - mock_session.serve_unix_async.return_value.__aexit__ = AsyncMock(return_value=None) - - mock_process = AsyncMock() - mock_process.returncode = 0 - # Mock stdout.readline for environment test - mock_process.stdout.readline.side_effect = [ - b"", # EOF (no output) - ] - mock_process.wait = AsyncMock(return_value=None) - - with patch("asyncio.create_subprocess_shell", return_value=mock_process) as mock_subprocess: - executor = HookExecutor( - config=hook_config, - device_factory=mock_device_factory, - ) - - await executor.execute_before_lease_hook(hook_context) - - # Check that all expected environment variables are set - call_args = mock_subprocess.call_args - env = call_args[1]["env"] - - assert env["LEASE_NAME"] == "test-lease-123" - assert env["CLIENT_NAME"] == "test-client" - assert env["LEASE_DURATION"] == "30m" - assert env["EXPORTER_NAME"] == "test-exporter" - assert env["EXPORTER_NAMESPACE"] == "default" - assert env[JUMPSTARTER_HOST] == "/tmp/test_socket" - assert env[JMP_DRIVERS_ALLOW] == "UNSAFE" - - async def test_real_time_output_logging(self, mock_device_factory, hook_context) -> None: + mock_process = AsyncMock() + mock_process.returncode = 0 + # Mock stdout.readline for environment test + mock_process.stdout.readline.side_effect = [ + b"", # EOF (no output) + ] + mock_process.wait = AsyncMock(return_value=None) + + with patch("asyncio.create_subprocess_shell", return_value=mock_process) as mock_subprocess: + executor = HookExecutor( + config=hook_config, + device_factory=mock_device_factory, + ) + + await executor.execute_before_lease_hook(lease_scope) + + # Check that expected environment variables are set (unused fields removed) + call_args = mock_subprocess.call_args + env = call_args[1]["env"] + + assert env["LEASE_NAME"] == "test-lease-123" + assert env["CLIENT_NAME"] == "test-client" + # These fields are no longer set: + assert "LEASE_DURATION" not in env + assert "EXPORTER_NAME" not in env + assert "EXPORTER_NAMESPACE" not in env + assert env[JUMPSTARTER_HOST] == "/tmp/test_socket" + assert env[JMP_DRIVERS_ALLOW] == "UNSAFE" + + async def test_real_time_output_logging(self, mock_device_factory, lease_scope) -> None: """Test that hook output is logged in real-time at INFO level.""" hook_config = HookConfigV1Alpha1( - before_lease=HookInstanceConfigV1Alpha1( - script="echo 'Line 1'; echo 'Line 2'; echo 'Line 3'", timeout=10 - ), + before_lease=HookInstanceConfigV1Alpha1(script="echo 'Line 1'; echo 'Line 2'; echo 'Line 3'", timeout=10), ) - with patch("jumpstarter.exporter.hooks.Session") as mock_session_class: - mock_session = Mock() - mock_session_class.return_value.__enter__.return_value = mock_session - - mock_session.serve_unix_async.return_value.__aenter__ = AsyncMock(return_value="/tmp/test_socket") - mock_session.serve_unix_async.return_value.__aexit__ = AsyncMock(return_value=None) - - mock_process = AsyncMock() - mock_process.returncode = 0 - # Mock multiple lines of output to verify streaming - mock_process.stdout.readline.side_effect = [ - b"Line 1\n", - b"Line 2\n", - b"Line 3\n", - b"", # EOF + mock_process = AsyncMock() + mock_process.returncode = 0 + # Mock multiple lines of output to verify streaming + mock_process.stdout.readline.side_effect = [ + b"Line 1\n", + b"Line 2\n", + b"Line 3\n", + b"", # EOF + ] + mock_process.wait = AsyncMock(return_value=None) + + # Mock the logger to capture log calls + with ( + patch("jumpstarter.exporter.hooks.logger") as mock_logger, + patch("asyncio.create_subprocess_shell", return_value=mock_process), + ): + executor = HookExecutor( + config=hook_config, + device_factory=mock_device_factory, + ) + + result = await executor.execute_before_lease_hook(lease_scope) + + assert result is None + + # Verify that output lines were logged in real-time at INFO level + expected_calls = [ + call("Executing before-lease hook for lease %s", "test-lease-123"), + call("Executing hook: %s", "echo 'Line 1'; echo 'Line 2'; echo 'Line 3'"), + call("Hook executed successfully"), ] - mock_process.wait = AsyncMock(return_value=None) - - # Mock the logger to capture log calls - with ( - patch("jumpstarter.exporter.hooks.logger") as mock_logger, - patch("asyncio.create_subprocess_shell", return_value=mock_process), - ): - executor = HookExecutor( - config=hook_config, - device_factory=mock_device_factory, - ) - - result = await executor.execute_before_lease_hook(hook_context) - - assert result is None - - # Verify that output lines were logged in real-time at INFO level - expected_calls = [ - call("Executing before-lease hook for lease %s", "test-lease-123"), - call("Executing hook: %s", "echo 'Line 1'; echo 'Line 2'; echo 'Line 3'"), - call("Hook executed successfully"), - ] - mock_logger.info.assert_has_calls(expected_calls, any_order=False) - - async def test_post_lease_hook_execution_on_completion(self, mock_device_factory, hook_context) -> None: + mock_logger.info.assert_has_calls(expected_calls, any_order=False) + + async def test_post_lease_hook_execution_on_completion(self, mock_device_factory, lease_scope) -> None: """Test that post-lease hook executes when called directly.""" hook_config = HookConfigV1Alpha1( after_lease=HookInstanceConfigV1Alpha1(script="echo 'Post-lease cleanup completed'", timeout=10), ) - with patch("jumpstarter.exporter.hooks.Session") as mock_session_class: - mock_session = Mock() - mock_session_class.return_value.__enter__.return_value = mock_session - - mock_session.serve_unix_async.return_value.__aenter__ = AsyncMock(return_value="/tmp/test_socket") - mock_session.serve_unix_async.return_value.__aexit__ = AsyncMock(return_value=None) - - mock_process = AsyncMock() - mock_process.returncode = 0 - # Mock post-lease hook output - mock_process.stdout.readline.side_effect = [ - b"Post-lease cleanup completed\n", - b"", # EOF + mock_process = AsyncMock() + mock_process.returncode = 0 + # Mock post-lease hook output + mock_process.stdout.readline.side_effect = [ + b"Post-lease cleanup completed\n", + b"", # EOF + ] + mock_process.wait = AsyncMock(return_value=None) + + # Mock the logger to capture log calls + with ( + patch("jumpstarter.exporter.hooks.logger") as mock_logger, + patch("asyncio.create_subprocess_shell", return_value=mock_process), + ): + executor = HookExecutor( + config=hook_config, + device_factory=mock_device_factory, + ) + + result = await executor.execute_after_lease_hook(lease_scope) + + assert result is None + + # Verify that post-lease hook output was logged + expected_calls = [ + call("Executing after-lease hook for lease %s", "test-lease-123"), + call("Executing hook: %s", "echo 'Post-lease cleanup completed'"), + call("Hook executed successfully"), ] - mock_process.wait = AsyncMock(return_value=None) - - # Mock the logger to capture log calls - with ( - patch("jumpstarter.exporter.hooks.logger") as mock_logger, - patch("asyncio.create_subprocess_shell", return_value=mock_process), - ): - executor = HookExecutor( - config=hook_config, - device_factory=mock_device_factory, - ) - - result = await executor.execute_after_lease_hook(hook_context) - - assert result is None - - # Verify that post-lease hook output was logged - expected_calls = [ - call("Executing after-lease hook for lease %s", "test-lease-123"), - call("Executing hook: %s", "echo 'Post-lease cleanup completed'"), - call("Hook executed successfully"), - ] - mock_logger.info.assert_has_calls(expected_calls, any_order=False) - - async def test_hook_timeout_with_warn(self, mock_device_factory, hook_context) -> None: + mock_logger.info.assert_has_calls(expected_calls, any_order=False) + + async def test_hook_timeout_with_warn(self, mock_device_factory, lease_scope) -> None: """Test that hook succeeds when timeout occurs but on_failure='warn'.""" hook_config = HookConfigV1Alpha1( before_lease=HookInstanceConfigV1Alpha1(script="sleep 60", timeout=1, on_failure="warn"), ) - with patch("jumpstarter.exporter.hooks.Session") as mock_session_class: - mock_session = Mock() - mock_session_class.return_value.__enter__.return_value = mock_session - mock_session.serve_unix_async.return_value.__aenter__ = AsyncMock(return_value="/tmp/test_socket") - mock_session.serve_unix_async.return_value.__aexit__ = AsyncMock(return_value=None) - - mock_process = AsyncMock() - mock_process.terminate = AsyncMock(return_value=None) - mock_process.wait = AsyncMock(return_value=None) - - with ( - patch("asyncio.create_subprocess_shell", return_value=mock_process), - patch("asyncio.wait_for", side_effect=asyncio.TimeoutError()), - patch("jumpstarter.exporter.hooks.logger") as mock_logger, - ): - executor = HookExecutor(config=hook_config, device_factory=mock_device_factory) - result = await executor.execute_before_lease_hook(hook_context) - assert result is None - # Verify WARNING log was created - assert any("on_failure=warn, continuing" in str(call) for call in mock_logger.warning.call_args_list) + mock_process = AsyncMock() + mock_process.stdout.readline.return_value = b"" # EOF + mock_process.terminate = AsyncMock(return_value=None) + mock_process.wait = AsyncMock(return_value=None) + + with ( + patch("asyncio.create_subprocess_shell", return_value=mock_process), + patch("asyncio.wait_for", side_effect=asyncio.TimeoutError()), + patch("jumpstarter.exporter.hooks.logger") as mock_logger, + ): + executor = HookExecutor(config=hook_config, device_factory=mock_device_factory) + result = await executor.execute_before_lease_hook(lease_scope) + assert result is None + # Verify WARNING log was created + assert any("on_failure=warn, continuing" in str(call) for call in mock_logger.warning.call_args_list) diff --git a/packages/jumpstarter/jumpstarter/exporter/lease_context.py b/packages/jumpstarter/jumpstarter/exporter/lease_context.py new file mode 100644 index 000000000..f9caad72e --- /dev/null +++ b/packages/jumpstarter/jumpstarter/exporter/lease_context.py @@ -0,0 +1,63 @@ +"""LeaseScope: Context manager for lease-related resources. + +This module provides a clean abstraction for managing the lifecycle of resources +associated with a lease, including the session, socket path, and synchronization events. +""" + +from dataclasses import dataclass, field + +from anyio import Event + +from jumpstarter.exporter.session import Session + + +@dataclass +class LeaseContext: + """Encapsulates all resources associated with an active lease. + + This class bundles together the session, socket path, synchronization event, + and lease identity information that are needed throughout the lease lifecycle. + By grouping these resources, we make their relationships and lifecycles explicit. + + Attributes: + lease_name: Name of the current lease assigned by the controller + session: The Session object managing the device and gRPC services (set in handle_lease) + socket_path: Unix socket path where the session is serving (set in handle_lease) + before_lease_hook: Event that signals when before-lease hook completes + client_name: Name of the client currently holding the lease (empty if unleased) + """ + + lease_name: str + before_lease_hook: Event + session: Session | None = None + socket_path: str = "" + client_name: str = field(default="") + + def __post_init__(self): + """Validate that required resources are present.""" + assert self.before_lease_hook is not None, "LeaseScope requires a before_lease_hook event" + assert self.lease_name, "LeaseScope requires a non-empty lease_name" + + def is_ready(self) -> bool: + """Check if the lease scope has been fully initialized with session and socket. + + Note: This checks for resource initialization (session/socket), not lease activity. + Use is_active() to check if the lease itself is active. + """ + return self.session is not None and self.socket_path != "" + + def is_active(self) -> bool: + """Check if this lease is active (has a non-empty lease name).""" + return bool(self.lease_name) + + def has_client(self) -> bool: + """Check if a client is currently holding the lease.""" + return bool(self.client_name) + + def update_client(self, client_name: str): + """Update the client name for this lease.""" + self.client_name = client_name + + def clear_client(self): + """Clear the client name when the lease is no longer held.""" + self.client_name = "" diff --git a/packages/jumpstarter/jumpstarter/exporter/session.py b/packages/jumpstarter/jumpstarter/exporter/session.py index 13d1a462a..63d1e9a0d 100644 --- a/packages/jumpstarter/jumpstarter/exporter/session.py +++ b/packages/jumpstarter/jumpstarter/exporter/session.py @@ -4,7 +4,7 @@ from contextlib import asynccontextmanager, contextmanager, suppress from dataclasses import dataclass, field from logging.handlers import QueueHandler -from typing import Self +from typing import TYPE_CHECKING, Self from uuid import UUID import grpc @@ -19,11 +19,13 @@ from .logging import LogHandler from jumpstarter.common import ExporterStatus, LogSource, Metadata, TemporarySocket from jumpstarter.common.streams import StreamRequestMetadata -from jumpstarter.driver import Driver from jumpstarter.streams.common import forward_stream from jumpstarter.streams.metadata import MetadataStreamAttributes from jumpstarter.streams.router import RouterStream +if TYPE_CHECKING: + from jumpstarter.driver import Driver + logger = logging.getLogger(__name__) @@ -34,8 +36,8 @@ class Session( Metadata, ContextManagerMixin, ): - root_device: Driver - mapping: dict[UUID, Driver] + root_device: "Driver" + mapping: dict[UUID, "Driver"] _logging_queue: deque = field(init=False) _logging_handler: QueueHandler = field(init=False) From aa13aeabf733e11bb81bd7433ed5d80c9b55d2f4 Mon Sep 17 00:00:00 2001 From: Kirk Brauer Date: Tue, 25 Nov 2025 14:34:28 -0500 Subject: [PATCH 10/21] Fix broken tests due to field name change and status not being correct --- .../jumpstarter-testing/jumpstarter_testing/pytest_test.py | 3 +++ packages/jumpstarter/jumpstarter/client/core.py | 2 +- packages/jumpstarter/jumpstarter/common/utils.py | 4 ++++ packages/jumpstarter/jumpstarter/config/exporter.py | 3 +++ packages/jumpstarter/jumpstarter/exporter/session.py | 2 +- 5 files changed, 12 insertions(+), 2 deletions(-) diff --git a/packages/jumpstarter-testing/jumpstarter_testing/pytest_test.py b/packages/jumpstarter-testing/jumpstarter_testing/pytest_test.py index f1697cc2c..073bdb89c 100644 --- a/packages/jumpstarter-testing/jumpstarter_testing/pytest_test.py +++ b/packages/jumpstarter-testing/jumpstarter_testing/pytest_test.py @@ -1,6 +1,7 @@ from jumpstarter_driver_power.driver import MockPower from pytest import Pytester +from jumpstarter.common import ExporterStatus from jumpstarter.config.env import JMP_DRIVERS_ALLOW, JUMPSTARTER_HOST from jumpstarter.exporter import Session @@ -18,6 +19,8 @@ def test_simple(self, client): with Session(root_device=MockPower()) as session: with session.serve_unix() as path: + # For local testing, set status to LEASE_READY since there's no lease/hook flow + session.update_status(ExporterStatus.LEASE_READY) monkeypatch.setenv(JUMPSTARTER_HOST, str(path)) monkeypatch.setenv(JMP_DRIVERS_ALLOW, "UNSAFE") result = pytester.runpytest() diff --git a/packages/jumpstarter/jumpstarter/client/core.py b/packages/jumpstarter/jumpstarter/client/core.py index 2f6491db9..2fdffcb38 100644 --- a/packages/jumpstarter/jumpstarter/client/core.py +++ b/packages/jumpstarter/jumpstarter/client/core.py @@ -89,7 +89,7 @@ async def check_exporter_status(self): status = ExporterStatus.from_proto(response.status) if status != ExporterStatus.LEASE_READY: - raise ExporterNotReady(f"Exporter status is {status}: {response.status_message}") + raise ExporterNotReady(f"Exporter status is {status}: {response.message}") except AioRpcError as e: # If GetStatus is not implemented, assume ready for backward compatibility diff --git a/packages/jumpstarter/jumpstarter/common/utils.py b/packages/jumpstarter/jumpstarter/common/utils.py index fc37ae796..2ed94720a 100644 --- a/packages/jumpstarter/jumpstarter/common/utils.py +++ b/packages/jumpstarter/jumpstarter/common/utils.py @@ -19,8 +19,12 @@ @asynccontextmanager async def serve_async(root_device: "Driver", portal: BlockingPortal, stack: ExitStack): + from jumpstarter.common import ExporterStatus + with Session(root_device=root_device) as session: async with session.serve_unix_async() as path: + # For local testing, set status to LEASE_READY since there's no lease/hook flow + session.update_status(ExporterStatus.LEASE_READY) # SAFETY: the root_device instance is constructed locally thus considered trusted async with client_from_path(path, portal, stack, allow=[], unsafe=True) as client: try: diff --git a/packages/jumpstarter/jumpstarter/config/exporter.py b/packages/jumpstarter/jumpstarter/config/exporter.py index b4998caf4..4122cd377 100644 --- a/packages/jumpstarter/jumpstarter/config/exporter.py +++ b/packages/jumpstarter/jumpstarter/config/exporter.py @@ -183,6 +183,7 @@ def delete(cls, alias: str) -> Path: @asynccontextmanager async def serve_unix_async(self): # dynamic import to avoid circular imports + from jumpstarter.common import ExporterStatus from jumpstarter.exporter import Session with Session( @@ -193,6 +194,8 @@ async def serve_unix_async(self): ).instantiate(), ) as session: async with session.serve_unix_async() as path: + # For local usage, set status to LEASE_READY since there's no lease/hook flow + session.update_status(ExporterStatus.LEASE_READY) yield path @contextmanager diff --git a/packages/jumpstarter/jumpstarter/exporter/session.py b/packages/jumpstarter/jumpstarter/exporter/session.py index 63d1e9a0d..663c39e03 100644 --- a/packages/jumpstarter/jumpstarter/exporter/session.py +++ b/packages/jumpstarter/jumpstarter/exporter/session.py @@ -174,5 +174,5 @@ async def GetStatus(self, request, context): logger.debug("GetStatus() -> %s", self._current_status) return jumpstarter_pb2.GetStatusResponse( status=self._current_status.to_proto(), - status_message=self._status_message, + message=self._status_message, ) From d140613d86db21a7110375afdd74bcb9fdcd1105 Mon Sep 17 00:00:00 2001 From: Kirk Brauer Date: Tue, 25 Nov 2025 14:53:21 -0500 Subject: [PATCH 11/21] Fix typing issues --- .../jumpstarter/jumpstarter/client/core.py | 2 ++ .../jumpstarter/jumpstarter/client/grpc.py | 3 +-- .../jumpstarter/common/__init__.py | 12 ++++++++- .../jumpstarter/jumpstarter/common/types.py | 25 +++++++++++++++++++ .../jumpstarter/jumpstarter/config/client.py | 2 +- .../jumpstarter/config/exporter.py | 2 +- 6 files changed, 41 insertions(+), 5 deletions(-) create mode 100644 packages/jumpstarter/jumpstarter/common/types.py diff --git a/packages/jumpstarter/jumpstarter/client/core.py b/packages/jumpstarter/jumpstarter/client/core.py index 2fdffcb38..4f859f230 100644 --- a/packages/jumpstarter/jumpstarter/client/core.py +++ b/packages/jumpstarter/jumpstarter/client/core.py @@ -2,6 +2,8 @@ Base classes for drivers and driver clients """ +from __future__ import annotations + import logging from contextlib import asynccontextmanager from dataclasses import dataclass, field diff --git a/packages/jumpstarter/jumpstarter/client/grpc.py b/packages/jumpstarter/jumpstarter/client/grpc.py index 295e4b974..ecf80d6f7 100644 --- a/packages/jumpstarter/jumpstarter/client/grpc.py +++ b/packages/jumpstarter/jumpstarter/client/grpc.py @@ -5,7 +5,6 @@ from dataclasses import InitVar, dataclass, field from datetime import datetime, timedelta from types import SimpleNamespace -from typing import Any from google.protobuf import duration_pb2, field_mask_pb2, json_format, timestamp_pb2 from grpc import ChannelConnectivity @@ -487,7 +486,7 @@ class MultipathExporterStub: channels: InitVar[list[Channel]] - __stubs: dict[Channel, Any] = field(init=False, default_factory=OrderedDict) + __stubs: dict[Channel, SimpleNamespace] = field(init=False, default_factory=OrderedDict) def __post_init__(self, channels): for channel in channels: diff --git a/packages/jumpstarter/jumpstarter/common/__init__.py b/packages/jumpstarter/jumpstarter/common/__init__.py index 08645b471..8d6ba38bd 100644 --- a/packages/jumpstarter/jumpstarter/common/__init__.py +++ b/packages/jumpstarter/jumpstarter/common/__init__.py @@ -1,12 +1,22 @@ from .enums import ExporterStatus, LogSource from .metadata import Metadata from .tempfile import TemporarySocket, TemporaryTcpListener, TemporaryUnixListener +from .types import ( + AsyncChannel, + ControllerStub, + ExporterStub, + RouterStub, +) __all__ = [ + "AsyncChannel", + "ControllerStub", "ExporterStatus", + "ExporterStub", "LogSource", "Metadata", + "RouterStub", "TemporarySocket", - "TemporaryUnixListener", "TemporaryTcpListener", + "TemporaryUnixListener", ] diff --git a/packages/jumpstarter/jumpstarter/common/types.py b/packages/jumpstarter/jumpstarter/common/types.py new file mode 100644 index 000000000..fb1104920 --- /dev/null +++ b/packages/jumpstarter/jumpstarter/common/types.py @@ -0,0 +1,25 @@ +"""Type aliases for gRPC and Protobuf types.""" + +from typing import TYPE_CHECKING, TypeAlias + +from grpc.aio import Channel +from jumpstarter_protocol import jumpstarter_pb2_grpc, router_pb2_grpc + +# Stub type aliases (the generic Stub classes work for both sync and async) +ExporterStub: TypeAlias = jumpstarter_pb2_grpc.ExporterServiceStub +RouterStub: TypeAlias = router_pb2_grpc.RouterServiceStub +ControllerStub: TypeAlias = jumpstarter_pb2_grpc.ControllerServiceStub + +# Channel type alias +AsyncChannel: TypeAlias = Channel + +# Async stub type aliases are only available for type checking (defined in .pyi files) +if TYPE_CHECKING: + pass + +__all__ = [ + "AsyncChannel", + "ControllerStub", + "ExporterStub", + "RouterStub", +] diff --git a/packages/jumpstarter/jumpstarter/config/client.py b/packages/jumpstarter/jumpstarter/config/client.py index 2ffee7a6f..94da54e4b 100644 --- a/packages/jumpstarter/jumpstarter/config/client.py +++ b/packages/jumpstarter/jumpstarter/config/client.py @@ -120,7 +120,7 @@ class ClientConfigV1Alpha1(BaseSettings): leases: ClientConfigV1Alpha1Lease = Field(default_factory=ClientConfigV1Alpha1Lease) - async def channel(self): + async def channel(self) -> grpc.aio.Channel: if self.endpoint is None or self.token is None: raise ConfigurationError("endpoint or token not set in client config") diff --git a/packages/jumpstarter/jumpstarter/config/exporter.py b/packages/jumpstarter/jumpstarter/config/exporter.py index 4122cd377..e70b00d57 100644 --- a/packages/jumpstarter/jumpstarter/config/exporter.py +++ b/packages/jumpstarter/jumpstarter/config/exporter.py @@ -212,7 +212,7 @@ async def create_exporter(self): from jumpstarter.exporter import Exporter - async def channel_factory(): + async def channel_factory() -> grpc.aio.Channel: if self.endpoint is None or self.token is None: raise ConfigurationError("endpoint or token not set in exporter config") credentials = grpc.composite_channel_credentials( From 3b71d7a1254a35542f70c588efb453eef71b955f Mon Sep 17 00:00:00 2001 From: Kirk Brauer Date: Tue, 25 Nov 2025 22:18:01 -0500 Subject: [PATCH 12/21] Fix controller registration issue --- .../jumpstarter/jumpstarter/exporter/exporter.py | 16 +++++++++++----- 1 file changed, 11 insertions(+), 5 deletions(-) diff --git a/packages/jumpstarter/jumpstarter/exporter/exporter.py b/packages/jumpstarter/jumpstarter/exporter/exporter.py index 1efc2dff3..e80ff1863 100644 --- a/packages/jumpstarter/jumpstarter/exporter/exporter.py +++ b/packages/jumpstarter/jumpstarter/exporter/exporter.py @@ -238,12 +238,19 @@ def factory( return factory - async def _register_with_controller(self, channel: grpc.aio.Channel): - """Register the exporter with the controller.""" - exporter_stub = jumpstarter_pb2_grpc.ExporterServiceStub(channel) + async def _register_with_controller(self, local_channel: grpc.aio.Channel): + """Register the exporter with the controller. + + Args: + local_channel: The local Unix socket channel to get device reports from + """ + # Get device reports from the local session + exporter_stub = jumpstarter_pb2_grpc.ExporterServiceStub(local_channel) response: jumpstarter_pb2.GetReportResponse = await exporter_stub.GetReport(empty_pb2.Empty()) + + # Register with the REMOTE controller (not the local session) logger.info("Registering exporter with controller") - controller = jumpstarter_pb2_grpc.ControllerServiceStub(channel) + controller = await self._get_controller_stub() await controller.Register( jumpstarter_pb2.RegisterRequest( labels=self.labels, @@ -253,7 +260,6 @@ async def _register_with_controller(self, channel: grpc.aio.Channel): # Mark exporter as registered internally self._registered = True # Report that exporter is available to the controller - # TODO: Determine if the controller should handle this logic internally await self._report_status(ExporterStatus.AVAILABLE, "Exporter registered and available") async def _report_status(self, status: ExporterStatus, message: str = ""): From cab55c90c328547c1b7a93c6cfd1bb6b06eb0639 Mon Sep 17 00:00:00 2001 From: Kirk Brauer Date: Tue, 25 Nov 2025 22:50:59 -0500 Subject: [PATCH 13/21] Add status field to jmp admin get exporter --- .../jumpstarter_cli_admin/get_test.py | 50 ++++++++++++++++--- .../jumpstarter_kubernetes/exporters.py | 9 ++++ 2 files changed, 52 insertions(+), 7 deletions(-) diff --git a/packages/jumpstarter-cli-admin/jumpstarter_cli_admin/get_test.py b/packages/jumpstarter-cli-admin/jumpstarter_cli_admin/get_test.py index 4b12a807b..a79ff7d0f 100644 --- a/packages/jumpstarter-cli-admin/jumpstarter_cli_admin/get_test.py +++ b/packages/jumpstarter-cli-admin/jumpstarter_cli_admin/get_test.py @@ -301,7 +301,10 @@ def test_get_clients(_load_kube_config_mock, list_clients_mock: AsyncMock): kind="Exporter", metadata=V1ObjectMeta(name="test", namespace="testing", creation_timestamp="2024-01-01T21:00:00Z"), status=V1Alpha1ExporterStatus( - endpoint="grpc://example.com:443", credential=V1ObjectReference(name="test-credential"), devices=[] + endpoint="grpc://example.com:443", + credential=V1ObjectReference(name="test-credential"), + devices=[], + exporter_status="Available", ), ) @@ -318,7 +321,9 @@ def test_get_clients(_load_kube_config_mock, list_clients_mock: AsyncMock): "name": "test-credential" }, "devices": [], - "endpoint": "grpc://example.com:443" + "endpoint": "grpc://example.com:443", + "exporterStatus": "Available", + "statusMessage": null } } """ @@ -334,6 +339,8 @@ def test_get_clients(_load_kube_config_mock, list_clients_mock: AsyncMock): name: test-credential devices: [] endpoint: grpc://example.com:443 + exporterStatus: Available + statusMessage: null """ @@ -348,6 +355,7 @@ def test_get_exporter(_load_kube_config_mock, get_exporter_mock: AsyncMock): result = runner.invoke(get, ["exporter", "test"]) assert result.exit_code == 0 assert "test" in result.output + assert "Available" in result.output assert "grpc://example.com:443" in result.output get_exporter_mock.reset_mock() @@ -396,6 +404,7 @@ def test_get_exporter(_load_kube_config_mock, get_exporter_mock: AsyncMock): V1Alpha1ExporterDevice(labels={"hardware": "rpi4"}, uuid="82a8ac0d-d7ff-4009-8948-18a3c5c607b1"), V1Alpha1ExporterDevice(labels={"hardware": "rpi4"}, uuid="f7cd30ac-64a3-42c6-ba31-b25f033b97c1"), ], + exporter_status="Available", ), ) @@ -425,7 +434,9 @@ def test_get_exporter(_load_kube_config_mock, get_exporter_mock: AsyncMock): "uuid": "f7cd30ac-64a3-42c6-ba31-b25f033b97c1" } ], - "endpoint": "grpc://example.com:443" + "endpoint": "grpc://example.com:443", + "exporterStatus": "Available", + "statusMessage": null } } """ @@ -447,6 +458,8 @@ def test_get_exporter(_load_kube_config_mock, get_exporter_mock: AsyncMock): hardware: rpi4 uuid: f7cd30ac-64a3-42c6-ba31-b25f033b97c1 endpoint: grpc://example.com:443 + exporterStatus: Available + statusMessage: null """ @@ -460,6 +473,7 @@ def test_get_exporter_devices(_load_kube_config_mock, get_exporter_mock: AsyncMo result = runner.invoke(get, ["exporter", "test", "--devices"]) assert result.exit_code == 0 assert "test" in result.output + assert "Available" in result.output assert "grpc://example.com:443" in result.output assert "hardware:rpi4" in result.output assert "82a8ac0d-d7ff-4009-8948-18a3c5c607b1" in result.output @@ -510,6 +524,7 @@ def test_get_exporter_devices(_load_kube_config_mock, get_exporter_mock: AsyncMo endpoint="grpc://example.com:443", credential=V1ObjectReference(name="test-credential"), devices=[], + exporter_status="Available", ), ), V1Alpha1Exporter( @@ -520,6 +535,7 @@ def test_get_exporter_devices(_load_kube_config_mock, get_exporter_mock: AsyncMo endpoint="grpc://example.com:443", credential=V1ObjectReference(name="another-credential"), devices=[], + exporter_status="Available", ), ), ] @@ -541,7 +557,9 @@ def test_get_exporter_devices(_load_kube_config_mock, get_exporter_mock: AsyncMo "name": "test-credential" }, "devices": [], - "endpoint": "grpc://example.com:443" + "endpoint": "grpc://example.com:443", + "exporterStatus": "Available", + "statusMessage": null } }, { @@ -557,7 +575,9 @@ def test_get_exporter_devices(_load_kube_config_mock, get_exporter_mock: AsyncMo "name": "another-credential" }, "devices": [], - "endpoint": "grpc://example.com:443" + "endpoint": "grpc://example.com:443", + "exporterStatus": "Available", + "statusMessage": null } } ], @@ -578,6 +598,8 @@ def test_get_exporter_devices(_load_kube_config_mock, get_exporter_mock: AsyncMo name: test-credential devices: [] endpoint: grpc://example.com:443 + exporterStatus: Available + statusMessage: null - apiVersion: jumpstarter.dev/v1alpha1 kind: Exporter metadata: @@ -589,6 +611,8 @@ def test_get_exporter_devices(_load_kube_config_mock, get_exporter_mock: AsyncMo name: another-credential devices: [] endpoint: grpc://example.com:443 + exporterStatus: Available + statusMessage: null kind: ExporterList """ @@ -609,6 +633,7 @@ def test_get_exporters(_load_kube_config_mock, list_exporters_mock: AsyncMock): assert result.exit_code == 0 assert "test" in result.output assert "another" in result.output + assert "Available" in result.output list_exporters_mock.reset_mock() # List exporters JSON output @@ -655,6 +680,7 @@ def test_get_exporters(_load_kube_config_mock, list_exporters_mock: AsyncMock): devices=[ V1Alpha1ExporterDevice(labels={"hardware": "rpi4"}, uuid="82a8ac0d-d7ff-4009-8948-18a3c5c607b1") ], + exporter_status="Available", ), ), V1Alpha1Exporter( @@ -667,6 +693,7 @@ def test_get_exporters(_load_kube_config_mock, list_exporters_mock: AsyncMock): devices=[ V1Alpha1ExporterDevice(labels={"hardware": "rpi4"}, uuid="f7cd30ac-64a3-42c6-ba31-b25f033b97c1"), ], + exporter_status="Available", ), ), ] @@ -695,7 +722,9 @@ def test_get_exporters(_load_kube_config_mock, list_exporters_mock: AsyncMock): "uuid": "82a8ac0d-d7ff-4009-8948-18a3c5c607b1" } ], - "endpoint": "grpc://example.com:443" + "endpoint": "grpc://example.com:443", + "exporterStatus": "Available", + "statusMessage": null } }, { @@ -718,7 +747,9 @@ def test_get_exporters(_load_kube_config_mock, list_exporters_mock: AsyncMock): "uuid": "f7cd30ac-64a3-42c6-ba31-b25f033b97c1" } ], - "endpoint": "grpc://example.com:443" + "endpoint": "grpc://example.com:443", + "exporterStatus": "Available", + "statusMessage": null } } ], @@ -742,6 +773,8 @@ def test_get_exporters(_load_kube_config_mock, list_exporters_mock: AsyncMock): hardware: rpi4 uuid: 82a8ac0d-d7ff-4009-8948-18a3c5c607b1 endpoint: grpc://example.com:443 + exporterStatus: Available + statusMessage: null - apiVersion: jumpstarter.dev/v1alpha1 kind: Exporter metadata: @@ -756,6 +789,8 @@ def test_get_exporters(_load_kube_config_mock, list_exporters_mock: AsyncMock): hardware: rpi4 uuid: f7cd30ac-64a3-42c6-ba31-b25f033b97c1 endpoint: grpc://example.com:443 + exporterStatus: Available + statusMessage: null kind: ExporterList """ @@ -774,6 +809,7 @@ def test_get_exporters_devices(_load_kube_config_mock, list_exporters_mock: Asyn assert result.exit_code == 0 assert "test" in result.output assert "another" in result.output + assert "Available" in result.output assert "hardware:rpi4" in result.output assert "82a8ac0d-d7ff-4009-8948-18a3c5c607b1" in result.output assert "f7cd30ac-64a3-42c6-ba31-b25f033b97c1" in result.output diff --git a/packages/jumpstarter-kubernetes/jumpstarter_kubernetes/exporters.py b/packages/jumpstarter-kubernetes/jumpstarter_kubernetes/exporters.py index 004c47ffb..1ea45006f 100644 --- a/packages/jumpstarter-kubernetes/jumpstarter_kubernetes/exporters.py +++ b/packages/jumpstarter-kubernetes/jumpstarter_kubernetes/exporters.py @@ -26,6 +26,8 @@ class V1Alpha1ExporterStatus(JsonBaseModel): credential: SerializeV1ObjectReference devices: list[V1Alpha1ExporterDevice] endpoint: str + exporter_status: str | None = Field(alias="exporterStatus", default=None) + status_message: str | None = Field(alias="statusMessage", default=None) class V1Alpha1Exporter(JsonBaseModel): @@ -55,6 +57,8 @@ def from_dict(dict: dict): devices=[V1Alpha1ExporterDevice(labels=d["labels"], uuid=d["uuid"]) for d in dict["status"]["devices"]] if "devices" in dict["status"] else [], + exporter_status=dict["status"].get("exporterStatus"), + status_message=dict["status"].get("statusMessage"), ), ) @@ -62,17 +66,20 @@ def from_dict(dict: dict): def rich_add_columns(cls, table, devices: bool = False): if devices: table.add_column("NAME", no_wrap=True) + table.add_column("STATUS") table.add_column("ENDPOINT") table.add_column("AGE") table.add_column("LABELS") table.add_column("UUID") else: table.add_column("NAME", no_wrap=True) + table.add_column("STATUS") table.add_column("ENDPOINT") table.add_column("DEVICES") table.add_column("AGE") def rich_add_rows(self, table, devices: bool = False): + status = self.status.exporter_status if self.status else "Unknown" if devices: if self.status is not None: for d in self.status.devices: @@ -82,6 +89,7 @@ def rich_add_rows(self, table, devices: bool = False): labels.append(f"{label}:{str(d.labels[label])}") table.add_row( self.metadata.name, + status or "Unknown", self.status.endpoint, time_since(self.metadata.creation_timestamp), ",".join(labels), @@ -91,6 +99,7 @@ def rich_add_rows(self, table, devices: bool = False): else: table.add_row( self.metadata.name, + status or "Unknown", self.status.endpoint, str(len(self.status.devices) if self.status and self.status.devices else 0), time_since(self.metadata.creation_timestamp), From cc446649dc440d75359002d579d9953ef2bda548 Mon Sep 17 00:00:00 2001 From: Kirk Brauer Date: Tue, 25 Nov 2025 23:01:37 -0500 Subject: [PATCH 14/21] Fix lease status race condition causing E2E tests to fail --- .../jumpstarter/exporter/exporter.py | 50 +++++++++++-------- .../jumpstarter/exporter/lease_context.py | 25 +++++++++- 2 files changed, 53 insertions(+), 22 deletions(-) diff --git a/packages/jumpstarter/jumpstarter/exporter/exporter.py b/packages/jumpstarter/jumpstarter/exporter/exporter.py index e80ff1863..948de4202 100644 --- a/packages/jumpstarter/jumpstarter/exporter/exporter.py +++ b/packages/jumpstarter/jumpstarter/exporter/exporter.py @@ -132,7 +132,7 @@ class Exporter(AsyncContextManagerMixin, Metadata): AFTER_LEASE_HOOK, BEFORE_LEASE_HOOK_FAILED, AFTER_LEASE_HOOK_FAILED. """ - _lease_scope: LeaseContext | None = field(init=False, default=None) + _lease_context: LeaseContext | None = field(init=False, default=None) """Encapsulates all resources associated with the current lease. Contains the session, socket path, and synchronization event needed @@ -266,9 +266,10 @@ async def _report_status(self, status: ExporterStatus, message: str = ""): """Report the exporter status with the controller and session.""" self._exporter_status = status - # Update session status if available - if self._lease_scope and self._lease_scope.session: - self._lease_scope.session.update_status(status, message) + # Update status in lease context (handles session update internally) + # This ensures status is stored even before session is created + if self._lease_context: + self._lease_context.update_status(status, message) try: controller = await self._get_controller_stub() @@ -409,6 +410,10 @@ async def handle_lease(self, lease_name: str, tg: TaskGroup, lease_scope: LeaseC # Populate the lease scope with session and socket path lease_scope.session = session lease_scope.socket_path = path + # Sync current status to the newly created session + # This ensures the session has the correct status even if _report_status + # was called before the session was created (race condition fix) + session.update_status(lease_scope.current_status, lease_scope.status_message) # Wait for before-lease hook to complete before processing client connections logger.info("Waiting for before-lease hook to complete before accepting connections") @@ -449,23 +454,23 @@ async def serve(self): # noqa: C901 async for status in status_rx: # Check if lease name changed (and there was a previous active lease) lease_changed = ( - self._lease_scope - and self._lease_scope.is_active() - and self._lease_scope.lease_name != status.lease_name + self._lease_context + and self._lease_context.is_active() + and self._lease_context.lease_name != status.lease_name ) if lease_changed: # After-lease hook for the previous lease (lease name changed) - if self.hook_executor and self._lease_scope.has_client(): + if self.hook_executor and self._lease_context.has_client(): with CancelScope(shield=True): await self.hook_executor.run_after_lease_hook( - self._lease_scope, + self._lease_context, self._report_status, self.stop, ) logger.info("Lease status changed, killing existing connections") # Clear lease scope for next lease - self._lease_scope = None + self._lease_context = None self.stop() break @@ -482,43 +487,48 @@ async def serve(self): # noqa: C901 lease_name=status.lease_name, before_lease_hook=Event(), ) - self._lease_scope = lease_scope + self._lease_context = lease_scope tg.start_soon(self.handle_lease, status.lease_name, tg, lease_scope) if current_leased: logger.info("Currently leased by %s under %s", status.client_name, status.lease_name) - if self._lease_scope: - self._lease_scope.update_client(status.client_name) + if self._lease_context: + self._lease_context.update_client(status.client_name) # Before-lease hook when transitioning from unleased to leased if not previous_leased: - if self.hook_executor and self._lease_scope: + if self.hook_executor and self._lease_context: tg.start_soon( self.hook_executor.run_before_lease_hook, - self._lease_scope, + self._lease_context, self._report_status, self.stop, # Pass shutdown callback ) else: # No hook configured, set event immediately await self._report_status(ExporterStatus.LEASE_READY, "Ready for commands") - if self._lease_scope: - self._lease_scope.before_lease_hook.set() + if self._lease_context: + self._lease_context.before_lease_hook.set() else: logger.info("Currently not leased") # After-lease hook when transitioning from leased to unleased - if previous_leased and self.hook_executor and self._lease_scope and self._lease_scope.has_client(): + if ( + previous_leased + and self.hook_executor + and self._lease_context + and self._lease_context.has_client() + ): # Shield the after-lease hook from cancellation with CancelScope(shield=True): await self.hook_executor.run_after_lease_hook( - self._lease_scope, + self._lease_context, self._report_status, self.stop, ) # Clear lease scope for next lease - self._lease_scope = None + self._lease_context = None if self._stop_requested: self.stop(should_unregister=True) diff --git a/packages/jumpstarter/jumpstarter/exporter/lease_context.py b/packages/jumpstarter/jumpstarter/exporter/lease_context.py index f9caad72e..9e8878392 100644 --- a/packages/jumpstarter/jumpstarter/exporter/lease_context.py +++ b/packages/jumpstarter/jumpstarter/exporter/lease_context.py @@ -5,10 +5,14 @@ """ from dataclasses import dataclass, field +from typing import TYPE_CHECKING from anyio import Event -from jumpstarter.exporter.session import Session +from jumpstarter.common import ExporterStatus + +if TYPE_CHECKING: + from jumpstarter.exporter.session import Session @dataclass @@ -25,13 +29,17 @@ class LeaseContext: socket_path: Unix socket path where the session is serving (set in handle_lease) before_lease_hook: Event that signals when before-lease hook completes client_name: Name of the client currently holding the lease (empty if unleased) + current_status: Current exporter status (stored here for access before session is created) + status_message: Message describing the current status """ lease_name: str before_lease_hook: Event - session: Session | None = None + session: "Session | None" = None socket_path: str = "" client_name: str = field(default="") + current_status: ExporterStatus = field(default=ExporterStatus.AVAILABLE) + status_message: str = field(default="") def __post_init__(self): """Validate that required resources are present.""" @@ -61,3 +69,16 @@ def update_client(self, client_name: str): def clear_client(self): """Clear the client name when the lease is no longer held.""" self.client_name = "" + + def update_status(self, status: ExporterStatus, message: str = ""): + """Update the current status in the lease context. + + This stores the status in the LeaseContext so it's available even before + the session is created, fixing the race condition where GetStatus is called + before the session can be updated. + """ + self.current_status = status + self.status_message = message + # Also update session if it exists + if self.session: + self.session.update_status(status, message) From 0cb3cab3d3d9c763ae48be36181b94b44bd2cf52 Mon Sep 17 00:00:00 2001 From: Kirk Brauer Date: Tue, 25 Nov 2025 23:25:21 -0500 Subject: [PATCH 15/21] Fix additional status update race conditions breaking E2E --- .../jumpstarter/jumpstarter/exporter/exporter.py | 15 +++++++++------ 1 file changed, 9 insertions(+), 6 deletions(-) diff --git a/packages/jumpstarter/jumpstarter/exporter/exporter.py b/packages/jumpstarter/jumpstarter/exporter/exporter.py index 948de4202..716fe635d 100644 --- a/packages/jumpstarter/jumpstarter/exporter/exporter.py +++ b/packages/jumpstarter/jumpstarter/exporter/exporter.py @@ -259,8 +259,11 @@ async def _register_with_controller(self, local_channel: grpc.aio.Channel): ) # Mark exporter as registered internally self._registered = True - # Report that exporter is available to the controller - await self._report_status(ExporterStatus.AVAILABLE, "Exporter registered and available") + # Only report AVAILABLE status during initial registration (no lease context) + # During per-lease registration, status is managed by serve() to avoid + # overwriting LEASE_READY with AVAILABLE + if self._lease_context is None: + await self._report_status(ExporterStatus.AVAILABLE, "Exporter registered and available") async def _report_status(self, status: ExporterStatus, message: str = ""): """Report the exporter status with the controller and session.""" @@ -410,16 +413,16 @@ async def handle_lease(self, lease_name: str, tg: TaskGroup, lease_scope: LeaseC # Populate the lease scope with session and socket path lease_scope.session = session lease_scope.socket_path = path - # Sync current status to the newly created session - # This ensures the session has the correct status even if _report_status - # was called before the session was created (race condition fix) - session.update_status(lease_scope.current_status, lease_scope.status_message) # Wait for before-lease hook to complete before processing client connections logger.info("Waiting for before-lease hook to complete before accepting connections") await lease_scope.before_lease_hook.wait() logger.info("Before-lease hook completed, now accepting connections") + # Sync status to session AFTER hook completes - this ensures we have LEASE_READY + # status from serve() rather than the default AVAILABLE + session.update_status(lease_scope.current_status, lease_scope.status_message) + # Process client connections # Type: request is jumpstarter_pb2.ListenResponse with router_endpoint and router_token fields async for request in listen_rx: From a129ff7dc0bd6f8dabd3a2c60b500a8940fdfe9a Mon Sep 17 00:00:00 2001 From: Kirk Brauer Date: Tue, 25 Nov 2025 23:49:13 -0500 Subject: [PATCH 16/21] Fix unit test failures --- .../jumpstarter_cli_admin/create_test.py | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/packages/jumpstarter-cli-admin/jumpstarter_cli_admin/create_test.py b/packages/jumpstarter-cli-admin/jumpstarter_cli_admin/create_test.py index 7331c3b5b..5d4c2d9c8 100644 --- a/packages/jumpstarter-cli-admin/jumpstarter_cli_admin/create_test.py +++ b/packages/jumpstarter-cli-admin/jumpstarter_cli_admin/create_test.py @@ -234,7 +234,9 @@ def test_create_client( "name": "{name}-credential" }}, "devices": [], - "endpoint": "{endpoint}" + "endpoint": "{endpoint}", + "exporterStatus": null, + "statusMessage": null }} }} """.format(name=EXPORTER_NAME, endpoint=EXPORTER_ENDPOINT) @@ -250,6 +252,8 @@ def test_create_client( name: {name}-credential devices: [] endpoint: {endpoint} + exporterStatus: null + statusMessage: null """.format(name=EXPORTER_NAME, endpoint=EXPORTER_ENDPOINT) From 6aa6a1fdd468c286a6b141d4a773dc9d1a8b8e0d Mon Sep 17 00:00:00 2001 From: Kirk Brauer Date: Wed, 26 Nov 2025 14:07:20 -0500 Subject: [PATCH 17/21] Fix broken unit tests --- .../jumpstarter_kubernetes/exporters_test.py | 32 ++++++++++++------- 1 file changed, 20 insertions(+), 12 deletions(-) diff --git a/packages/jumpstarter-kubernetes/jumpstarter_kubernetes/exporters_test.py b/packages/jumpstarter-kubernetes/jumpstarter_kubernetes/exporters_test.py index 1792a0f3b..683f23547 100644 --- a/packages/jumpstarter-kubernetes/jumpstarter_kubernetes/exporters_test.py +++ b/packages/jumpstarter-kubernetes/jumpstarter_kubernetes/exporters_test.py @@ -47,7 +47,9 @@ def test_exporter_dump_json(): "uuid": "f4cf49ab-fc64-46c6-94e7-a40502eb77b1" } ], - "endpoint": "https://test-exporter" + "endpoint": "https://test-exporter", + "exporterStatus": null, + "statusMessage": null } }""" ) @@ -73,6 +75,8 @@ def test_exporter_dump_yaml(): test: label uuid: f4cf49ab-fc64-46c6-94e7-a40502eb77b1 endpoint: https://test-exporter + exporterStatus: null + statusMessage: null """ ) @@ -113,8 +117,9 @@ def test_exporter_rich_add_columns_without_devices(): mock_table = MagicMock() V1Alpha1Exporter.rich_add_columns(mock_table, devices=False) - assert mock_table.add_column.call_count == 4 + assert mock_table.add_column.call_count == 5 mock_table.add_column.assert_any_call("NAME", no_wrap=True) + mock_table.add_column.assert_any_call("STATUS") mock_table.add_column.assert_any_call("ENDPOINT") mock_table.add_column.assert_any_call("DEVICES") mock_table.add_column.assert_any_call("AGE") @@ -128,8 +133,9 @@ def test_exporter_rich_add_columns_with_devices(): mock_table = MagicMock() V1Alpha1Exporter.rich_add_columns(mock_table, devices=True) - assert mock_table.add_column.call_count == 5 + assert mock_table.add_column.call_count == 6 mock_table.add_column.assert_any_call("NAME", no_wrap=True) + mock_table.add_column.assert_any_call("STATUS") mock_table.add_column.assert_any_call("ENDPOINT") mock_table.add_column.assert_any_call("AGE") mock_table.add_column.assert_any_call("LABELS") @@ -146,9 +152,10 @@ def test_exporter_rich_add_rows_without_devices(): mock_table.add_row.assert_called_once() args = mock_table.add_row.call_args[0] assert args[0] == "test-exporter" - assert args[1] == "https://test-exporter" - assert args[2] == "1" # Number of devices - assert args[3] == "5m" # Age + assert args[1] == "Unknown" # Status (shows "Unknown" when exporter_status is None) + assert args[2] == "https://test-exporter" + assert args[3] == "1" # Number of devices + assert args[4] == "5m" # Age def test_exporter_rich_add_rows_with_devices(): @@ -161,10 +168,11 @@ def test_exporter_rich_add_rows_with_devices(): mock_table.add_row.assert_called_once() args = mock_table.add_row.call_args[0] assert args[0] == "test-exporter" - assert args[1] == "https://test-exporter" - assert args[2] == "5m" # Age - assert args[3] == "test:label" # Labels - assert args[4] == "f4cf49ab-fc64-46c6-94e7-a40502eb77b1" # UUID + assert args[1] == "Unknown" # Status (shows "Unknown" when exporter_status is None) + assert args[2] == "https://test-exporter" + assert args[3] == "5m" # Age + assert args[4] == "test:label" # Labels + assert args[5] == "f4cf49ab-fc64-46c6-94e7-a40502eb77b1" # UUID def test_exporter_rich_add_names(): @@ -212,7 +220,7 @@ def test_exporter_list_rich_add_columns(): mock_table = MagicMock() V1Alpha1ExporterList.rich_add_columns(mock_table, devices=False) - assert mock_table.add_column.call_count == 4 + assert mock_table.add_column.call_count == 5 def test_exporter_list_rich_add_columns_with_devices(): @@ -223,7 +231,7 @@ def test_exporter_list_rich_add_columns_with_devices(): mock_table = MagicMock() V1Alpha1ExporterList.rich_add_columns(mock_table, devices=True) - assert mock_table.add_column.call_count == 5 + assert mock_table.add_column.call_count == 6 def test_exporter_list_rich_add_rows(): From 4a391cefbf5d04eca4f43df0468849061e788ba4 Mon Sep 17 00:00:00 2001 From: Kirk Brauer Date: Thu, 27 Nov 2025 08:29:09 -0500 Subject: [PATCH 18/21] Fix CodeRabbit warnings for previous_leased --- packages/jumpstarter/jumpstarter/exporter/exporter.py | 9 ++++++++- 1 file changed, 8 insertions(+), 1 deletion(-) diff --git a/packages/jumpstarter/jumpstarter/exporter/exporter.py b/packages/jumpstarter/jumpstarter/exporter/exporter.py index 716fe635d..ed11e6f2f 100644 --- a/packages/jumpstarter/jumpstarter/exporter/exporter.py +++ b/packages/jumpstarter/jumpstarter/exporter/exporter.py @@ -132,6 +132,13 @@ class Exporter(AsyncContextManagerMixin, Metadata): AFTER_LEASE_HOOK, BEFORE_LEASE_HOOK_FAILED, AFTER_LEASE_HOOK_FAILED. """ + _previous_leased: bool = field(init=False, default=False) + """Previous lease state used to detect lease state transitions. + + Tracks whether the exporter was leased in the previous status check to + determine when to trigger before-lease and after-lease hooks. + """ + _lease_context: LeaseContext | None = field(init=False, default=None) """Encapsulates all resources associated with the current lease. @@ -478,7 +485,7 @@ async def serve(self): # noqa: C901 break # Check for lease state transitions - previous_leased = hasattr(self, "_previous_leased") and self._previous_leased + previous_leased = self._previous_leased current_leased = status.leased # Check if this is a new lease assignment (first time or lease name changed) From a302f24c4e938e6f6d837ce0e1a3477aa390fa77 Mon Sep 17 00:00:00 2001 From: Kirk Brauer Date: Sun, 4 Jan 2026 18:22:20 -0500 Subject: [PATCH 19/21] Fix hooks race condition --- .../jumpstarter/jumpstarter/exporter/hooks.py | 128 +++++++++++------- 1 file changed, 79 insertions(+), 49 deletions(-) diff --git a/packages/jumpstarter/jumpstarter/exporter/hooks.py b/packages/jumpstarter/jumpstarter/exporter/hooks.py index 3a76fda77..1f37ace2c 100644 --- a/packages/jumpstarter/jumpstarter/exporter/hooks.py +++ b/packages/jumpstarter/jumpstarter/exporter/hooks.py @@ -1,16 +1,18 @@ """Lifecycle hooks for Jumpstarter exporters.""" -import asyncio import logging import os +import subprocess from collections.abc import Awaitable from dataclasses import dataclass from typing import TYPE_CHECKING, Callable, Literal +import anyio +from anyio import open_process + from jumpstarter.common import ExporterStatus, LogSource from jumpstarter.config.env import JMP_DRIVERS_ALLOW, JUMPSTARTER_HOST from jumpstarter.config.exporter import HookConfigV1Alpha1, HookInstanceConfigV1Alpha1 -from jumpstarter.exporter.logging import get_logger from jumpstarter.exporter.session import Session if TYPE_CHECKING: @@ -148,7 +150,12 @@ async def _execute_hook_process( logging_session: Session, hook_type: Literal["before_lease", "after_lease"], ) -> None: - """Execute the hook process with the given environment and logging session.""" + """Execute the hook process with the given environment and logging session. + + Uses anyio for subprocess execution to be compatible with the anyio-based exporter. + """ + + command = hook_config.script timeout = hook_config.timeout on_failure = hook_config.on_failure @@ -156,56 +163,60 @@ async def _execute_hook_process( # Exception handling error_msg: str | None = None cause: Exception | None = None + timed_out = False try: - # Execute the hook command using shell - process = await asyncio.create_subprocess_shell( + # Execute the hook command using shell via anyio + # Pass the command as a string to use shell mode + async with await open_process( command, env=hook_env, - stdout=asyncio.subprocess.PIPE, - stderr=asyncio.subprocess.STDOUT, - ) - - try: - # Create a logger with automatic source registration - hook_logger = get_logger(f"hook.{lease_scope.lease_name}", log_source, logging_session) - - # Stream output line-by-line for real-time logging - output_lines = [] - - async def read_output(): - while True: - line = await process.stdout.readline() - if not line: - break - line_decoded = line.decode().rstrip() - output_lines.append(line_decoded) - # Route hook output through the logging system - hook_logger.info(line_decoded) - - # Run output reading and process waiting concurrently with timeout - await asyncio.wait_for(asyncio.gather(read_output(), process.wait()), timeout=timeout) + stdout=subprocess.PIPE, + stderr=subprocess.STDOUT, + ) as process: + output_lines: list[str] = [] + + async def read_output() -> None: + """Read stdout line by line.""" + assert process.stdout is not None + buffer = b"" + async for chunk in process.stdout: + buffer += chunk + while b"\n" in buffer: + line, buffer = buffer.split(b"\n", 1) + line_decoded = line.decode().rstrip() + output_lines.append(line_decoded) + logger.info("[hook output] %s", line_decoded) + # Handle any remaining data without newline + if buffer: + line_decoded = buffer.decode().rstrip() + if line_decoded: + output_lines.append(line_decoded) + logger.info("[hook output] %s", line_decoded) + + # Use move_on_after for timeout + with anyio.move_on_after(timeout) as cancel_scope: + await read_output() + await process.wait() - # Check if hook succeeded (exit code 0) - if process.returncode == 0: + if cancel_scope.cancelled_caught: + timed_out = True + error_msg = f"Hook timed out after {timeout} seconds" + logger.error(error_msg) + # Terminate the process + process.terminate() + # Give it a moment to terminate gracefully + with anyio.move_on_after(5): + await process.wait() + # Force kill if still running + if process.returncode is None: + process.kill() + + elif process.returncode == 0: logger.info("Hook executed successfully") return - - # Non-zero exit code is a failure - error_msg = f"Hook failed with exit code {process.returncode}" - - except asyncio.TimeoutError as e: - error_msg = f"Hook timed out after {timeout} seconds" - cause = e - logger.error(error_msg) - try: - # Attempt to gracefully terminate the process - process.terminate() - await asyncio.wait_for(process.wait(), timeout=5) - except asyncio.TimeoutError: - # Force kill if it didn't terminate in time - process.kill() - await process.wait() + else: + error_msg = f"Hook failed with exit code {process.returncode}" except Exception as e: error_msg = f"Error executing hook: {e}" @@ -214,6 +225,9 @@ async def read_output(): # Handle failure if one occurred if error_msg is not None: + # For timeout, create a TimeoutError as the cause + if timed_out and cause is None: + cause = TimeoutError(error_msg) self._handle_hook_failure(error_msg, on_failure, hook_type, cause) async def execute_before_lease_hook(self, lease_scope: "LeaseContext") -> None: @@ -278,7 +292,19 @@ async def run_before_lease_hook( """ try: # Wait for lease scope to be fully populated by handle_lease - assert lease_scope.is_ready(), "LeaseScope must be fully initialized before running before-lease hooks" + # This is necessary because handle_lease and run_before_lease_hook run concurrently + timeout = 30 # seconds + interval = 0.1 # seconds + elapsed = 0.0 + while not lease_scope.is_ready(): + if elapsed >= timeout: + error_msg = "Timeout waiting for lease scope to be ready" + logger.error(error_msg) + await report_status(ExporterStatus.BEFORE_LEASE_HOOK_FAILED, error_msg) + lease_scope.before_lease_hook.set() + return + await anyio.sleep(interval) + elapsed += interval # Check if hook is configured if not self.config.before_lease: @@ -351,8 +377,12 @@ async def run_after_lease_hook( shutdown: Callback to trigger exporter shutdown on critical failures """ try: - # Verify lease scope is ready - assert lease_scope.is_ready(), "LeaseScope must be fully initialized before running after-lease hooks" + # Verify lease scope is ready - for after-lease this should always be true + # since we've already processed the lease, but check defensively + if not lease_scope.is_ready(): + logger.warning("LeaseScope not ready for after-lease hook, skipping") + await report_status(ExporterStatus.AVAILABLE, "Available for new lease") + return # Check if hook is configured if not self.config.after_lease: From ce4f2aa4bb27370eb6eba83cd11d999fbdeaecc8 Mon Sep 17 00:00:00 2001 From: Kirk Brauer Date: Mon, 5 Jan 2026 22:33:57 -0500 Subject: [PATCH 20/21] Fix exit on hook failure and exit code handling --- .../jumpstarter-cli/jumpstarter_cli/run.py | 15 ++++++++++-- .../jumpstarter/exporter/exporter.py | 23 ++++++++++++++++++- .../jumpstarter/jumpstarter/exporter/hooks.py | 14 ++++++----- 3 files changed, 43 insertions(+), 9 deletions(-) diff --git a/packages/jumpstarter-cli/jumpstarter_cli/run.py b/packages/jumpstarter-cli/jumpstarter_cli/run.py index 50f9606e1..3650b63ab 100644 --- a/packages/jumpstarter-cli/jumpstarter_cli/run.py +++ b/packages/jumpstarter-cli/jumpstarter_cli/run.py @@ -76,11 +76,22 @@ async def signal_handler(): except* Exception as excgroup: _handle_exporter_exceptions(excgroup) + # Check if exporter set an exit code (e.g., from hook failure with on_failure='exit') + exporter_exit_code = exporter.exit_code + # Cancel the signal handler after exporter completes signal_tg.cancel_scope.cancel() - # Return signal number if received, otherwise 0 for immediate restart - return received_signal if received_signal else 0 + # Return exit code in priority order: + # 1. Signal number if received (for signal-based termination) + # 2. Exporter's exit code if set (for hook failure with on_failure='exit') + # 3. 0 for immediate restart (normal exit without signal or explicit exit code) + if received_signal: + return received_signal + elif exporter_exit_code is not None: + return exporter_exit_code + else: + return 0 sys.exit(anyio.run(serve_with_graceful_shutdown)) diff --git a/packages/jumpstarter/jumpstarter/exporter/exporter.py b/packages/jumpstarter/jumpstarter/exporter/exporter.py index ed11e6f2f..15e8c37d0 100644 --- a/packages/jumpstarter/jumpstarter/exporter/exporter.py +++ b/packages/jumpstarter/jumpstarter/exporter/exporter.py @@ -139,6 +139,14 @@ class Exporter(AsyncContextManagerMixin, Metadata): determine when to trigger before-lease and after-lease hooks. """ + _exit_code: int | None = field(init=False, default=None) + """Exit code to use when the exporter shuts down. + + When set to a non-zero value, the exporter should terminate permanently + (not restart). This is used by hooks with on_failure='exit' to signal + that the exporter should shut down and not be restarted by the CLI. + """ + _lease_context: LeaseContext | None = field(init=False, default=None) """Encapsulates all resources associated with the current lease. @@ -157,13 +165,17 @@ class Exporter(AsyncContextManagerMixin, Metadata): a reference holder and doesn't manage resource lifecycles directly. """ - def stop(self, wait_for_lease_exit=False, should_unregister=False): + def stop(self, wait_for_lease_exit=False, should_unregister=False, exit_code: int | None = None): """Signal the exporter to stop. Args: wait_for_lease_exit (bool): If True, wait for the current lease to exit before stopping. should_unregister (bool): If True, unregister from controller. Otherwise rely on heartbeat. + exit_code (int | None): If set, the exporter will exit with this code (non-zero means no restart). """ + # Set exit code if provided + if exit_code is not None: + self._exit_code = exit_code # Stop immediately if not started yet or if immediate stop is requested if (not self._started or not wait_for_lease_exit) and self._tg is not None: @@ -178,6 +190,15 @@ def stop(self, wait_for_lease_exit=False, should_unregister=False): self._stop_requested = True logger.info("Exporter marked for stop upon lease exit") + @property + def exit_code(self) -> int | None: + """Get the exit code for the exporter. + + Returns: + The exit code if set, or None if the exporter should restart. + """ + return self._exit_code + async def _get_controller_stub(self) -> jumpstarter_pb2_grpc.ControllerServiceStub: """Create and return a controller service stub.""" return jumpstarter_pb2_grpc.ControllerServiceStub(await self.channel_factory()) diff --git a/packages/jumpstarter/jumpstarter/exporter/hooks.py b/packages/jumpstarter/jumpstarter/exporter/hooks.py index 1f37ace2c..d3ec0e36c 100644 --- a/packages/jumpstarter/jumpstarter/exporter/hooks.py +++ b/packages/jumpstarter/jumpstarter/exporter/hooks.py @@ -274,7 +274,7 @@ async def run_before_lease_hook( self, lease_scope: "LeaseContext", report_status: Callable[["ExporterStatus", str], Awaitable[None]], - shutdown: Callable[[], None], + shutdown: Callable[..., None], ) -> None: """Execute before-lease hook with full orchestration. @@ -288,7 +288,7 @@ async def run_before_lease_hook( Args: lease_scope: LeaseScope containing session, socket_path, and sync event report_status: Async callback to report status changes to controller - shutdown: Callback to trigger exporter shutdown on critical failures + shutdown: Callback to trigger exporter shutdown (accepts optional exit_code kwarg) """ try: # Wait for lease scope to be fully populated by handle_lease @@ -334,7 +334,8 @@ async def run_before_lease_hook( f"beforeLease hook failed (on_failure=exit, shutting down): {e}", ) logger.error("Shutting down exporter due to beforeLease hook failure with on_failure='exit'") - shutdown() + # Exit code 1 tells the CLI not to restart the exporter + shutdown(exit_code=1) else: # on_failure='endLease' - just block this lease, exporter stays available logger.error("beforeLease hook failed with on_failure='endLease': %s", e) @@ -360,7 +361,7 @@ async def run_after_lease_hook( self, lease_scope: "LeaseContext", report_status: Callable[["ExporterStatus", str], Awaitable[None]], - shutdown: Callable[[], None], + shutdown: Callable[..., None], ) -> None: """Execute after-lease hook with full orchestration. @@ -374,7 +375,7 @@ async def run_after_lease_hook( Args: lease_scope: LeaseScope containing session, socket_path, and client info report_status: Async callback to report status changes to controller - shutdown: Callback to trigger exporter shutdown on critical failures + shutdown: Callback to trigger exporter shutdown (accepts optional exit_code kwarg) """ try: # Verify lease scope is ready - for after-lease this should always be true @@ -412,7 +413,8 @@ async def run_after_lease_hook( f"afterLease hook failed (on_failure=exit, shutting down): {e}", ) logger.error("Shutting down exporter due to afterLease hook failure with on_failure='exit'") - shutdown() + # Exit code 1 tells the CLI not to restart the exporter + shutdown(exit_code=1) else: # on_failure='endLease' - lease already ended, just report the failure # The exporter remains available for new leases From 75236e2430891623f28358250407add9a223eac2 Mon Sep 17 00:00:00 2001 From: Kirk Brauer Date: Mon, 5 Jan 2026 22:58:01 -0500 Subject: [PATCH 21/21] Enable executing j commands within hooks --- packages/jumpstarter/jumpstarter/client/core.py | 16 ++++++++++++++-- 1 file changed, 14 insertions(+), 2 deletions(-) diff --git a/packages/jumpstarter/jumpstarter/client/core.py b/packages/jumpstarter/jumpstarter/client/core.py index 4f859f230..85a7267e5 100644 --- a/packages/jumpstarter/jumpstarter/client/core.py +++ b/packages/jumpstarter/jumpstarter/client/core.py @@ -85,12 +85,24 @@ def __post_init__(self): self.logger.addHandler(handler) async def check_exporter_status(self): - """Check if the exporter is ready to accept driver calls""" + """Check if the exporter is ready to accept driver calls. + + Allows driver commands during hook execution (BEFORE_LEASE_HOOK, AFTER_LEASE_HOOK) + in addition to the normal LEASE_READY status. This enables hooks to interact + with drivers via the `j` CLI for automation use cases. + """ + # Statuses that allow driver commands + ALLOWED_STATUSES = { + ExporterStatus.LEASE_READY, + ExporterStatus.BEFORE_LEASE_HOOK, + ExporterStatus.AFTER_LEASE_HOOK, + } + try: response = await self.stub.GetStatus(jumpstarter_pb2.GetStatusRequest()) status = ExporterStatus.from_proto(response.status) - if status != ExporterStatus.LEASE_READY: + if status not in ALLOWED_STATUSES: raise ExporterNotReady(f"Exporter status is {status}: {response.message}") except AioRpcError as e: